prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![feature(asm)]
#![feature(box_syntax)]
#![feature(test)]
#![feature(async_closure)]
extern crate static_assertions;
#[macro_use]
extern crate log;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate bifrost;
extern crate bifrost_hasher;
extern crate bifrost_plugins;
#[allow(unused_imports)]
#[macro_use]
pub extern crate dovahkiin;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate bitflags;
extern crate bincode;
extern crate byteorder;
extern crate core;
extern crate libc;
extern crate linked_hash_map;
extern crate num_cpus;
extern crate parking_lot;
extern crate rand;
extern crate serde;
#[allow(unused_imports)]
#[macro_use]
extern crate itertools;
#[macro_use]
extern crate smallvec;
extern crate owning_ref;
extern crate serde_json;
extern crate test;<|fim▁hole|>pub mod utils;
#[macro_use]
pub mod ram;
pub mod client;
pub mod index;
pub mod server;<|fim▁end|> | |
<|file_name|>motors.d.ts<|end_file_name|><|fim▁begin|>import IO = require('./io');
import IndexedDevice = IO.IndexedDevice;
export declare module Motor {
type CommandValue = 'run-forever' | 'run-to-abs-pos' | 'run-to-rel-pos' | 'run-timed' | 'run-direct' | 'stop' | 'reset';
type EncoderPolarityValue = 'normal' | 'inversed';
type PolarityValue = 'normal' | 'inversed';
type StateValue = 'running' | 'ramping' | 'holding' | 'overloaded' | 'stalled';
type StopActionValue = 'coast' | 'brake' | 'hold';
}
export declare module DcMotor {
type CommandValue = 'run-forever' | 'run-timed' | 'run-direct' | 'stop';
type PolarityValue = 'normal' | 'inversed';
type StopActionValue = 'coast' | 'brake';
}
export declare module ServoMotor {
type CommandValue = 'run' | 'float';
type PolarityValue = 'normal' | 'inversed';
}
export declare class MotorBase extends IndexedDevice {
constructor(driverTypeDirName: string, nameConvention?: string, targetAddress?: string, targetDriverName?: string | string[]);
}
export declare class Motor extends MotorBase {
constructor(port?: string, targetDriverName?: string[] | string);
commandValues: {
runForever: Motor.CommandValue;
runToAbsPos: Motor.CommandValue;
runToRelPos: Motor.CommandValue;
runTimed: Motor.CommandValue;
runDirect: Motor.CommandValue;
stop: Motor.CommandValue;
reset: Motor.CommandValue;
};
encoderPolarityValues: {
normal: Motor.EncoderPolarityValue;
inversed: Motor.EncoderPolarityValue;
};
polarityValues: {
normal: Motor.PolarityValue;
inversed: Motor.PolarityValue;
};
stateValues: {
running: Motor.StateValue;
ramping: Motor.StateValue;
holding: Motor.StateValue;<|fim▁hole|> };
stopActionValues: {
coast: Motor.StopActionValue;
brake: Motor.StopActionValue;
hold: Motor.StopActionValue;
};
reset(): void;
stop(): void;
address: string;
command: Motor.CommandValue;
commands: string[];
countPerRot: number;
countPerM: number;
driverName: string;
dutyCycle: number;
dutyCycleSp: number;
fullTravelCount: number;
polarity: Motor.PolarityValue;
position: number;
positionP: number;
positionI: number;
positionD: number;
positionSp: number;
maxSpeed: number;
speed: number;
speedSp: number;
rampUpSp: number;
rampDownSp: number;
speedP: number;
speedI: number;
speedD: number;
state: Motor.StateValue[];
stopAction: Motor.StopActionValue;
stopActions: string[];
timeSp: number;
sendCommand(commandName: Motor.CommandValue): void;
setStopAction(stopAction: Motor.StopActionValue): void;
runForever(sp?: number, stopAction?: Motor.StopActionValue): void;
start(sp?: number, stopAction?: Motor.StopActionValue): void;
runToPosition(position?: number, speedSp?: number, stopAction?: Motor.StopActionValue): void;
runToAbsolutePosition(position?: number, speedSp?: number, stopAction?: Motor.StopActionValue): void;
runForDistance(distance?: number, speedSp?: number, stopAction?: Motor.StopActionValue): void;
runToRelativePosition(relPos?: number, speedSp?: number, stopAction?: Motor.StopActionValue): void;
runForTime(timeMs: number, speedSp?: number, stopAction?: Motor.StopActionValue): void;
hasState(stateValue: Motor.StateValue): boolean;
isRunning: boolean;
isRamping: boolean;
isHolding: boolean;
isOverloaded: boolean;
isStalled: boolean;
}
export declare class LargeMotor extends Motor {
constructor(port?: string);
}
export declare class MediumMotor extends Motor {
constructor(port?: string);
}
export declare class DcMotor extends MotorBase {
constructor(port: string);
commandValues: {
runForever: DcMotor.CommandValue;
runTimed: DcMotor.CommandValue;
runDirect: DcMotor.CommandValue;
stop: DcMotor.CommandValue;
};
polarityValues: {
normal: DcMotor.PolarityValue;
inversed: DcMotor.PolarityValue;
};
stopActionValues: {
coast: DcMotor.StopActionValue;
brake: DcMotor.StopActionValue;
};
address: string;
command: DcMotor.CommandValue;
commands: string[];
driverName: string;
dutyCycle: number;
dutyCycleSp: number;
polarity: DcMotor.PolarityValue;
rampDownSp: number;
rampUpSp: number;
state: string[];
stopAction: DcMotor.StopActionValue;
stopActions: string[];
timeSp: number;
}
export declare class ServoMotor extends MotorBase {
constructor(port: string);
commandValues: {
run: ServoMotor.CommandValue;
float: ServoMotor.CommandValue;
};
polarityValues: {
normal: ServoMotor.PolarityValue;
inversed: ServoMotor.PolarityValue;
};
address: string;
command: ServoMotor.CommandValue;
driverName: string;
maxPulseSp: number;
midPulseSp: number;
minPulseSp: number;
polarity: ServoMotor.PolarityValue;
positionSp: number;
rateSp: number;
state: string[];
}<|fim▁end|> | overloaded: Motor.StateValue;
stalled: Motor.StateValue; |
<|file_name|>jquery.liveFilter.js<|end_file_name|><|fim▁begin|>/*
* jQuery.liveFilter
*
* Copyright (c) 2009 Mike Merritt
*
* Forked by Lim Chee Aun (cheeaun.com)
*
*/
(function($){
$.fn.liveFilter = function(inputEl, filterEl, options){
var defaults = {
filterChildSelector: null,
filter: function(el, val){
return $(el).text().toUpperCase().indexOf(val.toUpperCase()) >= 0;
},
before: function(){},
after: function(){}
};
var options = $.extend(defaults, options);
var el = $(this).find(filterEl);
if (options.filterChildSelector) el = el.find(options.filterChildSelector);
var filter = options.filter;
$(inputEl).keyup(function(){
var val = $(this).val();
var contains = el.filter(function(){
return filter(this, val);
});
var containsNot = el.not(contains);
if (options.filterChildSelector){
<|fim▁hole|> contains = contains.parents(filterEl);
containsNot = containsNot.parents(filterEl).hide();
}
options.before.call(this, contains, containsNot);
contains.show();
containsNot.hide();
if (val === '') {
contains.show();
containsNot.show();
}
options.after.call(this, contains, containsNot);
});
}
})(jQuery);<|fim▁end|> | |
<|file_name|>SharedPlanPriceSpec.ts<|end_file_name|><|fim▁begin|>import { CommonConstants } from '../../../../utils/CommonConstants';
import { ServerFarmSkuConstants } from '../../../../utils/scenario-checker/ServerFarmSku';
import { AppKind } from '../../../../utils/AppKind';
import { PriceSpec, PriceSpecInput, SpecColorCodes } from './PriceSpec';
import { style } from 'typestyle';
export abstract class SharedPlanPriceSpec extends PriceSpec {
constructor(t: (string) => string) {
super(t);
this.tier = ServerFarmSkuConstants.Tier.shared;
this.skuCode = ServerFarmSkuConstants.SkuCode.Shared.D1;
this.legacySkuName = 'shared';
this.topLevelFeatures = [t('pricing_sharedInfrastructure'), t('pricing_memory').format(1), t('pricing_computeLimit').format(240)];
this.featureItems = [
{
id: 'feature_customDomainsName',
iconUrl: 'image/custom-domains.svg',
title: t('feature_customDomainsName'),
description: t('feature_customDomainsInfo'),
},
];
this.hardwareItems = [
{<|fim▁hole|> iconUrl: 'image/app-service-plan.svg',
title: t('pricing_includedHardware_azureComputeUnits'),
description: t('pricing_computeDedicatedAcu'),
learnMoreUrl: CommonConstants.Links.azureComputeUnitLearnMore,
},
{
id: 'memory',
iconUrl: 'image/website-power.svg',
title: t('memory'),
description: t('pricing_sharedMemory'),
},
{
id: 'storage',
iconUrl: 'image/storage.svg',
title: t('storage'),
description: t('pricing_sharedDisk').format('1 GB'),
},
];
this.specResourceSet = {
id: this.skuCode,
firstParty: [
{
quantity: 744,
},
],
};
this.cssClass = style({
background: SpecColorCodes.PREMIUM,
});
}
public async runInitialization(input: PriceSpecInput): Promise<void> {
if (input.plan) {
if (
input.plan.properties.hostingEnvironmentProfile ||
input.plan.properties.isXenon ||
AppKind.hasAnyKind(input.plan, [CommonConstants.Kinds.linux, CommonConstants.Kinds.elastic])
) {
this.state = 'hidden';
}
} else if (input.specPickerInput.data) {
if (
input.specPickerInput.data.hostingEnvironmentName ||
input.specPickerInput.data.isLinux ||
input.specPickerInput.data.isXenon ||
input.specPickerInput.data.isElastic
) {
this.state = 'hidden';
}
}
return this.checkIfDreamspark(input.subscriptionId);
}
}<|fim▁end|> | id: 'pricing_includedHardware_azureComputeUnits', |
<|file_name|>values.go<|end_file_name|><|fim▁begin|>package source
import "fmt"
func Values() {
<|fim▁hole|> fmt.Println("go" + "lang")
fmt.Println("1+1 =", 1+1)
fmt.Println("7.0/3.0 =", 7.0/3.0)
fmt.Println(true && false)
fmt.Println(true || false)
fmt.Println(!true)
}<|fim▁end|> | |
<|file_name|>read_method.rs<|end_file_name|><|fim▁begin|>use super::branchify::generate_branchified_method;
use super::get_writer;
use std::io::IoResult;
pub fn generate(output_dir: Path) -> IoResult<()> {
let mut writer = get_writer(output_dir, "read_method.rs");
try!(writer.write(b"\
// This automatically generated file is included in request.rs.
pub mod dummy {
use std::io::{Stream, IoResult};
use method::Method;
use method::Method::{Connect, Delete, Get, Head, Options, Patch, Post, Put, Trace, ExtensionMethod};
use server::request::MAX_METHOD_LEN;
use rfc2616::{SP, is_token_item};
use buffer::BufferedStream;
#[inline]
pub fn read_method<S: Stream>(stream: &mut BufferedStream<S>) -> IoResult<Method> {
"));
try!(generate_branchified_method(
&mut *writer,
branchify!(case sensitive,
"CONNECT" => Connect,
"DELETE" => Delete,
"GET" => Get,
"HEAD" => Head,
"OPTIONS" => Options,<|fim▁hole|> "POST" => Post,
"PUT" => Put,
"TRACE" => Trace
),
1,
"stream.read_byte()",
"SP",
"MAX_METHOD_LEN",
"is_token_item(b)",
"ExtensionMethod({})"));
writer.write(b"}\n}\n")
}<|fim▁end|> | "PATCH" => Patch, |
<|file_name|>leap.js<|end_file_name|><|fim▁begin|>var Leap = require("leapjs");
var keyboard = require('node_keyboard');
//Each var individually declared below so they refence different objects in memory. I.e work independantly.
//These vars log when a particular action / gesture last ran.
var last_fav = new Date().getTime();
var last_swipe = new Date().getTime();
var last_up = new Date().getTime();
var last_down = new Date().getTime();
var current_time;
var delay = 1000; //Number of milliseconds forced between each gesture.
var controller = Leap.loop({enableGestures: true}, function(frame){
if(frame.valid && frame.gestures.length > 0){
frame.gestures.forEach(function(gesture){
switch (gesture.type){
case "circle":
current_time = new Date().getTime();
if(last_fav+delay < current_time){
keyboard.press(keyboard.Key_Numpad0);
keyboard.release(keyboard.Key_Numpad0);
console.log("Circle Gesture");
last_fav = new Date().getTime();
}
break;
case "swipe":
current_time = new Date().getTime();<|fim▁hole|> if(last_swipe+delay < current_time){
//Classify swipe as either horizontal or vertical
var isHorizontal = Math.abs(gesture.direction[0]) > Math.abs(gesture.direction[1]);
//Classify as right-left or up-down
if(isHorizontal){
if(gesture.direction[0] > 0){
swipeDirection = "Swipe Right";
keyboard.press(keyboard.Key_Up); //Key_Up keycode and Key_Right keycode are swapped in node_keyboard dependancy
keyboard.release(keyboard.Key_Up);//Key_Up keycode and Key_Right keycode are swapped in node_keyboard dependancy
} else {
swipeDirection = "Swipe Left";
keyboard.press(keyboard.Key_Left);
keyboard.release(keyboard.Key_Left);
}
} else { //vertical
if(gesture.direction[1] > 0){
swipeDirection = "Swipe Up";
} else {
swipeDirection = "Swipe Down";
}
}
console.log(swipeDirection);
last_swipe = new Date().getTime();
}
break;
}
});
}
if(frame.pointables.length == 5){
var pointable = frame.pointables;
current_time = new Date().getTime();
if(last_up+delay < current_time){
if(pointable[0].direction[1] > 0.78 && pointable[1].direction[1] > 0.78 && pointable[2].direction[1] > 0.78 && pointable[3].direction[1] > 0.78 && pointable[4].direction[1] > 0.78 ){
console.log("Up Vote");
keyboard.press(keyboard.Key_NumpadAdd);
keyboard.release(keyboard.Key_NumpadAdd);
last_up = new Date().getTime();
}
}
if(last_down+delay < current_time){
if(pointable[0].direction[1] < -0.78 && pointable[1].direction[1] < -0.78 && pointable[2].direction[1] < -0.78 && pointable[3].direction[1] < -0.78 && pointable[4].direction[1] < -0.78 ){
console.log("Down Vote");
keyboard.press(keyboard.Key_NumpadSubtract);
keyboard.release(keyboard.Key_NumpadSubtract);
last_down = new Date().getTime();
}
}
}
});
console.log('Leap Imgur Controller Running');<|fim▁end|> | |
<|file_name|>popular.js<|end_file_name|><|fim▁begin|>import Ember from 'ember';
var PluginsPopularRoute = Ember.Route.extend({
titleToken: 'Popular'<|fim▁hole|>export default PluginsPopularRoute;<|fim▁end|> | });
|
<|file_name|>mergeIntervals.js<|end_file_name|><|fim▁begin|>// Given a collection of intervals, merge all overlapping intervals.
// For example,
// Given [1,3],[2,6],[8,10],[15,18],
// return [1,6],[8,10],[15,18].
/**
* Definition for an interval.
* function Interval(start, end) {
* this.start = start;
* this.end = end;
* }
*/
/**
* @param {Interval[]} intervals
* @return {Interval[]}
*/
function merge(intervals) { // 148 ms runtime
if (!intervals.length) return intervals;
intervals.sort((a, b) => a.start !== b.start ? a.start - b.start : a.end - b.end);
var prev = intervals[0],
res = [prev];
for (var curr of intervals) {
if (curr.start <= prev.end) {
prev.end = Math.max(prev.end, curr.end);
} else {
res.push(curr);
prev = curr;
}
}
return res;
}
//---------------------------------------------------------
var merge = function(intervals) {
if( intervals.length < 1 ) return intervals;
var start = [],
end = [];
for( var i=0; i<intervals.length; i++ ){
start.push( intervals[i].start );
end.push( intervals[i].end );
}
// if end[i] < start[j]
// store end
// j++
// else
// store start
// i++
var result = [],
tempStart = start[0];
for( var j=0, k=0; j< start.length || k<end.length; ){
if( start[j] < end[k] ){
tempStart = Math.min(tempStart, start[j]);
j++;
} else {
result.push( [tempStart, end[k+1]] );
tempStart = start[j];
k++;
j++;
}
}
// [ 1, 2, 8, 15 ]
// [ 3, 6, 10, 18 ]
return result;<|fim▁hole|><|fim▁end|> |
}; |
<|file_name|>base.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Codegen the completed AST to the LLVM IR.
//!
//! Some functions here, such as codegen_block and codegen_expr, return a value --
//! the result of the codegen to LLVM -- while others, such as codegen_fn
//! and mono_item, are called only for the side effect of adding a
//! particular definition to the LLVM IR output we're producing.
//!
//! Hopefully useful general knowledge about codegen:
//!
//! * There's no way to find out the Ty type of a Value. Doing so
//! would be "trying to get the eggs out of an omelette" (credit:
//! pcwalton). You can, instead, find out its llvm::Type by calling val_ty,
//! but one llvm::Type corresponds to many `Ty`s; for instance, tup(int, int,
//! int) and rec(x=int, y=int, z=int) will have the same llvm::Type.
use super::ModuleLlvm;
use rustc_codegen_ssa::{ModuleCodegen, ModuleKind};
use rustc_codegen_ssa::base::maybe_create_entry_wrapper;
use super::LlvmCodegenBackend;
use llvm;
use metadata;
use rustc::mir::mono::{Linkage, Visibility, Stats};
use rustc::middle::cstore::{EncodedMetadata};
use rustc::ty::TyCtxt;
use rustc::middle::exported_symbols;
use rustc::session::config::{self, DebugInfo};
use builder::Builder;
use common;
use context::CodegenCx;
use monomorphize::partitioning::CodegenUnitExt;
use rustc_codegen_ssa::mono_item::MonoItemExt;
use rustc_data_structures::small_c_str::SmallCStr;
use rustc_codegen_ssa::traits::*;
use rustc_codegen_ssa::back::write::submit_codegened_module_to_llvm;
use std::ffi::CString;
use std::time::Instant;
use syntax_pos::symbol::InternedString;
use rustc::hir::CodegenFnAttrs;
use value::Value;
pub fn write_metadata<'a, 'gcx>(
tcx: TyCtxt<'a, 'gcx, 'gcx>,
llvm_module: &ModuleLlvm
) -> EncodedMetadata {
use std::io::Write;
use flate2::Compression;
use flate2::write::DeflateEncoder;
let (metadata_llcx, metadata_llmod) = (&*llvm_module.llcx, llvm_module.llmod());
#[derive(PartialEq, Eq, PartialOrd, Ord)]
enum MetadataKind {
None,
Uncompressed,
Compressed
}
let kind = tcx.sess.crate_types.borrow().iter().map(|ty| {
match *ty {
config::CrateType::Executable |
config::CrateType::Staticlib |
config::CrateType::Cdylib => MetadataKind::None,
config::CrateType::Rlib => MetadataKind::Uncompressed,
config::CrateType::Dylib |
config::CrateType::ProcMacro => MetadataKind::Compressed,
}
}).max().unwrap_or(MetadataKind::None);
if kind == MetadataKind::None {
return EncodedMetadata::new();
}
let metadata = tcx.encode_metadata();
if kind == MetadataKind::Uncompressed {
return metadata;
}
assert!(kind == MetadataKind::Compressed);
let mut compressed = tcx.metadata_encoding_version();
DeflateEncoder::new(&mut compressed, Compression::fast())
.write_all(&metadata.raw_data).unwrap();
let llmeta = common::bytes_in_context(metadata_llcx, &compressed);
let llconst = common::struct_in_context(metadata_llcx, &[llmeta], false);
let name = exported_symbols::metadata_symbol_name(tcx);
let buf = CString::new(name).unwrap();
let llglobal = unsafe {
llvm::LLVMAddGlobal(metadata_llmod, common::val_ty(llconst), buf.as_ptr())
};
unsafe {
llvm::LLVMSetInitializer(llglobal, llconst);
let section_name = metadata::metadata_section_name(&tcx.sess.target.target);
let name = SmallCStr::new(section_name);
llvm::LLVMSetSection(llglobal, name.as_ptr());
// Also generate a .section directive to force no
// flags, at least for ELF outputs, so that the
// metadata doesn't get loaded into memory.
let directive = format!(".section {}", section_name);
let directive = CString::new(directive).unwrap();
llvm::LLVMSetModuleInlineAsm(metadata_llmod, directive.as_ptr())
}
return metadata;
}
pub struct ValueIter<'ll> {
cur: Option<&'ll Value>,
step: unsafe extern "C" fn(&'ll Value) -> Option<&'ll Value>,
}
impl Iterator for ValueIter<'ll> {
type Item = &'ll Value;
fn next(&mut self) -> Option<&'ll Value> {
let old = self.cur;
if let Some(old) = old {
self.cur = unsafe { (self.step)(old) };
}
old
}
}
pub fn iter_globals(llmod: &'ll llvm::Module) -> ValueIter<'ll> {
unsafe {
ValueIter {
cur: llvm::LLVMGetFirstGlobal(llmod),
step: llvm::LLVMGetNextGlobal,
}
}
}
pub fn compile_codegen_unit<'ll, 'tcx>(tcx: TyCtxt<'ll, 'tcx, 'tcx>,
cgu_name: InternedString)
-> Stats {
let start_time = Instant::now();
let dep_node = tcx.codegen_unit(cgu_name).codegen_dep_node(tcx);
let ((stats, module), _) = tcx.dep_graph.with_task(dep_node,
tcx,
cgu_name,
module_codegen);
let time_to_codegen = start_time.elapsed();
// We assume that the cost to run LLVM on a CGU is proportional to
// the time we needed for codegenning it.
let cost = time_to_codegen.as_secs() * 1_000_000_000 +
time_to_codegen.subsec_nanos() as u64;
submit_codegened_module_to_llvm(&LlvmCodegenBackend(()), tcx, module, cost);
return stats;
fn module_codegen<'ll, 'tcx>(
tcx: TyCtxt<'ll, 'tcx, 'tcx>,
cgu_name: InternedString)
-> (Stats, ModuleCodegen<ModuleLlvm>)
{
let backend = LlvmCodegenBackend(());
let cgu = tcx.codegen_unit(cgu_name);
// Instantiate monomorphizations without filling out definitions yet...
let llvm_module = backend.new_metadata(tcx.sess, &cgu_name.as_str());
let stats = {
let cx = CodegenCx::new(tcx, cgu, &llvm_module);
let mono_items = cx.codegen_unit
.items_in_deterministic_order(cx.tcx);
for &(mono_item, (linkage, visibility)) in &mono_items {
mono_item.predefine::<Builder>(&cx, linkage, visibility);
}
// ... and now that we have everything pre-defined, fill out those definitions.
for &(mono_item, _) in &mono_items {
mono_item.define::<Builder>(&cx);
}
// If this codegen unit contains the main function, also create the
// wrapper here
maybe_create_entry_wrapper::<Builder>(&cx);
// Run replace-all-uses-with for statics that need it
for &(old_g, new_g) in cx.statics_to_rauw().borrow().iter() {
unsafe {
let bitcast = llvm::LLVMConstPointerCast(new_g, cx.val_ty(old_g));
llvm::LLVMReplaceAllUsesWith(old_g, bitcast);
llvm::LLVMDeleteGlobal(old_g);
}
}
// Create the llvm.used variable
// This variable has type [N x i8*] and is stored in the llvm.metadata section
if !cx.used_statics().borrow().is_empty() {
cx.create_used_variable()
}
// Finalize debuginfo
if cx.sess().opts.debuginfo != DebugInfo::None {
cx.debuginfo_finalize();
}
cx.consume_stats().into_inner()
};
(stats, ModuleCodegen {
name: cgu_name.to_string(),
module_llvm: llvm_module,
kind: ModuleKind::Regular,
})
}
}
pub fn set_link_section(llval: &Value, attrs: &CodegenFnAttrs) {
let sect = match attrs.link_section {
Some(name) => name,
None => return,
};
unsafe {
let buf = SmallCStr::new(§.as_str());
llvm::LLVMSetSection(llval, buf.as_ptr());
}
}
pub fn linkage_to_llvm(linkage: Linkage) -> llvm::Linkage {
match linkage {
Linkage::External => llvm::Linkage::ExternalLinkage,
Linkage::AvailableExternally => llvm::Linkage::AvailableExternallyLinkage,
Linkage::LinkOnceAny => llvm::Linkage::LinkOnceAnyLinkage,
Linkage::LinkOnceODR => llvm::Linkage::LinkOnceODRLinkage,
Linkage::WeakAny => llvm::Linkage::WeakAnyLinkage,
Linkage::WeakODR => llvm::Linkage::WeakODRLinkage,
Linkage::Appending => llvm::Linkage::AppendingLinkage,
Linkage::Internal => llvm::Linkage::InternalLinkage,
Linkage::Private => llvm::Linkage::PrivateLinkage,
Linkage::ExternalWeak => llvm::Linkage::ExternalWeakLinkage,
Linkage::Common => llvm::Linkage::CommonLinkage,
}
}
pub fn visibility_to_llvm(linkage: Visibility) -> llvm::Visibility {
match linkage {
Visibility::Default => llvm::Visibility::Default,
Visibility::Hidden => llvm::Visibility::Hidden,<|fim▁hole|> Visibility::Protected => llvm::Visibility::Protected,
}
}<|fim▁end|> | |
<|file_name|>error.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Utilities to throw exceptions from Rust bindings.
use dom::bindings::codegen::PrototypeList::proto_id_to_name;
use dom::bindings::conversions::ToJSValConvertible;
use dom::bindings::global::GlobalRef;
use dom::domexception::{DOMException, DOMErrorName};
use util::mem::HeapSizeOf;
use util::str::DOMString;
use js::jsapi::JSAutoCompartment;
use js::jsapi::{JSContext, JSObject, RootedValue};
use js::jsapi::{JS_IsExceptionPending, JS_SetPendingException, JS_ReportPendingException};
use js::jsapi::{JS_ReportErrorNumber1, JSErrorFormatString, JSExnType};
use js::jsapi::{JS_SaveFrameChain, JS_RestoreFrameChain};
use js::jsval::UndefinedValue;
use libc;
use std::ffi::CString;
use std::mem;
use std::ptr;
/// DOM exceptions that can be thrown by a native DOM method.
#[derive(Debug, Clone, HeapSizeOf)]
pub enum Error {
/// IndexSizeError DOMException
IndexSize,
/// NotFoundError DOMException
NotFound,
/// HierarchyRequestError DOMException
HierarchyRequest,
/// WrongDocumentError DOMException
WrongDocument,
/// InvalidCharacterError DOMException
InvalidCharacter,
/// NotSupportedError DOMException
NotSupported,
/// InUseAttributeError DOMException
InUseAttribute,
/// InvalidStateError DOMException
InvalidState,
/// SyntaxError DOMException
Syntax,
/// NamespaceError DOMException
Namespace,
/// InvalidAccessError DOMException
InvalidAccess,
/// SecurityError DOMException
Security,
/// NetworkError DOMException
Network,
/// AbortError DOMException
Abort,
/// TimeoutError DOMException
Timeout,
/// InvalidNodeTypeError DOMException
InvalidNodeType,
/// DataCloneError DOMException
DataClone,
/// NoModificationAllowedError DOMException
NoModificationAllowed,
/// QuotaExceededError DOMException
QuotaExceeded,
/// TypeMismatchError DOMException
TypeMismatch,
/// TypeError JavaScript Error
Type(DOMString),
/// RangeError JavaScript Error
Range(DOMString),
/// A JavaScript exception is already pending.
JSFailed,
}
/// The return type for IDL operations that can throw DOM exceptions.
pub type Fallible<T> = Result<T, Error>;
/// The return type for IDL operations that can throw DOM exceptions and
/// return `()`.
pub type ErrorResult = Fallible<()>;
/// Set a pending exception for the given `result` on `cx`.
pub fn throw_dom_exception(cx: *mut JSContext, global: GlobalRef,
result: Error) {
let code = match result {
Error::IndexSize => DOMErrorName::IndexSizeError,
Error::NotFound => DOMErrorName::NotFoundError,
Error::HierarchyRequest => DOMErrorName::HierarchyRequestError,
Error::WrongDocument => DOMErrorName::WrongDocumentError,
Error::InvalidCharacter => DOMErrorName::InvalidCharacterError,
Error::NotSupported => DOMErrorName::NotSupportedError,
Error::InUseAttribute => DOMErrorName::InUseAttributeError,
Error::InvalidState => DOMErrorName::InvalidStateError,
Error::Syntax => DOMErrorName::SyntaxError,
Error::Namespace => DOMErrorName::NamespaceError,
Error::InvalidAccess => DOMErrorName::InvalidAccessError,
Error::Security => DOMErrorName::SecurityError,
Error::Network => DOMErrorName::NetworkError,
Error::Abort => DOMErrorName::AbortError,
Error::Timeout => DOMErrorName::TimeoutError,
Error::InvalidNodeType => DOMErrorName::InvalidNodeTypeError,
Error::DataClone => DOMErrorName::DataCloneError,
Error::NoModificationAllowed => DOMErrorName::NoModificationAllowedError,
Error::QuotaExceeded => DOMErrorName::QuotaExceededError,
Error::TypeMismatch => DOMErrorName::TypeMismatchError,
Error::Type(message) => {
assert!(unsafe { JS_IsExceptionPending(cx) } == 0);
throw_type_error(cx, &message);
return;
},
Error::Range(message) => {
assert!(unsafe { JS_IsExceptionPending(cx) } == 0);
throw_range_error(cx, &message);
return;
},
Error::JSFailed => {
assert!(unsafe { JS_IsExceptionPending(cx) } == 1);
return;
}
};
assert!(unsafe { JS_IsExceptionPending(cx) } == 0);
let exception = DOMException::new(global, code);
let mut thrown = RootedValue::new(cx, UndefinedValue());
exception.to_jsval(cx, thrown.handle_mut());
unsafe {
JS_SetPendingException(cx, thrown.handle());
}
}
/// Report a pending exception, thereby clearing it.
pub fn report_pending_exception(cx: *mut JSContext, obj: *mut JSObject) {
unsafe {
if JS_IsExceptionPending(cx) != 0 {
let saved = JS_SaveFrameChain(cx);
{
let _ac = JSAutoCompartment::new(cx, obj);
JS_ReportPendingException(cx);
}
if saved != 0 {
JS_RestoreFrameChain(cx);
}
}
}
}
/// Throw an exception to signal that a `JSVal` can not be converted to any of
/// the types in an IDL union type.
pub fn throw_not_in_union(cx: *mut JSContext, names: &'static str) {
assert!(unsafe { JS_IsExceptionPending(cx) } == 0);
let error = format!("argument could not be converted to any of: {}", names);
throw_type_error(cx, &error);
}
/// Throw an exception to signal that a `JSObject` can not be converted to a
/// given DOM type.
pub fn throw_invalid_this(cx: *mut JSContext, proto_id: u16) {
debug_assert!(unsafe { JS_IsExceptionPending(cx) } == 0);
let error = format!("\"this\" object does not implement interface {}.",
proto_id_to_name(proto_id));
throw_type_error(cx, &error);
}<|fim▁hole|> '0' as libc::c_char,
'}' as libc::c_char,
0 as libc::c_char,
];
/// Format string struct used to throw `TypeError`s.
static mut TYPE_ERROR_FORMAT_STRING: JSErrorFormatString = JSErrorFormatString {
format: &ERROR_FORMAT_STRING_STRING as *const libc::c_char,
argCount: 1,
exnType: JSExnType::JSEXN_TYPEERR as i16,
};
/// Format string struct used to throw `RangeError`s.
static mut RANGE_ERROR_FORMAT_STRING: JSErrorFormatString = JSErrorFormatString {
format: &ERROR_FORMAT_STRING_STRING as *const libc::c_char,
argCount: 1,
exnType: JSExnType::JSEXN_RANGEERR as i16,
};
/// Callback used to throw javascript errors.
/// See throw_js_error for info about error_number.
unsafe extern fn get_error_message(_user_ref: *mut libc::c_void,
error_number: libc::c_uint)
-> *const JSErrorFormatString
{
let num: JSExnType = mem::transmute(error_number);
match num {
JSExnType::JSEXN_TYPEERR => &TYPE_ERROR_FORMAT_STRING as *const JSErrorFormatString,
JSExnType::JSEXN_RANGEERR => &RANGE_ERROR_FORMAT_STRING as *const JSErrorFormatString,
_ => panic!("Bad js error number given to get_error_message: {}", error_number)
}
}
/// Helper fn to throw a javascript error with the given message and number.
/// Reuse the jsapi error codes to distinguish the error_number
/// passed back to the get_error_message callback.
/// c_uint is u32, so this cast is safe, as is casting to/from i32 from there.
fn throw_js_error(cx: *mut JSContext, error: &str, error_number: u32) {
let error = CString::new(error).unwrap();
unsafe {
JS_ReportErrorNumber1(cx,
Some(get_error_message),
ptr::null_mut(), error_number, error.as_ptr());
}
}
/// Throw a `TypeError` with the given message.
pub fn throw_type_error(cx: *mut JSContext, error: &str) {
throw_js_error(cx, error, JSExnType::JSEXN_TYPEERR as u32);
}
/// Throw a `RangeError` with the given message.
pub fn throw_range_error(cx: *mut JSContext, error: &str) {
throw_js_error(cx, error, JSExnType::JSEXN_RANGEERR as u32);
}<|fim▁end|> |
/// Format string used to throw javascript errors.
static ERROR_FORMAT_STRING_STRING: [libc::c_char; 4] = [
'{' as libc::c_char, |
<|file_name|>prune.go<|end_file_name|><|fim▁begin|>package catalog
import (
"fmt"
"poule/operations"
"strings"
"time"
"poule/configuration"
"poule/gh"
"poule/operations/settings"
"github.com/google/go-github/github"
"github.com/mitchellh/mapstructure"
"github.com/pkg/errors"
"github.com/urfave/cli"
)
func init() {
registerOperation(&pruneDescriptor{})
}
type pruneDescriptor struct{}
type pruneConfig struct {
Action string `mapstructure:"action"`
GracePeriod string `mapstructure:"grace-period"`
OutdatedThreshold string `mapstructure:"outdated-threshold"`
}
func (d *pruneDescriptor) CommandLineDescription() CommandLineDescription {
return CommandLineDescription{
Name: "prune",
Description: "Prune outdated issues",
Flags: []cli.Flag{
cli.StringFlag{
Name: "action",
Usage: "action to take for outdated issues",
Value: "ping",
},
cli.StringFlag{
Name: "grace-period",
Usage: "grace period before closing",
Value: "2w",
},
cli.StringFlag{
Name: "threshold",
Usage: "threshold in days, weeks, months, or years",
Value: "6m",
},
},
}
}
func (d *pruneDescriptor) OperationFromCli(c *cli.Context) (operations.Operation, error) {
pruneConfig := &pruneConfig{
Action: c.String("action"),
GracePeriod: c.String("grace-period"),
OutdatedThreshold: c.String("threshold"),
}
return d.makeOperation(pruneConfig)
}
func (d *pruneDescriptor) OperationFromConfig(c operations.Configuration) (operations.Operation, error) {
pruneConfig := &pruneConfig{}
if err := mapstructure.Decode(c, &pruneConfig); err != nil {
return nil, errors.Wrap(err, "decoding configuration")
}
return d.makeOperation(pruneConfig)
}
func (d *pruneDescriptor) makeOperation(config *pruneConfig) (operations.Operation, error) {
var (
err error
operation pruneOperation
)
if operation.action, err = parseAction(config.Action); err != nil {
return nil, err
}
if operation.gracePeriod, err = settings.ParseExtDuration(config.GracePeriod); err != nil {
return nil, err
}
if operation.outdatedThreshold, err = settings.ParseExtDuration(config.OutdatedThreshold); err != nil {
return nil, err
}
return &operation, nil
}
type pruneOperation struct {
action string
gracePeriod settings.ExtDuration
outdatedThreshold settings.ExtDuration
}
func (o *pruneOperation) Accepts() operations.AcceptedType {
return operations.Issues
}
func (o *pruneOperation) Apply(c *operations.Context, item gh.Item, userData interface{}) error {
issue := item.Issue
switch o.action {
case "close":
// TODO Find the last ping/warn message, and take the grace period into account.
break
case "force-close":
state := "closed"
_, _, err := c.Client.Issues().Edit(c.Username, c.Repository, *issue.Number, &github.IssueRequest{
State: &state,
})
return err
case "ping":
body := formatPingComment(issue, o)
_, _, err := c.Client.Issues().CreateComment(c.Username, c.Repository, *issue.Number, &github.IssueComment{
Body: &body,
})
return err
case "warn":
body := formatWarnComment(issue, o)
_, _, err := c.Client.Issues().CreateComment(c.Username, c.Repository, *issue.Number, &github.IssueComment{
Body: &body,
})
return err
}
return nil
}
func (o *pruneOperation) Describe(c *operations.Context, item gh.Item, userData interface{}) string {
issue := item.Issue
return fmt.Sprintf("Execute %s action on issue #%d (last commented on %s)",
o.action, *issue.Number, userData.(time.Time).Format(time.RFC3339))
}
func (o *pruneOperation) Filter(c *operations.Context, item gh.Item) (operations.FilterResult, interface{}, error) {
// Retrieve comments for that issue since our threshold plus our grace
// period plus one day.<|fim▁hole|> comments, _, err := c.Client.Issues().ListComments(c.Username, c.Repository, *issue.Number, &github.IssueListCommentsOptions{
Since: time.Now().Add(-1*o.outdatedThreshold.Duration()).Add(-1*o.gracePeriod.Duration()).AddDate(0, 0, -1),
ListOptions: github.ListOptions{
PerPage: 200,
},
})
if err != nil {
return operations.Reject, nil, errors.Wrapf(err, "failed to retrieve comments for issue #%d", *issue.Number)
}
// Figure out the last time the issue was commented on.
lastCommented := *issue.UpdatedAt
for size := len(comments); size > 0; size-- {
// Skip all comments produced by the tool itself (as indicated by the
// presence of the PouleToken).
if strings.Contains(*comments[size-1].Body, configuration.PouleToken) {
comments = comments[0 : size-1]
continue
}
lastCommented = *comments[size-1].UpdatedAt
break
}
// Filter out issues which last commented date is under our threshold. We
// retrieve the issues in ascending update order: no more issues will be
// accepted after that.
if !lastCommented.Add(o.outdatedThreshold.Duration()).Before(time.Now()) {
return operations.Terminal, nil, nil
}
return operations.Accept, lastCommented, nil
}
func (o *pruneOperation) IssueListOptions(c *operations.Context) *github.IssueListByRepoOptions {
return &github.IssueListByRepoOptions{
State: "open",
Sort: "updated",
Direction: "asc",
ListOptions: github.ListOptions{
PerPage: 200,
},
}
}
func (o *pruneOperation) PullRequestListOptions(c *operations.Context) *github.PullRequestListOptions {
// pruneOperation doesn't apply to GitHub pull requests.
return nil
}
func formatPingComment(issue *github.Issue, o *pruneOperation) string {
comment := `<!-- %s:%s:%d%c -->
@%s It has been detected that this issue has not received any activity in over %s. Can you please let us know if it is still relevant:
- For a bug: do you still experience the issue with the latest version?
- For a feature request: was your request appropriately answered in a later version?
Thank you!`
return fmt.Sprintf(comment,
configuration.PouleToken,
o.action,
o.outdatedThreshold.Quantity,
o.outdatedThreshold.Unit,
*issue.User.Login,
o.outdatedThreshold.String(),
)
}
func formatWarnComment(issue *github.Issue, o *pruneOperation) string {
comment := `%s
This issue will be **automatically closed in %s** unless it is commented on.
`
base := formatPingComment(issue, o)
return fmt.Sprintf(comment, base, o.gracePeriod.String())
}
func parseAction(action string) (string, error) {
switch action {
case "close", "force-close", "ping", "warn":
break
default:
return "", fmt.Errorf("Invalid action %q", action)
}
return action, nil
}<|fim▁end|> | issue := item.Issue |
<|file_name|>physics.ts<|end_file_name|><|fim▁begin|>/// <reference path="../../typings/index.d.ts" />
/// <reference path="../../node_modules/excalibur/dist/excalibur.d.ts" />
import * as ex from "excalibur";
import { Actor, Engine, IEngineOptions, Sprite, Texture, Vector } from "excalibur";
import * as p2 from "p2";
import { Body, Shape } from "p2";
export class PhysicsWorld {
public world:p2.World;
public game:ex.Engine;
public bodiesByActorId:Map<number, Body> = new Map<number, Body>();
constructor(game: ex.Engine) {
this.game = game;
this.world = new p2.World({
gravity:[0, 10] // minus values cause things to fall upwards
});<|fim▁hole|> // Add the ground
var groundBody = new Body({
mass: 0, // Setting mass to 0 makes it static
position: [0, game.canvasHeight],
angle: Math.PI
});
groundBody.addShape(new p2.Plane());
this.world.addBody(groundBody);
// Setup simulation
this.game.on("postupdate", (evt:ex.PostUpdateEvent) => {
// Step physics simulation
this.world.step(fixedTimeStep, evt.delta, maxSubSteps);
// Update actors in world
for (let actor of this.game.currentScene.children) {
// p2 position => ex position
if (this.bodiesByActorId.has(actor.id)) {
let actorPhysicsBody:Body = this.bodiesByActorId.get(actor.id);
actor.pos.setTo(actorPhysicsBody.interpolatedPosition[0], actorPhysicsBody.interpolatedPosition[1]);
actor.rotation = actorPhysicsBody.interpolatedAngle;
// Do not use Excalibur delta-pos or delta-rotation
actor.vel.setTo(0, 0);
actor.rx = 0;
}
}
});
}
}<|fim▁end|> |
var fixedTimeStep = 1 / 60; // seconds
var maxSubSteps = 10; // Max sub steps to catch up with the wall clock
|
<|file_name|>main.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
import os
import sys
import commentjson
import time
import json
import datetime
from parse_input import check_config_structure
from build_nova import build_infrastructure, wait_for_spawn
from url_requests import test_infrastructure
import subprocess
### MAIN
if __name__ == "__main__":
if len(sys.argv) > 4:
configFile = sys.argv[4]
else:
print "No config file specified. Using 'config_example.json'"
configFile = 'config_example.json'
try:
with open(sys.argv[3] + "/config_files/" + configFile) as json_data_file:
try:
configData = commentjson.load(json_data_file)
except ValueError:
print "Wrong data format. Should be json."
exit(1)
except commentjson.JSONLibraryException:
print "Wrong data format. Should be json."
exit(1)
except IOError:<|fim▁hole|>
configData['creds']['os_password'] = sys.argv[1]
configData['framework_dir'] = sys.argv[3]
print "Checking JSON structure..."
if check_config_structure(configData) == -1:
print "problem reading config file"
exit(1)
configData['launch_time'] = datetime.datetime.now().strftime('%Y/%m/%d-%H:%M:%S')
print "Building the infrastructure..."
if build_infrastructure(configData) == -1:
print "problem building the infrastructure"
exit(1)
if wait_for_spawn(configData) == -1:
print "machines didn't spawn properly"
exit(1)
raw_input("Press Enter once the HA installation is ready:")
print "Sending test request to ensure the operability."
if test_infrastructure(configData) == -1:
print "Infrastructure not built properly"
#erase built VMs
configData['creds']['os_password'] = ""
with open(sys.argv[2], 'w') as outfile:
json.dump(configData, outfile)
exit(1)
print " Request received."
print "---"
time.sleep(5)
#configData['test_url']['full_url'] = "87.190.239.41" #TODO zakomentovat
configData['creds']['os_password'] = ""
#TODO perform always, even after an exception
with open( sys.argv[2], 'w') as outfile:
json.dump(configData, outfile)
print "Testing availability of a service " + configData['test_url']['full_url']
exit(10) # OK<|fim▁end|> | print "File not found/permission was denied."
exit(1) |
<|file_name|>fields.py<|end_file_name|><|fim▁begin|>import pickle
from base64 import b64encode
from pulsar.utils.html import UnicodeMixin
class FieldError(RuntimeError):
pass
def get_field_type(field):
return getattr(field, 'repr_type', 'text')
class Field(UnicodeMixin):
'''Base class of all :mod:`.odm` Fields.
Each field is specified as a :class:`.Model` class attribute.
.. attribute:: index
Probably the most important field attribute, it establish if
the field creates indexes for queries.
If you don't need to query the field you should set this value to
``False``, it will save you memory.
.. note:: if ``index`` is set to ``False`` executing queries
against the field will
throw a :class:`stdnet.QuerySetError` exception.
No database queries are allowed for non indexed fields
as a design decision (explicit better than implicit).
Default ``True``.
.. attribute:: unique
If ``True``, the field must be unique throughout the model.
In this case :attr:`Field.index` is also ``True``.
Enforced at :class:`stdnet.BackendDataServer` level.
Default ``False``.
.. attribute:: primary_key
If ``True``, this field is the primary key for the model.
A primary key field has the following properties:
* :attr:`Field.unique` is also ``True``.
* There can be only one in a model.
* It's attribute name in the model must be **id**.
* If not specified a :class:`AutoIdField` will be added.
Default ``False``.
.. attribute:: required
If ``False``, the field is allowed to be null.
Default ``True``.
.. attribute:: default
Default value for this field. It can be a callable attribute with
arity 0.
Default ``None``.
.. attribute:: name
Field name, created by the ``odm`` at runtime.
.. attribute:: attname
The attribute name for the field, created by the :meth:`get_attname`
method at runtime. For most field, its value is the same as the
:attr:`name`. It is the field sorted in the backend database.
.. attribute:: model
The :class:`.Model` holding the field.
Created by the ``odm`` at runtime.
.. attribute:: charset
The charset used for encoding decoding text.
.. attribute:: hidden
If ``True`` the field will be hidden from search algorithms.
Default ``False``.
.. attribute:: python_type
The python ``type`` for the :class:`Field`.
.. attribute:: as_cache
If ``True`` the field contains data which is considered cache and
therefore always reproducible. Field marked as cache, have
:attr:`required` always ``False``.
This attribute is used by the :class:`.Model.fieldvalue_pairs` method
which returns a dictionary of field names and values.
Default ``False``.
'''
to_python = None
to_store = None
index = False
_default = None
creation_counter = 0
def __init__(self, unique=False, primary_key=False, required=True,
index=None, hidden=None, as_cache=False, **extras):
self.foreign_keys = ()
self.primary_key = primary_key
index = index if index is not None else self.index
if primary_key:
self.unique = True
self.required = True
self.index = True
self.as_cache = False
extras['default'] = None
else:
self.unique = unique
self.required = required
self.as_cache = as_cache
self.index = True if unique else index
if self.as_cache:
self.required = False
self.unique = False
self.index = False
self._meta = None
self.name = None
self.model = None
self._default = extras.pop('default', self._default)
self._handle_extras(**extras)
self.creation_counter = Field.creation_counter
Field.creation_counter += 1
def register_with_model(self, name, model):
'''Called during the creation of a the :class:`StdModel`
class when :class:`Metaclass` is initialised. It fills
:attr:`Field.name` and :attr:`Field.model`. This is an internal
function users should never call.'''
assert not self.name, 'Field %s is already registered' % self
self.name = name
self.attname = self.get_attname()
self.model = model
self._meta = meta = model._meta
meta.dfields[name] = self
if self.to_python:
meta.converters[name] = self.to_python
if self.primary_key:
meta.pk = self
self.add_to_fields()
def add_to_fields(self):
'''Add this :class:`Field` to the fields of :attr:`model`.
'''
self._meta.scalarfields.append(self)
if self.index:
self._meta.indices.append(self)
def get_attname(self):
'''Generate the :attr:`attname` at runtime'''
return self.name
def to_json(self, value, store=None):
return value
def _handle_extras(self, **extras):
'''Callback to hadle extra arguments during initialization.'''
self.error_extras(extras)
def error_extras(self, extras):
keys = list(extras)
if keys:
raise TypeError(("__init__() got an unexepcted keyword argument "
"'{0}'".format(keys[0])))
class CharField(Field):
def to_python(self, value, store=None):
if isinstance(value, bytes):
return value.decode('utf-8', 'ignore')
elif value is not None:
return str(value)
to_store = to_python
to_json = to_python
class AutoIdField(Field):
pass
class IntegerField(Field):
repr_type = 'numeric'
def to_python(self, value, store=None):
try:
return int(value)
except Exception:
return None
to_store = to_python
to_json = to_python
class BooleanField(Field):
repr_type = 'bool'
def to_python(self, value, store=None):
try:
return bool(int(value))
except Exception:
return None
to_json = to_python
def to_store(self, value, store=None):
try:
return 1 if value else 0
except Exception:
return None<|fim▁hole|> repr_type = 'numeric'
def to_python(self, value, store=None):
try:
return float(value)
except Exception:
return None
to_store = to_python
to_json = to_python
class PickleField(Field):
def to_python(self, value, store=None):
if value is not None:
try:
return pickle.loads(value)
except Exception:
return None
def to_store(self, value, store=None):
if value is not None:
try:
return pickle.dumps(value, protocol=2)
except Exception:
return None
def to_json(self, value, store=None):
if isinstance(value, (int, float, str, tuple, list, dict)):
return value
else:
value = self.to_store(value)
if value is not None:
return b64encode(value).decode('utf-8')
class JSONField(CharField):
'''A JSON field which implements automatic conversion to
and from an object and a JSON string. It is the responsability of the
user making sure the object is JSON serializable.
There are few extra parameters which can be used to customize the
behaviour and how the field is stored in the back-end server.
:parameter encoder_class: The JSON class used for encoding.
Default: :class:`stdnet.utils.jsontools.JSONDateDecimalEncoder`.
:parameter decoder_hook: A JSON decoder function.
Default: :class:`stdnet.utils.jsontools.date_decimal_hook`.
:parameter as_string: Set the :attr:`as_string` attribute.
Default ``True``.
.. attribute:: as_string
A boolean indicating if data should be serialized
into a single JSON string or it should be used to create several
fields prefixed with the field name and the double underscore ``__``.
Default ``True``.
Effectively, a :class:`JSONField` with ``as_string`` attribute set to
``False`` is a multifield, in the sense that it generates several
field-value pairs. For example, lets consider the following::
class MyModel(odm.StdModel):
name = odm.SymbolField()
data = odm.JSONField(as_string=False)
And::
>>> m = MyModel(name='bla',
... data={'pv': {'': 0.5, 'mean': 1, 'std': 3.5}})
>>> m.cleaned_data
{'name': 'bla', 'data__pv': 0.5, 'data__pv__mean': '1',
'data__pv__std': '3.5', 'data': '""'}
>>>
The reason for setting ``as_string`` to ``False`` is to allow
the :class:`JSONField` to define several fields at runtime,
without introducing new :class:`Field` in your model class.
These fields behave exactly like standard fields and therefore you
can, for example, sort queries with respect to them::
>>> MyModel.objects.query().sort_by('data__pv__std')
>>> MyModel.objects.query().sort_by('-data__pv')
which can be rather useful feature.
'''
_default = {}
def to_python(self, value, backend=None):
if value is None:
return self.get_default()
try:
return self.encoder.loads(value)
except TypeError:
return value
def serialise(self, value, lookup=None):
if lookup:
value = range_lookups[lookup](value)
return self.encoder.dumps(value)
def value_from_data(self, instance, data):
if self.as_string:
return data.pop(self.attname, None)
else:
return flat_to_nested(data, instance=instance,
attname=self.attname,
loads=self.encoder.loads)
def get_sorting(self, name, errorClass):
pass
def get_lookup(self, name, errorClass):
if self.as_string:
return super(JSONField, self).get_lookup(name, errorClass)
else:
if name:
name = JSPLITTER.join((self.attname, name))
return (name, None)
class CompositeIdField(Field):
'''This field can be used when an instance of a model is uniquely
identified by a combination of two or more :class:`Field` in the model
itself. It requires a number of positional arguments greater or equal 2.
These arguments must be fields names in the model where the
:class:`CompositeIdField` is defined.
.. attribute:: fields
list of :class:`Field` names which are used to uniquely identify a
model instance
Check the :ref:`composite id tutorial <tutorial-compositeid>` for more
information and tips on how to use it.
'''
type = 'composite'
def __init__(self, *fields, **kwargs):
super(CompositeIdField, self).__init__(**kwargs)
self.fields = fields
if len(self.fields) < 2:
raise FieldError('At least tow fields are required by composite '
'CompositeIdField')
def get_value(self, instance, *bits):
if bits:
raise AttributeError
values = tuple((getattr(instance, f.attname) for f in self.fields))
return hash(values)
def register_with_model(self, name, model):
fields = []
for field in self.fields:
if field not in model._meta.dfields:
raise FieldError('Composite id field "%s" in in "%s" model.' %
(field, model._meta))
field = model._meta.dfields[field]
fields.append(field)
self.fields = tuple(fields)
return super(CompositeIdField, self).register_with_model(name, model)<|fim▁end|> |
class FloatField(Field): |
<|file_name|>select-demo-module.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {CommonModule} from '@angular/common';
import {NgModule} from '@angular/core';
import {FormsModule, ReactiveFormsModule} from '@angular/forms';
import {MatButtonModule} from '@angular/material/button';
import {MatCardModule} from '@angular/material/card';
import {MatFormFieldModule} from '@angular/material/form-field';
import {MatIconModule} from '@angular/material/icon';
import {MatInputModule} from '@angular/material/input';
import {MatSelectModule} from '@angular/material/select';
import {MatToolbarModule} from '@angular/material/toolbar';
import {RouterModule} from '@angular/router';
import {SelectDemo} from './select-demo';
@NgModule({
imports: [
CommonModule,
FormsModule,
MatButtonModule,
MatCardModule,<|fim▁hole|> MatSelectModule,
MatToolbarModule,
ReactiveFormsModule,
RouterModule.forChild([{path: '', component: SelectDemo}]),
],
declarations: [SelectDemo],
})
export class SelectDemoModule {
}<|fim▁end|> | MatFormFieldModule,
MatIconModule,
MatInputModule, |
<|file_name|>run_suite.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import datetime
from run_utils import *
class TestSuite(object):
def __init__(self, options, cache):
self.options = options
self.cache = cache
self.nameprefix = "opencv_" + self.options.mode + "_"
self.tests = self.cache.gatherTests(self.nameprefix + "*", self.isTest)
def getOS(self):
return getPlatformVersion() or self.cache.getOS()
def getHardware(self):
res = []
if self.cache.getArch() in ["x86", "x64"] and self.cache.withCuda():
res.append("CUDA")
return res
def getLogName(self, app, timestamp):
app = self.getAlias(app)
rev = self.cache.getGitVersion()
if isinstance(timestamp, datetime.datetime):
timestamp = timestamp.strftime("%Y%m%d-%H%M%S")
if self.options.longname:
small_pieces = [self.getOS(), self.cache.getArch()] + self.cache.getDependencies() + self.getHardware() + [self.cache.getSIMDFeatures()]
big_pieces = [app, str(rev), timestamp, "_".join([p for p in small_pieces if p])]
l = "__".join(big_pieces)
else:
pieces = [app, self.cache.getOS(), self.cache.getArch()] + self.getHardware() + [rev, timestamp]
lname = "_".join([p for p in pieces if p])
lname = re.sub(r'[\(\)\[\]\s,]', '_', lname)
l = re.sub(r'_+', '_', lname)
return l + ".xml"
def listTests(self, short = False, main = False):
if len(self.tests) == 0:
raise Err("No tests found")
for t in self.tests:
if short:
t = self.getAlias(t)
if not main or self.cache.isMainModule(t):
log.info("%s", t)
def getAlias(self, fname):
return sorted(self.getAliases(fname), key = len)[0]
def getAliases(self, fname):
def getCuts(fname, prefix):
# filename w/o extension (opencv_test_core)
noext = re.sub(r"\.(exe|apk)$", '', fname)
# filename w/o prefix (core.exe)
nopref = fname
if fname.startswith(prefix):
nopref = fname[len(prefix):]
# filename w/o prefix and extension (core)
noprefext = noext
if noext.startswith(prefix):
noprefext = noext[len(prefix):]
return noext, nopref, noprefext
# input is full path ('/home/.../bin/opencv_test_core') or 'java'
res = [fname]
fname = os.path.basename(fname)
res.append(fname) # filename (opencv_test_core.exe)
for s in getCuts(fname, self.nameprefix):
res.append(s)
if self.cache.build_type == "Debug" and "Visual Studio" in self.cache.cmake_generator:
res.append(re.sub(r"d$", '', s)) # MSVC debug config, remove 'd' suffix
log.debug("Aliases: %s", set(res))
return set(res)
def getTest(self, name):
# return stored test name by provided alias
for t in self.tests:
if name in self.getAliases(t):
return t
raise Err("Can not find test: %s", name)
def getTestList(self, white, black):
res = [t for t in white or self.tests if self.getAlias(t) not in black]
if len(res) == 0:
raise Err("No tests found")
return set(res)
def isTest(self, fullpath):
if fullpath == "java":
return True
if not os.path.isfile(fullpath):
return False
if self.cache.getOS() == "nt" and not fullpath.endswith(".exe"):
return False
return os.access(fullpath, os.X_OK)
def wrapInValgrind(self, cmd = []):
if self.options.valgrind:
res = ['valgrind']
if self.options.valgrind_supp:
res.append("--suppressions=%s" % self.options.valgrind_supp)
res.extend(self.options.valgrind_opt)
return res + cmd
return cmd
def runTest(self, path, logfile, workingDir, args = []):
args = args[:]
exe = os.path.abspath(path)
if path == "java":
cmd = [self.cache.ant_executable, "-Dopencv.build.type=%s" % self.cache.build_type, "buildAndTest"]
ret = execute(cmd, cwd = self.cache.java_test_binary_dir + "/.build")
return None, ret
else:
if isColorEnabled(args):
args.append("--gtest_color=yes")
cmd = self.wrapInValgrind([exe] + args)
tempDir = TempEnvDir('OPENCV_TEMP_PATH', "__opencv_temp.")
tempDir.init()
log.warning("Run: %s" % " ".join(cmd))
ret = execute(cmd, cwd = workingDir)
tempDir.clean()
hostlogpath = os.path.join(workingDir, logfile)
if os.path.isfile(hostlogpath):
return hostlogpath, ret
return None, ret
def checkPrerequisites(self):
if self.cache.getArch() == "x64" and hostmachine == "x86":
raise Err("Target architecture is incompatible with current platform")
def runTests(self, tests, black, workingDir, args = []):
self.checkPrerequisites()
args = args[:]
logs = []
test_list = self.getTestList(tests, black)
date = datetime.datetime.now()
if len(test_list) != 1:
args = [a for a in args if not a.startswith("--gtest_output=")]
ret = 0
for test in test_list:
more_args = []
exe = self.getTest(test)
userlog = [a for a in args if a.startswith("--gtest_output=")]
if len(userlog) == 0:
logname = self.getLogName(exe, date)
more_args.append("--gtest_output=xml:" + logname)
else:
logname = userlog[0][userlog[0].find(":")+1:]
log.debug("Running the test: %s (%s) ==> %s in %s", exe, args + more_args, logname, workingDir)
if self.options.dry_run:
logfile, r = None, 0
else:
logfile, r = self.runTest(exe, logname, workingDir, args + more_args)
log.debug("Test returned: %s ==> %s", r, logfile)
if r != 0:
ret = r
if logfile:
logs.append(os.path.relpath(logfile, workingDir))
return logs, ret<|fim▁hole|>
#===================================================================================================
if __name__ == "__main__":
log.error("This is utility file, please execute run.py script")<|fim▁end|> | |
<|file_name|>profile.module.ts<|end_file_name|><|fim▁begin|>import { AskUsernameComponent } from './ask-username/ask-username.component';
import { BsDropdownModule } from 'ngx-bootstrap';
import { CoreModule } from '../core/core.module';
import { EditProfileComponent } from './edit-profile/edit-profile.component';
import { InputCounterModule } from 'ng4-input-counter';
import { ModuleWithProviders, NgModule } from '@angular/core';
import { ProfileCompletedGuard } from './guards/profile-completed.guard'
import { ProfileComponent } from './profile/profile.component'
import { ProfileRoutingModule } from './profile-routing.module'
import { RouterModule } from '@angular/router'
import { SharedModule } from '../shared/shared.module'
import { SnippetModule } from '../snippet/snippet.module'
import { TooltipModule } from 'ngx-bootstrap/tooltip/tooltip.module'
@NgModule({
imports: [
SharedModule,
BsDropdownModule,
TooltipModule,<|fim▁hole|> RouterModule,
CoreModule,
ProfileRoutingModule,
InputCounterModule,
SnippetModule
],
declarations: [
ProfileComponent,
EditProfileComponent,
AskUsernameComponent
]
})
export class ProfileModule {
static forRoot(): ModuleWithProviders {
return {
ngModule: ProfileModule,
providers: [
ProfileCompletedGuard
]
}
}
}<|fim▁end|> | |
<|file_name|>allowed_object_naming.ts<|end_file_name|><|fim▁begin|>import {Issue} from "../issue";
import {IObject} from "../objects/_iobject";
import {IRule, IRuleMetadata, RuleTag} from "./_irule";
import {BasicRuleConfig} from "./_basic_rule_config";
import {IRegistry} from "../_iregistry";
export class AllowedObjectNamingConf extends BasicRuleConfig {
}
export class AllowedObjectNaming implements IRule {
private conf = new AllowedObjectNamingConf();
public getMetadata(): IRuleMetadata {
return {
key: "allowed_object_naming",
title: "Allowed object naming",
shortDescription: `Enforces basic name length and namespace restrictions, see note SAP 104010`,
tags: [RuleTag.Naming, RuleTag.SingleFile],
};
}
public initialize(_reg: IRegistry) {
return this;
}
public getConfig(): AllowedObjectNamingConf {
return this.conf;
}
public setConfig(conf: AllowedObjectNamingConf) {
this.conf = conf;
}
public run(obj: IObject): Issue[] {
const allowed = obj.getAllowedNaming();
const name = obj.getName();
let message = "";
if (name.length > allowed.maxLength) {
message = "Name exceeds max length";
} else if (allowed.allowNamespace === false && name.indexOf("/") >= 0) {
message = "Namespace not allowed for object type";
} else if (allowed.customRegex !== undefined) {
if (name.match(allowed.customRegex) === null) {
message = "Name not allowed";
}
} else if (name.match(/^(\/[A-Z_\d]{3,8}\/)?[A-Z_\d<> ]+$/i) === null) {
message = "Name not allowed";
}
if (message.length > 0) {
return [Issue.atRow(obj.getFiles()[0], 1, message, this.getMetadata().key, this.conf.severity)];
}
return [];
}
<|fim▁hole|><|fim▁end|> | } |
<|file_name|>test_geocoder.py<|end_file_name|><|fim▁begin|># coding=utf-8
import json
import re
import responses
import pytest
import mapbox
def test_geocoder_default_name():
"""Default name is set"""
geocoder = mapbox.Geocoder()
assert geocoder.name == 'mapbox.places'
def test_geocoder_name():
"""Named dataset name is set"""
geocoder = mapbox.Geocoder('mapbox.places-permanent')
assert geocoder.name == 'mapbox.places-permanent'
def _check_coordinate_precision(coord, precision):
"""Coordinate precision is <= specified number of digits"""
if '.' not in coord:
return True
else:
return len(coord.split('.')[-1]) <= precision
@responses.activate
def test_geocoder_forward():
"""Forward geocoding works"""
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/1600%20pennsylvania%20ave%20nw.json?access_token=pk.test',
match_querystring=True,
body='{"query": ["1600", "pennsylvania", "ave", "nw"]}', status=200,
content_type='application/json')
response = mapbox.Geocoder(access_token='pk.test').forward('1600 pennsylvania ave nw')
assert response.status_code == 200
assert response.json()['query'] == ["1600", "pennsylvania", "ave", "nw"]
@responses.activate
def test_geocoder_forward_geojson():
"""Forward geocoding .geojson method works"""
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/1600%20pennsylvania%20ave%20nw.json?access_token=pk.test',
match_querystring=True,
body='{"query": ["1600", "pennsylvania", "ave", "nw"]}', status=200,
content_type='application/json')
response = mapbox.Geocoder(access_token='pk.test').forward('1600 pennsylvania ave nw')
assert response.status_code == 200
assert response.geojson() == response.json()
@responses.activate
def test_geocoder_reverse():
"""Reverse geocoding works"""
lon, lat = -77.4371, 37.5227
body = json.dumps({"query": [lon, lat]})
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/{0},{1}.json?access_token=pk.test'.format(lon, lat),
match_querystring=True,
body=body,
status=200,
content_type='application/json')
response = mapbox.Geocoder(access_token='pk.test').reverse(lon=lon, lat=lat)
assert response.status_code == 200
assert response.json()['query'] == [lon, lat]
@responses.activate
def test_geocoder_reverse_geojson():
"""Reverse geocoding geojson works"""
lon, lat = -77.4371, 37.5227
body = json.dumps({"query": [lon, lat]})
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/{0},{1}.json?access_token=pk.test'.format(lon, lat),
match_querystring=True,
body=body,
status=200,
content_type='application/json')
response = mapbox.Geocoder(access_token='pk.test').reverse(lon=lon, lat=lat)
assert response.status_code == 200
assert response.geojson() == response.json()
def test_geocoder_place_types():
"""Place types are enumerated"""
assert sorted(mapbox.Geocoder().place_types.items()) == [
('address', "A street address with house number. Examples: 1600 Pennsylvania Ave NW, 1051 Market St, Oberbaumstrasse 7."),
('country', "Sovereign states and other political entities. Examples: United States, France, China, Russia."),
('district', "Second order administrative division. Only used when necessary. Examples: Tianjin, Beijing"),
('locality', "A smaller area within a place that possesses official status and boundaries. Examples: Oakleigh (Melbourne)"),
('neighborhood', 'A smaller area within a place, often without formal boundaries. Examples: Montparnasse, Downtown, Haight-Ashbury.'),
('place', "City, town, village or other municipality relevant to a country's address or postal system. Examples: Cleveland, Saratoga Springs, Berlin, Paris."),
('poi', "Places of interest including commercial venues, major landmarks, parks, and other features. Examples: Subway Restaurant, Yosemite National Park, Statue of Liberty."),
('poi.landmark', "Places of interest that are particularly notable or long-lived like parks, places of worship and museums. A strict subset of the poi place type. Examples: Yosemite National Park, Statue of Liberty."),
('postcode', "Postal code, varies by a country's postal system. Examples: 20009, CR0 3RL."),
('region', "First order administrative divisions within a country, usually provinces or states. Examples: California, Ontario, Essonne.")]
def test_validate_country_codes_err():
try:
mapbox.Geocoder()._validate_country_codes(('us', 'bogus'))
except mapbox.InvalidCountryCodeError as err:
assert str(err) == "bogus"
def test_validate_country():
assert mapbox.Geocoder()._validate_country_codes(
('us', 'br')) == {'country': 'us,br'}
def test_validate_place_types_err():
try:
mapbox.Geocoder()._validate_place_types(('address', 'bogus'))
except mapbox.InvalidPlaceTypeError as err:
assert str(err) == "bogus"
def test_validate_place_types():
assert mapbox.Geocoder()._validate_place_types(
('address', 'poi')) == {'types': 'address,poi'}
@responses.activate
def test_geocoder_forward_types():
"""Type filtering of forward geocoding works"""
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/1600%20pennsylvania%20ave%20nw.json?types=address,country,place,poi.landmark,postcode,region&access_token=pk.test',
match_querystring=True,
body='{"query": ["1600", "pennsylvania", "ave", "nw"]}', status=200,
content_type='application/json')
response = mapbox.Geocoder(
access_token='pk.test').forward(
'1600 pennsylvania ave nw',
types=('address', 'country', 'place', 'poi.landmark', 'postcode', 'region'))
assert response.status_code == 200
assert response.json()['query'] == ["1600", "pennsylvania", "ave", "nw"]
@responses.activate
def test_geocoder_reverse_types():
"""Type filtering of reverse geocoding works"""
lon, lat = -77.4371, 37.5227
body = json.dumps({"query": [lon, lat]})
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/{0},{1}.json?types=address,country,place,poi.landmark,postcode,region&access_token=pk.test'.format(lon, lat),
match_querystring=True,
body=body,
status=200,
content_type='application/json')
response = mapbox.Geocoder(
access_token='pk.test').reverse(
lon=lon, lat=lat,
types=('address', 'country', 'place', 'poi.landmark', 'postcode', 'region'))
assert response.status_code == 200
assert response.json()['query'] == [lon, lat]
@responses.activate
def test_geocoder_forward_proximity():
"""Proximity parameter works"""
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/1600%20pennsylvania%20ave%20nw.json?proximity=0.0,0.0&access_token=pk.test',
match_querystring=True,
body='{"query": ["1600", "pennsylvania", "ave", "nw"]}', status=200,
content_type='application/json')
response = mapbox.Geocoder(
access_token='pk.test').forward(
'1600 pennsylvania ave nw', lon=0, lat=0)
assert response.status_code == 200
assert response.json()['query'] == ["1600", "pennsylvania", "ave", "nw"]
@responses.activate
def test_geocoder_proximity_rounding():
"""Proximity parameter is rounded to 3 decimal places"""
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/1600%20pennsylvania%20ave%20nw.json',
match_querystring=False,
body='{"query": ["1600", "pennsylvania", "ave", "nw"]}', status=200,
content_type='application/json')
response = mapbox.Geocoder(
access_token='pk.test').forward(
'1600 pennsylvania ave nw', lon=0.123456, lat=0.987654)
# check coordinate precision for proximity flag
match = re.search(r'[&\?]proximity=([^&$]+)', response.url)
assert match is not None
for coord in re.split(r'(%2C|,)', match.group(1)):
assert _check_coordinate_precision(coord, 3)
@responses.activate
def test_geocoder_forward_bbox():
"""Bbox parameter works"""
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/washington.json?bbox=-78.3284%2C38.6039%2C-78.0428%2C38.7841&access_token=pk.test',
match_querystring=True,
body='{"query": ["washington"]}', status=200,
content_type='application/json')
response = mapbox.Geocoder(
access_token='pk.test').forward(
'washington', bbox=(-78.3284,38.6039,-78.0428,38.7841))
assert response.status_code == 200
assert response.json()['query'] == ["washington"]
@responses.activate
def test_geocoder_forward_limit():
"""Limit parameter works"""
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/washington.json?limit=3&access_token=pk.test',
match_querystring=True,
body='{"query": ["washington"], "features": [1, 2, 3]}', status=200,
content_type='application/json')
response = mapbox.Geocoder(
access_token='pk.test').forward(
'washington', limit=3)
assert response.status_code == 200
assert len(response.json()['features']) == 3
@responses.activate
def test_geocoder_reverse_limit():
"""Limit parameter works"""
lon, lat = -77.4371, 37.5227
body = json.dumps({"query": [lon, lat],
"features": [{'name': 'place'}]})
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/{0},{1}.json?access_token=pk.test&limit=1&types=place'.format(lon, lat),
match_querystring=True,
body=body,
status=200,
content_type='application/json')
service = mapbox.Geocoder(access_token='pk.test')
response = service.reverse(lon=lon, lat=lat, limit=1, types=['place'])
assert response.status_code == 200
assert len(response.json()['features']) == 1
@responses.activate
def test_geocoder_reverse_limit_requires_onetype():
"""Limit requires a single type"""
lon, lat = -77.123456789, 37.987654321
service = mapbox.Geocoder(access_token='pk.test')
with pytest.raises(mapbox.InvalidPlaceTypeError):
service.reverse(lon=lon, lat=lat, limit=1)
with pytest.raises(mapbox.InvalidPlaceTypeError):
service.reverse(lon=lon, lat=lat, limit=1, types=['places', 'country'])
@responses.activate
def test_geocoder_reverse_rounding():
"""Reverse geocoding parameters are rounded to 5 decimal places"""
lon, lat = -77.123456789, 37.987654321
body = json.dumps({"query": [lon, lat]})
responses.add(
responses.GET,
re.compile('https:\/\/api\.mapbox\.com\/geocoding\/v5\/mapbox\.places\/.+\.json'),
match_querystring=False,
body=body,
status=200,
content_type='application/json')
response = mapbox.Geocoder(
access_token='pk.test').reverse(
lon=lon, lat=lat)<|fim▁hole|> match = re.search(r'\/([\-\d\.\,]+)\.json', response.url)
assert match is not None
for coord in re.split(r'(%2C|,)', match.group(1)):
assert _check_coordinate_precision(coord, 5)
@responses.activate
def test_geocoder_unicode():
"""Forward geocoding works with non-ascii inputs
Specifically, the URITemplate needs to utf-8 encode all inputs
"""
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/Florian%C3%B3polis%2C%20Brazil.json?access_token=pk.test',
match_querystring=True,
body='{}', status=200,
content_type='application/json')
query = "Florianópolis, Brazil"
try:
query = query.decode('utf-8') # Python 2
except:
pass # Python 3
response = mapbox.Geocoder(access_token='pk.test').forward(query)
assert response.status_code == 200
@responses.activate
def test_geocoder_forward_country():
"""Country parameter of forward geocoding works"""
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/1600%20pennsylvania%20ave%20nw.json?country=us&access_token=pk.test',
match_querystring=True,
body='{"query": ["1600", "pennsylvania", "ave", "nw"]}', status=200,
content_type='application/json')
response = mapbox.Geocoder(
access_token='pk.test').forward('1600 pennsylvania ave nw', country=['us'])
assert response.status_code == 200<|fim▁end|> |
# check coordinate precision for reverse geocoding coordinates |
<|file_name|>speaking.py<|end_file_name|><|fim▁begin|># Copyright (C) 2015 Ross D Milligan
# GNU GENERAL PUBLIC LICENSE Version 3 (full notice can be found at https://github.com/rdmilligan/SaltwashAR)
class Speaking:
# initialize speaking
def __init__(self, text_to_speech):
self.is_speaking = False
self.text_to_speech = text_to_speech
# text to speech
def _text_to_speech(self, text):<|fim▁hole|><|fim▁end|> | self.is_speaking = True
self.text_to_speech.convert(text)
self.is_speaking = False |
<|file_name|>FilterGroup.js<|end_file_name|><|fim▁begin|>"use strict";
var __extends = (this && this.__extends) || function (d, b) {
for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p];
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
var React = require('react');
var _1 = require("../../../");
var bemBlock = require('bem-cn');
var size = require('lodash/size');
var toArray = require('lodash/toArray');
var map = require('lodash/map');
var FilterGroupItem = (function (_super) {
__extends(FilterGroupItem, _super);
function FilterGroupItem(props) {
_super.call(this, props);
this.removeFilter = this.removeFilter.bind(this);
}
FilterGroupItem.prototype.removeFilter = function () {
var _a = this.props, removeFilter = _a.removeFilter, filter = _a.filter;
if (removeFilter) {
removeFilter(filter);
}
};
FilterGroupItem.prototype.render = function () {
var _a = this.props, bemBlocks = _a.bemBlocks, label = _a.label, itemKey = _a.itemKey;
return (React.createElement(_1.FastClick, {handler: this.removeFilter}, React.createElement("div", {className: bemBlocks.items("value"), "data-key": itemKey}, label)));
};
FilterGroupItem = __decorate([
_1.PureRender,
__metadata('design:paramtypes', [Object])
], FilterGroupItem);
return FilterGroupItem;
}(React.Component));
exports.FilterGroupItem = FilterGroupItem;
var FilterGroup = (function (_super) {
__extends(FilterGroup, _super);
function FilterGroup(props) {
_super.call(this, props);
this.removeFilters = this.removeFilters.bind(this);
}
FilterGroup.prototype.removeFilters = function () {
var _a = this.props, removeFilters = _a.removeFilters, filters = _a.filters;
if (removeFilters) {
removeFilters(filters);
}
};
FilterGroup.prototype.render = function () {
var _this = this;
var _a = this.props, mod = _a.mod, className = _a.className, title = _a.title, filters = _a.filters, removeFilters = _a.removeFilters, removeFilter = _a.removeFilter;
var bemBlocks = {
container: bemBlock(mod),
items: bemBlock(mod + "-items")
};
return (React.createElement("div", {key: title, className: bemBlocks.container().mix(className)}, React.createElement("div", {className: bemBlocks.items()}, React.createElement("div", {className: bemBlocks.items("title")}, title), React.createElement("div", {className: bemBlocks.items("list")}, map(filters, function (filter) { return _this.renderFilter(filter, bemBlocks); }))), this.renderRemove(bemBlocks)));
};
FilterGroup.prototype.renderFilter = function (filter, bemBlocks) {
var _a = this.props, translate = _a.translate, removeFilter = _a.removeFilter;
return (React.createElement(FilterGroupItem, {key: filter.value, itemKey: filter.value, bemBlocks: bemBlocks, filter: filter, label: translate(filter.value), removeFilter: removeFilter}));
};
FilterGroup.prototype.renderRemove = function (bemBlocks) {
if (!this.props.removeFilters)
return null;
return (React.createElement(_1.FastClick, {handler: this.removeFilters}, React.createElement("div", {className: bemBlocks.container("remove-action"), onClick: this.removeFilters}, "X")));
};<|fim▁hole|> return FilterGroup;
}(React.Component));
exports.FilterGroup = FilterGroup;
//# sourceMappingURL=FilterGroup.js.map<|fim▁end|> | FilterGroup.defaultProps = {
mod: "sk-filter-group",
translate: function (str) { return str; }
}; |
<|file_name|>ConvertibleStrings.cpp<|end_file_name|><|fim▁begin|>#include <vector>
#include <list>
#include <map>
#include <set>
#include <queue>
#include <deque>
#include <stack>
#include <bitset>
#include <algorithm>
#include <functional>
#include <numeric>
#include <utility>
#include <sstream>
#include <iostream>
#include <iomanip>
#include <cstdio>
#include <cmath>
#include <cstdlib>
#include <ctime>
using namespace std;
#define INF 2000000000
class ConvertibleStrings {
public:
int leastRemovals(string a, string b) {
vector<string> V;
vector<string> W;
int f = 362880;
string s = "ABCDEFGHI";
for(int i=0; i<f; i++) {
W.push_back(s);
next_permutation(s.begin(), s.end());
}
int c = INF;
for(int i=0; i<W.size(); i++) {
string t = "";
for(int j=0; j<a.length(); j++) {
t += W[i][(a[j]-'A') % 9];
}
int cp = 0;
for(int j=0; j<b.length(); j++) {
if(t[j] != b[j])
cp++;
}
c = min(c, cp);
}
return c;
}
};
// BEGIN KAWIGIEDIT TESTING
// Generated by KawigiEdit 2.1.4 (beta) modified by pivanof
bool KawigiEdit_RunTest(int testNum, string p0, string p1, bool hasAnswer, int p2) {
cout << "Test " << testNum << ": [" << "\"" << p0 << "\"" << "," << "\"" << p1 << "\"";
cout << "]" << endl;
ConvertibleStrings *obj;
int answer;
obj = new ConvertibleStrings();
clock_t startTime = clock();
answer = obj->leastRemovals(p0, p1);
clock_t endTime = clock();
delete obj;
bool res;
res = true;
cout << "Time: " << double(endTime - startTime) / CLOCKS_PER_SEC << " seconds" << endl;
if (hasAnswer) {
cout << "Desired answer:" << endl;
cout << "\t" << p2 << endl;
}
cout << "Your answer:" << endl;
cout << "\t" << answer << endl;
if (hasAnswer) {
res = answer == p2;
}
if (!res) {
cout << "DOESN'T MATCH!!!!" << endl;
} else if (double(endTime - startTime) / CLOCKS_PER_SEC >= 2) {
cout << "FAIL the timeout" << endl;
res = false;
} else if (hasAnswer) {
cout << "Match :-)" << endl;
} else {
cout << "OK, but is it right?" << endl;
}
cout << "" << endl;
return res;
}
int main() {
bool all_right;
all_right = true;
string p0;
string p1;
int p2;
{
// ----- test 0 -----
p0 = "DD";
p1 = "FF";
p2 = 0;
all_right = KawigiEdit_RunTest(0, p0, p1, true, p2) && all_right;
// ------------------
}
{
// ----- test 1 -----
p0 = "AAAA";
p1 = "ABCD";<|fim▁hole|> all_right = KawigiEdit_RunTest(1, p0, p1, true, p2) && all_right;
// ------------------
}
{
// ----- test 2 -----
p0 = "AAIAIA";
p1 = "BCDBEE";
p2 = 3;
all_right = KawigiEdit_RunTest(2, p0, p1, true, p2) && all_right;
// ------------------
}
{
// ----- test 3 -----
p0 = "ABACDCECDCDAAABBFBEHBDFDDHHD";
p1 = "GBGCDCECDCHAAIBBFHEBBDFHHHHE";
p2 = 9;
all_right = KawigiEdit_RunTest(3, p0, p1, true, p2) && all_right;
// ------------------
}
{
// ----- test 4 -----
p0 = "CADEFIECDHEDCDIDGFCAEFFBGIHIIHGIBIH";
p1 = "DGBIHADCBDAHEBFGHFAHAIDCDACFAAADBEI";
p2 = 22;
all_right = KawigiEdit_RunTest(4, p0, p1, true, p2) && all_right;
// ------------------
}
if (all_right) {
cout << "You're a stud (at least on the example cases)!" << endl;
} else {
cout << "Some of the test cases had errors." << endl;
}
return 0;
}
// END KAWIGIEDIT TESTING
//Powered by KawigiEdit 2.1.4 (beta) modified by pivanof!<|fim▁end|> | p2 = 3; |
<|file_name|>test_copy_package.py<|end_file_name|><|fim▁begin|>"""
test package copying
"""
import shutil
import time
import os.path
import os
from rez.system import system
from rez.build_process_ import create_build_process
from rez.build_system import create_build_system
from rez.resolved_context import ResolvedContext
from rez.packages_ import get_latest_package
from rez.package_copy import copy_package
from rez.vendor.version.version import VersionRange
from rez.tests.util import TestBase, TempdirMixin
class TestCopyPackage(TestBase, TempdirMixin):
@classmethod
def setUpClass(cls):
TempdirMixin.setUpClass()
path = os.path.dirname(__file__)
packages_path = os.path.join(path, "data", "builds", "packages")
cls.src_root = os.path.join(cls.root, "src", "packages")
cls.install_root = os.path.join(cls.root, "packages")
shutil.copytree(packages_path, cls.src_root)
# repo we will copy packages into
cls.dest_install_root = os.path.join(cls.root, "dest_packages")
# include modules
pypath = os.path.join(path, "data", "python", "late_bind")
cls.settings = dict(
packages_path=[cls.install_root],
package_filter=None,
package_definition_python_path=pypath,
resolve_caching=False,
warn_untimestamped=False,
warn_old_commands=False,
implicit_packages=[])
@classmethod
def tearDownClass(cls):
TempdirMixin.tearDownClass()
def setup_once(self):
# build packages used by this test
self._build_package("build_util", "1")
self._build_package("floob")
self._build_package("foo", "1.0.0")
self._build_package("foo", "1.1.0")
self._build_package("bah", "2.1")
@classmethod
def _create_builder(cls, working_dir):
buildsys = create_build_system(working_dir)
return create_build_process(process_type="local",
working_dir=working_dir,
build_system=buildsys)
@classmethod
def _build_package(cls, name, version=None):
# create the builder
working_dir = os.path.join(cls.src_root, name)
if version:
working_dir = os.path.join(working_dir, version)
builder = cls._create_builder(working_dir)
builder.build(install_path=cls.install_root, install=True, clean=True)
def _reset_dest_repository(self):
system.clear_caches()
if os.path.exists(self.dest_install_root):
shutil.rmtree(self.dest_install_root)
os.makedirs(self.dest_install_root)
def _get_src_pkg(self, name, version):
return get_latest_package(
name,
range_=VersionRange("==" + version),
paths=[self.install_root],
error=True
)
def _get_dest_pkg(self, name, version):
return get_latest_package(
name,
range_=VersionRange("==" + version),
paths=[self.dest_install_root],
error=True
)
def _assert_copied(self, result, copied, skipped):
self.assertEqual(len(result["copied"]), copied)
self.assertEqual(len(result["skipped"]), skipped)
def test_1(self):
"""Simple package copy, no variants, no overwrite."""
self._reset_dest_repository()
# make a copy of a package
src_pkg = self._get_src_pkg("floob", "1.2.0")
result = copy_package(
package=src_pkg,
dest_repository=self.dest_install_root
)
self._assert_copied(result, 1, 0)
# check the copied package exists and matches
dest_pkg = self._get_dest_pkg("floob", "1.2.0")
result_variant = result["copied"][0][1]
dest_variant = dest_pkg.iter_variants().next()
self.assertEqual(dest_variant.handle, result_variant.handle)
pyfile = os.path.join(dest_pkg.base, "python")
ctime = os.stat(pyfile).st_ctime
# copy again but with overwrite=False; should do nothing
result = copy_package(
package=src_pkg,
dest_repository=self.dest_install_root
)
self._assert_copied(result, 0, 1)
# check that package payload wasn't overwritten
self.assertEqual(os.stat(pyfile).st_ctime, ctime)
def test_2(self):
"""Package copy, no variants, overwrite."""
self._reset_dest_repository()
# make a copy of a package
src_pkg = self._get_src_pkg("floob", "1.2.0")
copy_package(
package=src_pkg,
dest_repository=self.dest_install_root
)
dest_pkg = self._get_dest_pkg("floob", "1.2.0")
pyfile = os.path.join(dest_pkg.base, "python")
ctime = os.stat(pyfile).st_ctime
# overwrite same package copy
result = copy_package(
package=src_pkg,
dest_repository=self.dest_install_root,
overwrite=True
)
self._assert_copied(result, 1, 0)
# check that package payload was overwritten
self.assertNotEqual(os.stat(pyfile).st_ctime, ctime)
def test_3(self):
"""Package copy, variants, overwrite and non-overwrite."""
self._reset_dest_repository()
# make a copy of a varianted package
src_pkg = self._get_src_pkg("bah", "2.1")
result = copy_package(
package=src_pkg,
dest_repository=self.dest_install_root
)
self._assert_copied(result, 2, 0) # 2 variants
# check the copied variants exist and match
dest_pkg = self._get_dest_pkg("bah", "2.1")
ctimes = []
for index in (0, 1):
result_variant = result["copied"][index][1]
dest_variant = dest_pkg.get_variant(index)
self.assertEqual(dest_variant.handle, result_variant.handle)
pyfile = os.path.join(dest_variant.root, "python")
ctime = os.stat(pyfile).st_ctime
ctimes.append(ctime)
# copy variant with no overwrite, should do nothing
result = copy_package(
package=src_pkg,
dest_repository=self.dest_install_root,
variants=[1]
)
self._assert_copied(result, 0, 1)
# copy variant with overwrite
result = copy_package(
package=src_pkg,
dest_repository=self.dest_install_root,
variants=[1],
overwrite=True
)
self._assert_copied(result, 1, 0)
# check copied variant is the one we expect
dest_pkg = self._get_dest_pkg("bah", "2.1")
result_variant = result["copied"][0][1]
dest_variant = dest_pkg.get_variant(1)
self.assertEqual(dest_variant.handle, result_variant.handle)
# check copied variant payload was overwritten
pyfile = os.path.join(dest_variant.root, "python")
self.assertNotEqual(os.stat(pyfile).st_ctime, ctimes[1])
# check non-copied variant payload was not written
skipped_variant = dest_pkg.get_variant(0)
pyfile = os.path.join(skipped_variant.root, "python")
self.assertEqual(os.stat(pyfile).st_ctime, ctimes[0])
def test_4(self):
"""Package copy with rename, reversion."""
self._reset_dest_repository()
# copy a package to a different name and version
src_pkg = self._get_src_pkg("floob", "1.2.0")
result = copy_package(
package=src_pkg,
dest_repository=self.dest_install_root,
dest_name="flaab",
dest_version="5.4.1"
)
self._assert_copied(result, 1, 0)
# check copied variant is the one we expect
dest_pkg = self._get_dest_pkg("flaab", "5.4.1")
result_variant = result["copied"][0][1]
dest_variant = dest_pkg.iter_variants().next()
self.assertEqual(dest_variant.handle, result_variant.handle)
def test_5(self):
"""Package copy with standard, new timestamp."""
self._reset_dest_repository()
# wait 1 second to guarantee newer timestamp in copied pkg
time.sleep(1)
# copy package and overwrite timestamp
src_pkg = self._get_src_pkg("floob", "1.2.0")
copy_package(
package=src_pkg,
dest_repository=self.dest_install_root
)
# check copied variant contains expected timestamp
dest_pkg = self._get_dest_pkg("floob", "1.2.0")
self.assertTrue(dest_pkg.timestamp > src_pkg.timestamp)
def test_6(self):
"""Package copy with keep_timestamp."""
self._reset_dest_repository()
# wait 1 second to ensure we don't just accidentally get same timestamp
time.sleep(1)
# copy package and overwrite timestamp
src_pkg = self._get_src_pkg("floob", "1.2.0")
copy_package(
package=src_pkg,
dest_repository=self.dest_install_root,
keep_timestamp=True
)
# check copied variant contains expected timestamp
dest_pkg = self._get_dest_pkg("floob", "1.2.0")
self.assertEqual(dest_pkg.timestamp, src_pkg.timestamp)
def test_7(self):
"""Package copy with overrides."""
self._reset_dest_repository()
overrides = {
"timestamp": 10000,
"description": "this is a copy",
"some_extra_key": True
}
# copy package and overwrite timestamp
src_pkg = self._get_src_pkg("floob", "1.2.0")
copy_package(
package=src_pkg,
dest_repository=self.dest_install_root,
overrides=overrides
)
# check copied variant contains expected timestamp
dest_pkg = self._get_dest_pkg("floob", "1.2.0")
for k, v in overrides.iteritems():
self.assertEqual(getattr(dest_pkg, k), v)
def test_8(self):
"""Ensure that include modules are copied."""
self._reset_dest_repository()
src_pkg = self._get_src_pkg("foo", "1.1.0")<|fim▁hole|> )
dest_pkg = self._get_dest_pkg("foo", "1.1.0")
dest_variant = dest_pkg.iter_variants().next()
# do a resolve
ctxt = ResolvedContext(
["foo==1.1.0"],
package_paths=[self.dest_install_root, self.install_root]
)
resolved_variant = ctxt.get_resolved_package("foo")
self.assertEqual(dest_variant.handle, resolved_variant.handle)
# this can only match if the include module was copied with the package
environ = ctxt.get_environ(parent_environ={})
self.assertEqual(environ.get("EEK"), "2")<|fim▁end|> | copy_package(
package=src_pkg,
dest_repository=self.dest_install_root, |
<|file_name|>triggers.go<|end_file_name|><|fim▁begin|>// Copyright 2015 Canonical Ltd.
// Licensed under the AGPLv3, see LICENCE file for details.
package meterstatus
import (
"time"
)
type TriggerCreator func(WorkerState, string, time.Time, Clock, time.Duration, time.Duration) (<-chan time.Time, <-chan time.Time)
// GetTriggers returns the signal channels for state transitions based on the current state.
// It controls the transitions of the inactive meter status worker.
//
// In a simple case, the transitions are trivial:
//<|fim▁hole|>// D------------------A----------------------R--------------------->
//
// D - disconnect time
// A - amber status triggered
// R - red status triggered
//
// The problem arises from the fact that the lifetime of the worker can
// be interrupted, possibly with significant portions of the duration missing.
func GetTriggers(
wst WorkerState,
status string,
disconnectedAt time.Time,
clk Clock,
amberGracePeriod time.Duration,
redGracePeriod time.Duration) (<-chan time.Time, <-chan time.Time) {
now := clk.Now()
if wst == Done {
return nil, nil
}
if wst <= WaitingAmber && status == "RED" {
// If the current status is already RED, we don't want to deescalate.
wst = WaitingRed
// } else if wst <= WaitingAmber && now.Sub(disconnectedAt) >= amberGracePeriod {
// If we missed the transition to amber, activate it.
// wst = WaitingRed
} else if wst < Done && now.Sub(disconnectedAt) >= redGracePeriod {
// If we missed the transition to amber and it's time to transition to RED, go straight to RED.
wst = WaitingRed
}
if wst == WaitingRed {
redSignal := clk.After(redGracePeriod - now.Sub(disconnectedAt))
return nil, redSignal
}
if wst == WaitingAmber || wst == Uninitialized {
amberSignal := clk.After(amberGracePeriod - now.Sub(disconnectedAt))
redSignal := clk.After(redGracePeriod - now.Sub(disconnectedAt))
return amberSignal, redSignal
}
return nil, nil
}<|fim▁end|> | |
<|file_name|>clientabc.py<|end_file_name|><|fim▁begin|>"""Abstract base class for kernel clients"""
#-----------------------------------------------------------------------------
# Copyright (C) 2013 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
import abc
#-----------------------------------------------------------------------------
# Main kernel client class
#-----------------------------------------------------------------------------
class KernelClientABC(object):
"""KernelManager ABC.
The docstrings for this class can be found in the base implementation:
`IPython.kernel.client.KernelClient`
"""
__metaclass__ = abc.ABCMeta
@abc.abstractproperty
def kernel(self):
pass
@abc.abstractproperty
def shell_channel_class(self):
pass
@abc.abstractproperty
def iopub_channel_class(self):
pass
@abc.abstractproperty
def hb_channel_class(self):
pass
@abc.abstractproperty
def stdin_channel_class(self):
pass
#--------------------------------------------------------------------------
# Channel management methods
#--------------------------------------------------------------------------
@abc.abstractmethod
def start_channels(self, shell=True, iopub=True, stdin=True, hb=True):
pass
@abc.abstractmethod
def stop_channels(self):
pass
<|fim▁hole|> @abc.abstractproperty
def shell_channel(self):
pass
@abc.abstractproperty
def iopub_channel(self):
pass
@abc.abstractproperty
def stdin_channel(self):
pass
@abc.abstractproperty
def hb_channel(self):
pass<|fim▁end|> | @abc.abstractproperty
def channels_running(self):
pass
|
<|file_name|>parallax.js<|end_file_name|><|fim▁begin|>$(function () {
var controller = new ScrollMagic.Controller({
globalSceneOptions: {
triggerHook: 'onLeave',
reverse: true
}
});
// $('.homepage .panel').each(function () {
// var element = $(this);
// console.log(element);
// new ScrollMagic.Scene({triggerElement: element})
// .setPin(element)
// .setClassToggle(element,'active')
// .addTo(controller)
// })
// var actual_positions = [0];
// var mid_points = [];
// var all_scenes = [];
// $('.services .panel').each(function (index) {
// if($(this).hasClass('main')) {
// new ScrollMagic.Scene({triggerElement: '.services .main'})
// .setPin('.services .main')
// .setClassToggle('.services .main','active')
// .addTo(controller)
// }
// else {
// var element_id = $(this).attr('id');
// var element_id_with_hash = '#' + $(this).attr('id');<|fim▁hole|> // .setClassToggle(element_id_with_hash,'show-bottom-nav')
// .addTo(controller)
// all_scenes.push({
// id: element_id,
// scene: scene
// });
// actual_positions.push(Math.ceil(scene.triggerPosition()));
// if(actual_positions.length > 1) {
// mid_points.push((actual_positions[index] + actual_positions [index-1]) / 2)
// }
// }
// })
// $('a[href*=#]:not([href=#])').click(function () {
// var id = $(this).attr('href').replace('#','');
// if($(this).parent().parent().parent().hasClass('bottom-nav')) {
// var index = $('.bottom-nav ul li a').index($(this));
// }else {
// var index = $('ul.wrap li a').index($(this));
// }
// if(id == 'down') {
// setTimeout(function () {
// $('.bottom-nav').addClass('fixed')
// },1100)
// }
// else {
// var targetted_scene = all_scenes[index];
// if(targetted_scene.id == id) {
// $('html,body').animate({scrollTop: targetted_scene.scene.scrollOffset()},1000);
// return false;
// }
// }
// })
// var add_and_remove_active_class = function (index) {
// $('.bottom-nav').addClass('fixed')
// $('.bottom-nav ul li').removeClass('active');
// $('.bottom-nav ul li:nth-child(' + index + ')').children('a').parent().last().addClass('active');
// }
// $(window).scroll(function () {
// if ($(".show-bottom-nav")[0]){
// $('.bottom-nav').addClass('fixed')
// }else{
// $('.bottom-nav').removeClass('fixed')
// }
// for(var index=0; index<mid_points.length; index++) {
// var next_index = index+1;
// var last_index = mid_points.length-1
// /* check between mid point ranges and set active class to the respective nav item. */
// if($(window).scrollTop() > mid_points[index] && $(window).scrollTop() < mid_points[next_index]) {
// add_and_remove_active_class(next_index);
// break;
// /* if nothing matches and reaches to last index then set active active to last nav item. */
// }else if ($(window).scrollTop() > mid_points[last_index]) {
// add_and_remove_active_class(mid_points.length);
// /* remove from all if its rolled back to the top*/
// }else {
// $('.bottom-nav ul li').removeClass('active');
// }
// }
// });
});
//change navigation color on scroll
/*
var offset_top_news_section = $('.color-light').offset().top;
var offset_top_contact_section = $('.pattern').offset().top;
console.log(offset_top_contact_section, offset_top_news_section);
$(window).scroll(function (event) {
var scroll = $(window).scrollTop();
console.log(scroll);
if(scroll < offset_top_contact_section && scroll >= offset_top_news_section) {
$('.homepage nav').addClass('change-color');
} else {
$('.homepage nav').removeClass('change-color');
}
});
*/<|fim▁end|> |
// var scene = new ScrollMagic.Scene({triggerElement: element_id_with_hash})
// .setPin(element_id_with_hash) |
<|file_name|>NotificationDetail.js<|end_file_name|><|fim▁begin|>import React from "react";
import { Link } from "react-router";
import Loader from "../../core/Loader";
import DataComponent from "../base/DataComponent";
import DataError from "../base/DataError";
import ResourceAction from "./../base/ResourceAction";
import history from "../../history";
/**
* @author Niklas Keller
*/
class NotificationDetail extends DataComponent {
getDataUri() {
return "notifications/" + this.props.params.id;
}
componentWillReceiveProps(next) {<|fim▁hole|> if (oldId !== newId) {
this.fetchData();
}
}
render() {
let content = null;
if (this.state.loaded) {
if (this.state.failed) {
content = (
<DataError />
);
} else {
let emails = this.state.data.emails.map((item) => (
<li><code>{item}</code></li>
));
emails = (
<ul>
{emails}
</ul>
);
content = (
<div>
<div className="actions">
<Link to={this.props.location.pathname + "/edit"} className="action">
<i className="fa fa-pencil icon"/>
Edit
</Link>
<ResourceAction icon="trash" method="DELETE" uri={"notifications/" + this.props.params.id}
onClick={() => window.confirm("Do you really want to delete this notification?")}
onSuccess={() => history.replaceState(null, "/notifications")}
onError={(e) => {
let error = typeof e === "object" && "data" in e ? e.data.detail : "Unknown error.";
window.alert("Deletion failed. " + error);
}}
backend={this.props.backend}>
Delete
</ResourceAction>
</div>
<h1>Notification: {this.state.data.name}</h1>
<label className="input-label">Description</label>
<pre>{this.state.data.description}</pre>
<label className="input-label">SQL Query</label>
<pre>{this.state.data.query}</pre>
<label className="input-label">Check Period</label>
<div>Checked every <code>{this.state.data.checkPeriod}</code> seconds.</div>
<label className="input-label">E-Mails</label>
<div>{emails}</div>
<label className="input-label">Send Once</label>
<div>{this.state.data.sendOnce.value ? "Yes, will be sent once and then be deleted." : "No, will be sent on every change."}</div>
</div>
);
}
}
return (
<Loader loaded={this.state.loaded} className="loader">
{content}
</Loader>
);
}
}
export default NotificationDetail;<|fim▁end|> | let oldId = this.props.params.id;
let newId = next.params.id;
|
<|file_name|>lxqt-panel_eu.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.1" language="eu">
<context>
<name>AddPluginDialog</name>
<message>
<location filename="../../../config/addplugindialog.ui" line="14"/>
<source>Add Plugins</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/addplugindialog.ui" line="22"/>
<source>Search:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/addplugindialog.ui" line="98"/>
<source>Add Widget</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/addplugindialog.ui" line="105"/>
<source>Close</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/addplugindialog.cpp" line="115"/>
<source>(only one instance can run at a time)</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>ConfigPanelDialog</name>
<message>
<source>Configure panel</source>
<translation type="vanished">Konfiguratu panela</translation>
</message>
<message>
<source>Panel size</source>
<translation type="vanished">Panelaren tamaina</translation>
</message>
<message>
<source>Size:</source>
<translation type="vanished">Tamaina:</translation>
</message>
<message>
<source>px</source>
<translation type="vanished">px</translation>
</message>
<message>
<source>Use automatic sizing</source>
<translation type="vanished">Erabili tamaina automatikoa</translation>
</message>
<message>
<source>Panel length && position</source>
<translation type="vanished">Panelaren luzera eta posizioa</translation>
</message>
<message>
<source>Left</source>
<translation type="vanished">Ezkerra</translation>
</message>
<message>
<source>Center</source>
<translation type="vanished">Erdia</translation>
</message>
<message>
<source>Right</source>
<translation type="vanished">Eskuina</translation>
</message>
<message>
<source>%</source>
<translation type="vanished">%</translation>
</message>
<message>
<source>Alignment:</source>
<translation type="vanished">Lerrokatzea:</translation>
</message>
<message>
<source>Length:</source>
<translation type="vanished">Luzera:</translation>
</message>
<message>
<source>Position:</source>
<translation type="vanished">Posizioa:</translation>
</message>
<message>
<source>Top of desktop</source>
<translation type="vanished">Mahaigainaren goialdea</translation>
</message>
<message>
<source>Left of desktop</source>
<translation type="vanished">Mahaigainaren ezkerraldea</translation>
</message>
<message>
<source>Right of desktop</source>
<translation type="vanished">Mahaigainaren eskuinaldea</translation>
</message>
<message>
<source>Bottom of desktop</source>
<translation type="vanished">Mahaigainaren behealdea</translation>
</message>
<message>
<source>Top of desktop %1</source>
<translation type="vanished">%1 mahaigainaren goialdea</translation>
</message>
<message>
<source>Left of desktop %1</source>
<translation type="vanished">%1 mahaigainaren ezkerraldea</translation>
</message>
<message>
<source>Right of desktop %1</source>
<translation type="vanished">%1 mahaigainaren eskuinaldea</translation>
</message>
<message>
<source>Bottom of desktop %1</source>
<translation type="vanished">%1 mahaigainaren behealdea</translation>
</message>
<message>
<location filename="../../../config/configpaneldialog.cpp" line="31"/>
<source>Configure Panel</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpaneldialog.cpp" line="38"/><|fim▁hole|> </message>
<message>
<location filename="../../../config/configpaneldialog.cpp" line="42"/>
<source>Widgets</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>ConfigPanelWidget</name>
<message>
<location filename="../../../config/configpanelwidget.ui" line="20"/>
<source>Configure panel</source>
<translation type="unfinished">Konfiguratu panela</translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.ui" line="44"/>
<source>Size</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.ui" line="81"/>
<source>Size:</source>
<translation type="unfinished">Tamaina:</translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.ui" line="109"/>
<location filename="../../../config/configpanelwidget.ui" line="153"/>
<source> px</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.ui" line="166"/>
<source>Icon size:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.ui" line="88"/>
<source>Length:</source>
<translation type="unfinished">Luzera:</translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.ui" line="68"/>
<source><p>Negative pixel value sets the panel length to that many pixels less than available screen space.</p><p/><p><i>E.g. "Length" set to -100px, screen size is 1000px, then real panel length will be 900 px.</i></p></source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.ui" line="96"/>
<source>%</source>
<translation type="unfinished">%</translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.ui" line="101"/>
<source>px</source>
<translation type="unfinished">px</translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.ui" line="173"/>
<source>Rows count:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.ui" line="202"/>
<source>Alignment && position</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.ui" line="262"/>
<location filename="../../../config/configpanelwidget.ui" line="285"/>
<source>Zero means no animation</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.ui" line="288"/>
<location filename="../../../config/configpanelwidget.ui" line="314"/>
<source> ms</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.ui" line="265"/>
<source>Animation duration:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.ui" line="219"/>
<location filename="../../../config/configpanelwidget.cpp" line="202"/>
<source>Left</source>
<translation type="unfinished">Ezkerra</translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.ui" line="224"/>
<location filename="../../../config/configpanelwidget.cpp" line="203"/>
<location filename="../../../config/configpanelwidget.cpp" line="209"/>
<source>Center</source>
<translation type="unfinished">Erdia</translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.ui" line="229"/>
<location filename="../../../config/configpanelwidget.cpp" line="204"/>
<source>Right</source>
<translation type="unfinished">Eskuina</translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.ui" line="250"/>
<source>A&uto-hide</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.ui" line="301"/>
<location filename="../../../config/configpanelwidget.ui" line="311"/>
<source>Zero means no delay</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.ui" line="304"/>
<source>Show with delay:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.ui" line="330"/>
<source>Don't allow maximized windows go under the panel window</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.ui" line="333"/>
<source>Reserve space on display</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.ui" line="349"/>
<source>Custom styling</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.ui" line="370"/>
<source>Font color:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.ui" line="410"/>
<source>Background color:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.ui" line="449"/>
<source>Background opacity:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.ui" line="481"/>
<source><small>Compositing is required for panel transparency.</small></source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.ui" line="509"/>
<source>Background image:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.ui" line="208"/>
<source>Alignment:</source>
<translation type="unfinished">Lerrokatzea:</translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.ui" line="237"/>
<source>Position:</source>
<translation type="unfinished">Posizioa:</translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.cpp" line="168"/>
<source>Top of desktop</source>
<translation type="unfinished">Mahaigainaren goialdea</translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.cpp" line="169"/>
<source>Left of desktop</source>
<translation type="unfinished">Mahaigainaren ezkerraldea</translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.cpp" line="170"/>
<source>Right of desktop</source>
<translation type="unfinished">Mahaigainaren eskuinaldea</translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.cpp" line="171"/>
<source>Bottom of desktop</source>
<translation type="unfinished">Mahaigainaren behealdea</translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.cpp" line="180"/>
<source>Top of desktop %1</source>
<translation type="unfinished">%1 mahaigainaren goialdea</translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.cpp" line="181"/>
<source>Left of desktop %1</source>
<translation type="unfinished">%1 mahaigainaren ezkerraldea</translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.cpp" line="182"/>
<source>Right of desktop %1</source>
<translation type="unfinished">%1 mahaigainaren eskuinaldea</translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.cpp" line="183"/>
<source>Bottom of desktop %1</source>
<translation type="unfinished">%1 mahaigainaren behealdea</translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.cpp" line="208"/>
<source>Top</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.cpp" line="210"/>
<source>Bottom</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.cpp" line="368"/>
<location filename="../../../config/configpanelwidget.cpp" line="384"/>
<source>Pick color</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.cpp" line="402"/>
<source>Images (*.png *.gif *.jpg)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpanelwidget.cpp" line="402"/>
<source>Pick image</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>ConfigPluginsWidget</name>
<message>
<location filename="../../../config/configpluginswidget.ui" line="14"/>
<source>Configure Plugins</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpluginswidget.ui" line="84"/>
<source>Note: changes made in this page cannot be reset.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpluginswidget.ui" line="112"/>
<source>Move up</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpluginswidget.ui" line="115"/>
<location filename="../../../config/configpluginswidget.ui" line="129"/>
<location filename="../../../config/configpluginswidget.ui" line="150"/>
<location filename="../../../config/configpluginswidget.ui" line="164"/>
<location filename="../../../config/configpluginswidget.ui" line="185"/>
<source>...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpluginswidget.ui" line="126"/>
<source>Move down</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpluginswidget.ui" line="147"/>
<source>Add</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../config/configpluginswidget.ui" line="161"/>
<source>Remove</source>
<translation type="unfinished">Kendu</translation>
</message>
<message>
<location filename="../../../config/configpluginswidget.ui" line="182"/>
<source>Configure</source>
<translation type="unfinished">Konfiguratu</translation>
</message>
</context>
<context>
<name>LXQtPanel</name>
<message>
<location filename="../../../lxqtpanel.cpp" line="1068"/>
<location filename="../../../lxqtpanel.cpp" line="1094"/>
<source>Panel</source>
<translation>Panela</translation>
</message>
<message>
<location filename="../../../lxqtpanel.cpp" line="1097"/>
<source>Configure Panel</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../lxqtpanel.cpp" line="1102"/>
<source>Manage Widgets</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../lxqtpanel.cpp" line="1108"/>
<source>Add New Panel</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../lxqtpanel.cpp" line="1115"/>
<source>Remove Panel</source>
<comment>Menu Item</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../lxqtpanel.cpp" line="1120"/>
<source>Lock This Panel</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../lxqtpanel.cpp" line="1265"/>
<source>Remove Panel</source>
<comment>Dialog Title</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../lxqtpanel.cpp" line="1266"/>
<source>Removing a panel can not be undone.
Do you want to remove this panel?</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Configure panel...</source>
<translation type="vanished">Konfiguratu panela...</translation>
</message>
<message>
<source>Add plugins ...</source>
<translation type="vanished">Gehitu pluginak...</translation>
</message>
</context>
<context>
<name>LXQtPanelPlugin</name>
<message>
<source>Configure</source>
<translation type="vanished">Konfiguratu</translation>
</message>
<message>
<source>Move</source>
<translation type="vanished">Mugitu</translation>
</message>
<message>
<source>Remove</source>
<translation type="vanished">Kendu</translation>
</message>
</context>
<context>
<name>LXQtPanelPrivate</name>
<message>
<source>Configure panel</source>
<translation type="vanished">Konfiguratu panela</translation>
</message>
</context>
<context>
<name>Plugin</name>
<message>
<location filename="../../../plugin.cpp" line="416"/>
<source>Configure "%1"</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../plugin.cpp" line="421"/>
<source>Move "%1"</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../plugin.cpp" line="429"/>
<source>Remove "%1"</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>main</name>
<message>
<location filename="../../../lxqtpanelapplication.cpp" line="95"/>
<source>Use alternate configuration file.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../lxqtpanelapplication.cpp" line="96"/>
<source>Configuration file</source>
<translation type="unfinished"></translation>
</message>
</context>
</TS><|fim▁end|> | <source>Panel</source>
<translation type="unfinished">Panela</translation> |
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from django.db import models<|fim▁hole|>class Post(models.Model):
title = models.TextField(null=True, blank=True) #Título
content = models.TextField(null=True, blank=True) #Contenido de la entrada
url = models.URLField(max_length=200, null=True, blank=True)
hijo = models.ForeignKey(Hijo, blank=True, null=True)
date_creation = models.DateTimeField(auto_now_add=True)
published_by = models.ForeignKey(User, blank=True, null=True)<|fim▁end|> | from django.contrib.auth.models import User
from users.models import Hijo
|
<|file_name|>x2search4porn.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from Plugins.Extensions.MediaPortal.plugin import _
from Plugins.Extensions.MediaPortal.resources.imports import *
from Plugins.Extensions.MediaPortal.resources.keyboardext import VirtualKeyBoardExt
CONFIG = "/usr/lib/enigma2/python/Plugins/Extensions/MediaPortal/additions/additions.xml"
class toSearchForPorn(MPScreen):
def __init__(self, session):
self.plugin_path = mp_globals.pluginPath
self.skin_path = mp_globals.pluginPath + mp_globals.skinsPath
path = "%s/%s/defaultGenreScreenCover.xml" % (self.skin_path, config.mediaportal.skin.value)
if not fileExists(path):
path = self.skin_path + mp_globals.skinFallback + "/defaultGenreScreenCover.xml"
with open(path, "r") as f:
self.skin = f.read()
f.close()
MPScreen.__init__(self, session)
self["actions"] = ActionMap(["MP_Actions"], {
"ok" : self.keyOK,
"cancel" : self.keyCancel,
"up" : self.keyUp,
"down" : self.keyDown,
"right" : self.keyRight,
"left" : self.keyLeft,
"red" : self.keyRed,
"green" : self.keyGreen,
"yellow" : self.keyYellow
}, -1)
self['title'] = Label("2Search4Porn")
self['name'] = Label("Your Search Requests")
self['ContentTitle'] = Label("Annoyed, typing in your search-words for each Porn-Site again and again?")
self['F1'] = Label(_("Delete"))
self['F2'] = Label(_("Add"))
self['F3'] = Label(_("Edit"))
self.keyLocked = True
self.suchString = ''
self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent)
self['liste'] = self.ml
self.onLayoutFinish.append(self.Searches)
def Searches(self):<|fim▁hole|> if fileExists(config.mediaportal.watchlistpath.value+"mp_2s4p"):
fobj = open(config.mediaportal.watchlistpath.value+"mp_2s4p","r")
for line in fobj:
self.genreliste.append((line, None))
fobj.close()
self.ml.setList(map(self._defaultlistcenter, self.genreliste))
self.keyLocked = False
def SearchAdd(self):
suchString = ""
self.session.openWithCallback(self.SearchAdd1, VirtualKeyBoardExt, title = (_("Enter Search")), text = suchString, is_dialog=True)
def SearchAdd1(self, suchString):
if suchString is not None and suchString != "":
self.genreliste.append((suchString,None))
self.ml.setList(map(self._defaultlistcenter, self.genreliste))
def SearchEdit(self):
if len(self.genreliste) > 0:
suchString = self['liste'].getCurrent()[0][0].rstrip()
self.session.openWithCallback(self.SearchEdit1, VirtualKeyBoardExt, title = (_("Enter Search")), text = suchString, is_dialog=True)
def SearchEdit1(self, suchString):
if suchString is not None and suchString != "":
pos = self['liste'].getSelectedIndex()
self.genreliste.pop(pos)
self.genreliste.insert(pos,(suchString,None))
self.ml.setList(map(self._defaultlistcenter, self.genreliste))
def SearchCallback(self, suchString):
if suchString is not None and suchString != "":
self.session.open(toSearchForPornBrowse,suchString)
def keyOK(self):
if self.keyLocked:
return
if len(self.genreliste) > 0:
self.SearchCallback(self['liste'].getCurrent()[0][0].rstrip())
def keyRed(self):
if self.keyLocked:
return
if len(self.genreliste) > 0:
self.genreliste.pop(self['liste'].getSelectedIndex())
self.ml.setList(map(self._defaultlistcenter, self.genreliste))
def keyGreen(self):
if self.keyLocked:
return
self.SearchAdd()
def keyYellow(self):
if self.keyLocked:
return
self.SearchEdit()
def keyCancel(self):
if self.keyLocked:
return
self.genreliste.sort(key=lambda t : t[0].lower())
fobj_out = open(config.mediaportal.watchlistpath.value+"mp_2s4p","w")
x = len(self.genreliste)
if x > 0:
for c in range(x):
writeback = self.genreliste[c][0].rstrip()+"\n"
fobj_out.write(writeback)
fobj_out.close()
else:
os.remove(config.mediaportal.watchlistpath.value+"mp_2s4p")
self.close()
class toSearchForPornBrowse(MPScreen):
def __init__(self, session, suchString):
self.suchString = suchString
self.plugin_path = mp_globals.pluginPath
self.skin_path = mp_globals.pluginPath + mp_globals.skinsPath
path = "%s/%s/defaultGenreScreen.xml" % (self.skin_path, config.mediaportal.skin.value)
if not fileExists(path):
path = self.skin_path + mp_globals.skinFallback + "/defaultGenreScreen.xml"
with open(path, "r") as f:
self.skin = f.read()
f.close()
MPScreen.__init__(self, session)
self["actions"] = ActionMap(["MP_Actions"], {
"ok" : self.keyOK,
"cancel" : self.keyCancel,
"up" : self.keyUp,
"down" : self.keyDown,
"right" : self.keyRight,
"left" : self.keyLeft
}, -1)
self['title'] = Label("2Search4Porn")
self['ContentTitle'] = Label("Select Site")
self['name'] = Label(_("Selection:"))
self.keyLocked = True
self.pornscreen = None
self.genreliste = []
self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent)
self['liste'] = self.ml
self.onLayoutFinish.append(self.loadsites)
def loadsites(self):
conf = xml.etree.cElementTree.parse(CONFIG)
for x in conf.getroot():
if x.tag == "set" and x.get("name") == 'additions':
root = x
for x in root:
if x.tag == "plugin":
if x.get("type") == "mod":
if x.get("confcat") == "porn" and x.get("search") == "1":
gz = x.get("gz")
if not config.mediaportal.showgrauzone.value and gz == "1":
pass
else:
mod = eval("config.mediaportal." + x.get("confopt") + ".value")
if mod:
exec("self.genreliste.append((\""+x.get("name").replace("&","&")+"\", None))")
self.genreliste.sort(key=lambda t : t[0].lower())
self.keyLocked = False
self.ml.setList(map(self._defaultlistcenter, self.genreliste))
def keyOK(self):
if self.keyLocked:
return
auswahl = self['liste'].getCurrent()[0][0]
self.suchString = self.suchString.rstrip()
conf = xml.etree.cElementTree.parse("/usr/lib/enigma2/python/Plugins/Extensions/MediaPortal/additions/additions.xml")
for x in conf.getroot():
if x.tag == "set" and x.get("name") == 'additions':
root = x
for x in root:
if x.tag == "plugin":
if x.get("type") == "mod":
if x.get("confcat") == "porn" and x.get("search") == "1":
if auswahl == x.get("name").replace("&","&"):
modfile = x.get("modfile")
modfile = "Plugins.Extensions.MediaPortal.additions.%s.%s" % (modfile.split(".")[0], modfile.split(".")[1])
exec("from "+modfile+" import *")
exec("self.suchString = self.suchString.replace(\" \",\""+x.get("delim")+"\")")
exec("Name = \"2Search4Porn - %s\" % (self.suchString)")
exec("Link = \""+x.get("searchurl").replace("&","&")+"\" % (self.suchString)")
print "Name: "+ Name
print "Link: "+ Link
exec("self.session.open("+x.get("searchscreen")+", Link, Name"+x.get("searchparam").replace(""","\"")+")")<|fim▁end|> | self.genreliste = []
self['liste'] = self.ml
if not fileExists(config.mediaportal.watchlistpath.value+"mp_2s4p"):
open(config.mediaportal.watchlistpath.value+"mp_2s4p","w").close() |
<|file_name|>category.rs<|end_file_name|><|fim▁begin|>use std::collections::HashSet;
use time::Timespec;
use conduit::{Request, Response};
use conduit_router::RequestParams;
use pg::GenericConnection;
use pg::rows::Row;
use {Model, Crate};
use db::RequestTransaction;
use util::{RequestUtils, CargoResult, ChainError};
use util::errors::NotFound;
#[derive(Clone)]
pub struct Category {
pub id: i32,
pub category: String,
pub slug: String,
pub description: String,
pub created_at: Timespec,
pub crates_cnt: i32,
}
#[derive(RustcEncodable, RustcDecodable)]
pub struct EncodableCategory {
pub id: String,
pub category: String,
pub slug: String,
pub description: String,
pub created_at: String,
pub crates_cnt: i32,
}
#[derive(RustcEncodable, RustcDecodable)]
pub struct EncodableCategoryWithSubcategories {
pub id: String,
pub category: String,
pub slug: String,
pub description: String,
pub created_at: String,
pub crates_cnt: i32,
pub subcategories: Vec<EncodableCategory>,
}
impl Category {
pub fn find_by_category(conn: &GenericConnection, name: &str)
-> CargoResult<Category> {
let stmt = try!(conn.prepare("SELECT * FROM categories \
WHERE category = $1"));
let rows = try!(stmt.query(&[&name]));
rows.iter().next()
.chain_error(|| NotFound)
.map(|row| Model::from_row(&row))
}
pub fn find_by_slug(conn: &GenericConnection, slug: &str)
-> CargoResult<Category> {
let stmt = try!(conn.prepare("SELECT * FROM categories \
WHERE slug = LOWER($1)"));
let rows = try!(stmt.query(&[&slug]));
rows.iter().next()
.chain_error(|| NotFound)
.map(|row| Model::from_row(&row))
}
pub fn encodable(self) -> EncodableCategory {
let Category {
id: _, crates_cnt, category, slug, description, created_at
} = self;
EncodableCategory {
id: slug.clone(),
slug: slug.clone(),
description: description.clone(),
created_at: ::encode_time(created_at),
crates_cnt: crates_cnt,
category: category,
}
}
pub fn update_crate(conn: &GenericConnection,
krate: &Crate,
categories: &[String]) -> CargoResult<Vec<String>> {
let old_categories = try!(krate.categories(conn));
let old_categories_ids: HashSet<_> = old_categories.iter().map(|cat| {
cat.id
}).collect();
// If a new category specified is not in the database, filter
// it out and don't add it. Return it to be able to warn about it.
let mut invalid_categories = vec![];
let new_categories: Vec<Category> = categories.iter().flat_map(|c| {
match Category::find_by_slug(conn, &c) {
Ok(cat) => Some(cat),
Err(_) => {
invalid_categories.push(c.to_string());
None
},
}
}).collect();
let new_categories_ids: HashSet<_> = new_categories.iter().map(|cat| {
cat.id
}).collect();
let to_rm: Vec<_> = old_categories_ids
.difference(&new_categories_ids)
.cloned()
.collect();
let to_add: Vec<_> = new_categories_ids
.difference(&old_categories_ids)
.cloned()
.collect();
if !to_rm.is_empty() {
try!(conn.execute("DELETE FROM crates_categories \
WHERE category_id = ANY($1) \
AND crate_id = $2",
&[&to_rm, &krate.id]));
}
if !to_add.is_empty() {
let insert: Vec<_> = to_add.into_iter().map(|id| {
format!("({}, {})", krate.id, id)
}).collect();
let insert = insert.join(", ");
try!(conn.execute(&format!("INSERT INTO crates_categories \
(crate_id, category_id) VALUES {}",
insert),
&[]));
}
Ok(invalid_categories)
}
pub fn count_toplevel(conn: &GenericConnection) -> CargoResult<i64> {
let sql = format!("\
SELECT COUNT(*) \
FROM {} \
WHERE category NOT LIKE '%::%'",
Model::table_name(None::<Self>
));
let stmt = try!(conn.prepare(&sql));
let rows = try!(stmt.query(&[]));
Ok(rows.iter().next().unwrap().get("count"))
}
pub fn toplevel(conn: &GenericConnection,
sort: &str,
limit: i64,
offset: i64) -> CargoResult<Vec<Category>> {
let sort_sql = match sort {
"crates" => "ORDER BY crates_cnt DESC",
_ => "ORDER BY category ASC",
};
// Collect all the top-level categories and sum up the crates_cnt of
// the crates in all subcategories
let stmt = try!(conn.prepare(&format!(
"SELECT c.id, c.category, c.slug, c.description, c.created_at, \
COALESCE (( \
SELECT sum(c2.crates_cnt)::int \
FROM categories as c2 \
WHERE c2.slug = c.slug \
OR c2.slug LIKE c.slug || '::%' \
), 0) as crates_cnt \
FROM categories as c \
WHERE c.category NOT LIKE '%::%' {} \
LIMIT $1 OFFSET $2",
sort_sql
)));
let categories: Vec<_> = try!(stmt.query(&[&limit, &offset]))
.iter()
.map(|row| Model::from_row(&row))
.collect();
Ok(categories)
}
pub fn subcategories(&self, conn: &GenericConnection)
-> CargoResult<Vec<Category>> {
let stmt = try!(conn.prepare("\
SELECT c.id, c.category, c.slug, c.description, c.created_at, \
COALESCE (( \
SELECT sum(c2.crates_cnt)::int \
FROM categories as c2 \
WHERE c2.slug = c.slug \
OR c2.slug LIKE c.slug || '::%' \
), 0) as crates_cnt \
FROM categories as c \<|fim▁hole|> WHERE c.category ILIKE $1 || '::%' \
AND c.category NOT ILIKE $1 || '::%::%'"));
let rows = try!(stmt.query(&[&self.category]));
Ok(rows.iter().map(|r| Model::from_row(&r)).collect())
}
}
impl Model for Category {
fn from_row(row: &Row) -> Category {
Category {
id: row.get("id"),
created_at: row.get("created_at"),
crates_cnt: row.get("crates_cnt"),
category: row.get("category"),
slug: row.get("slug"),
description: row.get("description"),
}
}
fn table_name(_: Option<Category>) -> &'static str { "categories" }
}
/// Handles the `GET /categories` route.
pub fn index(req: &mut Request) -> CargoResult<Response> {
let conn = try!(req.tx());
let (offset, limit) = try!(req.pagination(10, 100));
let query = req.query();
let sort = query.get("sort").map_or("alpha", String::as_str);
let categories = try!(Category::toplevel(conn, sort, limit, offset));
let categories = categories.into_iter().map(Category::encodable).collect();
// Query for the total count of categories
let total = try!(Category::count_toplevel(conn));
#[derive(RustcEncodable)]
struct R { categories: Vec<EncodableCategory>, meta: Meta }
#[derive(RustcEncodable)]
struct Meta { total: i64 }
Ok(req.json(&R {
categories: categories,
meta: Meta { total: total },
}))
}
/// Handles the `GET /categories/:category_id` route.
pub fn show(req: &mut Request) -> CargoResult<Response> {
let slug = &req.params()["category_id"];
let conn = try!(req.tx());
let cat = try!(Category::find_by_slug(&*conn, &slug));
let subcats = try!(cat.subcategories(&*conn)).into_iter().map(|s| {
s.encodable()
}).collect();
let cat = cat.encodable();
let cat_with_subcats = EncodableCategoryWithSubcategories {
id: cat.id,
category: cat.category,
slug: cat.slug,
description: cat.description,
created_at: cat.created_at,
crates_cnt: cat.crates_cnt,
subcategories: subcats,
};
#[derive(RustcEncodable)]
struct R { category: EncodableCategoryWithSubcategories}
Ok(req.json(&R { category: cat_with_subcats }))
}
/// Handles the `GET /category_slugs` route.
pub fn slugs(req: &mut Request) -> CargoResult<Response> {
let conn = try!(req.tx());
let stmt = try!(conn.prepare("SELECT slug FROM categories \
ORDER BY slug"));
let rows = try!(stmt.query(&[]));
#[derive(RustcEncodable)]
struct Slug { id: String, slug: String }
let slugs: Vec<Slug> = rows.iter().map(|r| {
let slug: String = r.get("slug");
Slug { id: slug.clone(), slug: slug }
}).collect();
#[derive(RustcEncodable)]
struct R { category_slugs: Vec<Slug> }
Ok(req.json(&R { category_slugs: slugs }))
}<|fim▁end|> | |
<|file_name|>simple-model-test.js<|end_file_name|><|fim▁begin|>import {
moduleForModel,
test
} from 'ember-qunit';
moduleForModel('simple-model', 'SimpleModel', {
// Specify the other units that are required for this test.
needs: []
});
test('it exists', function() {
var model = this.subject();<|fim▁hole|>});<|fim▁end|> | // var store = this.store();
ok(!!model); |
<|file_name|>service_v4.py<|end_file_name|><|fim▁begin|>import shelve
import os
import re
from resource_api.interfaces import Resource as BaseResource, Link as BaseLink, AbstractUriPolicy
from resource_api.schema import StringField, DateTimeField, IntegerField
from resource_api.service import Service
from resource_api.errors import ValidationError
RE_SHA1 = re.compile("^[a-f0-9]{40}$")
SHELVE_PATH = "/tmp/school.shelve.db"
class ShelveService(Service):
def __init__(self):
super(ShelveService, self).__init__()
self._storage = shelve.open(SHELVE_PATH, writeback=True)
def _get_context(self):
return {"storage": self._storage}
def _get_user(self, data):
return None
def __del__(self):
self._storage.close()
class Resource(BaseResource):
def __init__(self, context):
super(Resource, self).__init__(context)
self._storage = context["storage"]
def exists(self, user, pk):
return pk in self._storage.get(self.get_name(), {})
def get_data(self, user, pk):
return self._storage.get(self.get_name(), {}).get(pk)
def delete(self, user, pk):
self._storage.get(self.get_name(), {}).pop(pk)
self._storage.sync()
def create(self, user, pk, data):
if self.get_name() not in self._storage:
self._storage[self.get_name()] = {}
self._storage[self.get_name()][pk] = data
self._storage.sync()
def update(self, user, pk, data):
self._storage[self.get_name()][pk].update(data)
self._storage.sync()
def get_uris(self, user, params=None):
return self._storage.get(self.get_name(), {}).keys()
def get_count(self, user, params=None):
return len(self.get_uris(params))
class Link(BaseLink):
def __init__(self, context):
super(Link, self).__init__(context)<|fim▁hole|>
def get_data(self, user, pk, rel_pk):
return self._storage.get((pk, self.get_name()), {}).get(rel_pk)
def create(self, user, pk, rel_pk, data=None):
key = (pk, self.get_name())
if key not in self._storage:
self._storage[key] = {}
self._storage[key][rel_pk] = data
self._storage.sync()
def update(self, user, pk, rel_pk, data):
self._storage[key][rel_pk].update(data)
self._storage.sync()
def delete(self, user, pk, rel_pk):
self._storage.get((pk, self.get_name()), {}).pop(rel_pk)
self._storage.sync()
def get_uris(self, user, pk, params=None):
return self._storage.get((pk, self.get_name()), {}).keys()
def get_count(self, user, pk, params=None):
return len(self.get_uris(pk, params))
class Student(Resource):
""" A pupil """
class Schema:
email = StringField(regex="[^@]+@[^@]+\.[^@]+", pk=True,
description="Addess to which the notifications shall be sent")
first_name = StringField(description="Given name(s)")
last_name = StringField(description="Family name(s)")
birthday = DateTimeField()
class Links:
class courses(Link):
""" Courses the student has ever attended """
class Schema:
grade = IntegerField(min_val=1, max_val=5)
target = "Course"
related_name = "students"
master = True
class comments(Link):
""" Comments made by the student """
target = "Comment"
related_name = "student"
class ratings(Link):
""" Ratings given by the student """
target = "TeacherRating"
related_name = "student"
class Teacher(Resource):
""" A lecturer """
class Schema:
email = StringField(regex="[^@]+@[^@]+\.[^@]+", pk=True,
description="Addess to which the notifications shall be sent")
first_name = StringField(description="Given name(s)")
last_name = StringField(description="Family name(s)")
category = StringField(description="TQS Category", choices=["four", "five", "five plus", "six"])
class Links:
class ratings(Link):
""" Ratings given to the teacher """
target = "TeacherRating"
related_name = "teacher"
class courses(Link):
""" Courses the teacher is responsible for """
target = "Course"
related_name = "teacher"
class Course(Resource):
""" An educational unit represinting the lessons for a specific set of topics """
class Schema:
name = StringField(pk=True, description="Name of the course. E.g. physics, maths.")
duration = IntegerField(description="Length of the course in weeks")
class Links:
class teacher(Link):
""" The lecturer of the course """
target = "Teacher"
related_name = "courses"
cardinality = Link.cardinalities.ONE
master = True
required = True
class comments(Link):
""" All comments made about the course """
target = "Comment"
related_name = "course"
class ratings(Link):
""" All ratings that were given to the teachers of the specific course """
target = "TeacherRating"
related_name = "course"
class students(Link):
""" All pupils who attend the course """
target = "Student"
related_name = "courses"
class AutoGenSha1UriPolicy(AbstractUriPolicy):
""" Uses a randomly generated sha1 as a primary key """
@property
def type(self):
return "autogen_policy"
def generate_pk(self, data):
return os.urandom(16).encode('hex')
def serialize(self, pk):
return pk
def deserialize(self, pk):
if not isinstance(pk, basestring):
raise ValidationError("Has to be string")
if not RE_SHA1.match(value):
raise ValidationError("PK is not a valid SHA1")
return pk
class Comment(Resource):
""" Student's comment about the course """
UriPolicy = AutoGenSha1UriPolicy
class Schema:
pk = StringField(pk=True, description="Identifier of the resource")
value = StringField(description="Text of the comment")
creation_time = DateTimeField(description="Time when the comment was added (for sorting purpose)")
class Links:
class student(Link):
""" The pupil who made the comment """
target = "Student"
related_name = "comments"
cardinality = Link.cardinalities.ONE
master = True
required = True
class course(Link):
""" The subject the comment was made about """
target = "Course"
related_name = "comments"
cardinality = Link.cardinalities.ONE
master = True
required = True
class TeacherRating(Resource):
""" Student's rating about teacher's performance """
UriPolicy = AutoGenSha1UriPolicy
class Schema:
pk = StringField(pk=True, description="Identifier of the resource")
value = IntegerField(min_val=0, max_val=100, description="Lecturer's performance identifier ")
creation_time = DateTimeField(description="Time when the rating was added (for sorting purpose)")
class Links:
class student(Link):
""" The pupil who gave the rating to the teacher """
target = "Student"
related_name = "ratings"
cardinality = Link.cardinalities.ONE
master = True
required = True
class course(Link):
""" The subject with respect to which the rating was given """
target = "Course"
related_name = "ratings"
cardinality = Link.cardinalities.ONE
master = True
required = True
class teacher(Link):
""" The lecturer to whom the rating is related """
target = "Teacher"
related_name = "ratings"
cardinality = Link.cardinalities.ONE
master = True
required = True
srv = ShelveService()
srv.register(Student)
srv.register(Teacher)
srv.register(Course)
srv.register(Comment)
srv.register(TeacherRating)
srv.setup()<|fim▁end|> | self._storage = context["storage"]
def exists(self, user, pk, rel_pk):
return rel_pk in self._storage.get((pk, self.get_name()), {}) |
<|file_name|>setuphandlers.py<|end_file_name|><|fim▁begin|>from plone import api
from plone.app.controlpanel.security import ISecuritySchema
def setup_workspaces(portal):
mp = api.portal.get_tool(name='portal_membership')
# set type to custom member type
mp.setMemberAreaType('wigo.workspaces.workspace')<|fim▁hole|>def setup_security(portal):
""" Add security controlpanel settings.
"""
site = api.portal.get()
#site security setup!
security = ISecuritySchema(site)
security.set_enable_user_folders(True)
security.use_uuid_as_userid(True)
def setupVarious(context):
if context.readDataFile('wigo.statusapp-various.txt') is None:
return
portal = api.portal.get()
setup_workspaces(portal)
# call update security
setup_security(portal)<|fim▁end|> | # set member folder name
mp.setMembersFolderById('sqa')
|
<|file_name|>sensorCount.py<|end_file_name|><|fim▁begin|>from .AlFeatureTemplate import AlFeatureTemplate
from .sensorCountRoutine import AlFeatureSensorCountRoutine
import numpy as np
class AlFeatureSensorCount(AlFeatureTemplate):
def __init__(self, normalize=False):
"""
Initialization of Template Class
:return:
"""
AlFeatureTemplate.__init__(self,
name='sensorCount',
description='Number of Events in the window related to the sensor',
per_sensor=True,
enabled=True,
routine=AlFeatureSensorCountRoutine())
# Normalize the number between 0 to 1
self.normalize = normalize
def get_feature_value(self, data_list, cur_index, window_size, sensor_name=None):
"""
Counts the number of occurrence of the sensor specified in current window.
:param data_list: list of sensor data
:param cur_index: current data record index<|fim▁hole|> if self.normalize:
return np.float(self.routine.sensor_count[sensor_name])/(window_size * 2)
else:
return np.float(self.routine.sensor_count[sensor_name])<|fim▁end|> | :param window_size: window size
:param sensor_name: name of sensor
:return: a double value
""" |
<|file_name|>RestApplication.java<|end_file_name|><|fim▁begin|>package org.marsik.bugautomation.rest;
import javax.ws.rs.ApplicationPath;
import javax.ws.rs.core.Application;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
@ApplicationPath("/")
public class RestApplication extends Application {
@Override
public Set<Class<?>> getClasses() {
final HashSet<Class<?>> classes = new HashSet<>();
classes.add(MetricsEndpoint.class);
classes.add(InfoEndpoint.class);<|fim▁hole|><|fim▁end|> | return classes;
}
} |
<|file_name|>model.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Borders, padding, and margins.
#![deny(unsafe_code)]
use app_units::Au;
use euclid::{SideOffsets2D, Size2D};
use fragment::Fragment;
use std::cmp::{max, min};
use std::fmt;
use style::logical_geometry::{LogicalMargin, WritingMode};
use style::properties::ComputedValues;
use style::values::computed::{BorderCornerRadius, LengthOrPercentageOrAuto};
use style::values::computed::{LengthOrPercentage, LengthOrPercentageOrNone};
/// A collapsible margin. See CSS 2.1 § 8.3.1.
#[derive(Clone, Copy, Debug)]
pub struct AdjoiningMargins {
/// The value of the greatest positive margin.
pub most_positive: Au,
/// The actual value (not the absolute value) of the negative margin with the largest absolute
/// value. Since this is not the absolute value, this is always zero or negative.
pub most_negative: Au,
}
impl AdjoiningMargins {
pub fn new() -> AdjoiningMargins {
AdjoiningMargins {
most_positive: Au(0),
most_negative: Au(0),
}
}
pub fn from_margin(margin_value: Au) -> AdjoiningMargins {
if margin_value >= Au(0) {
AdjoiningMargins {
most_positive: margin_value,
most_negative: Au(0),
}
} else {
AdjoiningMargins {
most_positive: Au(0),
most_negative: margin_value,
}
}
}
pub fn union(&mut self, other: AdjoiningMargins) {
self.most_positive = max(self.most_positive, other.most_positive);
self.most_negative = min(self.most_negative, other.most_negative)
}
pub fn collapse(&self) -> Au {
self.most_positive + self.most_negative
}
}
/// Represents the block-start and block-end margins of a flow with collapsible margins. See CSS 2.1 § 8.3.1.
#[derive(Clone, Copy, Debug)]
pub enum CollapsibleMargins {
/// Margins may not collapse with this flow.
None(Au, Au),
/// Both the block-start and block-end margins (specified here in that order) may collapse, but the
/// margins do not collapse through this flow.
Collapse(AdjoiningMargins, AdjoiningMargins),
/// Margins collapse *through* this flow. This means, essentially, that the flow doesn’t
/// have any border, padding, or out-of-flow (floating or positioned) content
CollapseThrough(AdjoiningMargins),
}
impl CollapsibleMargins {
pub fn new() -> CollapsibleMargins {
CollapsibleMargins::None(Au(0), Au(0))
}
/// Returns the amount of margin that should be applied in a noncollapsible context. This is
/// currently used to apply block-start margin for hypothetical boxes, since we do not collapse
/// margins of hypothetical boxes.
pub fn block_start_margin_for_noncollapsible_context(&self) -> Au {
match *self {
CollapsibleMargins::None(block_start, _) => block_start,
CollapsibleMargins::Collapse(ref block_start, _) |
CollapsibleMargins::CollapseThrough(ref block_start) => block_start.collapse(),
}
}
pub fn block_end_margin_for_noncollapsible_context(&self) -> Au {
match *self {
CollapsibleMargins::None(_, block_end) => block_end,
CollapsibleMargins::Collapse(_, ref block_end) |
CollapsibleMargins::CollapseThrough(ref block_end) => block_end.collapse(),
}
}
}
enum FinalMarginState {
MarginsCollapseThrough,
BottomMarginCollapses,
}
pub struct MarginCollapseInfo {
pub state: MarginCollapseState,
pub block_start_margin: AdjoiningMargins,
pub margin_in: AdjoiningMargins,
}
impl MarginCollapseInfo {
pub fn initialize_block_start_margin(
fragment: &Fragment,
can_collapse_block_start_margin_with_kids: bool,
) -> MarginCollapseInfo {
MarginCollapseInfo {
state: if can_collapse_block_start_margin_with_kids {
MarginCollapseState::AccumulatingCollapsibleTopMargin
} else {
MarginCollapseState::AccumulatingMarginIn
},
block_start_margin: AdjoiningMargins::from_margin(fragment.margin.block_start),
margin_in: AdjoiningMargins::new(),
}
}
pub fn finish_and_compute_collapsible_margins(mut self,
fragment: &Fragment,
containing_block_size: Option<Au>,
can_collapse_block_end_margin_with_kids: bool,
mut may_collapse_through: bool)
-> (CollapsibleMargins, Au) {
let state = match self.state {
MarginCollapseState::AccumulatingCollapsibleTopMargin => {
may_collapse_through = may_collapse_through &&
match fragment.style().content_block_size() {
LengthOrPercentageOrAuto::Auto => true,
LengthOrPercentageOrAuto::Length(l) => l.px() == 0.,
LengthOrPercentageOrAuto::Percentage(v) => {
v.0 == 0. || containing_block_size.is_none()
}
LengthOrPercentageOrAuto::Calc(_) => false,
};
if may_collapse_through {
match fragment.style().min_block_size() {
LengthOrPercentage::Length(l) if l.px() == 0. => {
FinalMarginState::MarginsCollapseThrough
},
LengthOrPercentage::Percentage(v) if v.0 == 0. => {
FinalMarginState::MarginsCollapseThrough
},
_ => {
// If the fragment has non-zero min-block-size, margins may not
// collapse through it.
FinalMarginState::BottomMarginCollapses
}
}
} else {
// If the fragment has an explicitly specified block-size, margins may not
// collapse through it.
FinalMarginState::BottomMarginCollapses
}
}
MarginCollapseState::AccumulatingMarginIn => FinalMarginState::BottomMarginCollapses,
};
// Different logic is needed here depending on whether this flow can collapse its block-end
// margin with its children.
let block_end_margin = fragment.margin.block_end;
if !can_collapse_block_end_margin_with_kids {
match state {
FinalMarginState::MarginsCollapseThrough => {
let advance = self.block_start_margin.collapse();
self.margin_in.union(AdjoiningMargins::from_margin(block_end_margin));
(CollapsibleMargins::Collapse(self.block_start_margin, self.margin_in),
advance)
}
FinalMarginState::BottomMarginCollapses => {
let advance = self.margin_in.collapse();
self.margin_in.union(AdjoiningMargins::from_margin(block_end_margin));
(CollapsibleMargins::Collapse(self.block_start_margin, self.margin_in),
advance)
}
}
} else {
match state {
FinalMarginState::MarginsCollapseThrough => {
self.block_start_margin.union(AdjoiningMargins::from_margin(block_end_margin));
(CollapsibleMargins::CollapseThrough(self.block_start_margin), Au(0))
}
FinalMarginState::BottomMarginCollapses => {
self.margin_in.union(AdjoiningMargins::from_margin(block_end_margin));
(CollapsibleMargins::Collapse(self.block_start_margin, self.margin_in), Au(0))
}
}
}
}
pub fn current_float_ceiling(&mut self) -> Au {
match self.state {
MarginCollapseState::AccumulatingCollapsibleTopMargin => {
// We do not include the top margin in the float ceiling, because the float flow
// needs to be positioned relative to our *border box*, not our margin box. See
// `tests/ref/float_under_top_margin_a.html`.
Au(0)
}
MarginCollapseState::AccumulatingMarginIn => self.margin_in.collapse(),
}
}
/// Adds the child's potentially collapsible block-start margin to the current margin state and
/// advances the Y offset by the appropriate amount to handle that margin. Returns the amount
/// that should be added to the Y offset during block layout.
pub fn advance_block_start_margin(&mut self,
child_collapsible_margins: &CollapsibleMargins,
can_collapse_block_start_margin: bool)
-> Au {
if !can_collapse_block_start_margin {
self.state = MarginCollapseState::AccumulatingMarginIn
}
match (self.state, *child_collapsible_margins) {
(MarginCollapseState::AccumulatingCollapsibleTopMargin,
CollapsibleMargins::None(block_start, _)) => {
self.state = MarginCollapseState::AccumulatingMarginIn;
block_start
}
(MarginCollapseState::AccumulatingCollapsibleTopMargin,
CollapsibleMargins::Collapse(block_start, _)) => {
self.block_start_margin.union(block_start);
self.state = MarginCollapseState::AccumulatingMarginIn;
Au(0)
}
(MarginCollapseState::AccumulatingMarginIn,
CollapsibleMargins::None(block_start, _)) => {
let previous_margin_value = self.margin_in.collapse();
self.margin_in = AdjoiningMargins::new();
previous_margin_value + block_start
}
(MarginCollapseState::AccumulatingMarginIn,
CollapsibleMargins::Collapse(block_start, _)) => {
self.margin_in.union(block_start);
let margin_value = self.margin_in.collapse();
self.margin_in = AdjoiningMargins::new();
margin_value
}
(_, CollapsibleMargins::CollapseThrough(_)) => {
// For now, we ignore this; this will be handled by `advance_block_end_margin`
// below.
Au(0)
}
}
}
/// Adds the child's potentially collapsible block-end margin to the current margin state and
/// advances the Y offset by the appropriate amount to handle that margin. Returns the amount
/// that should be added to the Y offset during block layout.
pub fn advance_block_end_margin(&mut self, child_collapsible_margins: &CollapsibleMargins)
-> Au {
match (self.state, *child_collapsible_margins) {
(MarginCollapseState::AccumulatingCollapsibleTopMargin, CollapsibleMargins::None(..)) |
(MarginCollapseState::AccumulatingCollapsibleTopMargin,
CollapsibleMargins::Collapse(..)) => {
// Can't happen because the state will have been replaced with
// `MarginCollapseState::AccumulatingMarginIn` above.
panic!("should not be accumulating collapsible block_start margins anymore!")
}
(MarginCollapseState::AccumulatingCollapsibleTopMargin,
CollapsibleMargins::CollapseThrough(margin)) => {
self.block_start_margin.union(margin);
Au(0)
}
(MarginCollapseState::AccumulatingMarginIn,
CollapsibleMargins::None(_, block_end)) => {
assert_eq!(self.margin_in.most_positive, Au(0));
assert_eq!(self.margin_in.most_negative, Au(0));
block_end
}
(MarginCollapseState::AccumulatingMarginIn,
CollapsibleMargins::Collapse(_, block_end)) |
(MarginCollapseState::AccumulatingMarginIn,
CollapsibleMargins::CollapseThrough(block_end)) => {
self.margin_in.union(block_end);
Au(0)
}
}
}
}
#[derive(Clone, Copy, Debug)]
pub enum MarginCollapseState {
/// We are accumulating margin on the logical top of this flow.
AccumulatingCollapsibleTopMargin,
/// We are accumulating margin between two blocks.
AccumulatingMarginIn,
}
/// Intrinsic inline-sizes, which consist of minimum and preferred.
#[derive(Clone, Copy, Serialize)]
pub struct IntrinsicISizes {
/// The *minimum inline-size* of the content.
pub minimum_inline_size: Au,
/// The *preferred inline-size* of the content.
pub preferred_inline_size: Au,
}
impl fmt::Debug for IntrinsicISizes {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "min={:?}, pref={:?}", self.minimum_inline_size, self.preferred_inline_size)
}
}
impl IntrinsicISizes {
pub fn new() -> IntrinsicISizes {
IntrinsicISizes {
minimum_inline_size: Au(0),
preferred_inline_size: Au(0),
}
}
}
/// The temporary result of the computation of intrinsic inline-sizes.
#[derive(Debug)]
pub struct IntrinsicISizesContribution {<|fim▁hole|>}
impl IntrinsicISizesContribution {
/// Creates and initializes an inline size computation with all sizes set to zero.
pub fn new() -> IntrinsicISizesContribution {
IntrinsicISizesContribution {
content_intrinsic_sizes: IntrinsicISizes::new(),
surrounding_size: Au(0),
}
}
/// Adds the content intrinsic sizes and the surrounding size together to yield the final
/// intrinsic size computation.
pub fn finish(self) -> IntrinsicISizes {
IntrinsicISizes {
minimum_inline_size: self.content_intrinsic_sizes.minimum_inline_size +
self.surrounding_size,
preferred_inline_size: self.content_intrinsic_sizes.preferred_inline_size +
self.surrounding_size,
}
}
/// Updates the computation so that the minimum is the maximum of the current minimum and the
/// given minimum and the preferred is the sum of the current preferred and the given
/// preferred. This is used when laying out fragments in the inline direction.
///
/// FIXME(pcwalton): This is incorrect when the inline fragment contains forced line breaks
/// (e.g. `<br>` or `white-space: pre`).
pub fn union_inline(&mut self, sizes: &IntrinsicISizes) {
self.content_intrinsic_sizes.minimum_inline_size =
max(self.content_intrinsic_sizes.minimum_inline_size, sizes.minimum_inline_size);
self.content_intrinsic_sizes.preferred_inline_size =
self.content_intrinsic_sizes.preferred_inline_size + sizes.preferred_inline_size
}
/// Updates the computation so that the minimum is the sum of the current minimum and the
/// given minimum and the preferred is the sum of the current preferred and the given
/// preferred. This is used when laying out fragments in the inline direction when
/// `white-space` is `pre` or `nowrap`.
pub fn union_nonbreaking_inline(&mut self, sizes: &IntrinsicISizes) {
self.content_intrinsic_sizes.minimum_inline_size =
self.content_intrinsic_sizes.minimum_inline_size + sizes.minimum_inline_size;
self.content_intrinsic_sizes.preferred_inline_size =
self.content_intrinsic_sizes.preferred_inline_size + sizes.preferred_inline_size
}
/// Updates the computation so that the minimum is the maximum of the current minimum and the
/// given minimum and the preferred is the maximum of the current preferred and the given
/// preferred. This can be useful when laying out fragments in the block direction (but note
/// that it does not take floats into account, so `BlockFlow` does not use it).
///
/// This is used when contributing the intrinsic sizes for individual fragments.
pub fn union_block(&mut self, sizes: &IntrinsicISizes) {
self.content_intrinsic_sizes.minimum_inline_size =
max(self.content_intrinsic_sizes.minimum_inline_size, sizes.minimum_inline_size);
self.content_intrinsic_sizes.preferred_inline_size =
max(self.content_intrinsic_sizes.preferred_inline_size, sizes.preferred_inline_size)
}
}
/// Useful helper data type when computing values for blocks and positioned elements.
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum MaybeAuto {
Auto,
Specified(Au),
}
impl MaybeAuto {
#[inline]
pub fn from_style(length: LengthOrPercentageOrAuto, containing_length: Au)
-> MaybeAuto {
match length {
LengthOrPercentageOrAuto::Auto => MaybeAuto::Auto,
LengthOrPercentageOrAuto::Percentage(percent) => {
MaybeAuto::Specified(containing_length.scale_by(percent.0))
}
LengthOrPercentageOrAuto::Calc(calc) => {
MaybeAuto::from_option(calc.to_used_value(Some(containing_length)))
}
LengthOrPercentageOrAuto::Length(length) => MaybeAuto::Specified(Au::from(length))
}
}
#[inline]
pub fn from_option(au: Option<Au>) -> MaybeAuto {
match au {
Some(l) => MaybeAuto::Specified(l),
_ => MaybeAuto::Auto,
}
}
#[inline]
pub fn to_option(&self) -> Option<Au> {
match *self {
MaybeAuto::Specified(value) => Some(value),
MaybeAuto::Auto => None,
}
}
#[inline]
pub fn specified_or_default(&self, default: Au) -> Au {
match *self {
MaybeAuto::Auto => default,
MaybeAuto::Specified(value) => value,
}
}
#[inline]
pub fn specified_or_zero(&self) -> Au {
self.specified_or_default(Au::new(0))
}
#[inline]
pub fn map<F>(&self, mapper: F) -> MaybeAuto where F: FnOnce(Au) -> Au {
match *self {
MaybeAuto::Auto => MaybeAuto::Auto,
MaybeAuto::Specified(value) => MaybeAuto::Specified(mapper(value)),
}
}
}
/// Receive an optional container size and return used value for width or height.
///
/// `style_length`: content size as given in the CSS.
pub fn style_length(style_length: LengthOrPercentageOrAuto,
container_size: Option<Au>) -> MaybeAuto {
match container_size {
Some(length) => MaybeAuto::from_style(style_length, length),
None => if let LengthOrPercentageOrAuto::Length(length) = style_length {
MaybeAuto::Specified(Au::from(length))
} else {
MaybeAuto::Auto
}
}
}
/// Computes a border radius size against the containing size.
///
/// Note that percentages in `border-radius` are resolved against the relevant
/// box dimension instead of only against the width per [1]:
///
/// > Percentages: Refer to corresponding dimension of the border box.
///
/// [1]: https://drafts.csswg.org/css-backgrounds-3/#border-radius
pub fn specified_border_radius(
radius: BorderCornerRadius,
containing_size: Size2D<Au>)
-> Size2D<Au>
{
let w = radius.0.width().to_used_value(containing_size.width);
let h = radius.0.height().to_used_value(containing_size.height);
Size2D::new(w, h)
}
#[inline]
pub fn padding_from_style(style: &ComputedValues,
containing_block_inline_size: Au,
writing_mode: WritingMode)
-> LogicalMargin<Au> {
let padding_style = style.get_padding();
LogicalMargin::from_physical(writing_mode, SideOffsets2D::new(
padding_style.padding_top.to_used_value(containing_block_inline_size),
padding_style.padding_right.to_used_value(containing_block_inline_size),
padding_style.padding_bottom.to_used_value(containing_block_inline_size),
padding_style.padding_left.to_used_value(containing_block_inline_size)))
}
/// Returns the explicitly-specified margin lengths from the given style. Percentage and auto
/// margins are returned as zero.
///
/// This is used when calculating intrinsic inline sizes.
#[inline]
pub fn specified_margin_from_style(style: &ComputedValues,
writing_mode: WritingMode) -> LogicalMargin<Au> {
let margin_style = style.get_margin();
LogicalMargin::from_physical(writing_mode, SideOffsets2D::new(
MaybeAuto::from_style(margin_style.margin_top, Au(0)).specified_or_zero(),
MaybeAuto::from_style(margin_style.margin_right, Au(0)).specified_or_zero(),
MaybeAuto::from_style(margin_style.margin_bottom, Au(0)).specified_or_zero(),
MaybeAuto::from_style(margin_style.margin_left, Au(0)).specified_or_zero()))
}
/// A min-size and max-size constraint. The constructor has a optional `border`
/// parameter, and when it is present the constraint will be subtracted. This is
/// used to adjust the constraint for `box-sizing: border-box`, and when you do so
/// make sure the size you want to clamp is intended to be used for content box.
#[derive(Clone, Copy, Debug, Serialize)]
pub struct SizeConstraint {
min_size: Au,
max_size: Option<Au>,
}
impl SizeConstraint {
/// Create a `SizeConstraint` for an axis.
pub fn new(container_size: Option<Au>,
min_size: LengthOrPercentage,
max_size: LengthOrPercentageOrNone,
border: Option<Au>) -> SizeConstraint {
let mut min_size = match container_size {
Some(container_size) => min_size.to_used_value(container_size),
None => if let LengthOrPercentage::Length(length) = min_size {
Au::from(length)
} else {
Au(0)
}
};
let mut max_size = match container_size {
Some(container_size) => max_size.to_used_value(container_size),
None => if let LengthOrPercentageOrNone::Length(length) = max_size {
Some(Au::from(length))
} else {
None
}
};
// Make sure max size is not smaller than min size.
max_size = max_size.map(|x| max(x, min_size));
if let Some(border) = border {
min_size = max(min_size - border, Au(0));
max_size = max_size.map(|x| max(x - border, Au(0)));
}
SizeConstraint {
min_size: min_size,
max_size: max_size
}
}
/// Clamp the given size by the given min size and max size constraint.
pub fn clamp(&self, other: Au) -> Au {
if other < self.min_size {
self.min_size
} else {
match self.max_size {
Some(max_size) if max_size < other => max_size,
_ => other
}
}
}
}<|fim▁end|> | /// Intrinsic sizes for the content only (not counting borders, padding, or margins).
pub content_intrinsic_sizes: IntrinsicISizes,
/// The inline size of borders and padding, as well as margins if appropriate.
pub surrounding_size: Au, |
<|file_name|>resource_aws_servicecatalog_portfolio_test.go<|end_file_name|><|fim▁begin|>package aws
import (
"fmt"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/servicecatalog"
"github.com/hashicorp/terraform/helper/acctest"
"github.com/hashicorp/terraform/helper/resource"
"github.com/hashicorp/terraform/terraform"
"testing"
)
func TestAccAWSServiceCatalogPortfolioBasic(t *testing.T) {
name := acctest.RandString(5)
var dpo servicecatalog.DescribePortfolioOutput
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckServiceCatlaogPortfolioDestroy,
Steps: []resource.TestStep{
resource.TestStep{
Config: testAccCheckAwsServiceCatalogPortfolioResourceConfigBasic1(name),
Check: resource.ComposeTestCheckFunc(
testAccCheckPortfolio("aws_servicecatalog_portfolio.test", &dpo),
resource.TestCheckResourceAttrSet("aws_servicecatalog_portfolio.test", "arn"),
resource.TestCheckResourceAttrSet("aws_servicecatalog_portfolio.test", "created_time"),
resource.TestCheckResourceAttr("aws_servicecatalog_portfolio.test", "name", name),
resource.TestCheckResourceAttr("aws_servicecatalog_portfolio.test", "description", "test-2"),
resource.TestCheckResourceAttr("aws_servicecatalog_portfolio.test", "provider_name", "test-3"),
resource.TestCheckResourceAttr("aws_servicecatalog_portfolio.test", "tags.%", "1"),
resource.TestCheckResourceAttr("aws_servicecatalog_portfolio.test", "tags.Key1", "Value One"),
),
},
resource.TestStep{
Config: testAccCheckAwsServiceCatalogPortfolioResourceConfigBasic2(name),
Check: resource.ComposeTestCheckFunc(
testAccCheckPortfolio("aws_servicecatalog_portfolio.test", &dpo),
resource.TestCheckResourceAttrSet("aws_servicecatalog_portfolio.test", "arn"),
resource.TestCheckResourceAttrSet("aws_servicecatalog_portfolio.test", "created_time"),
resource.TestCheckResourceAttr("aws_servicecatalog_portfolio.test", "name", name),
resource.TestCheckResourceAttr("aws_servicecatalog_portfolio.test", "description", "test-b"),
resource.TestCheckResourceAttr("aws_servicecatalog_portfolio.test", "provider_name", "test-c"),
resource.TestCheckResourceAttr("aws_servicecatalog_portfolio.test", "tags.%", "2"),
resource.TestCheckResourceAttr("aws_servicecatalog_portfolio.test", "tags.Key1", "Value 1"),
resource.TestCheckResourceAttr("aws_servicecatalog_portfolio.test", "tags.Key2", "Value Two"),
),
},
resource.TestStep{
Config: testAccCheckAwsServiceCatalogPortfolioResourceConfigBasic3(name),
Check: resource.ComposeTestCheckFunc(
testAccCheckPortfolio("aws_servicecatalog_portfolio.test", &dpo),
resource.TestCheckResourceAttrSet("aws_servicecatalog_portfolio.test", "arn"),
resource.TestCheckResourceAttrSet("aws_servicecatalog_portfolio.test", "created_time"),
resource.TestCheckResourceAttr("aws_servicecatalog_portfolio.test", "name", name),
resource.TestCheckResourceAttr("aws_servicecatalog_portfolio.test", "description", "test-only-change-me"),
resource.TestCheckResourceAttr("aws_servicecatalog_portfolio.test", "provider_name", "test-c"),
resource.TestCheckResourceAttr("aws_servicecatalog_portfolio.test", "tags.%", "1"),
resource.TestCheckResourceAttr("aws_servicecatalog_portfolio.test", "tags.Key3", "Value Three"),
),
},
},
})
}
func TestAccAWSServiceCatalogPortfolioDisappears(t *testing.T) {
name := acctest.RandString(5)
var dpo servicecatalog.DescribePortfolioOutput
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckServiceCatlaogPortfolioDestroy,
Steps: []resource.TestStep{
resource.TestStep{
Config: testAccCheckAwsServiceCatalogPortfolioResourceConfigBasic1(name),
Check: resource.ComposeTestCheckFunc(
testAccCheckPortfolio("aws_servicecatalog_portfolio.test", &dpo),
testAccCheckServiceCatlaogPortfolioDisappears(&dpo),
),
ExpectNonEmptyPlan: true,
},
},
})
}
func TestAccAWSServiceCatalogPortfolioImport(t *testing.T) {
resourceName := "aws_servicecatalog_portfolio.test"
name := acctest.RandString(5)
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckServiceCatlaogPortfolioDestroy,
Steps: []resource.TestStep{
resource.TestStep{
Config: testAccCheckAwsServiceCatalogPortfolioResourceConfigBasic1(name),
},
resource.TestStep{
ResourceName: resourceName,
ImportState: true,
ImportStateVerify: true,
},
},
})
}
func testAccCheckPortfolio(pr string, dpo *servicecatalog.DescribePortfolioOutput) resource.TestCheckFunc {
return func(s *terraform.State) error {
conn := testAccProvider.Meta().(*AWSClient).scconn
rs, ok := s.RootModule().Resources[pr]
if !ok {
return fmt.Errorf("Not found: %s", pr)
}
if rs.Primary.ID == "" {
return fmt.Errorf("No ID is set")
}
input := servicecatalog.DescribePortfolioInput{}
input.Id = aws.String(rs.Primary.ID)
resp, err := conn.DescribePortfolio(&input)
if err != nil {
return err
}
*dpo = *resp
return nil
}
}
func testAccCheckServiceCatlaogPortfolioDisappears(dpo *servicecatalog.DescribePortfolioOutput) resource.TestCheckFunc {
return func(s *terraform.State) error {
conn := testAccProvider.Meta().(*AWSClient).scconn
input := servicecatalog.DeletePortfolioInput{}
input.Id = dpo.PortfolioDetail.Id
_, err := conn.DeletePortfolio(&input)
if err != nil {
return err
}
return nil
}
}
func testAccCheckServiceCatlaogPortfolioDestroy(s *terraform.State) error {
conn := testAccProvider.Meta().(*AWSClient).scconn
for _, rs := range s.RootModule().Resources {
if rs.Type != "aws_servicecatalog_portfolio" {
continue
}
input := servicecatalog.DescribePortfolioInput{}
input.Id = aws.String(rs.Primary.ID)
_, err := conn.DescribePortfolio(&input)
if err == nil {
return fmt.Errorf("Portfolio still exists")
}
}
return nil
}
func testAccCheckAwsServiceCatalogPortfolioResourceConfigBasic1(name string) string {
return fmt.Sprintf(`
resource "aws_servicecatalog_portfolio" "test" {
name = "%s"
description = "test-2"
provider_name = "test-3"
tags {
Key1 = "Value One"
}
}
`, name)
}<|fim▁hole|>
func testAccCheckAwsServiceCatalogPortfolioResourceConfigBasic2(name string) string {
return fmt.Sprintf(`
resource "aws_servicecatalog_portfolio" "test" {
name = "%s"
description = "test-b"
provider_name = "test-c"
tags {
Key1 = "Value 1"
Key2 = "Value Two"
}
}
`, name)
}
func testAccCheckAwsServiceCatalogPortfolioResourceConfigBasic3(name string) string {
return fmt.Sprintf(`
resource "aws_servicecatalog_portfolio" "test" {
name = "%s"
description = "test-only-change-me"
provider_name = "test-c"
tags {
Key3 = "Value Three"
}
}
`, name)
}<|fim▁end|> | |
<|file_name|>ecommerce.go<|end_file_name|><|fim▁begin|>package ecommerce
import (
"net/http"
"github.com/arvindkandhare/goamz/aws"
)
// ProductAdvertising provides methods for querying the product advertising API
type ProductAdvertising struct {
service aws.Service
associateTag string
}
// New creates a new ProductAdvertising client
func New(auth aws.Auth, associateTag string) (p *ProductAdvertising, err error) {
serviceInfo := aws.ServiceInfo{Endpoint: "https://webservices.amazon.com", Signer: aws.V2Signature}
if service, err := aws.NewService(auth, serviceInfo); err == nil {
p = &ProductAdvertising{*service, associateTag}
}
return
}
// PerformOperation is the main method used for interacting with the product advertising API
func (p *ProductAdvertising) PerformOperation(operation string, params map[string]string) (resp *http.Response, err error) {
params["Operation"] = operation
return p.query(params)
}
func (p *ProductAdvertising) query(params map[string]string) (resp *http.Response, err error) {
params["Service"] = "AWSECommerceService"
params["AssociateTag"] = p.associateTag<|fim▁hole|>}<|fim▁end|> | return p.service.Query("GET", "/onca/xml", params) |
<|file_name|>record.cpp<|end_file_name|><|fim▁begin|>#include "stdafx.h"
#include "record.hpp"
namespace tc { namespace log {
const char* record::type_acronym( void ) const {
switch( type ) {
case log::trace: return "T";
case log::debug: return "D";
case log::info: return "I";
case log::warn: return "W";
case log::error: return "E";
case log::fatal: return "F";
case log::all: return "A";
default:
return "!";<|fim▁hole|>
record::record( tc::log::type type , const tc::log::tag& tag )
: type(type)
, tag(tag)
, ts(tc::timestamp::now())
, tid(tc::threading::current_thread_id())
{
}
record::~record( void ) {
}
}}<|fim▁end|> | }
return "?";
} |
<|file_name|>bird.cpp<|end_file_name|><|fim▁begin|>/*
* bird.cpp
*
* Created on: Apr 7, 2015
* Author: torch2424
*/
<|fim▁hole|>
bird::bird()
{
//animalType = &type;
}
bird::~bird()
{
// TODO Auto-generated destructor stub
}<|fim▁end|> | #include "bird.h"
using namespace std; |
<|file_name|>latlng.js<|end_file_name|><|fim▁begin|>import util from './util'
import LatLngBounds from './latlngbounds'
const {abs, max, min, PI, sin, cos, acos} = Math
const rad = PI / 180
// distance between two geographical points using spherical law of cosines approximation
function distance (latlng1, latlng2) {
const lat1 = latlng1.lat * rad
const lat2 = latlng2.lat * rad
const a = sin(lat1) * sin(lat2) + cos(lat1) * cos(lat2) * cos((latlng2.lng - latlng1.lng) * rad)
return 6371000 * acos(min(a, 1));
}
class LatLng {
constructor(a, b, c) {
if (a instanceof LatLng) {
return a;
}
if (Array.isArray(a) && typeof a[0] !== 'object') {
if (a.length === 3) {
return this._constructor(a[0], a[1], a[2])
}
if (a.length === 2) {
return this._constructor(a[0], a[1])
}
return null;
}
if (a === undefined || a === null) {
return a
}
if (typeof a === 'object' && 'lat' in a) {
return this._constructor(a.lat, 'lng' in a ? a.lng : a.lon, a.alt);
}
if (b === undefined) {
return null;
}
return this._constructor(a, b, c)
}
_constructor(lat, lng, alt) {
if (isNaN(lat) || isNaN(lng)) {
throw new Error('Invalid LatLng object: (' + lat + ', ' + lng + ')');
}
// @property lat: Number
// Latitude in degrees
this.lat = +lat
// @property lng: Number
// Longitude in degrees
this.lng = +lng
// @property alt: Number
// Altitude in meters (optional)
if (alt !== undefined) {
this.alt = +alt
}
}
// @method equals(otherLatLng: LatLng, maxMargin?: Number): Boolean
// Returns `true` if the given `LatLng` point is at the same position (within a small margin of error). The margin of error can be overriden by setting `maxMargin` to a small number.
equals(obj, maxMargin) {
if (!obj) { return false }
obj = new LatLng(obj);
const margin = max(abs(this.lat - obj.lat), abs(this.lng - obj.lng))
return margin <= (maxMargin === undefined ? 1.0E-9 : maxMargin);
}
// @method toString(): String
// Returns a string representation of the point (for debugging purposes).
toString(precision) {
return `LatLng(${this.lat.toFixed(precision)}, ${this.lng.toFixed(precision)})`
}
// @method distanceTo(otherLatLng: LatLng): Number
// Returns the distance (in meters) to the given `LatLng` calculated using the [Haversine formula](http://en.wikipedia.org/wiki/Haversine_formula).
distanceTo(other) {
return distance(this, new LatLng(other))
}
// @method wrap(): LatLng
// Returns a new `LatLng` object with the longitude wrapped so it's always between -180 and +180 degrees.
wrap(latlng) {
const lng = util.wrapNum(latlng.lng, [-180, 180], true)
return new LatLng(latlng.lat, lng, latlng.alt)<|fim▁hole|> toBounds(sizeInMeters) {
const latAccuracy = 180 * sizeInMeters / 40075017
const lngAccuracy = latAccuracy / Math.cos((Math.PI / 180) * this.lat)
return LatLngBounds(
[this.lat - latAccuracy, this.lng - lngAccuracy],
[this.lat + latAccuracy, this.lng + lngAccuracy]
)
}
clone() {
return new LatLng(this.lat, this.lng, this.alt)
}
}
module.exports = LatLng<|fim▁end|> | }
// @method toBounds(sizeInMeters: Number): LatLngBounds
// Returns a new `LatLngBounds` object in which each boundary is `sizeInMeters` meters apart from the `LatLng`. |
<|file_name|>journey-of-h5.js<|end_file_name|><|fim▁begin|>function drawCanvas() {
var canvas = document.getElementById('canvas-container');
var context = canvas.getContext("2d");
var grd = context.createLinearGradient(0,0,170,0);
grd.addColorStop(0,"red");
grd.addColorStop(0.5,"blue");
grd.addColorStop(1,"green");
context.fillStyle = grd;
context.lineWidth = 50;
context.lineJoin="round";
context.beginPath();
context.arc(100,100,50,0,1.5*Math.PI);
context.closePath();
context.stroke();
context.fill();
}
$(function() {<|fim▁hole|><|fim▁end|> | drawCanvas();
}); |
<|file_name|>test_getter.py<|end_file_name|><|fim▁begin|>from uber.tests import *
@pytest.fixture
def attendee_id():
with Session() as session:
return session.query(Attendee).filter_by(first_name='Regular', last_name='Attendee').one().id
@pytest.fixture(autouse=True)
def mock_apply(monkeypatch):
monkeypatch.setattr(Attendee, 'apply', Mock())
return Attendee.apply
def test_invalid_gets():
with Session() as session:
pytest.raises(Exception, session.attendee)
pytest.raises(Exception, session.attendee, '')
pytest.raises(Exception, session.attendee, [])<|fim▁hole|>
def test_basic_get(attendee_id, mock_apply):
with Session() as session:
assert session.attendee(attendee_id).first_name == 'Regular'
assert not mock_apply.called
assert session.attendee(id=attendee_id).first_name == 'Regular'
assert not mock_apply.called
assert session.attendee({'id': attendee_id}).first_name == 'Regular'
assert mock_apply.called
def test_empty_get(mock_apply):
with Session() as session:
assert session.attendee({}).paid == NOT_PAID # basic sanity check
assert mock_apply.called
def test_ignore_csrf(request):
with Session() as session:
pytest.raises(Exception, session.attendee, {'paid': NEED_NOT_PAY})
session.attendee({'paid': NEED_NOT_PAY}, ignore_csrf=True)
session.attendee({'paid': NEED_NOT_PAY}, allowed=['paid'])
request.addfinalizer(lambda: setattr(cherrypy.request, 'method', 'GET'))
cherrypy.request.method = 'POST'
session.attendee({'paid': NEED_NOT_PAY})<|fim▁end|> | pytest.raises(Exception, session.attendee, None)
pytest.raises(Exception, session.attendee, str(uuid4()))
pytest.raises(Exception, session.attendee, {'id': str(uuid4())}) |
<|file_name|>DatabasePersistenceProvider.java<|end_file_name|><|fim▁begin|>package br.com.trustsystems.demo.provider;
import br.com.trustsystems.persistence.Persistent;
import br.com.trustsystems.persistence.dao.IUnitOfWork;
import br.com.trustsystems.persistence.provider.jpa.JpaPersistenceProvider;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;<|fim▁hole|>import java.util.Collection;
import java.util.List;
@Service
public class DatabasePersistenceProvider extends JpaPersistenceProvider
{
@PersistenceContext
private EntityManager em;
@Override
public EntityManager entityManager()
{
return em;
}
@Transactional(propagation = Propagation.REQUIRED)
public <D extends Persistent<?>> D persist(Class<D> domainClass, D domainObject)
{
return super.persist(domainClass, domainObject);
}
@Transactional(propagation = Propagation.REQUIRED)
public <D extends Persistent<?>> List<D> persistAll(Class<D> domainClass, List<D> domainObjects)
{
return super.persistAll(domainClass, domainObjects);
}
@Transactional(propagation = Propagation.REQUIRED)
public <D extends Persistent<?>> boolean deleteAll(Class<D> domainClass, Collection<D> domainObjects)
{
return super.deleteAll(domainClass, domainObjects);
}
@Transactional(propagation = Propagation.REQUIRED)
public <D extends Persistent<?>> boolean delete(Class<D> domainClass, D domainObject)
{
return super.delete(domainClass, domainObject);
}
@Transactional(propagation = Propagation.REQUIRES_NEW)
public void runInTransaction(IUnitOfWork t)
{
super.runInTransaction(t);
}
}<|fim▁end|> | |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>var htmlparser = require('htmlparser2');
var _ = require('lodash');
var ent = require('ent');
module.exports = sanitizeHtml;
function sanitizeHtml(html, options) {
var result = '';
if (!options) {
options = sanitizeHtml.defaults;
} else {
_.defaults(options, sanitizeHtml.defaults);
}
// Tags that contain something other than HTML. If we are not allowing
// these tags, we should drop their content too. For other tags you would
// drop the tag but keep its content.
var nonTextTagsMap = {
script: true,
style: true
};
var allowedTagsMap = {};
_.each(options.allowedTags, function(tag) {
allowedTagsMap[tag] = true;
});
var selfClosingMap = {};
_.each(options.selfClosing, function(tag) {
selfClosingMap[tag] = true;
});
var allowedAttributesMap = {};
_.each(options.allowedAttributes, function(attributes, tag) {
allowedAttributesMap[tag] = {};
_.each(attributes, function(name) {
allowedAttributesMap[tag][name] = true;
});
});
var depth = 0;
var skipMap = {};
var skipText = false;
var parser = new htmlparser.Parser({
onopentag: function(name, attribs) {
var skip = false;
if (!_.has(allowedTagsMap, name)) {
skip = true;
if (_.has(nonTextTagsMap, name)) {
skipText = true;
}
skipMap[depth] = true;
}
depth++;
if (skip) {
// We want the contents but not this tag
return;
}
result += '<' + name;
if (_.has(allowedAttributesMap, name)) {
_.each(attribs, function(value, a) {
if (_.has(allowedAttributesMap[name], a)) {
result += ' ' + a;
if ((a === 'href') || (a === 'src')) {
if (naughtyHref(value)) {
return;
}
}
if (value.length) {
// Values are ALREADY escaped, calling escapeHtml here
// results in double escapes
result += '="' + value + '"';
}
}
});
}
if (_.has(selfClosingMap, name)) {
result += " />";
} else {
result += ">";
}
},
ontext: function(text) {
if (skipText) {
return;
}
// It is NOT actually raw text, entities are already escaped.
// If we call escapeHtml here we wind up double-escaping.
result += text;
},
onclosetag: function(name) {
skipText = false;
depth--;
if (skipMap[depth]) {
delete skipMap[depth];
return;
}
if (_.has(selfClosingMap, name)) {
// Already output />
return;
}
result += "</" + name + ">";
}
});
parser.write(html);
parser.end();
return result;
function escapeHtml(s) {
if (s === 'undefined') {
s = '';
}
if (typeof(s) !== 'string') {
s = s + '';
}
return s.replace(/\&/g, '&').replace(/</g, '<').replace(/\>/g, '>').replace(/\"/g, '"');
}
function naughtyHref(href) {
// So we don't get faked out by a hex or decimal escaped javascript URL #1
href = ent.decode(href);
// Browsers ignore character codes of 32 (space) and below in a surprising
// number of situations. Start reading here:
// https://www.owasp.org/index.php/XSS_Filter_Evasion_Cheat_Sheet#Embedded_tab
href = href.replace(/[\x00-\x20]+/, '');
// Case insensitive so we don't get faked out by JAVASCRIPT #1
var matches = href.match(/^([a-zA-Z]+)\:/);
if (!matches) {
// No scheme = no way to inject js (right?)
return false;
}
var scheme = matches[1].toLowerCase();
return (!_.contains(['http', 'https', 'ftp', 'mailto' ], scheme));
}
}<|fim▁hole|>sanitizeHtml.defaults = {
allowedTags: [ 'h3', 'h4', 'h5', 'h6', 'blockquote', 'p', 'a', 'ul', 'ol', 'nl', 'li', 'b', 'i', 'strong', 'em', 'strike', 'code', 'hr', 'br', 'div', 'table', 'thead', 'caption', 'tbody', 'tr', 'th', 'td', 'pre' ],
allowedAttributes: {
a: [ 'href', 'name', 'target' ],
// We don't currently allow img itself by default, but this
// would make sense if we did
img: [ 'src' ]
},
// Lots of these won't come up by default because we don't allow them
selfClosing: [ 'img', 'br', 'hr', 'area', 'base', 'basefont', 'input', 'link', 'meta' ]
};<|fim▁end|> |
// Defaults are accessible to you so that you can use them as a starting point
// programmatically if you wish
|
<|file_name|>cythonize.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
""" cythonize.py
Cythonize pyx files into C++ files as needed.
Usage: cythonize.py [root]
Checks pyx files to see if they have been changed relative to their
corresponding C++ files. If they have, then runs cython on these files to
recreate the C++ files.
Additionally, checks pxd files and setup.py if they have been changed. If
they have, rebuilds everything.
Change detection based on file hashes stored in JSON format.
For now, this script should be run by developers when changing Cython files
and the resulting C++ files checked in, so that end-users (and Python-only
developers) do not get the Cython dependencies.
Based upon:
https://raw.github.com/dagss/private-scipy-refactor/cythonize/cythonize.py
https://raw.githubusercontent.com/numpy/numpy/master/tools/cythonize.py
Note: this script does not check any of the dependent C++ libraries.
"""
from __future__ import print_function
import os
import sys
import json
import hashlib
import subprocess
import argparse
HASH_FILE = 'cythonize.json'
def process_pyx(fromfile, tofile):
print('Processing %s' % fromfile)
try:
from Cython.Compiler.Version import version as cython_version
from distutils.version import LooseVersion
if LooseVersion(cython_version) < LooseVersion('0.19'):
raise Exception('Require Cython >= 0.19')
except ImportError:
pass
flags = ['--fast-fail']
if tofile.endswith('.cpp'):
flags += ['--cplus']
try:
try:
r = subprocess.call(['cython'] + flags + ['-o', tofile, fromfile])
if r != 0:
raise Exception('Cython failed')<|fim▁hole|> # There are ways of installing Cython that don't result in a cython
# executable on the path, see gh-2397.
r = subprocess.call([sys.executable, '-c',
'import sys; from Cython.Compiler.Main import '
'setuptools_main as main; sys.exit(main())'] + flags +
['-o', tofile, fromfile])
if r != 0:
raise Exception('Cython failed')
except OSError:
raise OSError('Cython needs to be installed')
def preserve_cwd(path, func, *args):
orig_cwd = os.getcwd()
try:
os.chdir(path)
func(*args)
finally:
os.chdir(orig_cwd)
def load_hashes(filename):
try:
return json.load(open(filename))
except (ValueError, IOError):
return {}
def save_hashes(hash_db, filename):
with open(filename, 'w') as f:
f.write(json.dumps(hash_db))
def get_hash(path):
return hashlib.md5(open(path, 'rb').read()).hexdigest()
def hash_changed(base, path, db):
full_path = os.path.normpath(os.path.join(base, path))
return not get_hash(full_path) == db.get(full_path)
def hash_add(base, path, db):
full_path = os.path.normpath(os.path.join(base, path))
db[full_path] = get_hash(full_path)
def process(base, filename, db):
root, ext = os.path.splitext(filename)
if ext in ['.pyx', '.cpp']:
if hash_changed(base, filename, db) or not os.path.isfile(os.path.join(base, root + '.cpp')):
preserve_cwd(base, process_pyx, root + '.pyx', root + '.cpp')
hash_add(base, root + '.cpp', db)
hash_add(base, root + '.pyx', db)
def check_changes(root, db):
res = False
new_db = {}
setup_filename = 'setup.py'
hash_add('.', setup_filename, new_db)
if hash_changed('.', setup_filename, db):
res = True
for base, _, files in os.walk(root):
for filename in files:
if filename.endswith('.pxd'):
hash_add(base, filename, new_db)
if hash_changed(base, filename, db):
res = True
if res:
db.clear()
db.update(new_db)
return res
def run(root):
db = load_hashes(HASH_FILE)
try:
check_changes(root, db)
for base, _, files in os.walk(root):
for filename in files:
process(base, filename, db)
finally:
save_hashes(db, HASH_FILE)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Cythonize pyx files into C++ files as needed')
parser.add_argument('root', help='root directory')
args = parser.parse_args()
run(args.root)<|fim▁end|> | except OSError: |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![feature(if_let)]
use std::os;
use std::io::Command;
use std::io::process::InheritFd;
use std::default::Default;
/// Extra configuration to pass to gcc.
pub struct Config {
/// Directories where gcc will look for header files.
pub include_directories: Vec<Path>,
/// Additional definitions (`-DKEY` or `-DKEY=VALUE`).
pub definitions: Vec<(String, Option<String>)>,
/// Additional object files to link into the final archive
pub objects: Vec<Path>,
}
impl Default for Config {
fn default() -> Config {
Config {
include_directories: Vec::new(),
definitions: Vec::new(),
objects: Vec::new(),
}
}
}
/// Compile a library from the given set of input C files.
///
/// This will simply compile all files into object files and then assemble them
/// into the output. This will read the standard environment variables to detect
/// cross compilations and such.
///
/// # Example
///
/// ```no_run
/// use std::default::Default;
/// gcc::compile_library("libfoo.a", &Default::default(), &[
/// "foo.c",
/// "bar.c",
/// ]);
/// ```
pub fn compile_library(output: &str, config: &Config, files: &[&str]) {
assert!(output.starts_with("lib"));
assert!(output.ends_with(".a"));
let target = os::getenv("TARGET").unwrap();
let opt_level = os::getenv("OPT_LEVEL").unwrap();
let mut cmd = Command::new(gcc(target.as_slice()));
cmd.arg(format!("-O{}", opt_level));
cmd.arg("-c");
cmd.arg("-ffunction-sections").arg("-fdata-sections");
cmd.args(cflags().as_slice());
if target.as_slice().contains("-ios") {
cmd.args(ios_flags(target.as_slice()).as_slice());
} else {
if target.as_slice().contains("i686") {
cmd.arg("-m32");
} else if target.as_slice().contains("x86_64") {
cmd.arg("-m64");
}
if !target.as_slice().contains("i686") {
cmd.arg("-fPIC");
}
}
for directory in config.include_directories.iter() {
cmd.arg("-I").arg(directory);
}
for &(ref key, ref value) in config.definitions.iter() {
if let &Some(ref value) = value {
cmd.arg(format!("-D{}={}", key, value));
} else {
cmd.arg(format!("-D{}", key));
}
}
let src = Path::new(os::getenv("CARGO_MANIFEST_DIR").unwrap());
let dst = Path::new(os::getenv("OUT_DIR").unwrap());
let mut objects = Vec::new();
for file in files.iter() {
let obj = dst.join(*file).with_extension("o");
std::io::fs::mkdir_recursive(&obj.dir_path(), std::io::USER_RWX).unwrap();
run(cmd.clone().arg(src.join(*file)).arg("-o").arg(&obj));
objects.push(obj);
}
run(Command::new(ar(target.as_slice())).arg("crus")
.arg(dst.join(output))
.args(objects.as_slice())
.args(config.objects.as_slice()));
println!("cargo:rustc-flags=-L {} -l {}:static",
dst.display(), output.slice(3, output.len() - 2));
}
fn run(cmd: &mut Command) {
println!("running: {}", cmd);
assert!(cmd.stdout(InheritFd(1))
.stderr(InheritFd(2))
.status()
.unwrap()
.success());
}
fn gcc(target: &str) -> String {
let is_android = target.find_str("android").is_some();
os::getenv("CC").unwrap_or(if cfg!(windows) {
"gcc".to_string()
} else if is_android {
format!("{}-gcc", target)
} else {
"cc".to_string()
})
}
fn ar(target: &str) -> String {
let is_android = target.find_str("android").is_some();
os::getenv("AR").unwrap_or(if is_android {
format!("{}-ar", target)
} else {<|fim▁hole|> "ar".to_string()
})
}
fn cflags() -> Vec<String> {
os::getenv("CFLAGS").unwrap_or(String::new())
.as_slice().words().map(|s| s.to_string())
.collect()
}
fn ios_flags(target: &str) -> Vec<String> {
let mut is_device_arch = false;
let mut res = Vec::new();
if target.starts_with("arm-") {
res.push("-arch");
res.push("armv7");
is_device_arch = true;
} else if target.starts_with("arm64-") {
res.push("-arch");
res.push("arm64");
is_device_arch = true;
} else if target.starts_with("i386-") {
res.push("-m32");
} else if target.starts_with("x86_64-") {
res.push("-m64");
}
let sdk = if is_device_arch {"iphoneos"} else {"iphonesimulator"};
println!("Detecting iOS SDK path for {}", sdk);
let sdk_path = Command::new("xcrun")
.arg("--show-sdk-path")
.arg("--sdk")
.arg(sdk)
.stderr(InheritFd(2))
.output()
.unwrap()
.output;
let sdk_path = String::from_utf8(sdk_path).unwrap();
res.push("-isysroot");
res.push(sdk_path.as_slice().trim());
res.iter().map(|s| s.to_string()).collect::<Vec<_>>()
}<|fim▁end|> | |
<|file_name|>flow.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Servo's experimental layout system builds a tree of `Flow` and `Fragment` objects and solves
//! layout constraints to obtain positions and display attributes of tree nodes. Positions are
//! computed in several tree traversals driven by the fundamental data dependencies required by
/// inline and block layout.
///
/// Flows are interior nodes in the layout tree and correspond closely to *flow contexts* in the
/// CSS specification. Flows are responsible for positioning their child flow contexts and
/// fragments. Flows have purpose-specific fields, such as auxiliary line structs, out-of-flow
/// child lists, and so on.
///
/// Currently, the important types of flows are:
///
/// * `BlockFlow`: A flow that establishes a block context. It has several child flows, each of
/// which are positioned according to block formatting context rules (CSS block boxes). Block
/// flows also contain a single box to represent their rendered borders, padding, etc.
/// The BlockFlow at the root of the tree has special behavior: it stretches to the boundaries of
/// the viewport.
///
/// * `InlineFlow`: A flow that establishes an inline context. It has a flat list of child
/// fragments/flows that are subject to inline layout and line breaking and structs to represent
/// line breaks and mapping to CSS boxes, for the purpose of handling `getClientRects()` and
/// similar methods.
use css::node_style::StyledNode;
use block::BlockFlow;
use context::LayoutContext;
use display_list_builder::DisplayListBuildingResult;
use floats::Floats;
use flow_list::{FlowList, FlowListIterator, MutFlowListIterator};
use flow_ref::FlowRef;
use fragment::{Fragment, FragmentBorderBoxIterator, SpecificFragmentInfo};
use incremental::{RECONSTRUCT_FLOW, REFLOW, REFLOW_OUT_OF_FLOW, RestyleDamage};
use inline::InlineFlow;
use model::{CollapsibleMargins, IntrinsicISizes};
use parallel::FlowParallelInfo;
use table::{ColumnComputedInlineSize, ColumnIntrinsicInlineSize, TableFlow};
use table_caption::TableCaptionFlow;
use table_cell::TableCellFlow;
use table_colgroup::TableColGroupFlow;
use table_row::TableRowFlow;
use table_rowgroup::TableRowGroupFlow;
use table_wrapper::TableWrapperFlow;
use wrapper::ThreadSafeLayoutNode;
use geom::{Point2D, Rect, Size2D};
use gfx::display_list::ClippingRegion;
use serialize::{Encoder, Encodable};
use msg::compositor_msg::LayerId;
use servo_util::geometry::{Au, ZERO_RECT};
use servo_util::logical_geometry::{LogicalRect, LogicalSize, WritingMode};
use std::mem;
use std::fmt;
use std::iter::Zip;
use std::num::FromPrimitive;
use std::raw;
use std::sync::atomic::{AtomicUint, Ordering};
use std::slice::IterMut;
use style::computed_values::{clear, empty_cells, float, position, text_align};
use style::properties::ComputedValues;
use std::sync::Arc;
/// Virtual methods that make up a float context.
///
/// Note that virtual methods have a cost; we should not overuse them in Servo. Consider adding
/// methods to `ImmutableFlowUtils` or `MutableFlowUtils` before adding more methods here.
pub trait Flow: fmt::Debug + Sync {
// RTTI
//
// TODO(pcwalton): Use Rust's RTTI, once that works.
/// Returns the class of flow that this is.
fn class(&self) -> FlowClass;
/// If this is a block flow, returns the underlying object, borrowed immutably. Fails
/// otherwise.
fn as_immutable_block<'a>(&'a self) -> &'a BlockFlow {
panic!("called as_immutable_block() on a non-block flow")
}
/// If this is a block flow, returns the underlying object. Fails otherwise.
fn as_block<'a>(&'a mut self) -> &'a mut BlockFlow {
debug!("called as_block() on a flow of type {:?}", self.class());
panic!("called as_block() on a non-block flow")
}
/// If this is an inline flow, returns the underlying object, borrowed immutably. Fails
/// otherwise.
fn as_immutable_inline<'a>(&'a self) -> &'a InlineFlow {
panic!("called as_immutable_inline() on a non-inline flow")
}
/// If this is an inline flow, returns the underlying object. Fails otherwise.
fn as_inline<'a>(&'a mut self) -> &'a mut InlineFlow {
panic!("called as_inline() on a non-inline flow")
}
/// If this is a table wrapper flow, returns the underlying object. Fails otherwise.
fn as_table_wrapper<'a>(&'a mut self) -> &'a mut TableWrapperFlow {
panic!("called as_table_wrapper() on a non-tablewrapper flow")
}
/// If this is a table wrapper flow, returns the underlying object, borrowed immutably. Fails
/// otherwise.
fn as_immutable_table_wrapper<'a>(&'a self) -> &'a TableWrapperFlow {
panic!("called as_immutable_table_wrapper() on a non-tablewrapper flow")
}
/// If this is a table flow, returns the underlying object. Fails otherwise.
fn as_table<'a>(&'a mut self) -> &'a mut TableFlow {
panic!("called as_table() on a non-table flow")
}
/// If this is a table flow, returns the underlying object, borrowed immutably. Fails otherwise.
fn as_immutable_table<'a>(&'a self) -> &'a TableFlow {
panic!("called as_table() on a non-table flow")
}
/// If this is a table colgroup flow, returns the underlying object. Fails otherwise.
fn as_table_colgroup<'a>(&'a mut self) -> &'a mut TableColGroupFlow {
panic!("called as_table_colgroup() on a non-tablecolgroup flow")
}
/// If this is a table rowgroup flow, returns the underlying object. Fails otherwise.
fn as_table_rowgroup<'a>(&'a mut self) -> &'a mut TableRowGroupFlow {
panic!("called as_table_rowgroup() on a non-tablerowgroup flow")
}
/// If this is a table rowgroup flow, returns the underlying object, borrowed immutably. Fails
/// otherwise.
fn as_immutable_table_rowgroup<'a>(&'a self) -> &'a TableRowGroupFlow {
panic!("called as_table_rowgroup() on a non-tablerowgroup flow")
}
/// If this is a table row flow, returns the underlying object. Fails otherwise.
fn as_table_row<'a>(&'a mut self) -> &'a mut TableRowFlow {
panic!("called as_table_row() on a non-tablerow flow")
}
/// If this is a table row flow, returns the underlying object, borrowed immutably. Fails
/// otherwise.
fn as_immutable_table_row<'a>(&'a self) -> &'a TableRowFlow {
panic!("called as_table_row() on a non-tablerow flow")
}
/// If this is a table cell flow, returns the underlying object. Fails otherwise.
fn as_table_caption<'a>(&'a mut self) -> &'a mut TableCaptionFlow {
panic!("called as_table_caption() on a non-tablecaption flow")
}
/// If this is a table cell flow, returns the underlying object. Fails otherwise.
fn as_table_cell<'a>(&'a mut self) -> &'a mut TableCellFlow {
panic!("called as_table_cell() on a non-tablecell flow")
}
/// If this is a table cell flow, returns the underlying object, borrowed immutably. Fails
/// otherwise.
fn as_immutable_table_cell<'a>(&'a self) -> &'a TableCellFlow {
panic!("called as_table_cell() on a non-tablecell flow")
}
/// If this is a table row, table rowgroup, or table flow, returns column intrinsic
/// inline-sizes. Fails otherwise.
fn column_intrinsic_inline_sizes<'a>(&'a mut self) -> &'a mut Vec<ColumnIntrinsicInlineSize> {
panic!("called column_intrinsic_inline_sizes() on non-table flow")
}
/// If this is a table row, table rowgroup, or table flow, returns column computed
/// inline-sizes. Fails otherwise.
fn column_computed_inline_sizes<'a>(&'a mut self) -> &'a mut Vec<ColumnComputedInlineSize> {
panic!("called column_intrinsic_inline_sizes() on non-table flow")
}
// Main methods
/// Pass 1 of reflow: computes minimum and preferred inline-sizes.
///
/// Recursively (bottom-up) determine the flow's minimum and preferred inline-sizes. When
/// called on this flow, all child flows have had their minimum and preferred inline-sizes set.
/// This function must decide minimum/preferred inline-sizes based on its children's inline-
/// sizes and the dimensions of any boxes it is responsible for flowing.
fn bubble_inline_sizes(&mut self) {
panic!("bubble_inline_sizes not yet implemented")
}
/// Pass 2 of reflow: computes inline-size.
fn assign_inline_sizes(&mut self, _ctx: &LayoutContext) {
panic!("assign_inline_sizes not yet implemented")
}
/// Pass 3a of reflow: computes block-size.
fn assign_block_size<'a>(&mut self, _ctx: &'a LayoutContext<'a>) {
panic!("assign_block_size not yet implemented")
}
/// If this is a float, places it. The default implementation does nothing.
fn place_float_if_applicable<'a>(&mut self, _: &'a LayoutContext<'a>) {}
/// Assigns block-sizes in-order; or, if this is a float, places the float. The default
/// implementation simply assigns block-sizes if this flow is impacted by floats. Returns true
/// if this child was impacted by floats or false otherwise.
fn assign_block_size_for_inorder_child_if_necessary<'a>(&mut self,
layout_context: &'a LayoutContext<'a>)
-> bool {
let impacted = base(self).flags.impacted_by_floats();
if impacted {
self.assign_block_size(layout_context);
mut_base(self).restyle_damage.remove(REFLOW_OUT_OF_FLOW | REFLOW);
}
impacted
}
/// Phase 4 of reflow: computes absolute positions.
fn compute_absolute_position(&mut self) {
// The default implementation is a no-op.
}
/// Phase 5 of reflow: builds display lists.
fn build_display_list(&mut self, layout_context: &LayoutContext);
/// Returns the union of all overflow rects of all of this flow's fragments.
fn compute_overflow(&self) -> Rect<Au>;
/// Iterates through border boxes of all of this flow's fragments.
fn iterate_through_fragment_border_boxes(&self,
iterator: &mut FragmentBorderBoxIterator,
stacking_context_position: &Point2D<Au>);
/// Marks this flow as the root flow. The default implementation is a no-op.
fn mark_as_root(&mut self) {}
// Note that the following functions are mostly called using static method
// dispatch, so it's ok to have them in this trait. Plus, they have
// different behaviour for different types of Flow, so they can't go into
// the Immutable / Mutable Flow Utils traits without additional casts.
/// Return true if store overflow is delayed for this flow.
///
/// Currently happens only for absolutely positioned flows.
fn is_store_overflow_delayed(&mut self) -> bool {
false
}
fn is_root(&self) -> bool {
false
}
/// The 'position' property of this flow.
fn positioning(&self) -> position::T {
position::T::static_
}
/// Return true if this flow has position 'fixed'.
fn is_fixed(&self) -> bool {
self.positioning() == position::T::fixed
}
fn is_positioned(&self) -> bool {
self.is_relatively_positioned() || base(self).flags.contains(IS_ABSOLUTELY_POSITIONED)
}
fn is_relatively_positioned(&self) -> bool {
self.positioning() == position::T::relative
}
/// Return true if this is the root of an absolute flow tree.
fn is_root_of_absolute_flow_tree(&self) -> bool {
false
}
/// Returns true if this is an absolute containing block.
fn is_absolute_containing_block(&self) -> bool {
false
}
/// Updates the inline position of a child flow during the assign-height traversal. At present,
/// this is only used for absolutely-positioned inline-blocks.
fn update_late_computed_inline_position_if_necessary(&mut self, inline_position: Au);
/// Updates the block position of a child flow during the assign-height traversal. At present,
/// this is only used for absolutely-positioned inline-blocks.
fn update_late_computed_block_position_if_necessary(&mut self, block_position: Au);
/// Return the dimensions of the containing block generated by this flow for absolutely-
/// positioned descendants. For block flows, this is the padding box.
///
/// NB: Do not change this `&self` to `&mut self` under any circumstances! It has security
/// implications because this can be called on parents concurrently from descendants!
fn generated_containing_block_rect(&self) -> LogicalRect<Au> {
panic!("generated_containing_block_rect not yet implemented for this flow")
}
/// Returns a layer ID for the given fragment.
#[allow(unsafe_blocks)]
fn layer_id(&self, fragment_id: uint) -> LayerId {
unsafe {
let obj = mem::transmute::<&&Self, &raw::TraitObject>(&self);
let pointer: uint = mem::transmute(obj.data);
LayerId(pointer, fragment_id)
}
}
/// Attempts to perform incremental fixup of this flow by replacing its fragment's style with
/// the new style. This can only succeed if the flow has exactly one fragment.
fn repair_style(&mut self, new_style: &Arc<ComputedValues>);
}
// Base access
#[inline(always)]
#[allow(unsafe_blocks)]
pub fn base<'a, T: ?Sized + Flow>(this: &'a T) -> &'a BaseFlow {
unsafe {
let obj = mem::transmute::<&&'a T, &'a raw::TraitObject>(&this);
mem::transmute::<*mut (), &'a BaseFlow>(obj.data)
}
}
/// Iterates over the children of this immutable flow.
pub fn imm_child_iter<'a>(flow: &'a Flow) -> FlowListIterator<'a> {
base(flow).children.iter()
}
#[inline(always)]
#[allow(unsafe_blocks)]
pub fn mut_base<'a, T: ?Sized + Flow>(this: &'a mut T) -> &'a mut BaseFlow {
unsafe {
let obj = mem::transmute::<&&'a mut T, &'a raw::TraitObject>(&this);
mem::transmute::<*mut (), &'a mut BaseFlow>(obj.data)
}
}
/// Iterates over the children of this flow.
pub fn child_iter<'a>(flow: &'a mut Flow) -> MutFlowListIterator<'a> {
mut_base(flow).children.iter_mut()
}
pub trait ImmutableFlowUtils {
// Convenience functions
/// Returns true if this flow is a block or a float flow.
fn is_block_like(self) -> bool;
/// Returns true if this flow is a table flow.
fn is_table(self) -> bool;
/// Returns true if this flow is a table caption flow.
fn is_table_caption(self) -> bool;
/// Returns true if this flow is a proper table child.
fn is_proper_table_child(self) -> bool;
/// Returns true if this flow is a table row flow.
fn is_table_row(self) -> bool;
/// Returns true if this flow is a table cell flow.
fn is_table_cell(self) -> bool;
/// Returns true if this flow is a table colgroup flow.
fn is_table_colgroup(self) -> bool;
/// Returns true if this flow is a table rowgroup flow.
fn is_table_rowgroup(self) -> bool;
/// Returns true if this flow is one of table-related flows.
fn is_table_kind(self) -> bool;
/// Returns true if anonymous flow is needed between this flow and child flow.
fn need_anonymous_flow(self, child: &Flow) -> bool;
/// Generates missing child flow of this flow.
fn generate_missing_child_flow(self, node: &ThreadSafeLayoutNode) -> FlowRef;
/// Returns true if this flow has no children.
fn is_leaf(self) -> bool;
/// Returns the number of children that this flow possesses.
fn child_count(self) -> uint;
/// Return true if this flow is a Block Container.
fn is_block_container(self) -> bool;
/// Returns true if this flow is a block flow.
fn is_block_flow(self) -> bool;
/// Returns true if this flow is an inline flow.
fn is_inline_flow(self) -> bool;
/// Dumps the flow tree for debugging.
fn dump(self);
/// Dumps the flow tree for debugging, with a prefix to indicate that we're at the given level.
fn dump_with_level(self, level: uint);
}
pub trait MutableFlowUtils {
// Traversals
/// Traverses the tree in preorder.
fn traverse_preorder<T:PreorderFlowTraversal>(self, traversal: &T);
/// Traverses the tree in postorder.
fn traverse_postorder<T:PostorderFlowTraversal>(self, traversal: &T);
// Mutators
/// Computes the overflow region for this flow.
fn store_overflow(self, _: &LayoutContext);
/// Gathers static block-offsets bubbled up by kids.
///
/// This essentially gives us offsets of all absolutely positioned direct descendants and all
/// fixed descendants, in tree order.
///
/// This is called in a bottom-up traversal (specifically, the assign-block-size traversal).
/// So, kids have their flow origin already set. In the case of absolute flow kids, they have
/// their hypothetical box position already set.
fn collect_static_block_offsets_from_children(self);
}
pub trait MutableOwnedFlowUtils {
/// Set absolute descendants for this flow.
///
/// Set this flow as the Containing Block for all the absolute descendants.
fn set_absolute_descendants(&mut self, abs_descendants: AbsDescendants);
}
#[derive(RustcEncodable, PartialEq, Debug)]
pub enum FlowClass {
Block,
Inline,
ListItem,
TableWrapper,
Table,
TableColGroup,
TableRowGroup,
TableRow,
TableCaption,
TableCell,
}
/// A top-down traversal.
pub trait PreorderFlowTraversal {
/// The operation to perform. Return true to continue or false to stop.
fn process(&self, flow: &mut Flow);
/// Returns true if this node must be processed in-order. If this returns false,
/// we skip the operation for this node, but continue processing the descendants.
/// This is called *after* parent nodes are visited.
fn should_process(&self, _flow: &mut Flow) -> bool {
true
}
}
/// A bottom-up traversal, with a optional in-order pass.
pub trait PostorderFlowTraversal {
/// The operation to perform. Return true to continue or false to stop.
fn process(&self, flow: &mut Flow);
/// Returns false if this node must be processed in-order. If this returns false, we skip the
/// operation for this node, but continue processing the ancestors. This is called *after*
/// child nodes are visited.
fn should_process(&self, _flow: &mut Flow) -> bool {
true
}
}
bitflags! {
#[doc = "Flags used in flows."]
flags FlowFlags: u16 {
// floated descendants flags
#[doc = "Whether this flow has descendants that float left in the same block formatting"]
#[doc = "context."]
const HAS_LEFT_FLOATED_DESCENDANTS = 0b0000_0000_0000_0001,
#[doc = "Whether this flow has descendants that float right in the same block formatting"]
#[doc = "context."]
const HAS_RIGHT_FLOATED_DESCENDANTS = 0b0000_0000_0000_0010,
#[doc = "Whether this flow is impacted by floats to the left in the same block formatting"]
#[doc = "context (i.e. its height depends on some prior flows with `float: left`)."]
const IMPACTED_BY_LEFT_FLOATS = 0b0000_0000_0000_0100,
#[doc = "Whether this flow is impacted by floats to the right in the same block"]
#[doc = "formatting context (i.e. its height depends on some prior flows with `float:"]
#[doc = "right`)."]
const IMPACTED_BY_RIGHT_FLOATS = 0b0000_0000_0000_1000,
// text align flags
#[doc = "Whether this flow contains a flow that has its own layer within the same absolute"]
#[doc = "containing block."]
const LAYERS_NEEDED_FOR_DESCENDANTS = 0b0000_0000_0001_0000,
#[doc = "Whether this flow must have its own layer. Even if this flag is not set, it might"]
#[doc = "get its own layer if it's deemed to be likely to overlap flows with their own"]
#[doc = "layer."]
const NEEDS_LAYER = 0b0000_0000_0010_0000,
#[doc = "Whether this flow is absolutely positioned. This is checked all over layout, so a"]
#[doc = "virtual call is too expensive."]
const IS_ABSOLUTELY_POSITIONED = 0b0000_0000_0100_0000,
#[doc = "Whether this flow clears to the left. This is checked all over layout, so a"]
#[doc = "virtual call is too expensive."]
const CLEARS_LEFT = 0b0000_0000_1000_0000,
#[doc = "Whether this flow clears to the right. This is checked all over layout, so a"]
#[doc = "virtual call is too expensive."]
const CLEARS_RIGHT = 0b0000_0001_0000_0000,
#[doc = "Whether this flow is left-floated. This is checked all over layout, so a"]
#[doc = "virtual call is too expensive."]
const FLOATS_LEFT = 0b0000_0010_0000_0000,
#[doc = "Whether this flow is right-floated. This is checked all over layout, so a"]
#[doc = "virtual call is too expensive."]
const FLOATS_RIGHT = 0b0000_0100_0000_0000,
#[doc = "Text alignment. \
NB: If you update this, update `TEXT_ALIGN_SHIFT` below."]
const TEXT_ALIGN = 0b0111_1000_0000_0000,
}
}
// NB: If you update this field, you must update the the floated descendants flags.
/// The bitmask of flags that represent the `has_left_floated_descendants` and
/// `has_right_floated_descendants` fields.
static HAS_FLOATED_DESCENDANTS_BITMASK: FlowFlags = FlowFlags { bits: 0b0000_0011 };
/// The number of bits we must shift off to handle the text alignment field.
///
/// NB: If you update this, update `TEXT_ALIGN` above.
static TEXT_ALIGN_SHIFT: uint = 11;
impl FlowFlags {
/// Propagates text alignment flags from an appropriate parent flow per CSS 2.1.
///
/// FIXME(#2265, pcwalton): It would be cleaner and faster to make this a derived CSS property
/// `-servo-text-align-in-effect`.
pub fn propagate_text_alignment_from_parent(&mut self, parent_flags: FlowFlags) {
self.set_text_align_override(parent_flags);
}
#[inline]
pub fn text_align(self) -> text_align::T {
FromPrimitive::from_u16((self & TEXT_ALIGN).bits() >> TEXT_ALIGN_SHIFT).unwrap()
}
#[inline]
pub fn set_text_align(&mut self, value: text_align::T) {
*self = (*self & !TEXT_ALIGN) |
FlowFlags::from_bits((value as u16) << TEXT_ALIGN_SHIFT).unwrap();
}
#[inline]
pub fn set_text_align_override(&mut self, parent: FlowFlags) {
self.insert(parent & TEXT_ALIGN);
}
#[inline]
pub fn union_floated_descendants_flags(&mut self, other: FlowFlags) {
self.insert(other & HAS_FLOATED_DESCENDANTS_BITMASK);
}
#[inline]
pub fn impacted_by_floats(&self) -> bool {
self.contains(IMPACTED_BY_LEFT_FLOATS) || self.contains(IMPACTED_BY_RIGHT_FLOATS)
}
#[inline]
pub fn set(&mut self, flags: FlowFlags, value: bool) {
if value {
self.insert(flags);
} else {
self.remove(flags);
}
}
#[inline]
pub fn float_kind(&self) -> float::T {
if self.contains(FLOATS_LEFT) {
float::T::left
} else if self.contains(FLOATS_RIGHT) {
float::T::right
} else {
float::T::none
}
}
#[inline]
pub fn is_float(&self) -> bool {
self.contains(FLOATS_LEFT) || self.contains(FLOATS_RIGHT)
}
#[inline]
pub fn clears_floats(&self) -> bool {
self.contains(CLEARS_LEFT) || self.contains(CLEARS_RIGHT)
}
}
/// The Descendants of a flow.
///
/// Also, details about their position wrt this flow.
#[derive(Clone)]
pub struct Descendants {
/// Links to every descendant. This must be private because it is unsafe to leak `FlowRef`s to
/// layout.
descendant_links: Vec<FlowRef>,
/// Static block-direction offsets of all descendants from the start of this flow box.
pub static_block_offsets: Vec<Au>,
}
impl Descendants {
pub fn new() -> Descendants {
Descendants {
descendant_links: Vec::new(),
static_block_offsets: Vec::new(),
}
}
pub fn len(&self) -> uint {
self.descendant_links.len()
}
pub fn is_empty(&self) -> bool {
self.descendant_links.is_empty()
}
pub fn push(&mut self, given_descendant: FlowRef) {
self.descendant_links.push(given_descendant);
}
/// Push the given descendants on to the existing descendants.
///
/// Ignore any static y offsets, because they are None before layout.
pub fn push_descendants(&mut self, given_descendants: Descendants) {
for elem in given_descendants.descendant_links.into_iter() {
self.descendant_links.push(elem);
}
}
/// Return an iterator over the descendant flows.
pub fn iter<'a>(&'a mut self) -> DescendantIter<'a> {
DescendantIter {
iter: self.descendant_links.iter_mut(),
}
}
/// Return an iterator over (descendant, static y offset).
pub fn iter_with_offset<'a>(&'a mut self) -> DescendantOffsetIter<'a> {
let descendant_iter = DescendantIter {
iter: self.descendant_links.iter_mut(),
};
descendant_iter.zip(self.static_block_offsets.iter_mut())
}
}
pub type AbsDescendants = Descendants;
pub struct DescendantIter<'a> {
iter: IterMut<'a, FlowRef>,
}
impl<'a> Iterator for DescendantIter<'a> {
type Item = &'a mut (Flow + 'a);
fn next(&mut self) -> Option<&'a mut (Flow + 'a)> {
self.iter.next().map(|flow| &mut **flow)
}
}
pub type DescendantOffsetIter<'a> = Zip<DescendantIter<'a>, IterMut<'a, Au>>;
/// Information needed to compute absolute (i.e. viewport-relative) flow positions (not to be
/// confused with absolutely-positioned flows).
#[derive(RustcEncodable, Copy)]
pub struct AbsolutePositionInfo {
/// The size of the containing block for relatively-positioned descendants.
pub relative_containing_block_size: LogicalSize<Au>,
/// The position of the absolute containing block relative to the nearest ancestor stacking
/// context. If the absolute containing block establishes the stacking context for this flow,
/// and this flow is not itself absolutely-positioned, then this is (0, 0).
pub stacking_relative_position_of_absolute_containing_block: Point2D<Au>,
/// Whether the absolute containing block forces positioned descendants to be layerized.
///
/// FIXME(pcwalton): Move into `FlowFlags`.
pub layers_needed_for_positioned_flows: bool,
}
impl AbsolutePositionInfo {
pub fn new(writing_mode: WritingMode) -> AbsolutePositionInfo {
// FIXME(pcwalton): The initial relative containing block-size should be equal to the size
// of the root layer.
AbsolutePositionInfo {
relative_containing_block_size: LogicalSize::zero(writing_mode),
stacking_relative_position_of_absolute_containing_block: Point2D::zero(),
layers_needed_for_positioned_flows: false,
}
}
}
/// Data common to all flows.
pub struct BaseFlow {
/// NB: Must be the first element.
///
/// The necessity of this will disappear once we have dynamically-sized types.
ref_count: AtomicUint,
pub restyle_damage: RestyleDamage,
/// The children of this flow.
pub children: FlowList,
/// Intrinsic inline sizes for this flow.
pub intrinsic_inline_sizes: IntrinsicISizes,
/// The upper left corner of the box representing this flow, relative to the box representing
/// its parent flow.
///
/// For absolute flows, this represents the position with respect to its *containing block*.
///
/// This does not include margins in the block flow direction, because those can collapse. So
/// for the block direction (usually vertical), this represents the *border box*. For the
/// inline direction (usually horizontal), this represents the *margin box*.
pub position: LogicalRect<Au>,
/// The amount of overflow of this flow, relative to the containing block. Must include all the
/// pixels of all the display list items for correct invalidation.
pub overflow: Rect<Au>,
/// Data used during parallel traversals.
///
/// TODO(pcwalton): Group with other transient data to save space.
pub parallel: FlowParallelInfo,
/// The floats next to this flow.
pub floats: Floats,
/// The collapsible margins for this flow, if any.
pub collapsible_margins: CollapsibleMargins,
/// The position of this flow relative to the start of the nearest ancestor stacking context.
/// This is computed during the top-down pass of display list construction.
pub stacking_relative_position: Point2D<Au>,
/// Details about descendants with position 'absolute' or 'fixed' for which we are the
/// containing block. This is in tree order. This includes any direct children.
pub abs_descendants: AbsDescendants,
/// The inline-size of the block container of this flow. Used for computing percentage and
/// automatic values for `width`.
pub block_container_inline_size: Au,
/// The block-size of the block container of this flow, if it is an explicit size (does not
/// depend on content heights). Used for computing percentage values for `height`.
pub block_container_explicit_block_size: Option<Au>,
/// Offset wrt the nearest positioned ancestor - aka the Containing Block
/// for any absolutely positioned elements.
pub absolute_static_i_offset: Au,
/// Offset wrt the Initial Containing Block.
pub fixed_static_i_offset: Au,
/// Reference to the Containing Block, if this flow is absolutely positioned.
pub absolute_cb: ContainingBlockLink,
/// Information needed to compute absolute (i.e. viewport-relative) flow positions (not to be
/// confused with absolutely-positioned flows).
///
/// FIXME(pcwalton): Merge with `absolute_static_i_offset` and `fixed_static_i_offset` above?
pub absolute_position_info: AbsolutePositionInfo,
/// The clipping region for this flow and its descendants, in layer coordinates.
pub clip: ClippingRegion,
/// The results of display list building for this flow.
pub display_list_building_result: DisplayListBuildingResult,
/// The writing mode for this flow.
pub writing_mode: WritingMode,
/// Various flags for flows, tightly packed to save space.
pub flags: FlowFlags,
}
unsafe impl Send for BaseFlow {}
unsafe impl Sync for BaseFlow {}
<|fim▁hole|> write!(f,
"@ {:?}, CC {}, ADC {}",
self.position,
self.parallel.children_count.load(Ordering::SeqCst),
self.abs_descendants.len())
}
}
impl Encodable for BaseFlow {
fn encode<S: Encoder>(&self, e: &mut S) -> Result<(), S::Error> {
e.emit_struct("base", 0, |e| {
try!(e.emit_struct_field("id", 0, |e| self.debug_id().encode(e)));
try!(e.emit_struct_field("stacking_relative_position",
1,
|e| self.stacking_relative_position.encode(e)));
try!(e.emit_struct_field("intrinsic_inline_sizes",
2,
|e| self.intrinsic_inline_sizes.encode(e)));
try!(e.emit_struct_field("position", 3, |e| self.position.encode(e)));
e.emit_struct_field("children", 4, |e| {
e.emit_seq(self.children.len(), |e| {
for (i, c) in self.children.iter().enumerate() {
try!(e.emit_seq_elt(i, |e| {
try!(e.emit_struct("flow", 0, |e| {
try!(e.emit_struct_field("class", 0, |e| c.class().encode(e)));
e.emit_struct_field("data", 1, |e| {
match c.class() {
FlowClass::Block => c.as_immutable_block().encode(e),
FlowClass::Inline => c.as_immutable_inline().encode(e),
FlowClass::Table => c.as_immutable_table().encode(e),
FlowClass::TableWrapper => c.as_immutable_table_wrapper().encode(e),
FlowClass::TableRowGroup => c.as_immutable_table_rowgroup().encode(e),
FlowClass::TableRow => c.as_immutable_table_row().encode(e),
FlowClass::TableCell => c.as_immutable_table_cell().encode(e),
_ => { Ok(()) } // TODO: Support captions
}
})
}));
Ok(())
}));
}
Ok(())
})
})
})
}
}
#[unsafe_destructor]
impl Drop for BaseFlow {
fn drop(&mut self) {
if self.ref_count.load(Ordering::SeqCst) != 0 {
panic!("Flow destroyed before its ref count hit zero—this is unsafe!")
}
}
}
/// Whether a base flow should be forced to be nonfloated. This can affect e.g. `TableFlow`, which
/// is never floated because the table wrapper flow is the floated one.
#[derive(Clone, PartialEq)]
pub enum ForceNonfloatedFlag {
/// The flow should be floated if the node has a `float` property.
FloatIfNecessary,
/// The flow should be forced to be nonfloated.
ForceNonfloated,
}
impl BaseFlow {
#[inline]
pub fn new(node: Option<ThreadSafeLayoutNode>,
writing_mode: WritingMode,
force_nonfloated: ForceNonfloatedFlag)
-> BaseFlow {
let mut flags = FlowFlags::empty();
match node {
None => {}
Some(node) => {
let node_style = node.style();
match node_style.get_box().position {
position::T::absolute | position::T::fixed => {
flags.insert(IS_ABSOLUTELY_POSITIONED)
}
_ => {}
}
if force_nonfloated == ForceNonfloatedFlag::FloatIfNecessary {
match node_style.get_box().float {
float::T::none => {}
float::T::left => flags.insert(FLOATS_LEFT),
float::T::right => flags.insert(FLOATS_RIGHT),
}
}
match node_style.get_box().clear {
clear::T::none => {}
clear::T::left => flags.insert(CLEARS_LEFT),
clear::T::right => flags.insert(CLEARS_RIGHT),
clear::T::both => {
flags.insert(CLEARS_LEFT);
flags.insert(CLEARS_RIGHT);
}
}
}
}
// New flows start out as fully damaged.
let mut damage = RestyleDamage::all();
damage.remove(RECONSTRUCT_FLOW);
BaseFlow {
ref_count: AtomicUint::new(1),
restyle_damage: damage,
children: FlowList::new(),
intrinsic_inline_sizes: IntrinsicISizes::new(),
position: LogicalRect::zero(writing_mode),
overflow: ZERO_RECT,
parallel: FlowParallelInfo::new(),
floats: Floats::new(writing_mode),
collapsible_margins: CollapsibleMargins::new(),
stacking_relative_position: Point2D::zero(),
abs_descendants: Descendants::new(),
absolute_static_i_offset: Au(0),
fixed_static_i_offset: Au(0),
block_container_inline_size: Au(0),
block_container_explicit_block_size: None,
absolute_cb: ContainingBlockLink::new(),
display_list_building_result: DisplayListBuildingResult::None,
absolute_position_info: AbsolutePositionInfo::new(writing_mode),
clip: ClippingRegion::max(),
flags: flags,
writing_mode: writing_mode,
}
}
pub fn child_iter<'a>(&'a mut self) -> MutFlowListIterator<'a> {
self.children.iter_mut()
}
pub unsafe fn ref_count<'a>(&'a self) -> &'a AtomicUint {
&self.ref_count
}
pub fn debug_id(&self) -> uint {
let p = self as *const _;
p as uint
}
/// Ensures that all display list items generated by this flow are within the flow's overflow
/// rect. This should only be used for debugging.
pub fn validate_display_list_geometry(&self) {
// FIXME(pcwalton, #2795): Get the real container size.
let container_size = Size2D::zero();
let position_with_overflow = self.position
.to_physical(self.writing_mode, container_size)
.union(&self.overflow);
let bounds = Rect(self.stacking_relative_position, position_with_overflow.size);
let all_items = match self.display_list_building_result {
DisplayListBuildingResult::None => Vec::new(),
DisplayListBuildingResult::StackingContext(ref stacking_context) => {
stacking_context.display_list.all_display_items()
}
DisplayListBuildingResult::Normal(ref display_list) => display_list.all_display_items(),
};
for item in all_items.iter() {
let paint_bounds = item.base().clip.clone().intersect_rect(&item.base().bounds);
if !paint_bounds.might_be_nonempty() {
continue;
}
if bounds.union(&paint_bounds.bounding_rect()) != bounds {
error!("DisplayList item {:?} outside of Flow overflow ({:?})", item, paint_bounds);
}
}
}
}
impl<'a> ImmutableFlowUtils for &'a (Flow + 'a) {
/// Returns true if this flow is a block flow.
fn is_block_like(self) -> bool {
match self.class() {
FlowClass::Block => true,
_ => false,
}
}
/// Returns true if this flow is a proper table child.
/// 'Proper table child' is defined as table-row flow, table-rowgroup flow,
/// table-column-group flow, or table-caption flow.
fn is_proper_table_child(self) -> bool {
match self.class() {
FlowClass::TableRow | FlowClass::TableRowGroup |
FlowClass::TableColGroup | FlowClass::TableCaption => true,
_ => false,
}
}
/// Returns true if this flow is a table row flow.
fn is_table_row(self) -> bool {
match self.class() {
FlowClass::TableRow => true,
_ => false,
}
}
/// Returns true if this flow is a table cell flow.
fn is_table_cell(self) -> bool {
match self.class() {
FlowClass::TableCell => true,
_ => false,
}
}
/// Returns true if this flow is a table colgroup flow.
fn is_table_colgroup(self) -> bool {
match self.class() {
FlowClass::TableColGroup => true,
_ => false,
}
}
/// Returns true if this flow is a table flow.
fn is_table(self) -> bool {
match self.class() {
FlowClass::Table => true,
_ => false,
}
}
/// Returns true if this flow is a table caption flow.
fn is_table_caption(self) -> bool {
match self.class() {
FlowClass::TableCaption => true,
_ => false,
}
}
/// Returns true if this flow is a table rowgroup flow.
fn is_table_rowgroup(self) -> bool {
match self.class() {
FlowClass::TableRowGroup => true,
_ => false,
}
}
/// Returns true if this flow is one of table-related flows.
fn is_table_kind(self) -> bool {
match self.class() {
FlowClass::TableWrapper | FlowClass::Table |
FlowClass::TableColGroup | FlowClass::TableRowGroup |
FlowClass::TableRow | FlowClass::TableCaption | FlowClass::TableCell => true,
_ => false,
}
}
/// Returns true if anonymous flow is needed between this flow and child flow.
/// Spec: http://www.w3.org/TR/CSS21/tables.html#anonymous-boxes
fn need_anonymous_flow(self, child: &Flow) -> bool {
match self.class() {
FlowClass::Table => !child.is_proper_table_child(),
FlowClass::TableRowGroup => !child.is_table_row(),
FlowClass::TableRow => !child.is_table_cell(),
_ => false
}
}
/// Generates missing child flow of this flow.
fn generate_missing_child_flow(self, node: &ThreadSafeLayoutNode) -> FlowRef {
let flow = match self.class() {
FlowClass::Table | FlowClass::TableRowGroup => {
let fragment =
Fragment::new_anonymous_from_specific_info(node,
SpecificFragmentInfo::TableRow);
box TableRowFlow::from_node_and_fragment(node, fragment) as Box<Flow>
},
FlowClass::TableRow => {
let fragment =
Fragment::new_anonymous_from_specific_info(node,
SpecificFragmentInfo::TableCell);
let hide = node.style().get_inheritedtable().empty_cells == empty_cells::T::hide;
box TableCellFlow::from_node_fragment_and_visibility_flag(node, fragment, !hide) as
Box<Flow>
},
_ => {
panic!("no need to generate a missing child")
}
};
FlowRef::new(flow)
}
/// Returns true if this flow has no children.
fn is_leaf(self) -> bool {
base(self).children.len() == 0
}
/// Returns the number of children that this flow possesses.
fn child_count(self) -> uint {
base(self).children.len()
}
/// Return true if this flow is a Block Container.
///
/// Except for table fragments and replaced elements, block-level fragments (`BlockFlow`) are
/// also block container fragments.
/// Non-replaced inline blocks and non-replaced table cells are also block
/// containers.
fn is_block_container(self) -> bool {
match self.class() {
// TODO: Change this when inline-blocks are supported.
FlowClass::Block | FlowClass::TableCaption | FlowClass::TableCell => {
// FIXME: Actually check the type of the node
self.child_count() != 0
}
_ => false,
}
}
/// Returns true if this flow is a block flow.
fn is_block_flow(self) -> bool {
match self.class() {
FlowClass::Block => true,
_ => false,
}
}
/// Returns true if this flow is an inline flow.
fn is_inline_flow(self) -> bool {
match self.class() {
FlowClass::Inline => true,
_ => false,
}
}
/// Dumps the flow tree for debugging.
fn dump(self) {
self.dump_with_level(0)
}
/// Dumps the flow tree for debugging, with a prefix to indicate that we're at the given level.
fn dump_with_level(self, level: uint) {
let mut indent = String::new();
for _ in range(0, level) {
indent.push_str("| ")
}
// TODO: ICE, already fixed in rustc.
//println!("{}+ {:?}", indent, self);
for kid in imm_child_iter(self) {
kid.dump_with_level(level + 1)
}
}
}
impl<'a> MutableFlowUtils for &'a mut (Flow + 'a) {
/// Traverses the tree in preorder.
fn traverse_preorder<T:PreorderFlowTraversal>(self, traversal: &T) {
if traversal.should_process(self) {
traversal.process(self);
}
for kid in child_iter(self) {
kid.traverse_preorder(traversal);
}
}
/// Traverses the tree in postorder.
fn traverse_postorder<T:PostorderFlowTraversal>(self, traversal: &T) {
for kid in child_iter(self) {
kid.traverse_postorder(traversal);
}
if traversal.should_process(self) {
traversal.process(self)
}
}
/// Calculate and set overflow for current flow.
///
/// CSS Section 11.1
/// This is the union of rectangles of the flows for which we define the
/// Containing Block.
///
/// Assumption: This is called in a bottom-up traversal, so kids' overflows have
/// already been set.
/// Assumption: Absolute descendants have had their overflow calculated.
fn store_overflow(self, _: &LayoutContext) {
// Calculate overflow on a per-fragment basis.
let mut overflow = self.compute_overflow();
if self.is_block_container() {
// FIXME(#2795): Get the real container size.
let container_size = Size2D::zero();
for kid in child_iter(self) {
if base(kid).flags.contains(IS_ABSOLUTELY_POSITIONED) {
continue
}
let kid_overflow = base(kid).overflow;
let kid_position = base(kid).position.to_physical(base(kid).writing_mode,
container_size);
overflow = overflow.union(&kid_overflow.translate(&kid_position.origin))
}
for kid in mut_base(self).abs_descendants.iter() {
let kid_overflow = base(kid).overflow;
let kid_position = base(kid).position.to_physical(base(kid).writing_mode,
container_size);
overflow = overflow.union(&kid_overflow.translate(&kid_position.origin))
}
}
mut_base(self).overflow = overflow;
}
/// Collect and update static y-offsets bubbled up by kids.
///
/// This would essentially give us offsets of all absolutely positioned
/// direct descendants and all fixed descendants, in tree order.
///
/// Assume that this is called in a bottom-up traversal (specifically, the
/// assign-block-size traversal). So, kids have their flow origin already set.
/// In the case of absolute flow kids, they have their hypothetical box
/// position already set.
fn collect_static_block_offsets_from_children(self) {
let mut absolute_descendant_block_offsets = Vec::new();
for kid in mut_base(self).child_iter() {
let mut gives_absolute_offsets = true;
if kid.is_block_like() {
let kid_block = kid.as_block();
if kid_block.is_fixed() || kid_block.base.flags.contains(IS_ABSOLUTELY_POSITIONED) {
// It won't contribute any offsets for descendants because it would be the
// containing block for them.
gives_absolute_offsets = false;
// Give the offset for the current absolute flow alone.
absolute_descendant_block_offsets.push(
kid_block.get_hypothetical_block_start_edge());
} else if kid_block.is_positioned() {
// It won't contribute any offsets because it would be the containing block
// for the descendants.
gives_absolute_offsets = false;
}
}
if gives_absolute_offsets {
let kid_base = mut_base(kid);
// Avoid copying the offset vector.
let offsets = mem::replace(&mut kid_base.abs_descendants.static_block_offsets,
Vec::new());
// Consume all the static block-offsets bubbled up by kids.
for block_offset in offsets.into_iter() {
// The offsets are with respect to the kid flow's fragment. Translate them to
// that of the current flow.
absolute_descendant_block_offsets.push(
block_offset + kid_base.position.start.b);
}
}
}
mut_base(self).abs_descendants.static_block_offsets = absolute_descendant_block_offsets
}
}
impl MutableOwnedFlowUtils for FlowRef {
/// Set absolute descendants for this flow.
///
/// Set yourself as the Containing Block for all the absolute descendants.
///
/// This is called during flow construction, so nothing else can be accessing the descendant
/// flows. This is enforced by the fact that we have a mutable `FlowRef`, which only flow
/// construction is allowed to possess.
fn set_absolute_descendants(&mut self, abs_descendants: AbsDescendants) {
let this = self.clone();
let block = self.as_block();
block.base.abs_descendants = abs_descendants;
for descendant_link in block.base.abs_descendants.iter() {
let base = mut_base(descendant_link);
base.absolute_cb.set(this.clone());
}
}
}
/// A link to a flow's containing block.
///
/// This cannot safely be a `Flow` pointer because this is a pointer *up* the tree, not *down* the
/// tree. A pointer up the tree is unsafe during layout because it can be used to access a node
/// with an immutable reference while that same node is being laid out, causing possible iterator
/// invalidation and use-after-free.
///
/// FIXME(pcwalton): I think this would be better with a borrow flag instead of `unsafe`.
pub struct ContainingBlockLink {
/// The pointer up to the containing block.
link: Option<FlowRef>,
}
impl ContainingBlockLink {
fn new() -> ContainingBlockLink {
ContainingBlockLink {
link: None,
}
}
fn set(&mut self, link: FlowRef) {
self.link = Some(link)
}
pub unsafe fn get<'a>(&'a mut self) -> &'a mut Option<FlowRef> {
&mut self.link
}
#[inline]
pub fn generated_containing_block_rect(&mut self) -> LogicalRect<Au> {
match self.link {
None => panic!("haven't done it"),
Some(ref mut link) => link.generated_containing_block_rect(),
}
}
}<|fim▁end|> | impl fmt::Debug for BaseFlow {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
<|file_name|>example.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
import time
from flask import Flask, session, url_for
from flask_debugtoolbar import DebugToolbarExtension
from weblablib import WebLab, requires_active, weblab_user, poll
app = Flask(__name__)
# XXX: IMPORTANT SETTINGS TO CHANGE
app.config['SECRET_KEY'] = 'something random' # e.g., run: os.urandom(32) and put the output here
app.config['WEBLAB_USERNAME'] = 'weblabdeusto' # This is the http_username you put in WebLab-Deusto
app.config['WEBLAB_PASSWORD'] = 'password' # This is the http_password you put in WebLab-Deusto
# XXX You should change...
# Use different cookie names for different labs
app.config['SESSION_COOKIE_NAME'] = 'lab'
# app.config['WEBLAB_UNAUTHORIZED_LINK'] = 'https://weblab.deusto.es/weblab/' # Your own WebLab-Deusto URL
# The URL for this lab (e.g., you might have two labs, /lab1 and /lab2 in the same server)
app.config['SESSION_COOKIE_PATH'] = '/lab'
# The session_id is stored in the Flask session. You might also use a different name
app.config['WEBLAB_SESSION_ID_NAME'] = 'lab_session_id'
# These are optional parameters
# Flask-Debug: don't intercept redirects (go directly)
app.config['DEBUG_TB_INTERCEPT_REDIRECTS'] = False
# app.config['WEBLAB_BASE_URL'] = '' # If you want the weblab path to start by /foo/weblab, you can put '/foo'
# app.config['WEBLAB_REDIS_URL'] = 'redis://localhost:6379/0' # default value
# app.config['WEBLAB_REDIS_BASE'] = 'lab1' # If you have more than one lab in the same redis database
# app.config['WEBLAB_CALLBACK_URL'] = '/lab/public' # If you don't pass it in the creator
# app.config['WEBLAB_TIMEOUT'] = 15 # in seconds, default value
# app.config['WEBLAB_SCHEME'] = 'https'
weblab = WebLab(app, callback_url='/lab/public')
toolbar = DebugToolbarExtension(app)
@weblab.initial_url
def initial_url():
"""
This returns the landing URL (e.g., where the user will be forwarded).
"""
return url_for('.lab')
@weblab.on_start
def on_start(client_data, server_data):
"""
In this code, you can do something to setup the experiment. It is
called for every user, before they start using it.
"""
print("New user!")
print(weblab_user)
@weblab.on_dispose
def on_stop():
"""
In this code, you can do something to clean up the experiment. It is
guaranteed to be run.
"""
print("User expired. Here you should clean resources")
print(weblab_user)
@app.route('/lab/')
@requires_active
def lab():
"""
This is your code. If you provide @requires_active to any other URL, it is secured.
"""
user = weblab_user
return "Hello %s. You didn't poll in %.2f seconds (timeout configured to %s). Total time left: %s" % (user.username, user.time_without_polling, weblab.timeout, user.time_left)
@app.route("/")
def index():
return "<html><head></head><body><a href='{}'>Access to the lab</a></body></html>".format(url_for('.lab'))
if __name__ == '__main__':
print("Run the following:")
print()<|fim▁hole|> print(" $ flask run")
print()<|fim▁end|> | print(" (optionally) $ export FLASK_DEBUG=1")
print(" $ export FLASK_APP={}".format(__file__)) |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from django.contrib.auth.models import User
class OrganisationType(models.Model):
type_desc = models.CharField(max_length=200)
def __unicode__(self):
return self.type_desc
class Address(models.Model):
street_address = models.CharField(max_length=100)
city = models.CharField(max_length=100)
pin = models.CharField(max_length=10)
province = models.CharField(max_length=100)
nationality = models.CharField(max_length=100)
def __unicode__(self):
return self.street_address + ',' + self.city
class HattiUser(models.Model):<|fim▁hole|> fax = models.CharField(max_length=100)
avatar = models.CharField(max_length=100, null=True, blank=True)
tagline = models.CharField(max_length=140)
class Meta:
abstract = True
class AdminOrganisations(HattiUser):
title = models.CharField(max_length=200)
organisation_type = models.ForeignKey(OrganisationType)
def __unicode__(self):
return self.title
class Customer(HattiUser):
title = models.CharField(max_length=200, blank=True, null=True)
is_org = models.BooleanField();
org_type = models.ForeignKey(OrganisationType)
company = models.CharField(max_length = 200)
def __unicode__(self, arg):
return unicode(self.user)<|fim▁end|> | user = models.OneToOneField(User)
address = models.ForeignKey(Address)
telephone = models.CharField(max_length=500)
date_joined = models.DateTimeField(auto_now_add=True) |
<|file_name|>frontEndRoute.js<|end_file_name|><|fim▁begin|>var express = require('express');
var soapSave = require('../utils/soa_save_esig')('http://192.168.0.6:8001/soa-infra/services/default/SignatureService/SignatureService_ep?WSDL');
var router = express.Router();
/* GET users listing. */
router.get('/', function(req, res) {
var sig = req.query;
console.log(sig);
if(!sig.userRoleType){
res.render('front_end', { title: 'MPSTD CAC Signature', cacContent: "need to supply userRoleType" });
return;
}
if (sig.req_type && sig.req_type == 'r'){
var inputData = {
applicantId: sig.applicantId,
formId: sig.formId,
userRoleType: sig.userRoleType
};
soapSave.retrieve(inputData,function (result ){
var b64string = result.signatureImage;
if(b64string == null){
res.render('front_end', { title: 'MPSTD CAC Signature', cacContent: "no sig found" });<|fim▁hole|> var cacObj =JSON.parse( buf.toString('ascii') );
res.render('response',
{ title: 'MPSTD CAC Signature response', cacContent: JSON.stringify(cacObj.subject),
cacSignature: JSON.stringify(cacObj.fingerprint),
timeStamp: JSON.stringify(result.signatureDateTime)
});
// console.log("base64 buffer length = " +buf.length);
}
} );
}
else{//default Esing sign pad
res.render('front_end', { title: 'MPSTD CAC Signature', cacContent: "no sig found" });
}
});
module.exports = router;<|fim▁end|> | }
else{
var buf = new Buffer(b64string, 'base64'); |
<|file_name|>extension_uninstall_dialog_gtk.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// Currently this file is only used for the uninstall prompt. The install prompt
// code is in extension_install_prompt2_gtk.cc.
#include "chrome/browser/extensions/extension_uninstall_dialog.h"
#include <gtk/gtk.h>
#include "base/string_util.h"
#include "base/utf_string_conversions.h"
#include "chrome/browser/ui/browser.h"
#include "chrome/browser/ui/browser_window.h"
#include "chrome/browser/ui/gtk/browser_window_gtk.h"
#include "chrome/common/extensions/extension.h"
#include "grit/generated_resources.h"
#include "ui/base/gtk/gtk_hig_constants.h"<|fim▁hole|>#include "ui/base/l10n/l10n_util.h"
#include "ui/gfx/gtk_util.h"
namespace {
// Left or right margin.
const int kPanelHorizMargin = 13;
// GTK implementation of the uninstall dialog.
class ExtensionUninstallDialogGtk : public ExtensionUninstallDialog {
public:
ExtensionUninstallDialogGtk(Browser* browser, Delegate* delegate);
virtual ~ExtensionUninstallDialogGtk() OVERRIDE;
private:
virtual void Show() OVERRIDE;
CHROMEGTK_CALLBACK_1(ExtensionUninstallDialogGtk, void, OnResponse, int);
GtkWidget* dialog_;
};
ExtensionUninstallDialogGtk::ExtensionUninstallDialogGtk(
Browser* browser, ExtensionUninstallDialog::Delegate* delegate)
: ExtensionUninstallDialog(browser, delegate),
dialog_(NULL) {}
void ExtensionUninstallDialogGtk::Show() {
BrowserWindow* browser_window = browser_->window();
if (!browser_window) {
delegate_->ExtensionUninstallCanceled();
return;
}
// Build the dialog.
dialog_ = gtk_dialog_new_with_buttons(
l10n_util::GetStringUTF8(IDS_EXTENSION_UNINSTALL_PROMPT_TITLE).c_str(),
browser_window->GetNativeWindow(),
GTK_DIALOG_MODAL,
GTK_STOCK_CANCEL,
GTK_RESPONSE_CLOSE,
l10n_util::GetStringUTF8(IDS_EXTENSION_PROMPT_UNINSTALL_BUTTON).c_str(),
GTK_RESPONSE_ACCEPT,
NULL);
#if !GTK_CHECK_VERSION(2, 22, 0)
gtk_dialog_set_has_separator(GTK_DIALOG(dialog_), FALSE);
#endif
// Create a two column layout.
GtkWidget* content_area = gtk_dialog_get_content_area(GTK_DIALOG(dialog_));
gtk_box_set_spacing(GTK_BOX(content_area), ui::kContentAreaSpacing);
GtkWidget* icon_hbox = gtk_hbox_new(FALSE, ui::kContentAreaSpacing);
gtk_box_pack_start(GTK_BOX(content_area), icon_hbox, TRUE, TRUE, 0);
// Put Icon in the left column.
GdkPixbuf* pixbuf = gfx::GdkPixbufFromSkBitmap(*icon_.bitmap());
GtkWidget* icon = gtk_image_new_from_pixbuf(pixbuf);
g_object_unref(pixbuf);
gtk_box_pack_start(GTK_BOX(icon_hbox), icon, TRUE, TRUE, 0);
// Create a new vbox for the right column.
GtkWidget* right_column_area = gtk_vbox_new(FALSE, 0);
gtk_box_pack_start(GTK_BOX(icon_hbox), right_column_area, TRUE, TRUE, 0);
std::string heading_text = l10n_util::GetStringFUTF8(
IDS_EXTENSION_UNINSTALL_PROMPT_HEADING, UTF8ToUTF16(extension_->name()));
GtkWidget* heading_label = gtk_label_new(heading_text.c_str());
gtk_misc_set_alignment(GTK_MISC(heading_label), 0.0, 0.5);
gtk_box_pack_start(GTK_BOX(right_column_area), heading_label, TRUE, TRUE, 0);
g_signal_connect(dialog_, "response", G_CALLBACK(OnResponseThunk), this);
gtk_window_set_resizable(GTK_WINDOW(dialog_), FALSE);
gtk_widget_show_all(dialog_);
}
ExtensionUninstallDialogGtk::~ExtensionUninstallDialogGtk() {
delegate_ = NULL;
if (dialog_) {
gtk_widget_destroy(dialog_);
dialog_ = NULL;
}
}
void ExtensionUninstallDialogGtk::OnResponse(
GtkWidget* dialog, int response_id) {
CHECK_EQ(dialog_, dialog);
gtk_widget_destroy(dialog_);
dialog_ = NULL;
if (delegate_) {
if (response_id == GTK_RESPONSE_ACCEPT)
delegate_->ExtensionUninstallAccepted();
else
delegate_->ExtensionUninstallCanceled();
}
}
} // namespace
// static
// Platform specific implementation of the uninstall dialog show method.
ExtensionUninstallDialog* ExtensionUninstallDialog::Create(
Browser* browser, Delegate* delegate) {
return new ExtensionUninstallDialogGtk(browser, delegate);
}<|fim▁end|> | |
<|file_name|>world.rs<|end_file_name|><|fim▁begin|>use crate::{
ai, animations, components, desc, flags::Flags, item, spatial::Spatial, spec::EntitySpawn,<|fim▁hole|>use std::collections::HashSet;
pub const GAME_VERSION: &str = "0.1.0";
calx_ecs::build_ecs! {
anim: animations::Anim,
brain: ai::Brain,
desc: desc::Desc,
health: stats::Health,
item: item::Item,
map_memory: components::MapMemory,
stacking: item::Stacking,
stats: stats::StatsComponent,
status: stats::Statuses,
}
#[derive(Serialize, Deserialize)]
pub struct WorldSeed {
pub rng_seed: u32,
pub world_skeleton: WorldSkeleton,
pub player_character: ExternalEntity,
}
/// Toplevel game state object.
#[derive(Serialize, Deserialize)]
pub struct World {
/// Game version. Not mutable in the slightest, but the simplest way to
/// get versioned save files is to just drop it here.
pub(crate) version: String,
/// Entity component system.
pub(crate) ecs: Ecs,
/// Static startup game world
pub(crate) world_cache: WorldCache,
/// Spawns from worldgen that have been generated in world.
generated_spawns: HashSet<(Location, EntitySpawn)>,
/// Spatial index for game entities.
pub(crate) spatial: Spatial,
/// Global gamestate flags.
pub(crate) flags: Flags,
/// Persistent random number generator.
pub(crate) rng: Rng,
}
impl World {
pub fn new(world_seed: &WorldSeed) -> World {
let mut ret = World {
version: GAME_VERSION.to_string(),
ecs: Default::default(),
world_cache: WorldCache::new(world_seed.rng_seed, world_seed.world_skeleton.clone()),
generated_spawns: Default::default(),
spatial: Default::default(),
flags: Default::default(),
rng: seeded_rng(&world_seed.rng_seed),
};
ret.spawn_player(
ret.world_cache.player_entrance(),
&world_seed.player_character,
);
ret.generate_world_spawns();
ret
}
pub(crate) fn generate_world_spawns(&mut self) {
let mut spawns = self.world_cache.drain_spawns();
spawns.retain(|s| !self.generated_spawns.contains(s));
let seed = self.rng_seed();
for (loc, s) in &spawns {
// Create one-off RNG from just the spawn info, will always run the same for same info.
let mut rng = calx::seeded_rng(&(seed, loc, s));
// Construct loadout from the spawn info and generate it in world.
self.spawn(&s.sample(&mut rng), *loc);
self.generated_spawns.insert((*loc, s.clone()));
}
}
}<|fim▁end|> | stats, world_cache::WorldCache, Distribution, ExternalEntity, Location, Rng, WorldSkeleton,
};
use calx::seeded_rng;
use serde::{Deserialize, Serialize}; |
<|file_name|>knowledged.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8-*-
import random
import re
import wolframalpha
import time
import sys
from sys import maxint
from client import jarvispath
WORDS = ["WHO", "WHAT", "WHERE", "HOW MUCH"]
def handle(text, mic, profile):
app_id=profile['keys']['WOLFRAMALPHA']
client = wolframalpha.Client(app_id)
query = client.query(text)
if len(query.pods) > 0:
texts = ""
pod = query.pods[1]
if pod.text:
texts = pod.text
else:
texts = "I can not find anything"
<|fim▁hole|>
def isValid(text):
if re.search(r'\bwho\b', text, re.IGNORECASE):
return True
elif re.search(r'\bwhat\b', text, re.IGNORECASE):
return True
elif re.search(r'\bwhere\b', text, re.IGNORECASE):
return True
elif re.search(r'\bhow much\b', text, re.IGNORECASE):
return True
else:
return False<|fim▁end|> | mic.say(texts.replace("|",""))
else:
mic.say("Sorry, Could you be more specific?.") |
<|file_name|>time.rs<|end_file_name|><|fim▁begin|>//! Utilities for mapping between human-usable time units and BAPS3's
//! preferred time units.
/// Enum of available time units.
///
/// This does not contain every possible time unit anyone may want to use with
/// a BAPS3 client, but covers the main possibilities.
///
/// Each unit specified in terms of its equivalent in microseconds, which is
/// the unit used 'over the wire' when talking to BAPS3.
#[derive(Copy)]
pub enum TimeUnit {
/// Hours (1 hour = 60 minutes)
Hours,
/// Minutes (1 minute = 60 seconds).
Minutes,
/// Seconds (1 second = 1,000 milliseconds).
Seconds,
/// Milliseconds (1 millisecond = 1,000 microseconds).
Milliseconds,
/// Microseconds (the BAPS3 base unit).
Microseconds
}
impl TimeUnit {
/// Returns the suffix of the given unit.
///
/// This is mainly for use in human-readable times.
pub fn suffix(&self) -> &'static str {
match *self {
TimeUnit::Hours => "h",
TimeUnit::Minutes => "m",
TimeUnit::Seconds => "s",
TimeUnit::Milliseconds => "ms",
TimeUnit::Microseconds => "us"
}
}
/// Returns the equivalent of `n` of the given unit in microseconds.
pub fn as_micros(&self, n: u64) -> u64 {
match *self {
TimeUnit::Hours => n * 1000 * 1000 * 60 * 60,
TimeUnit::Minutes => n * 1000 * 1000 * 60,
TimeUnit::Seconds => n * 1000 * 1000,
TimeUnit::Milliseconds => n * 1000,
TimeUnit::Microseconds => n
}
}
/// Returns the equivalent of `n` microseconds in the given unit.<|fim▁hole|> /// As the return value is an integer, there may be some rounding down.
///
/// # Examples
///
/// 1 million microseconds is equivalent to 1 second:
///
/// ```rust
/// use baps3_cli::time::TimeUnit;
/// assert_eq!(TimeUnit::Seconds.from_micros(1000000), 1)
/// ```
///
/// Translating one hour of time to microseconds and back is the identity:
///
/// ```rust
/// use baps3_cli::time::TimeUnit;
/// let hour_us = TimeUnit::Hours.as_micros(1);
/// assert_eq!(TimeUnit::Hours.from_micros(hour_us), 1)
/// ```
pub fn from_micros(&self, n: u64) -> u64 {
match *self {
TimeUnit::Hours => n / 1000 / 1000 / 60 / 60,
TimeUnit::Minutes => n / 1000 / 1000 / 60,
TimeUnit::Seconds => n / 1000 / 1000,
TimeUnit::Milliseconds => n / 1000,
TimeUnit::Microseconds => n
}
}
/// Multiplexes a series of unit flags into a TimeUnit.
/// Larger units take precedence.
pub fn from_flags(h: bool, m: bool, s: bool, ms: bool) -> TimeUnit {
if h { TimeUnit::Hours }
else if m { TimeUnit::Minutes }
else if s { TimeUnit::Seconds }
else if ms { TimeUnit::Milliseconds }
else { TimeUnit::Microseconds }
}
}<|fim▁end|> | /// |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>//! # Beamium.
//!
//! Beamium scrap Prometheus endpoint and forward metrics to Warp10.
extern crate backoff;
extern crate bytes;
extern crate cast;
extern crate clap;
extern crate core;
extern crate ctrlc;
extern crate flate2;
extern crate futures;
extern crate humantime;
extern crate hyper;
extern crate hyper_timeout;
extern crate hyper_tls;
extern crate nix;
extern crate regex;
#[macro_use]
extern crate slog;
extern crate slog_async;
#[macro_use]
extern crate slog_scope;
extern crate slog_stream;
extern crate slog_syslog;
extern crate slog_term;
extern crate time;
extern crate tokio_core;
extern crate tokio_timer;
extern crate yaml_rust;
use clap::App;
use std::fs;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use std::thread;
use std::time::Duration;
mod config;
mod lib;
mod log;
mod router;
mod scraper;
mod sink;
include!("version.rs");
/// Main loop.
fn main() {
// Setup a bare logger
log::bootstrap();
let matches = App::new("beamium")
.version(&*format!(
"{} ({}#{})",
env!("CARGO_PKG_VERSION"),
COMMIT,
PROFILE,
))
.author("d33d33 <[email protected]>")
.about("Send Prometheus metrics to Warp10")
.args_from_usage(
"-c, --config=[FILE] 'Sets a custom config file'
\
-v... 'Increase verbosity level (console only)'
-t 'Test config'",
)
.get_matches();
// Bootstrap config
let config_path = matches.value_of("config").unwrap_or("");
let config = match config::load_config(&config_path) {
Ok(config) => config,
Err(err) => {
crit!("Fail to load config {}: {}", &config_path, err);
std::process::abort();
}
};
if matches.is_present("t") {
info!("config ok");
std::process::exit(0);
}
info!("starting");
// Setup logging
match log::log(&config.parameters, matches.occurrences_of("v")) {
Ok(()) => {}
Err(err) => {
crit!("Log setup failure: {}", err);
std::process::abort();
}
}
// Ensure dirs
match fs::create_dir_all(&config.parameters.source_dir) {
Ok(()) => {}
Err(err) => {
crit!(<|fim▁hole|> std::process::abort();
}
};
match fs::create_dir_all(&config.parameters.sink_dir) {
Ok(()) => {}
Err(err) => {
crit!(
"Fail to create sink directory {}: {}",
&config.parameters.source_dir,
err
);
std::process::abort();
}
};
// Synchronisation stuff
let sigint = Arc::new(AtomicBool::new(false));
let mut handles = Vec::with_capacity(config.scrapers.len());
// Sigint handling
let r = sigint.clone();
ctrlc::set_handler(move || {
r.store(true, Ordering::SeqCst);
}).expect("Error setting sigint handler");
// Spawn scrapers
info!("spawning scrapers");
for scraper in config.scrapers.clone() {
let (parameters, sigint) = (config.parameters.clone(), sigint.clone());
handles.push(thread::spawn(move || {
slog_scope::scope(
&slog_scope::logger().new(o!("scraper" => scraper.name.clone())),
|| scraper::scraper(&scraper, ¶meters, &sigint),
);
}));
}
// Spawn router
info!("spawning router");
let mut router = router::Router::new(&config.sinks, &config.parameters, &config.labels);
router.start();
// Spawn sinks
info!("spawning sinks");
let mut sinks: Vec<sink::Sink> = config
.sinks
.iter()
.map(|sink| sink::Sink::new(&sink, &config.parameters))
.collect();
sinks.iter_mut().for_each(|s| s.start());
info!("started");
// Wait for sigint
loop {
thread::sleep(Duration::from_millis(10));
if sigint.load(Ordering::Relaxed) {
break;
}
}
info!("shutting down");
for handle in handles {
handle.join().unwrap();
}
router.stop();
for s in sinks {
s.stop();
}
info!("halted");
}<|fim▁end|> | "Fail to create source directory {}: {}",
&config.parameters.source_dir,
err
); |
<|file_name|>stock_picking.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2019 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import fields, models
class StockPicking(models.Model):
_inherit = 'stock.picking'
default_tracking_url = fields.Char(
related='carrier_id.default_tracking_url',<|fim▁hole|> )<|fim▁end|> | readonly=True, |
<|file_name|>polynomials.rs<|end_file_name|><|fim▁begin|>use std::cmp::{max, min};
use super::range::*;
/**
A polynomial pseudo-solver, using Range.
Returns a collection of ranges that include the exact roots. False-positives are possible.
TODO: instead of Vec<Range>, these should return a stack-allocated type.
*/
pub fn roots_linear(coefficients: [Range; 2], min_input: i64, max_input: i64) -> Vec<Range> {
if coefficients[1] == Range::exactly(0) && !coefficients[0].includes_0() {
return Vec::new();
}
if let Some(result) = ((-coefficients[0]) / coefficients[1]).clamp_to_0_exponent() {
if result.max() >= min_input && result.min() <= max_input {
return vec![result];
}
}
Vec::new()
}
pub fn roots_quadratic(terms: [Range; 3], min_input: i64, max_input: i64) -> Vec<Range> {
let a = terms[2];
let b = terms[1];
let c = terms[0];
let discriminant = b.squared() - a * c * Range::exactly(4);
// printlnerr!(" discriminant {:?}", discriminant);
// printlnerr!("confirm results: {:?}", roots_derivative_based (& terms));
if discriminant < 0 {
return Vec::new();
}
let sqrt = discriminant
.sqrt()
.expect("I thought we just ruled out the case where the square root would be nonexistent");
// printlnerr!(" sqrt {:?}", sqrt);
let result_0 = (-b - sqrt) / (a * 2);
let result_1 = (-b + sqrt) / (a * 2);
// printlnerr!(" result 0 {:?}", result_0);
// printlnerr!(" result 1 {:?}", result_1);
let mut results = Vec::new();
if let Some(result) = result_0.clamp_to_0_exponent() {
if result.max() >= min_input && result.min() <= max_input {
results.push(result);
}
}
if let Some(result) = result_1.clamp_to_0_exponent() {
if result.max() >= min_input && result.min() <= max_input {
if results
.last()
.map_or(false, |whatever| result.min() < whatever.min())
{
results.insert(0, result);
} else {
results.push(result);
}
}
}
// printlnerr!("My results: {:?}", results);
return results;
// if result_0.max >= result_1.min {
// vec![Range {
// min: result_0.min,
// max: result_1.max,
// exponent: 0,
// }]
// } else {
// vec![result_0, result_1]
// }
}
fn find_root_search<
Metadata: Copy,
InputStrategy: Fn(i64, i64, Range, Range, Metadata) -> i64,
MetadataGenerator: Fn(Range) -> Metadata,
MetadataTransformer: Fn(Metadata, Range, Range) -> Metadata,
>(
terms: &[Range],
min_only: bool,
max_only: bool,
input_1: i64,
input_2: i64,
value_1: Range,
value_2: Range,
value_1_metadata: Metadata,
input_strategy: &InputStrategy,
metadata_generator: &MetadataGenerator,
metadata_transformer: &MetadataTransformer,
) -> (i64, i64) {
assert!(!(value_1.includes_0() && value_2.includes_0()));
if !min_only {
assert!((value_1 < 0) != (value_2 < 0));
}
if !max_only {
assert!((value_1 > 0) != (value_2 > 0));
}
let mut input_1: i64 = input_1;
let mut input_2: i64 = input_2;
let mut value_1: Range = value_1;
let mut value_1_metadata = value_1_metadata;
let mut value_2: Range = value_2;
let mut result_for_other: i64 = 0;
let mut min_only = min_only;
loop {
let input = input_strategy(input_1, input_2, value_1, value_2, value_1_metadata);
if input == input_1 || input == input_2 {
break;
}
let value = evaluate(terms, input);
let closer_to_1;
if min_only {
closer_to_1 = (value > 0) != (value_2 > 0);
} else if max_only {
closer_to_1 = (value < 0) != (value_2 < 0);
} else {
closer_to_1 = (value > 0) != (value_2 > 0);
let other_closer_to_1 = (value < 0) != (value_2 < 0);
if closer_to_1 != other_closer_to_1 {
min_only = true;
if other_closer_to_1 {
result_for_other = find_root_search(
terms,
false,
true,
input_2,
input,
value_2,
value,
metadata_generator(value_2),
input_strategy,
metadata_generator,
metadata_transformer,
)
.0;
} else {
// possible optimization: use a better factor, referring to "A Family of Regula Falsi Methods", Galdino
result_for_other = find_root_search(
terms,
false,
true,
input_1,
input,
value_1,
value,
metadata_transformer(value_1_metadata, value_2, value),
input_strategy,
metadata_generator,
metadata_transformer,
)
.0;
}
}
}
if closer_to_1 {
input_1 = input_2;
value_1 = value_2;
value_1_metadata = metadata_generator(value_2);
} else {
value_1_metadata = metadata_transformer(value_1_metadata, value_2, value);
}
input_2 = input;
value_2 = value;
}
if max_only {
assert!((value_1 < 0) != (value_2 < 0));
(
if value_1 < 0 { input_1 } else { input_2 },
result_for_other,
)
} else {
assert!((value_1 > 0) != (value_2 > 0));
(
if value_1 > 0 { input_1 } else { input_2 },
result_for_other,
)
}
}
fn find_root_search_default(
terms: &[Range],
min_only: bool,
max_only: bool,
input_1: i64,
input_2: i64,
value_1: Range,
value_2: Range,
) -> (i64, i64) {
let floating =
|whatever: Range| (whatever.internal_min() as f64) * (2f64.powi(whatever.exponent() as i32));
let relaxed_result = find_root_search(
terms,
min_only,
max_only,
input_1,
input_2,
value_1,
value_2,
floating(value_1),
&|input_1, input_2, _, value_2, value_1_metadata| {
let mut input;
let value_2 = floating(value_2);
let denominator = value_2 - value_1_metadata;
input =
((input_2 as f64) - value_2 * ((input_2 as f64) - (input_1 as f64)) / denominator) as i64;
if input.cmp(&input_2) != input.cmp(&input_1).reverse() {
input = average_round_towards_neginf(input_1, input_2);
}
input
},
&floating,
&|fa, _, _| {
// refer to "A Family of Regula Falsi Methods", Galdino
// this method is written down as generally slightly the best in number of function evaluations,
// but it profiled slightly worse, probably just because it uses more operations.
// let mut m = 1f64 - floating (fx)/floating (fb);
// if m <= 0f64 {m = 0.5;}
// fa*m
fa * 0.5
},
);
if cfg!(debug_assertions) {
let strict_result = find_root_search(
terms,
min_only,
max_only,
input_1,
input_2,
value_1,
value_2,
value_1,
&|input_1, input_2, _, value_2, value_1_metadata| {
let mut input;
let denominator = (value_2 - value_1_metadata).rounded_to_middle_towards_neginf();
if denominator.includes_0() {
input = average_round_towards_neginf(input_1, input_2);
} else {
input = (Range::exactly(input_2)
- value_2 * (Range::exactly(input_2) - Range::exactly(input_1)) / denominator)
.clamp_to_0_exponent()
.unwrap()
.min();
if input.cmp(&input_2) != input.cmp(&input_1).reverse() {
input = average_round_towards_neginf(input_1, input_2);
}
}
input
},
&|whatever: Range| whatever,
&|value, _, _| value >> 1,
);
assert!(relaxed_result == strict_result);
}
relaxed_result
}
fn find_root(terms: &[Range], min: i64, max: i64) -> Option<Range> {
if min >= max {
return None;
}
let min_value = evaluate(terms, min);
let max_value = evaluate(terms, max);
// printlnerr!(" Values {:?}:{:?}, {:?}:{:?}", min, min_value, max, max_value);
if min_value.includes_0() {
if max_value.includes_0() {
Some(Range::new(min, max))
} else {
let search_by_min = max_value > 0;
Some(Range::new(
min,
find_root_search_default(
terms,
search_by_min,
!search_by_min,
min,
max,
min_value,
max_value,
)
.0,
))
}
} else if max_value.includes_0() {
let search_by_min = min_value > 0;
Some(Range::new(
find_root_search_default(
terms,
search_by_min,
!search_by_min,
min,
max,
min_value,
max_value,
)
.0,
max,
))
} else if max_value.min_signum() == min_value.min_signum() {<|fim▁hole|> None
} else {
let (result_for_min, result_for_max) =
find_root_search_default(terms, false, false, min, max, min_value, max_value);
Some(Range::new_either_order(result_for_min, result_for_max))
}
// return find_root_search (terms, false, min, max,
// let mut lower_bound = min;
// let mut upper_bound = max;
// hack: use a negative number for move_size so that it can store a slightly larger value
// let mut move_size = -1i64 << 63;
// while min.checked_sub(max).is_some() && move_size < min - max {
// move_size /= 2;
// }
// while move_size < 0 {
// printlnerr!(" Next values {:?}:{:?}, {:?}:{:?}", lower_bound, evaluate (terms, lower_bound ), upper_bound, evaluate (terms, upper_bound ));
//
// if lower_bound - move_size <= max &&
// (evaluate(terms, lower_bound - move_size) * direction).max <= 0 {
// lower_bound -= move_size;
// }
// if upper_bound + move_size >= min &&
// (evaluate(terms, upper_bound + move_size) * direction).min >= 0 {
// upper_bound += move_size;
// }
// move_size /= 2;
// }
// Some(Range::new(lower_bound, upper_bound))
}
fn collect_root(terms: &[Range], min: i64, max: i64, bucket: &mut Vec<Option<Range>>) {
bucket.push(find_root(terms, min, max));
}
fn roots_derivative_based(terms: &[Range], min_input: i64, max_input: i64) -> Vec<Range> {
let derivative: Vec<Range> = terms[1..]
.iter()
.enumerate()
.map(|(which, term)| term * (which as i64 + 1))
.collect();
// printlnerr!(" Derivative {:?}", derivative);
let extrema = roots(derivative.as_slice(), min_input, max_input);
// printlnerr!("extrema {:?}", extrema);
let mut bucket = Vec::new();
let mut results = Vec::new();
if extrema.is_empty() {
collect_root(terms, min_input, max_input, &mut bucket);
} else {
collect_root(terms, min_input, extrema[0].min(), &mut bucket);
for which in 0..(extrema.len() - 1) {
collect_root(
terms,
max(extrema[which].max(), min_input),
min(extrema[which + 1].min(), max_input),
&mut bucket,
);
}
collect_root(terms, extrema.last().unwrap().max(), max_input, &mut bucket);
}
// if we found a root on both sides of a derivative-root, we know that the derivative-root is bounded away from 0
for which in 0..extrema.len() {
let me = extrema[which];
if let Some(lower) = bucket[which] {
results.push(lower);
if let Some(higher) = bucket[which + 1] {
if lower < me && me < higher {
continue;
}
}
}
results.push(me);
}
if let Some(lower) = bucket[extrema.len()] {
results.push(lower);
}
results
}
pub fn roots(terms: &[Range], min: i64, max: i64) -> Vec<Range> {
let mut terms = terms;
while terms
.last()
.map_or(false, |term| term == &Range::exactly(0))
{
terms = &terms[..terms.len() - 1]
}
match terms.len() {
0 => vec![Range::new(min, max)],
1 => {
if terms[0].internal_min() <= 0 && terms[0].internal_max() >= 0 {
vec![Range::new(min, max)]
} else {
Vec::new()
}
}
2 => roots_linear([terms[0], terms[1]], min, max),
3 => roots_quadratic([terms[0], terms[1], terms[2]], min, max),
_ => roots_derivative_based(terms, min, max),
}
}
pub fn evaluate(terms: &[Range], input: i64) -> Range {
let mut factor = Range::exactly(1);
let mut result = Range::exactly(0);
for term in terms.iter() {
result = result + (term * factor);
factor = factor * input;
}
result
}
pub fn multiply_polynomials(terms_0: &[Range], terms_1: &[Range]) -> Vec<Range> {
(0..terms_0.len() + terms_1.len() - 1)
.map(|new_index| {
(max(terms_1.len(), new_index + 1) - terms_1.len()..min(terms_0.len(), new_index + 1))
.map(|view| terms_0[view] * terms_1[new_index - view])
.sum()
})
.collect()
}
use rand;
use rand::Rng;
// when coercing an update to land on an integer value, we obviously have a possible rounding error of up to 2 units (one from dividing the velocity, one from dividing the acceleration).
// But that's not all. The multiplications also have rounding error if they have to prevent overflows.
// we are only guaranteed to keep the top 31 bits of each factor, so that's a possible error factor of just below 1+2^{-30} for each of them.
// Square that error because there are 2 inputs in each multiplication,
// and square it again because we do 2 multiplications in a row for the acceleration.
// (1+2^{-30})^4 is a little bit above 1+2^{-28}.
// That error factor is multiplied specifically with the *distance traveled*
// or rather, it's multiplied with the absolute values of the quadratic term of the distance traveled,
// and then added to the error of the linear term, which is a little bit above 1+2^{-29}.
// The relationship between this error and the ACTUAL distance traveled is a little more complicated,
// since the 2 terms can point in opposite directions. In the worst case, the error can get up to
// more than 8 times the 1+2^{-28} figure for the same actual distance. Less than 16, though.
// So chopping off another 4 bits will be enough: 1+2^{-24}.
// So any constant error term is associated with a maximum distance traveled that will have no more than that much error.
pub fn max_error_for_distance_traveled(distance: i64) -> i64 {
right_shift_round_up(distance, 24)
}
const DO_TESTS: bool = cfg!(debug_assertions);
// We require the user to pass in a max error value – specifically, the one that they use with
// quadratic_trajectories_possible_distance_crossing_intervals –
// so that we can check to make sure they didn't go beyond the bounds of what they tested for.
pub fn quadratic_move_origin_rounding_change_towards_0(
terms: &mut [i64],
origin: i64,
input_scale_shift: u32,
max_error: i64,
) -> bool {
let distance_traveled = ((Range::exactly(terms[1]) * origin) >> input_scale_shift)
+ ((Range::exactly(terms[2]) * origin * origin) >> (input_scale_shift * 2));
if distance_traveled.max() - distance_traveled.min() > max_error * 2 {
printlnerr!(
"overflow-ish in quadratic_move_origin_rounding_change_towards_0; error size \
exceeded the given max error"
);
return false;
}
let mut between_time = 0;
let mut confirm = [Range::exactly(0); 3];
if DO_TESTS {
between_time = rand::thread_rng().gen_range(0, origin + 1);
confirm =
quadratic_future_proxy_minimizing_error(terms, between_time, input_scale_shift, max_error);
}
terms[0] += distance_traveled.rounded_towards_0();
terms[1] += ((Range::exactly(terms[2]) * origin) >> (input_scale_shift - 1)).rounded_towards_0();
if DO_TESTS {
let experimented = evaluate(&confirm, origin - between_time) >> (input_scale_shift * 2);
// printlnerr!("experimented {}, actually {}", experimented, terms [0]);
assert!(experimented.includes(&Range::exactly(terms[0])));
}
true
}
pub fn quadratic_future_proxy_minimizing_error(
terms: &[i64],
origin: i64,
input_scale_shift: u32,
max_error: i64,
) -> [Range; 3] {
// in the constant term, preserve the error of 2 units noted above.
// Multiplication error term is about (term 1*time since original origin) >> 30+shift + (term 2*time since original origin squared) >> 29+shift*2
// but time since original origin is actually "origin" + the input of the quadratic we're creating,
// this error is actually quadratic.
[
(Range::new(terms[0] - 2 - max_error, terms[0] + 2 + max_error) << (input_scale_shift * 2))
+ ((Range::exactly(terms[1]) * origin) << input_scale_shift)
+ (Range::exactly(terms[2]) * origin * origin),
(Range::exactly(terms[1]) << input_scale_shift) + ((Range::exactly(terms[2]) * origin) << 1),
Range::exactly(terms[2]),
]
}
pub fn time_until_which_quadratic_trajectory_may_remain_in_bounds(
start_time: i64,
trajectory: &[[i64; 3]],
bounds: &[[i64; 2]],
input_scale_shift: u32,
max_error: i64,
) -> Option<i64> {
assert!(trajectory.len() == bounds.len());
assert!(trajectory.len() > 0);
let mut min_input = start_time;
let mut max_input = i64::max_value() - max(0, start_time);
// printlnerr!("begin {:?} {:?} {:?}", start_time, trajectory, bounds);
for (third, more) in trajectory.iter().zip(bounds.iter()) {
let mut rubble =
quadratic_future_proxy_minimizing_error(third, 0, input_scale_shift, max_error);
rubble[0] = rubble[0] - (Range::new(more[0], more[1]) << (input_scale_shift * 2));
let possible_overlap_times = roots(&rubble, min_input, max_input);
// printlnerr!("roots {:?} {:?}", rubble, possible_overlap_times);
if let Some((this_min, this_max)) = if possible_overlap_times.is_empty() {
None
} else if possible_overlap_times.len() == 2
&& possible_overlap_times[0].max() >= possible_overlap_times[1].min() - 1
{
if possible_overlap_times[0].min() <= start_time
&& possible_overlap_times[1].max() >= start_time
{
Some((
possible_overlap_times[0].min(),
possible_overlap_times[1].max(),
))
} else {
None
}
} else {
possible_overlap_times
.iter()
.find(|root| root.min() <= start_time && root.max() >= start_time)
.map(|root| (root.min(), root.max()))
} {
min_input = max(min_input, this_min);
max_input = min(max_input, this_max);
assert!(
min_input <= max_input,
"an interval containing start_time should never exclude it"
);
} else {
return None;
}
}
// printlnerr!("end {} {}", min_input, max_input);
Some(max_input)
}
pub fn quadratic_trajectories_possible_distance_crossing_intervals(
distance: i64,
first: (i64, &[[i64; 3]]),
second: (i64, &[[i64; 3]]),
input_scale_shift: u32,
max_error: i64,
) -> Vec<Range> {
assert!(first.1.len() == second.1.len());
assert!(first.1.len() > 0);
let base = max(first.0, second.0);
let mut proxy = [
Range::exactly(0),
Range::exactly(0),
Range::exactly(0),
Range::exactly(0),
Range::exactly(0),
];
let mut min_input = 0;
let mut max_input = i64::max_value() - max(0, base);
for (third, more) in first.1.iter().zip(second.1.iter()) {
let mut rubble = quadratic_future_proxy_minimizing_error(
third.as_ref(),
base - first.0,
input_scale_shift,
max_error,
);
let bravo = quadratic_future_proxy_minimizing_error(
more.as_ref(),
base - second.0,
input_scale_shift,
max_error,
);
for index in 0..3 {
rubble[index] = rubble[index] - bravo[index];
}
let this_dimension_tester = [
rubble[0] + (Range::error_sized(distance) << (input_scale_shift * 2)),
rubble[1],
rubble[2],
];
let possible_overlap_times = roots(&this_dimension_tester, min_input, max_input);
// printlnerr!("one-dimensional proxy: {:?} {:?} {:?} {:?}", min_input, max_input, this_dimension_tester, possible_overlap_times );
if possible_overlap_times.is_empty() {
return Vec::new();
} else {
min_input = max(min_input, possible_overlap_times[0].min());
max_input = min(max_input, possible_overlap_times.last().unwrap().max());
if min_input > max_input {
return Vec::new();
}
}
for (which, value) in multiply_polynomials(&rubble, &rubble)
.into_iter()
.enumerate()
{
proxy[which] = proxy[which] + value
}
}
proxy[0] = proxy[0] - (Range::exactly(distance).squared() << (input_scale_shift * 4));
let real_distance_squared = |input| {
let mut result = 0i64;
for (third, more) in first.1.iter().zip(second.1.iter()) {
let mut rubble = third.clone();
if !quadratic_move_origin_rounding_change_towards_0(
&mut rubble,
input - first.0,
input_scale_shift,
max_error,
) {
return None;
}
let mut bravo = more.clone();
if !quadratic_move_origin_rounding_change_towards_0(
&mut bravo,
input - second.0,
input_scale_shift,
max_error,
) {
return None;
}
for index in 0..3 {
rubble[index] = rubble[index] - bravo[index];
}
if let Some(term) = rubble[0].checked_mul(rubble[0]) {
if let Some(res) = result.checked_add(term) {
result = res;
} else {
return None;
}
} else {
return None;
}
}
Some(result)
};
let test = |input| {
let evaluated = evaluate(&proxy, input);
// printlnerr!("input: {}, base: {}, evaluated: {}", input, base, evaluated);
if input < 0 || input > 1i64 << 32 {
return evaluated;
}
if let Some(distance_squared) = real_distance_squared(input + base) {
let real = distance_squared - distance * distance;
// printlnerr!("real: {}", real);
assert!((evaluated >> (input_scale_shift * 4)).includes(&Range::exactly(real)));
}
evaluated
};
let test_empty_interval = |start, stop| {
// Currently, evaluate() is more permissive than it theoretically needs to be.
// It could include 0 even if the polynomial couldn't actually emit 0 from that input.
// roots_derivative_based() uses evaluate() directly, so it's fine to assume that evaluate() is correct.
// However, roots_quadratic() might return a slightly tighter result.
// So we can't test quadratics in quite the same way.
if proxy[3] == Range::exactly(0) && proxy[4] == Range::exactly(0) {
return;
}
if start >= stop {
return;
}
let sample_points: Vec<i64> = vec![
start,
stop,
rand::thread_rng().gen_range(start, stop),
rand::thread_rng().gen_range(start, stop),
rand::thread_rng().gen_range(start, stop),
];
let sample_values: Vec<Range> = sample_points
.iter()
.map(|input| test(input.clone()))
.collect();
let signum = sample_values[0].internal_min().signum();
for value in sample_values.iter() {
if value.includes_0_strictly() || value.internal_min().signum() == -signum {
printlnerr!(" Proxy: {:?}", proxy);
printlnerr!(
"fail points: {:?}\n values: {:?}",
sample_points,
sample_values
);
panic!()
}
}
};
// printlnerr!(" Proxy: {:?}", proxy);
let mut result = roots(proxy.as_ref(), min_input, max_input);
// printlnerr!(" Proxy: {:?}\n Roots: {:?}", proxy, result);
if DO_TESTS {
test(0);
test(1000);
test(base);
for (which, root) in result.iter().enumerate() {
test((root.max() - root.min()) / 2);
if which == 0 {
test_empty_interval(min_input, root.min() - 1);
}
if which < result.len() - 1 {
test_empty_interval(root.max() + 1, result[which + 1].min() - 1);
} else {
test_empty_interval(root.max() + 1, max_input);
}
// printlnerr!("root check: {}: {} and then {} and then {}", root, evaluate (& proxy, root.max - 1), evaluate (& proxy, root.max()), evaluate (& proxy, root.max() + 1));
}
}
for root in result.iter_mut() {
*root = *root + Range::exactly(base);
}
result
}
#[cfg(test)]
mod tests {
use super::super::*;
fn test_roots(given_roots: Vec<Range>) {
let mut polynomial = vec![Range::exactly(1)];
for root in given_roots.iter() {
polynomial = multiply_polynomials(polynomial.as_slice(), &[-root, Range::exactly(1)])
}
let computed = roots(polynomial.as_slice(), -i64::max_value(), i64::max_value());
println!(
"\nFor roots {:?}\n Computed polynomial {:?}\n And roots {:?}\n Evaluated root \
minima: {:?}",
given_roots,
polynomial,
computed,
given_roots
.iter()
.map(|root| evaluate(polynomial.as_slice(), root.min()))
.collect::<Vec<Range>>()
);
}
// quickcheck! {
// fn automatic_roots(given_roots: Vec<Range>)
// }
#[test]
fn explicit_roots() {
test_roots(vec![Range::exactly(0)]);
test_roots(vec![Range::exactly(55)]);
test_roots(vec![Range::exactly(0), Range::exactly(55)]);
test_roots(vec![Range::exactly(-8), Range::exactly(55)]);
test_roots(vec![
Range::exactly(-8),
Range::exactly(55),
Range::exactly(999),
]);
test_roots(vec![
Range::exactly(-8),
Range::exactly(55),
Range::exactly(999),
Range::exactly(-84),
]);
test_roots(vec![
Range::exactly(-8),
Range::exactly(55),
Range::exactly(999),
Range::exactly(-84),
Range::exactly(-1967),
]);
test_roots(vec![Range::new(-1, 1), Range::new(54, 56)]);
test_roots(vec![Range::new(-9, -7), Range::new(50, 60)]);
test_roots(vec![
Range::new(-9, -7),
Range::new(54, 56),
Range::exactly(999),
]);
test_roots(vec![
Range::new(-9, -7),
Range::new(54, 56),
Range::new(950, 1050),
Range::new(-90, -80),
]);
test_roots(vec![
Range::new(-9, -7),
Range::new(54, 56),
Range::new(950, 1050),
Range::new(-90, -80),
Range::new(-1967, -1940),
]);
println!(
" {:?}",
roots(
&[
Range::new(-900, -800),
Range::new(500, 501),
Range::exactly(50)
],
-i64::max_value(),
i64::max_value()
)
);
println!(
" {:?}",
roots(
&[
Range::new(-900, -800),
Range::new(500, 501),
Range::exactly(50),
Range::exactly(1)
],
-i64::max_value(),
i64::max_value()
)
);
}
}<|fim▁end|> | |
<|file_name|>SelectByAttribute.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
***************************************************************************
SelectByAttribute.py
---------------------
Date : May 2010
Copyright : (C) 2010 by Michael Minn
Email : pyqgis at michaelminn dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Michael Minn'
__date__ = 'May 2010'
__copyright__ = '(C) 2010, Michael Minn'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from qgis.PyQt.QtCore import QVariant
from qgis.core import (QgsExpression,
QgsProcessingException,
QgsProcessingParameterVectorLayer,
QgsProcessingParameterField,
QgsProcessingParameterEnum,
QgsProcessingParameterString,
QgsProcessingOutputVectorLayer)
from processing.algs.qgis.QgisAlgorithm import QgisAlgorithm
class SelectByAttribute(QgisAlgorithm):
INPUT = 'INPUT'
FIELD = 'FIELD'
OPERATOR = 'OPERATOR'
VALUE = 'VALUE'
OUTPUT = 'OUTPUT'
OPERATORS = ['=',
'!=',
'>',
'>=',
'<',
'<=',
'begins with',
'contains',
'is null',
'is not null',
'does not contain'
]
STRING_OPERATORS = ['begins with',
'contains',
'does not contain']
def tags(self):
return self.tr('select,attribute,value,contains,null,field').split(',')
def group(self):
return self.tr('Vector selection')
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
self.i18n_operators = ['=',
'!=',
'>',
'>=',
'<',
'<=',
self.tr('begins with'),
self.tr('contains'),
self.tr('is null'),
self.tr('is not null'),
self.tr('does not contain')
]
self.addParameter(QgsProcessingParameterVectorLayer(self.INPUT, self.tr('Input layer')))
self.addParameter(QgsProcessingParameterField(self.FIELD,
self.tr('Selection attribute'), parentLayerParameterName=self.INPUT))
self.addParameter(QgsProcessingParameterEnum(self.OPERATOR,
self.tr('Operator'), self.i18n_operators))
self.addParameter(QgsProcessingParameterString(self.VALUE, self.tr('Value')))
self.addOutput(QgsProcessingOutputVectorLayer(self.OUTPUT, self.tr('Selected (attribute)')))
def name(self):
return 'selectbyattribute'
<|fim▁hole|> def processAlgorithm(self, parameters, context, feedback):
layer = self.parameterAsVectorLayer(parameters, self.INPUT, context)
fieldName = self.parameterAsString(parameters, self.FIELD, context)
operator = self.OPERATORS[self.parameterAsEnum(parameters, self.OPERATOR, context)]
value = self.parameterAsString(parameters, self.VALUE, context)
fields = layer.fields()
idx = layer.fields().lookupField(fieldName)
fieldType = fields[idx].type()
if fieldType != QVariant.String and operator in self.STRING_OPERATORS:
op = ''.join(['"%s", ' % o for o in self.STRING_OPERATORS])
raise QgsProcessingException(
self.tr('Operators {0} can be used only with string fields.').format(op))
field_ref = QgsExpression.quotedColumnRef(fieldName)
quoted_val = QgsExpression.quotedValue(value)
if operator == 'is null':
expression_string = '{} IS NULL'.format(field_ref)
elif operator == 'is not null':
expression_string = '{} IS NOT NULL'.format(field_ref)
elif operator == 'begins with':
expression_string = """%s LIKE '%s%%'""" % (field_ref, value)
elif operator == 'contains':
expression_string = """%s LIKE '%%%s%%'""" % (field_ref, value)
elif operator == 'does not contain':
expression_string = """%s NOT LIKE '%%%s%%'""" % (field_ref, value)
else:
expression_string = '{} {} {}'.format(field_ref, operator, quoted_val)
expression = QgsExpression(expression_string)
if expression.hasParserError():
raise QgsProcessingException(expression.parserErrorString())
layer.selectByExpression(expression_string)
return {self.OUTPUT: parameters[self.INPUT]}<|fim▁end|> | def displayName(self):
return self.tr('Select by attribute')
|
<|file_name|>FiledropperCom.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import re
import urlparse
from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class FiledropperCom(SimpleHoster):
__name__ = "FiledropperCom"
__type__ = "hoster"
__version__ = "0.01"
__pattern__ = r'https?://(?:www\.)?filedropper\.com/\w+'
__description__ = """Filedropper.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("zapp-brannigan", "[email protected]")]
NAME_PATTERN = r'Filename: (?P<N>.+?) <'
SIZE_PATTERN = r'Size: (?P<S>[\d.,]+) (?P<U>[\w^_]+),' #@NOTE: Website says always 0 KB
OFFLINE_PATTERN = r'value="a\.swf"'
def setup(self):
self.multiDL = False
self.chunkLimit = 1
<|fim▁hole|> m = re.search(r'img id="img" src="(.+?)"', self.html)
if m is None:
self.fail("Captcha not found")
captcha_code = self.decryptCaptcha("http://www.filedropper.com/%s" % m.group(1))
m = re.search(r'method="post" action="(.+?)"', self.html)
if m is None:
self.fail("Download link not found")
self.download(urlparse.urljoin("http://www.filedropper.com/", m.group(1)),
post={'code': captcha_code})
getInfo = create_getInfo(FiledropperCom)<|fim▁end|> |
def handleFree(self, pyfile): |
<|file_name|>ImageViewHolder.java<|end_file_name|><|fim▁begin|>package com.lzy.imagepicker.adapter;
import android.support.annotation.IdRes;
import android.support.v7.widget.RecyclerView;
import android.view.View;
/**
* Copyright (C) 2016,深圳市红鸟网络科技股份有限公司 All rights reserved.
* 项目名称:
* 类的描述:
* 创建人员:Robi
* 创建时间:2017/02/21 15:07
* 修改人员:Robi
* 修改时间:2017/02/21 15:07
* 修改备注:
* Version: 1.0.0
*/
public class ImageViewHolder extends RecyclerView.ViewHolder {
public ImageViewHolder(View itemView) {
super(itemView);
}
public <T extends View> T v(@IdRes int resId) {
return (T) itemView.findViewById(resId);<|fim▁hole|><|fim▁end|> | }
} |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
class RiverOutfall(models.Model):
name = models.TextField()
lat = models.FloatField(null=True)
lon = models.FloatField(null=True)
class RiverCso(models.Model):
river_outfall = models.ForeignKey("RiverOutfall")
open_time = models.DateTimeField()
close_time = models.DateTimeField()
class LakeOutfall(models.Model):
name = models.TextField()
lat = models.FloatField(null=True)
lon = models.FloatField(null=True)<|fim▁hole|> open_date = models.DateTimeField()
close_date = models.DateTimeField()
millions_of_gallons = models.FloatField()<|fim▁end|> |
class LakeReversal(models.Model):
lake_outfall = models.ForeignKey("LakeOutfall") |
<|file_name|>tokens.py<|end_file_name|><|fim▁begin|>"""
Based on :mod:`django.contrib.auth.tokens`. Supports the following settings:
:setting:`WALDO_REGISTRATION_TIMEOUT_DAYS`
The number of days a registration link will be valid before expiring. Default: 1.
:setting:`WALDO_EMAIL_TIMEOUT_DAYS`
The number of days an email change link will be valid before expiring. Default: 1.
"""
from hashlib import sha1
from datetime import date
from django.conf import settings
from django.utils.http import int_to_base36, base36_to_int
from django.contrib.auth.tokens import PasswordResetTokenGenerator
REGISTRATION_TIMEOUT_DAYS = getattr(settings, 'WALDO_REGISTRATION_TIMEOUT_DAYS', 1)
EMAIL_TIMEOUT_DAYS = getattr(settings, 'WALDO_EMAIL_TIMEOUT_DAYS', 1)
class RegistrationTokenGenerator(PasswordResetTokenGenerator):
"""Strategy object used to generate and check tokens for the user registration mechanism."""
def check_token(self, user, token):
"""Check that a registration token is correct for a given user."""
# If the user is active, the hash can't be valid.
if user.is_active:
return False
# Parse the token
try:
ts_b36, hash = token.split('-')
except ValueError:
return False
try:
ts = base36_to_int(ts_b36)
except ValueError:
return False
# Check that the timestamp and uid have not been tampered with.
if self._make_token_with_timestamp(user, ts) != token:
return False
# Check that the timestamp is within limit
if (self._num_days(self._today()) - ts) > REGISTRATION_TIMEOUT_DAYS:
return False
return True
def _make_token_with_timestamp(self, user, timestamp):
ts_b36 = int_to_base36(timestamp)
# By hashing on the internal state of the user and using state that is
# sure to change, we produce a hash that will be invalid as soon as it
# is used.
hash = sha1(settings.SECRET_KEY + unicode(user.id) + unicode(user.is_active) + user.last_login.strftime('%Y-%m-%d %H:%M:%S') + unicode(timestamp)).hexdigest()[::2]
return '%s-%s' % (ts_b36, hash)
registration_token_generator = RegistrationTokenGenerator()
class EmailTokenGenerator(PasswordResetTokenGenerator):<|fim▁hole|> """Strategy object used to generate and check tokens for a user email change mechanism."""
def make_token(self, user, email):
"""Returns a token that can be used once to do an email change for the given user and email."""
return self._make_token_with_timestamp(user, email, self._num_days(self._today()))
def check_token(self, user, email, token):
if email == user.email:
return False
# Parse the token
try:
ts_b36, hash = token.split('-')
except ValueError:
return False
try:
ts = base36_to_int(ts_b36)
except ValueError:
return False
# Check that the timestamp and uid have not been tampered with.
if self._make_token_with_timestamp(user, email, ts) != token:
return False
# Check that the timestamp is within limit
if (self._num_days(self._today()) - ts) > EMAIL_TIMEOUT_DAYS:
return False
return True
def _make_token_with_timestamp(self, user, email, timestamp):
ts_b36 = int_to_base36(timestamp)
hash = sha1(settings.SECRET_KEY + unicode(user.id) + user.email + email + unicode(timestamp)).hexdigest()[::2]
return '%s-%s' % (ts_b36, hash)
email_token_generator = EmailTokenGenerator()<|fim▁end|> | |
<|file_name|>uievent.py<|end_file_name|><|fim▁begin|>#
# Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer
# Copyright (C) 2006 - 2007 Richard Purdie
#
# SPDX-License-Identifier: GPL-2.0-only
#
"""
Use this class to fork off a thread to recieve event callbacks from the bitbake
server and queue them for the UI to process. This process must be used to avoid
client/server deadlocks.
"""
import socket, threading, pickle, collections
from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
class BBUIEventQueue:
def __init__(self, BBServer, clientinfo=("localhost, 0")):
self.eventQueue = []
self.eventQueueLock = threading.Lock()
self.eventQueueNotify = threading.Event()
self.BBServer = BBServer
self.clientinfo = clientinfo
server = UIXMLRPCServer(self.clientinfo)
self.host, self.port = server.socket.getsockname()
server.register_function( self.system_quit, "event.quit" )
server.register_function( self.send_event, "event.sendpickle" )
server.socket.settimeout(1)
self.EventHandle = None
# the event handler registration may fail here due to cooker being in invalid state
# this is a transient situation, and we should retry a couple of times before
# giving up
for count_tries in range(5):
ret = self.BBServer.registerEventHandler(self.host, self.port)
if isinstance(ret, collections.Iterable):
self.EventHandle, error = ret
else:
self.EventHandle = ret
error = ""
if self.EventHandle != None:
break
errmsg = "Could not register UI event handler. Error: %s, host %s, "\
"port %d" % (error, self.host, self.port)
bb.warn("%s, retry" % errmsg)
import time
time.sleep(1)
else:
raise Exception(errmsg)
self.server = server
self.t = threading.Thread()
self.t.setDaemon(True)
self.t.run = self.startCallbackHandler
self.t.start()
def getEvent(self):
self.eventQueueLock.acquire()
if len(self.eventQueue) == 0:
self.eventQueueLock.release()
return None
item = self.eventQueue.pop(0)
if len(self.eventQueue) == 0:
self.eventQueueNotify.clear()
self.eventQueueLock.release()
return item
def waitEvent(self, delay):
self.eventQueueNotify.wait(delay)
return self.getEvent()
def queue_event(self, event):
self.eventQueueLock.acquire()
self.eventQueue.append(event)
self.eventQueueNotify.set()
self.eventQueueLock.release()
def send_event(self, event):
self.queue_event(pickle.loads(event))
def startCallbackHandler(self):
<|fim▁hole|> bb.utils.set_process_name("UIEventQueue")
while not self.server.quit:
try:
self.server.handle_request()
except Exception as e:
import traceback
logger.error("BBUIEventQueue.startCallbackHandler: Exception while trying to handle request: %s\n%s" % (e, traceback.format_exc()))
self.server.server_close()
def system_quit( self ):
"""
Shut down the callback thread
"""
try:
self.BBServer.unregisterEventHandler(self.EventHandle)
except:
pass
self.server.quit = True
class UIXMLRPCServer (SimpleXMLRPCServer):
def __init__( self, interface ):
self.quit = False
SimpleXMLRPCServer.__init__( self,
interface,
requestHandler=SimpleXMLRPCRequestHandler,
logRequests=False, allow_none=True, use_builtin_types=True)
def get_request(self):
while not self.quit:
try:
sock, addr = self.socket.accept()
sock.settimeout(1)
return (sock, addr)
except socket.timeout:
pass
return (None, None)
def close_request(self, request):
if request is None:
return
SimpleXMLRPCServer.close_request(self, request)
def process_request(self, request, client_address):
if request is None:
return
SimpleXMLRPCServer.process_request(self, request, client_address)<|fim▁end|> | self.server.timeout = 1 |
<|file_name|>ScanResults.java<|end_file_name|><|fim▁begin|>/**
* Copyright 2017 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package com.github.ambry.store;
import com.github.ambry.utils.Pair;
import java.util.HashMap;
import java.util.Map;
import java.util.NavigableMap;
import java.util.TreeMap;
/**
* Hold the data structures needed by {@link BlobStoreStats} to serve requests. The class also exposes helper methods
* used to modify and access the stored data structures.
*/
class ScanResults {
// A NavigableMap that stores buckets for container valid data size. The key of the map is the end time of each
// bucket and the value is the corresponding valid data size map. For example, there are two buckets with end time
// t1 and t2. Bucket with end time t2 includes all events whose operation time is greater than or equal to t1 but
// strictly less than t2.
// Each bucket except for the very first one contains the delta in valid data size that occurred prior to the bucket
// end time. The very first bucket's end time is the forecast start time for containers and it contains the valid data
// size map at the forecast start time. The very first bucket is used as a base value, requested valid data size is
// computed by applying the deltas from appropriate buckets on the base value.
private final NavigableMap<Long, Map<String, Map<String, Long>>> containerBuckets = new TreeMap<>();
// A NavigableMap that stores buckets for log segment valid data size. The rest of the structure is similar
// to containerBuckets.
private final NavigableMap<Long, NavigableMap<String, Long>> logSegmentBuckets = new TreeMap<>();
final long containerForecastStartTimeMs;
final long containerLastBucketTimeMs;
final long containerForecastEndTimeMs;
final long logSegmentForecastStartTimeMs;
final long logSegmentLastBucketTimeMs;
final long logSegmentForecastEndTimeMs;
Offset scannedEndOffset = null;
/**
* Create the bucket data structures in advance based on the given scanStartTime and segmentScanTimeOffset.
*/
ScanResults(long startTimeInMs, long logSegmentForecastOffsetMs, int bucketCount, long bucketSpanInMs) {
long containerBucketTimeMs = startTimeInMs;
long logSegmentBucketTimeMs = startTimeInMs - logSegmentForecastOffsetMs;
for (int i = 0; i < bucketCount; i++) {
containerBuckets.put(containerBucketTimeMs, new HashMap<>());
logSegmentBuckets.put(logSegmentBucketTimeMs, new TreeMap<>(LogSegmentNameHelper.COMPARATOR));
containerBucketTimeMs += bucketSpanInMs;
logSegmentBucketTimeMs += bucketSpanInMs;
}
containerForecastStartTimeMs = containerBuckets.firstKey();
containerLastBucketTimeMs = containerBuckets.lastKey();
containerForecastEndTimeMs = containerLastBucketTimeMs + bucketSpanInMs;
logSegmentForecastStartTimeMs = logSegmentBuckets.firstKey();<|fim▁hole|>
/**
* Given a reference time, return the key of the appropriate container bucket whose end time is strictly greater than
* the reference time.
* @param referenceTimeInMs the reference time or operation time of an event.
* @return the appropriate bucket key (bucket end time) to indicate which bucket will an event with
* the given reference time as operation time belong to.
*/
Long getContainerBucketKey(long referenceTimeInMs) {
return containerBuckets.higherKey(referenceTimeInMs);
}
/**
* Given a reference time, return the key of the appropriate log segment bucket whose end time is strictly greater
* than the reference time.
* @param referenceTimeInMs the reference time or operation time of an event.
* @return the appropriate bucket key (bucket end time) to indicate which bucket will an event with
* the given reference time as operation time belong to.
*/
Long getLogSegmentBucketKey(long referenceTimeInMs) {
return logSegmentBuckets.higherKey(referenceTimeInMs);
}
/**
* Helper function to update the container base value bucket with the given value.
* @param serviceId the serviceId of the map entry to be updated
* @param containerId the containerId of the map entry to be updated
* @param value the value to be added
*/
void updateContainerBaseBucket(String serviceId, String containerId, long value) {
updateContainerBucket(containerBuckets.firstKey(), serviceId, containerId, value);
}
/**
* Helper function to update the log segment base value bucket with the given value.
* @param logSegmentName the log segment name of the map entry to be updated
* @param value the value to be added
*/
void updateLogSegmentBaseBucket(String logSegmentName, long value) {
updateLogSegmentBucket(logSegmentBuckets.firstKey(), logSegmentName, value);
}
/**
* Helper function to update a container bucket with the given value.
* @param bucketKey the bucket key to specify which bucket will be updated
* @param serviceId the serviceId of the map entry to be updated
* @param containerId the containerId of the map entry to be updated
* @param value the value to be added
*/
void updateContainerBucket(Long bucketKey, String serviceId, String containerId, long value) {
if (bucketKey != null && containerBuckets.containsKey(bucketKey)) {
Map<String, Map<String, Long>> existingBucketEntry = containerBuckets.get(bucketKey);
updateNestedMapHelper(existingBucketEntry, serviceId, containerId, value);
}
}
/**
* Helper function to update a log segment bucket with a given value.
* @param bucketKey the bucket key to specify which bucket will be updated
* @param logSegmentName the log segment name of the map entry to be updated
* @param value the value to be added
*/
void updateLogSegmentBucket(Long bucketKey, String logSegmentName, long value) {
if (bucketKey != null && logSegmentBuckets.containsKey(bucketKey)) {
Map<String, Long> existingBucketEntry = logSegmentBuckets.get(bucketKey);
updateMapHelper(existingBucketEntry, logSegmentName, value);
}
}
/**
* Given a reference time in milliseconds return the corresponding valid data size per log segment map by aggregating
* all buckets whose end time is less than or equal to the reference time.
* @param referenceTimeInMS the reference time in ms until which deletes and expiration are relevant
* @return a {@link Pair} whose first element is the end time of the last bucket that was aggregated and whose second
* element is the requested valid data size per log segment {@link NavigableMap}.
*/
Pair<Long, NavigableMap<String, Long>> getValidSizePerLogSegment(Long referenceTimeInMS) {
NavigableMap<String, Long> validSizePerLogSegment = new TreeMap<>(logSegmentBuckets.firstEntry().getValue());
NavigableMap<Long, NavigableMap<String, Long>> subMap =
logSegmentBuckets.subMap(logSegmentBuckets.firstKey(), false, referenceTimeInMS, true);
for (Map.Entry<Long, NavigableMap<String, Long>> bucket : subMap.entrySet()) {
for (Map.Entry<String, Long> bucketEntry : bucket.getValue().entrySet()) {
updateMapHelper(validSizePerLogSegment, bucketEntry.getKey(), bucketEntry.getValue());
}
}
Long lastReferenceBucketTimeInMs = subMap.isEmpty() ? logSegmentBuckets.firstKey() : subMap.lastKey();
return new Pair<>(lastReferenceBucketTimeInMs, validSizePerLogSegment);
}
/**
* Given a reference time in ms return the corresponding valid data size per container map by aggregating all buckets
* whose end time is less than or equal to the reference time.
* @param referenceTimeInMs the reference time in ms until which deletes and expiration are relevant.
* @return a {@link Pair} whose first element is the end time of the last bucket that was aggregated and whose second
* element is the requested valid data size per container {@link Map}.
*/
Map<String, Map<String, Long>> getValidSizePerContainer(Long referenceTimeInMs) {
Map<String, Map<String, Long>> validSizePerContainer = new HashMap<>();
for (Map.Entry<String, Map<String, Long>> accountEntry : containerBuckets.firstEntry().getValue().entrySet()) {
validSizePerContainer.put(accountEntry.getKey(), new HashMap<>(accountEntry.getValue()));
}
NavigableMap<Long, Map<String, Map<String, Long>>> subMap =
containerBuckets.subMap(containerBuckets.firstKey(), false, referenceTimeInMs, true);
for (Map.Entry<Long, Map<String, Map<String, Long>>> bucket : subMap.entrySet()) {
for (Map.Entry<String, Map<String, Long>> accountEntry : bucket.getValue().entrySet()) {
for (Map.Entry<String, Long> containerEntry : accountEntry.getValue().entrySet()) {
updateNestedMapHelper(validSizePerContainer, accountEntry.getKey(), containerEntry.getKey(),
containerEntry.getValue());
}
}
}
return validSizePerContainer;
}
/**
* Helper function to update nested map data structure.
* @param nestedMap nested {@link Map} to be updated
* @param firstKey of the nested map
* @param secondKey of the nested map
* @param value the value to be added at the corresponding entry
*/
private void updateNestedMapHelper(Map<String, Map<String, Long>> nestedMap, String firstKey, String secondKey,
Long value) {
if (!nestedMap.containsKey(firstKey)) {
nestedMap.put(firstKey, new HashMap<String, Long>());
}
updateMapHelper(nestedMap.get(firstKey), secondKey, value);
}
/**
* Helper function to update map data structure.
* @param map {@link Map} to be updated
* @param key of the map
* @param value the value to be added at the corresponding entry
*/
private void updateMapHelper(Map<String, Long> map, String key, Long value) {
Long newValue = map.containsKey(key) ? map.get(key) + value : value;
map.put(key, newValue);
}
}<|fim▁end|> | logSegmentLastBucketTimeMs = logSegmentBuckets.lastKey();
logSegmentForecastEndTimeMs = logSegmentLastBucketTimeMs + bucketSpanInMs;
} |
<|file_name|>extract.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2011 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://babel.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://babel.edgewall.org/log/.
"""Basic infrastructure for extracting localizable messages from source files.
This module defines an extensible system for collecting localizable message
strings from a variety of sources. A native extractor for Python source files
is builtin, extractors for other sources can be added using very simple plugins.
The main entry points into the extraction functionality are the functions
`extract_from_dir` and `extract_from_file`.
"""
import os
import sys
from tokenize import generate_tokens, COMMENT, NAME, OP, STRING
from babel.util import parse_encoding, pathmatch, relpath
from textwrap import dedent
__all__ = ['extract', 'extract_from_dir', 'extract_from_file']
__docformat__ = 'restructuredtext en'
GROUP_NAME = 'babel.extractors'
DEFAULT_KEYWORDS = {
'_': None,
'gettext': None,
'ngettext': (1, 2),
'ugettext': None,
'ungettext': (1, 2),
'dgettext': (2,),
'dngettext': (2, 3),
'N_': None
}
DEFAULT_MAPPING = [('**.py', 'python')]
empty_msgid_warning = (
'%s: warning: Empty msgid. It is reserved by GNU gettext: gettext("") '
'returns the header entry with meta information, not the empty string.')
def _strip_comment_tags(comments, tags):
"""Helper function for `extract` that strips comment tags from strings
in a list of comment lines. This functions operates in-place.
"""
def _strip(line):
for tag in tags:
if line.startswith(tag):
return line[len(tag):].strip()
return line
comments[:] = map(_strip, comments)
def extract_from_dir(dirname=os.getcwd(), method_map=DEFAULT_MAPPING,
options_map=None, keywords=DEFAULT_KEYWORDS,
comment_tags=(), callback=None, strip_comment_tags=False):
"""Extract messages from any source files found in the given directory.
This function generates tuples of the form:
``(filename, lineno, message, comments)``
Which extraction method is used per file is determined by the `method_map`
parameter, which maps extended glob patterns to extraction method names.
For example, the following is the default mapping:
>>> method_map = [<|fim▁hole|> This basically says that files with the filename extension ".py" at any
level inside the directory should be processed by the "python" extraction
method. Files that don't match any of the mapping patterns are ignored. See
the documentation of the `pathmatch` function for details on the pattern
syntax.
The following extended mapping would also use the "genshi" extraction
method on any file in "templates" subdirectory:
>>> method_map = [
... ('**/templates/**.*', 'genshi'),
... ('**.py', 'python')
... ]
The dictionary provided by the optional `options_map` parameter augments
these mappings. It uses extended glob patterns as keys, and the values are
dictionaries mapping options names to option values (both strings).
The glob patterns of the `options_map` do not necessarily need to be the
same as those used in the method mapping. For example, while all files in
the ``templates`` folders in an application may be Genshi applications, the
options for those files may differ based on extension:
>>> options_map = {
... '**/templates/**.txt': {
... 'template_class': 'genshi.template:TextTemplate',
... 'encoding': 'latin-1'
... },
... '**/templates/**.html': {
... 'include_attrs': ''
... }
... }
:param dirname: the path to the directory to extract messages from
:param method_map: a list of ``(pattern, method)`` tuples that maps of
extraction method names to extended glob patterns
:param options_map: a dictionary of additional options (optional)
:param keywords: a dictionary mapping keywords (i.e. names of functions
that should be recognized as translation functions) to
tuples that specify which of their arguments contain
localizable strings
:param comment_tags: a list of tags of translator comments to search for
and include in the results
:param callback: a function that is called for every file that message are
extracted from, just before the extraction itself is
performed; the function is passed the filename, the name
of the extraction method and and the options dictionary as
positional arguments, in that order
:param strip_comment_tags: a flag that if set to `True` causes all comment
tags to be removed from the collected comments.
:return: an iterator over ``(filename, lineno, funcname, message)`` tuples
:rtype: ``iterator``
:see: `pathmatch`
"""
if options_map is None:
options_map = {}
absname = os.path.abspath(dirname)
for root, dirnames, filenames in os.walk(absname):
for subdir in dirnames:
if subdir.startswith('.') or subdir.startswith('_'):
dirnames.remove(subdir)
dirnames.sort()
filenames.sort()
for filename in filenames:
filename = relpath(
os.path.join(root, filename).replace(os.sep, '/'),
dirname
)
for pattern, method in method_map:
if pathmatch(pattern, filename):
filepath = os.path.join(absname, filename)
options = {}
for opattern, odict in options_map.items():
if pathmatch(opattern, filename):
options = odict
if callback:
callback(filename, method, options)
for lineno, message, comments in \
extract_from_file(method, filepath,
keywords=keywords,
comment_tags=comment_tags,
options=options,
strip_comment_tags=
strip_comment_tags):
yield filename, lineno, message, comments
break
def extract_from_file(method, filename, keywords=DEFAULT_KEYWORDS,
comment_tags=(), options=None, strip_comment_tags=False):
"""Extract messages from a specific file.
This function returns a list of tuples of the form:
``(lineno, funcname, message)``
:param filename: the path to the file to extract messages from
:param method: a string specifying the extraction method (.e.g. "python")
:param keywords: a dictionary mapping keywords (i.e. names of functions
that should be recognized as translation functions) to
tuples that specify which of their arguments contain
localizable strings
:param comment_tags: a list of translator tags to search for and include
in the results
:param strip_comment_tags: a flag that if set to `True` causes all comment
tags to be removed from the collected comments.
:param options: a dictionary of additional options (optional)
:return: the list of extracted messages
:rtype: `list`
"""
fileobj = open(filename, 'U')
try:
return list(extract(method, fileobj, keywords, comment_tags, options,
strip_comment_tags))
finally:
fileobj.close()
def extract(method, fileobj, keywords=DEFAULT_KEYWORDS, comment_tags=(),
options=None, strip_comment_tags=False):
"""Extract messages from the given file-like object using the specified
extraction method.
This function returns a list of tuples of the form:
``(lineno, message, comments)``
The implementation dispatches the actual extraction to plugins, based on the
value of the ``method`` parameter.
>>> source = '''# foo module
... def run(argv):
... print _('Hello, world!')
... '''
>>> from StringIO import StringIO
>>> for message in extract('python', StringIO(source)):
... print message
(3, u'Hello, world!', [])
:param method: a string specifying the extraction method (.e.g. "python");
if this is a simple name, the extraction function will be
looked up by entry point; if it is an explicit reference
to a function (of the form ``package.module:funcname`` or
``package.module.funcname``), the corresponding function
will be imported and used
:param fileobj: the file-like object the messages should be extracted from
:param keywords: a dictionary mapping keywords (i.e. names of functions
that should be recognized as translation functions) to
tuples that specify which of their arguments contain
localizable strings
:param comment_tags: a list of translator tags to search for and include
in the results
:param options: a dictionary of additional options (optional)
:param strip_comment_tags: a flag that if set to `True` causes all comment
tags to be removed from the collected comments.
:return: the list of extracted messages
:rtype: `list`
:raise ValueError: if the extraction method is not registered
"""
func = None
if ':' in method or '.' in method:
if ':' not in method:
lastdot = method.rfind('.')
module, attrname = method[:lastdot], method[lastdot + 1:]
else:
module, attrname = method.split(':', 1)
func = getattr(__import__(module, {}, {}, [attrname]), attrname)
else:
try:
from pkg_resources import working_set
except ImportError:
# pkg_resources is not available, so we resort to looking up the
# builtin extractors directly
builtin = {'ignore': extract_nothing, 'python': extract_python}
func = builtin.get(method)
else:
for entry_point in working_set.iter_entry_points(GROUP_NAME,
method):
func = entry_point.load(require=True)
break
if func is None:
raise ValueError('Unknown extraction method %r' % method)
results = func(fileobj, keywords.keys(), comment_tags,
options=options or {})
for lineno, funcname, messages, comments in results:
if funcname:
spec = keywords[funcname] or (1,)
else:
spec = (1,)
if not isinstance(messages, (list, tuple)):
messages = [messages]
if not messages:
continue
# Validate the messages against the keyword's specification
msgs = []
invalid = False
# last_index is 1 based like the keyword spec
last_index = len(messages)
for index in spec:
if last_index < index:
# Not enough arguments
invalid = True
break
message = messages[index - 1]
if message is None:
invalid = True
break
msgs.append(message)
if invalid:
continue
first_msg_index = spec[0] - 1
if not messages[first_msg_index]:
# An empty string msgid isn't valid, emit a warning
where = '%s:%i' % (hasattr(fileobj, 'name') and \
fileobj.name or '(unknown)', lineno)
print >> sys.stderr, empty_msgid_warning % where
continue
messages = tuple(msgs)
if len(messages) == 1:
messages = messages[0]
if strip_comment_tags:
_strip_comment_tags(comments, comment_tags)
yield lineno, messages, comments
def extract_nothing(fileobj, keywords, comment_tags, options):
"""Pseudo extractor that does not actually extract anything, but simply
returns an empty list.
"""
return []
def extract_python(fileobj, keywords, comment_tags, options):
"""Extract messages from Python source code.
:param fileobj: the seekable, file-like object the messages should be
extracted from
:param keywords: a list of keywords (i.e. function names) that should be
recognized as translation functions
:param comment_tags: a list of translator tags to search for and include
in the results
:param options: a dictionary of additional options (optional)
:return: an iterator over ``(lineno, funcname, message, comments)`` tuples
:rtype: ``iterator``
"""
funcname = lineno = message_lineno = None
call_stack = -1
buf = []
messages = []
translator_comments = []
in_def = in_translator_comments = False
comment_tag = None
encoding = parse_encoding(fileobj) or options.get('encoding', 'iso-8859-1')
tokens = generate_tokens(fileobj.readline)
for tok, value, (lineno, _), _, _ in tokens:
if call_stack == -1 and tok == NAME and value in ('def', 'class'):
in_def = True
elif tok == OP and value == '(':
if in_def:
# Avoid false positives for declarations such as:
# def gettext(arg='message'):
in_def = False
continue
if funcname:
message_lineno = lineno
call_stack += 1
elif in_def and tok == OP and value == ':':
# End of a class definition without parens
in_def = False
continue
elif call_stack == -1 and tok == COMMENT:
# Strip the comment token from the line
value = value.decode(encoding)[1:].strip()
if in_translator_comments and \
translator_comments[-1][0] == lineno - 1:
# We're already inside a translator comment, continue appending
translator_comments.append((lineno, value))
continue
# If execution reaches this point, let's see if comment line
# starts with one of the comment tags
for comment_tag in comment_tags:
if value.startswith(comment_tag):
in_translator_comments = True
translator_comments.append((lineno, value))
break
elif funcname and call_stack == 0:
if tok == OP and value == ')':
if buf:
messages.append(''.join(buf))
del buf[:]
else:
messages.append(None)
if len(messages) > 1:
messages = tuple(messages)
else:
messages = messages[0]
# Comments don't apply unless they immediately preceed the
# message
if translator_comments and \
translator_comments[-1][0] < message_lineno - 1:
translator_comments = []
yield (message_lineno, funcname, messages,
[comment[1] for comment in translator_comments])
funcname = lineno = message_lineno = None
call_stack = -1
messages = []
translator_comments = []
in_translator_comments = False
elif tok == STRING:
# Unwrap quotes in a safe manner, maintaining the string's
# encoding
# https://sourceforge.net/tracker/?func=detail&atid=355470&
# aid=617979&group_id=5470
value = eval('# coding=%s\n%s' % (encoding, value),
{'__builtins__':{}}, {})
if isinstance(value, str):
value = value.decode(encoding)
buf.append(value)
elif tok == OP and value == ',':
if buf:
messages.append(''.join(buf))
del buf[:]
else:
messages.append(None)
if translator_comments:
# We have translator comments, and since we're on a
# comma(,) user is allowed to break into a new line
# Let's increase the last comment's lineno in order
# for the comment to still be a valid one
old_lineno, old_comment = translator_comments.pop()
translator_comments.append((old_lineno+1, old_comment))
elif call_stack > 0 and tok == OP and value == ')':
call_stack -= 1
elif funcname and call_stack == -1:
funcname = None
elif tok == NAME and value in keywords:
funcname = value
def extract_javascript(fileobj, keywords, comment_tags, options):
"""Extract messages from JavaScript source code.
:param fileobj: the seekable, file-like object the messages should be
extracted from
:param keywords: a list of keywords (i.e. function names) that should be
recognized as translation functions
:param comment_tags: a list of translator tags to search for and include
in the results
:param options: a dictionary of additional options (optional)
:return: an iterator over ``(lineno, funcname, message, comments)`` tuples
:rtype: ``iterator``
"""
from babel.messages.jslexer import tokenize, unquote_string
funcname = message_lineno = None
messages = []
last_argument = None
translator_comments = []
concatenate_next = False
encoding = options.get('encoding', 'utf-8')
last_token = None
call_stack = -1
for token in tokenize(fileobj.read().decode(encoding)):
if token.type == 'operator' and token.value == '(':
if funcname:
message_lineno = token.lineno
call_stack += 1
elif call_stack == -1 and token.type == 'linecomment':
value = token.value[2:].strip()
if translator_comments and \
translator_comments[-1][0] == token.lineno - 1:
translator_comments.append((token.lineno, value))
continue
for comment_tag in comment_tags:
if value.startswith(comment_tag):
translator_comments.append((token.lineno, value.strip()))
break
elif token.type == 'multilinecomment':
# only one multi-line comment may preceed a translation
translator_comments = []
value = token.value[2:-2].strip()
for comment_tag in comment_tags:
if value.startswith(comment_tag):
lines = value.splitlines()
if lines:
lines[0] = lines[0].strip()
lines[1:] = dedent('\n'.join(lines[1:])).splitlines()
for offset, line in enumerate(lines):
translator_comments.append((token.lineno + offset,
line))
break
elif funcname and call_stack == 0:
if token.type == 'operator' and token.value == ')':
if last_argument is not None:
messages.append(last_argument)
if len(messages) > 1:
messages = tuple(messages)
elif messages:
messages = messages[0]
else:
messages = None
# Comments don't apply unless they immediately precede the
# message
if translator_comments and \
translator_comments[-1][0] < message_lineno - 1:
translator_comments = []
if messages is not None:
yield (message_lineno, funcname, messages,
[comment[1] for comment in translator_comments])
funcname = message_lineno = last_argument = None
concatenate_next = False
translator_comments = []
messages = []
call_stack = -1
elif token.type == 'string':
new_value = unquote_string(token.value)
if concatenate_next:
last_argument = (last_argument or '') + new_value
concatenate_next = False
else:
last_argument = new_value
elif token.type == 'operator':
if token.value == ',':
if last_argument is not None:
messages.append(last_argument)
last_argument = None
else:
messages.append(None)
concatenate_next = False
elif token.value == '+':
concatenate_next = True
elif call_stack > 0 and token.type == 'operator' \
and token.value == ')':
call_stack -= 1
elif funcname and call_stack == -1:
funcname = None
elif call_stack == -1 and token.type == 'name' and \
token.value in keywords and \
(last_token is None or last_token.type != 'name' or
last_token.value != 'function'):
funcname = token.value
last_token = token<|fim▁end|> | ... ('**.py', 'python')
... ]
|
<|file_name|>FirstPersonControls.js<|end_file_name|><|fim▁begin|>/**
* @author mrdoob / http://mrdoob.com/
* @author alteredq / http://alteredqualia.com/
* @author paulirish / http://paulirish.com/
*/
THREE.FirstPersonControls = function ( object, domElement ) {
if ( domElement === undefined ) {
console.warn( 'THREE.FirstPersonControls: The second parameter "domElement" is now mandatory.' );
domElement = document;
}
this.object = object;
this.domElement = domElement;
// API
this.enabled = true;
this.movementSpeed = 1.0;
this.lookSpeed = 0.005;
this.lookVertical = true;
this.autoForward = false;
this.activeLook = true;
this.heightSpeed = false;
this.heightCoef = 1.0;
this.heightMin = 0.0;<|fim▁hole|> this.verticalMin = 0;
this.verticalMax = Math.PI;
this.mouseDragOn = false;
// internals
this.autoSpeedFactor = 0.0;
this.mouseX = 0;
this.mouseY = 0;
this.moveForward = false;
this.moveBackward = false;
this.moveLeft = false;
this.moveRight = false;
this.viewHalfX = 0;
this.viewHalfY = 0;
// private variables
var lat = 0;
var lon = 0;
var lookDirection = new THREE.Vector3();
var spherical = new THREE.Spherical();
var target = new THREE.Vector3();
//
if ( this.domElement !== document ) {
this.domElement.setAttribute( 'tabindex', - 1 );
}
//
this.handleResize = function () {
if ( this.domElement === document ) {
this.viewHalfX = window.innerWidth / 2;
this.viewHalfY = window.innerHeight / 2;
} else {
this.viewHalfX = this.domElement.offsetWidth / 2;
this.viewHalfY = this.domElement.offsetHeight / 2;
}
};
this.onMouseDown = function ( event ) {
if ( this.domElement !== document ) {
this.domElement.focus();
}
event.preventDefault();
event.stopPropagation();
if ( this.activeLook ) {
switch ( event.button ) {
case 0: this.moveForward = true; break;
case 2: this.moveBackward = true; break;
}
}
this.mouseDragOn = true;
};
this.onMouseUp = function ( event ) {
event.preventDefault();
event.stopPropagation();
if ( this.activeLook ) {
switch ( event.button ) {
case 0: this.moveForward = false; break;
case 2: this.moveBackward = false; break;
}
}
this.mouseDragOn = false;
};
this.onMouseMove = function ( event ) {
if ( this.domElement === document ) {
this.mouseX = event.pageX - this.viewHalfX;
this.mouseY = event.pageY - this.viewHalfY;
} else {
this.mouseX = event.pageX - this.domElement.offsetLeft - this.viewHalfX;
this.mouseY = event.pageY - this.domElement.offsetTop - this.viewHalfY;
}
};
this.onKeyDown = function ( event ) {
//event.preventDefault();
switch ( event.keyCode ) {
case 38: /*up*/
case 87: /*W*/ this.moveForward = true; break;
case 37: /*left*/
case 65: /*A*/ this.moveLeft = true; break;
case 40: /*down*/
case 83: /*S*/ this.moveBackward = true; break;
case 39: /*right*/
case 68: /*D*/ this.moveRight = true; break;
case 82: /*R*/ this.moveUp = true; break;
case 70: /*F*/ this.moveDown = true; break;
}
};
this.onKeyUp = function ( event ) {
switch ( event.keyCode ) {
case 38: /*up*/
case 87: /*W*/ this.moveForward = false; break;
case 37: /*left*/
case 65: /*A*/ this.moveLeft = false; break;
case 40: /*down*/
case 83: /*S*/ this.moveBackward = false; break;
case 39: /*right*/
case 68: /*D*/ this.moveRight = false; break;
case 82: /*R*/ this.moveUp = false; break;
case 70: /*F*/ this.moveDown = false; break;
}
};
this.lookAt = function ( x, y, z ) {
if ( x.isVector3 ) {
target.copy( x );
} else {
target.set( x, y, z );
}
this.object.lookAt( target );
setOrientation( this );
return this;
};
this.update = function () {
var targetPosition = new THREE.Vector3();
return function update( delta ) {
if ( this.enabled === false ) return;
if ( this.heightSpeed ) {
var y = THREE.Math.clamp( this.object.position.y, this.heightMin, this.heightMax );
var heightDelta = y - this.heightMin;
this.autoSpeedFactor = delta * ( heightDelta * this.heightCoef );
} else {
this.autoSpeedFactor = 0.0;
}
var actualMoveSpeed = delta * this.movementSpeed;
if ( this.moveForward || ( this.autoForward && ! this.moveBackward ) ) this.object.translateZ( - ( actualMoveSpeed + this.autoSpeedFactor ) );
if ( this.moveBackward ) this.object.translateZ( actualMoveSpeed );
if ( this.moveLeft ) this.object.translateX( - actualMoveSpeed );
if ( this.moveRight ) this.object.translateX( actualMoveSpeed );
if ( this.moveUp ) this.object.translateY( actualMoveSpeed );
if ( this.moveDown ) this.object.translateY( - actualMoveSpeed );
var actualLookSpeed = delta * this.lookSpeed;
if ( ! this.activeLook ) {
actualLookSpeed = 0;
}
var verticalLookRatio = 1;
if ( this.constrainVertical ) {
verticalLookRatio = Math.PI / ( this.verticalMax - this.verticalMin );
}
lon -= this.mouseX * actualLookSpeed;
if ( this.lookVertical ) lat -= this.mouseY * actualLookSpeed * verticalLookRatio;
lat = Math.max( - 85, Math.min( 85, lat ) );
var phi = THREE.Math.degToRad( 90 - lat );
var theta = THREE.Math.degToRad( lon );
if ( this.constrainVertical ) {
phi = THREE.Math.mapLinear( phi, 0, Math.PI, this.verticalMin, this.verticalMax );
}
var position = this.object.position;
targetPosition.setFromSphericalCoords( 1, phi, theta ).add( position );
this.object.lookAt( targetPosition );
};
}();
function contextmenu( event ) {
event.preventDefault();
}
this.dispose = function () {
this.domElement.removeEventListener( 'contextmenu', contextmenu, false );
this.domElement.removeEventListener( 'mousedown', _onMouseDown, false );
this.domElement.removeEventListener( 'mousemove', _onMouseMove, false );
this.domElement.removeEventListener( 'mouseup', _onMouseUp, false );
window.removeEventListener( 'keydown', _onKeyDown, false );
window.removeEventListener( 'keyup', _onKeyUp, false );
};
var _onMouseMove = bind( this, this.onMouseMove );
var _onMouseDown = bind( this, this.onMouseDown );
var _onMouseUp = bind( this, this.onMouseUp );
var _onKeyDown = bind( this, this.onKeyDown );
var _onKeyUp = bind( this, this.onKeyUp );
this.domElement.addEventListener( 'contextmenu', contextmenu, false );
this.domElement.addEventListener( 'mousemove', _onMouseMove, false );
this.domElement.addEventListener( 'mousedown', _onMouseDown, false );
this.domElement.addEventListener( 'mouseup', _onMouseUp, false );
window.addEventListener( 'keydown', _onKeyDown, false );
window.addEventListener( 'keyup', _onKeyUp, false );
function bind( scope, fn ) {
return function () {
fn.apply( scope, arguments );
};
}
function setOrientation( controls ) {
var quaternion = controls.object.quaternion;
lookDirection.set( 0, 0, - 1 ).applyQuaternion( quaternion );
spherical.setFromVector3( lookDirection );
lat = 90 - THREE.Math.radToDeg( spherical.phi );
lon = THREE.Math.radToDeg( spherical.theta );
}
this.handleResize();
setOrientation( this );
};<|fim▁end|> | this.heightMax = 1.0;
this.constrainVertical = false; |
<|file_name|>xccdf2csv_stig_module.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
import sys
import csv
import lxml.etree as ET
# This script creates a CSV file from an XCCDF file formatted in the
# structure of a STIG. This should enable its ingestion into VMS,
# as well as its comparison with VMS output.
xccdf_ns = "http://checklists.nist.gov/xccdf/1.1"
disa_cciuri = "http://iase.disa.mil/stigs/cci/Pages/index.aspx"
disa_srguri = "http://iase.disa.mil/stigs/srgs/Pages/index.aspx"
def parse_xml_file(xmlfile):
with open(xmlfile, 'r') as xml_file:
filestring = xml_file.read()
tree = ET.fromstring(filestring)
return tree
def reflist(refs):
refstring = ', '.join(refs)
return refstring
def node_to_text(node):
textslist = node.xpath(".//text()")
return ''.join(textslist)
def main():
if len(sys.argv) < 2:
print "Provide an XCCDF file to convert into a CSV file."
sys.exit(1)
xccdffile = sys.argv[1]
xccdftree = parse_xml_file(xccdffile)
rules = xccdftree.findall(".//{%s}Rule" % xccdf_ns)
rulewriter = csv.writer(sys.stdout, quoting=csv.QUOTE_ALL)
for rule in rules:
cci_refs = [ref.text for ref in rule.findall("{%s}ident[@system='%s']"
% (xccdf_ns, disa_cciuri))]
srg_refs = [ref.text for ref in rule.findall("{%s}ident[@system='%s']"
% (xccdf_ns, disa_srguri))]
title = rule.find("{%s}title" % xccdf_ns).text
description = node_to_text(rule.find("{%s}description" % xccdf_ns))
fixtext = node_to_text(rule.find("{%s}fixtext" % xccdf_ns))
checktext = node_to_text(rule.find(".//{%s}check-content" % xccdf_ns))
row = [reflist(cci_refs), reflist(srg_refs), title, description, fixtext, checktext]
rulewriter.writerow(row)<|fim▁hole|> sys.exit(0)
if __name__ == "__main__":
main()<|fim▁end|> | |
<|file_name|>css_types.py<|end_file_name|><|fim▁begin|>"""CSS selector structure items."""
import copyreg
from collections.abc import Hashable, Mapping
__all__ = (
'Selector',
'SelectorNull',
'SelectorTag',
'SelectorAttribute',
'SelectorContains',
'SelectorNth',
'SelectorLang',
'SelectorList',
'Namespaces',
'CustomSelectors'
)
SEL_EMPTY = 0x1
SEL_ROOT = 0x2
SEL_DEFAULT = 0x4
SEL_INDETERMINATE = 0x8
SEL_SCOPE = 0x10
SEL_DIR_LTR = 0x20
SEL_DIR_RTL = 0x40
SEL_IN_RANGE = 0x80
SEL_OUT_OF_RANGE = 0x100
SEL_DEFINED = 0x200
SEL_PLACEHOLDER_SHOWN = 0x400
class Immutable(object):
"""Immutable."""
__slots__ = ('_hash',)
def __init__(self, **kwargs):
"""Initialize."""
temp = []
for k, v in kwargs.items():
temp.append(type(v))
temp.append(v)
super(Immutable, self).__setattr__(k, v)
super(Immutable, self).__setattr__('_hash', hash(tuple(temp)))
@classmethod
def __base__(cls):
"""Get base class."""
return cls
def __eq__(self, other):
"""Equal."""
return (
isinstance(other, self.__base__()) and
all([getattr(other, key) == getattr(self, key) for key in self.__slots__ if key != '_hash'])
)
def __ne__(self, other):
"""Equal."""
return (
not isinstance(other, self.__base__()) or
any([getattr(other, key) != getattr(self, key) for key in self.__slots__ if key != '_hash'])
)
def __hash__(self):
"""Hash."""
return self._hash
def __setattr__(self, name, value):
"""Prevent mutability."""
raise AttributeError("'{}' is immutable".format(self.__class__.__name__))
def __repr__(self): # pragma: no cover
"""Representation."""
return "{}({})".format(
self.__base__(), ', '.join(["{}={!r}".format(k, getattr(self, k)) for k in self.__slots__[:-1]])
)
__str__ = __repr__
class ImmutableDict(Mapping):
"""Hashable, immutable dictionary."""
def __init__(self, *args, **kwargs):
"""Initialize."""
arg = args[0] if args else kwargs
is_dict = isinstance(arg, dict)
if (
is_dict and not all([isinstance(v, Hashable) for v in arg.values()]) or
not is_dict and not all([isinstance(k, Hashable) and isinstance(v, Hashable) for k, v in arg])
):
raise TypeError('All values must be hashable')
self._d = dict(*args, **kwargs)
self._hash = hash(tuple([(type(x), x, type(y), y) for x, y in sorted(self._d.items())]))
def __iter__(self):
"""Iterator."""
return iter(self._d)
def __len__(self):
"""Length."""
return len(self._d)
def __getitem__(self, key):
"""Get item: `namespace['key']`."""
return self._d[key]
def __hash__(self):
"""Hash."""
return self._hash
def __repr__(self): # pragma: no cover
"""Representation."""
return "{!r}".format(self._d)
__str__ = __repr__
class Namespaces(ImmutableDict):
"""Namespaces."""
def __init__(self, *args, **kwargs):
"""Initialize."""
# If there are arguments, check the first index.
# `super` should fail if the user gave multiple arguments,
# so don't bother checking that.
arg = args[0] if args else kwargs
is_dict = isinstance(arg, dict)
if is_dict and not all([isinstance(k, str) and isinstance(v, str) for k, v in arg.items()]):
raise TypeError('Namespace keys and values must be Unicode strings')
elif not is_dict and not all([isinstance(k, str) and isinstance(v, str) for k, v in arg]):
raise TypeError('Namespace keys and values must be Unicode strings')
super(Namespaces, self).__init__(*args, **kwargs)
class CustomSelectors(ImmutableDict):
"""Custom selectors."""
def __init__(self, *args, **kwargs):
"""Initialize."""
# If there are arguments, check the first index.
# `super` should fail if the user gave multiple arguments,
# so don't bother checking that.
arg = args[0] if args else kwargs
is_dict = isinstance(arg, dict)
if is_dict and not all([isinstance(k, str) and isinstance(v, str) for k, v in arg.items()]):
raise TypeError('CustomSelectors keys and values must be Unicode strings')
elif not is_dict and not all([isinstance(k, str) and isinstance(v, str) for k, v in arg]):
raise TypeError('CustomSelectors keys and values must be Unicode strings')
super(CustomSelectors, self).__init__(*args, **kwargs)
class Selector(Immutable):
"""Selector."""
__slots__ = (
'tag', 'ids', 'classes', 'attributes', 'nth', 'selectors',
'relation', 'rel_type', 'contains', 'lang', 'flags', '_hash'
)
def __init__(
self, tag, ids, classes, attributes, nth, selectors,
relation, rel_type, contains, lang, flags
):
"""Initialize."""
super(Selector, self).__init__(
tag=tag,
ids=ids,
classes=classes,
attributes=attributes,
nth=nth,
selectors=selectors,
relation=relation,
rel_type=rel_type,
contains=contains,
lang=lang,
flags=flags
)
class SelectorNull(Immutable):
"""Null Selector."""
def __init__(self):
"""Initialize."""
super(SelectorNull, self).__init__()
class SelectorTag(Immutable):
"""Selector tag."""
__slots__ = ("name", "prefix", "_hash")
def __init__(self, name, prefix):
"""Initialize."""
super(SelectorTag, self).__init__(
name=name,
prefix=prefix
)
class SelectorAttribute(Immutable):
"""Selector attribute rule."""
__slots__ = ("attribute", "prefix", "pattern", "xml_type_pattern", "_hash")
def __init__(self, attribute, prefix, pattern, xml_type_pattern):
"""Initialize."""
super(SelectorAttribute, self).__init__(
attribute=attribute,
prefix=prefix,
pattern=pattern,
xml_type_pattern=xml_type_pattern
)
class SelectorContains(Immutable):
"""Selector contains rule."""
__slots__ = ("text", "_hash")
def __init__(self, text):
"""Initialize."""
super(SelectorContains, self).__init__(
text=text
)
class SelectorNth(Immutable):
"""Selector nth type."""
__slots__ = ("a", "n", "b", "of_type", "last", "selectors", "_hash")
def __init__(self, a, n, b, of_type, last, selectors):
"""Initialize."""
super(SelectorNth, self).__init__(
a=a,
n=n,
b=b,
of_type=of_type,
last=last,
selectors=selectors
)
class SelectorLang(Immutable):
"""Selector language rules."""
__slots__ = ("languages", "_hash",)
def __init__(self, languages):
"""Initialize."""
<|fim▁hole|> )
def __iter__(self):
"""Iterator."""
return iter(self.languages)
def __len__(self): # pragma: no cover
"""Length."""
return len(self.languages)
def __getitem__(self, index): # pragma: no cover
"""Get item."""
return self.languages[index]
class SelectorList(Immutable):
"""Selector list."""
__slots__ = ("selectors", "is_not", "is_html", "_hash")
def __init__(self, selectors=tuple(), is_not=False, is_html=False):
"""Initialize."""
super(SelectorList, self).__init__(
selectors=tuple(selectors),
is_not=is_not,
is_html=is_html
)
def __iter__(self):
"""Iterator."""
return iter(self.selectors)
def __len__(self):
"""Length."""
return len(self.selectors)
def __getitem__(self, index):
"""Get item."""
return self.selectors[index]
def _pickle(p):
return p.__base__(), tuple([getattr(p, s) for s in p.__slots__[:-1]])
def pickle_register(obj):
"""Allow object to be pickled."""
copyreg.pickle(obj, _pickle)
pickle_register(Selector)
pickle_register(SelectorNull)
pickle_register(SelectorTag)
pickle_register(SelectorAttribute)
pickle_register(SelectorContains)
pickle_register(SelectorNth)
pickle_register(SelectorLang)
pickle_register(SelectorList)<|fim▁end|> |
super(SelectorLang, self).__init__(
languages=tuple(languages)
|
<|file_name|>generated_expansion.go<|end_file_name|><|fim▁begin|>/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
<|fim▁hole|><|fim▁end|> | package v1alpha1
type APIServiceExpansion interface{} |
<|file_name|>error.rs<|end_file_name|><|fim▁begin|>use rustc_serialize::json::DecoderError;
use rustc_serialize::base64::FromBase64Error;
use self::VaultError::*;
/// Various errors for vault operations
#[derive(Debug)]
pub enum VaultError {
/// If the vault is corrupted, we may not be able to read the base64 encoded data<|fim▁hole|> BadPasswordError,
/// When you attempt to encrypt a Vault that has no password set
NoPasswordSpecifiedError
}
/// Convenience type for VaultError functions
pub type VResult<T> = Result<T, VaultError>;
impl From<DecoderError> for VaultError {
fn from(e: DecoderError) -> VaultError {
match e {
DecoderError::ParseError(_) => BadPasswordError,
e => VaultEntrySchemaError(e)
}
}
}
impl From<FromBase64Error> for VaultError {
fn from(e: FromBase64Error) -> VaultError {
Base64Error(e)
}
}<|fim▁end|> | Base64Error(FromBase64Error),
/// This happens when the data in the vault is valid, but does not match the vault type
VaultEntrySchemaError(DecoderError),
/// When the decrypted data is not valid JSON |
<|file_name|>webXRInputSource.ts<|end_file_name|><|fim▁begin|>import { Observable } from "../Misc/observable";
import { AbstractMesh } from "../Meshes/abstractMesh";
import { Quaternion, Vector3 } from "../Maths/math.vector";
import { Ray } from "../Culling/ray";
import { Scene } from "../scene";
import { WebXRAbstractMotionController } from "./motionController/webXRAbstractMotionController";
import { WebXRMotionControllerManager } from "./motionController/webXRMotionControllerManager";
import { Tools } from "../Misc/tools";
import { WebXRCamera } from "./webXRCamera";
let idCount = 0;
/**
* Configuration options for the WebXR controller creation
*/
export interface IWebXRControllerOptions {
/**
* Should the controller mesh be animated when a user interacts with it
* The pressed buttons / thumbstick and touchpad animations will be disabled
*/
disableMotionControllerAnimation?: boolean;
/**
* Do not load the controller mesh, in case a different mesh needs to be loaded.
*/
doNotLoadControllerMesh?: boolean;
/**
* Force a specific controller type for this controller.
* This can be used when creating your own profile or when testing different controllers
*/
forceControllerProfile?: string;
/**
* Defines a rendering group ID for meshes that will be loaded.
* This is for the default controllers only.
*/
renderingGroupId?: number;
}
/**
* Represents an XR controller
*/
export class WebXRInputSource {
private _tmpVector = new Vector3();
private _uniqueId: string;
private _disposed = false;
/**
* Represents the part of the controller that is held. This may not exist if the controller is the head mounted display itself, if that's the case only the pointer from the head will be available
*/
public grip?: AbstractMesh;
/**
* If available, this is the gamepad object related to this controller.
* Using this object it is possible to get click events and trackpad changes of the
* webxr controller that is currently being used.
*/
public motionController?: WebXRAbstractMotionController;
/**
* Event that fires when the controller is removed/disposed.
* The object provided as event data is this controller, after associated assets were disposed.
* uniqueId is still available.
*/
public onDisposeObservable = new Observable<WebXRInputSource>();
/**
* Will be triggered when the mesh associated with the motion controller is done loading.
* It is also possible that this will never trigger (!) if no mesh was loaded, or if the developer decides to load a different mesh
* A shortened version of controller -> motion controller -> on mesh loaded.
*/
public onMeshLoadedObservable = new Observable<AbstractMesh>();
/**
* Observers registered here will trigger when a motion controller profile was assigned to this xr controller
*/
public onMotionControllerInitObservable = new Observable<WebXRAbstractMotionController>();
/**
* Pointer which can be used to select objects or attach a visible laser to
*/
public pointer: AbstractMesh;
/**
* The last XRPose the was calculated on the current XRFrame
* @hidden
*/
public _lastXRPose?: XRPose;
/**
* Creates the input source object
* @see https://doc.babylonjs.com/how_to/webxr_controllers_support
* @param _scene the scene which the controller should be associated to
* @param inputSource the underlying input source for the controller
* @param _options options for this controller creation
*/
constructor(
private _scene: Scene,
/** The underlying input source for the controller */
public inputSource: XRInputSource,
private _options: IWebXRControllerOptions = {}
) {
this._uniqueId = `controller-${idCount++}-${inputSource.targetRayMode}-${inputSource.handedness}`;
this.pointer = new AbstractMesh(`${this._uniqueId}-pointer`, _scene);
this.pointer.rotationQuaternion = new Quaternion();
if (this.inputSource.gripSpace) {
this.grip = new AbstractMesh(`${this._uniqueId}-grip`, this._scene);
this.grip.rotationQuaternion = new Quaternion();
}
this._tmpVector.set(0, 0, this._scene.useRightHandedSystem ? -1.0 : 1.0);
// for now only load motion controllers if gamepad object available
if (this.inputSource.gamepad && this.inputSource.targetRayMode === 'tracked-pointer') {
WebXRMotionControllerManager.GetMotionControllerWithXRInput(inputSource, _scene, this._options.forceControllerProfile).then(
(motionController) => {
this.motionController = motionController;
this.onMotionControllerInitObservable.notifyObservers(motionController);
// should the model be loaded?
if (!this._options.doNotLoadControllerMesh && !this.motionController._doNotLoadControllerMesh) {
this.motionController.loadModel().then((success) => {
if (success && this.motionController && this.motionController.rootMesh) {
if (this._options.renderingGroupId) {
// anything other than 0?
this.motionController.rootMesh.renderingGroupId = this._options.renderingGroupId;
this.motionController.rootMesh.getChildMeshes(false).forEach((mesh) => (mesh.renderingGroupId = this._options.renderingGroupId!));
}
this.onMeshLoadedObservable.notifyObservers(this.motionController.rootMesh);
this.motionController.rootMesh.parent = this.grip || this.pointer;
this.motionController.disableAnimation = !!this._options.disableMotionControllerAnimation;
}
// make sure to dispose is the controller is already disposed
if (this._disposed) {
this.motionController?.dispose();
}
});
}
},
() => {
Tools.Warn(`Could not find a matching motion controller for the registered input source`);
}
);
}
}
/**
* Get this controllers unique id
*/
public get uniqueId() {
return this._uniqueId;
}
/**
* Disposes of the object
*/
public dispose() {
if (this.grip) {
this.grip.dispose(true);
}
if (this.motionController) {
this.motionController.dispose();
}
this.pointer.dispose(true);
this.onMotionControllerInitObservable.clear();
this.onMeshLoadedObservable.clear();
this.onDisposeObservable.notifyObservers(this);
this.onDisposeObservable.clear();
this._disposed = true;
}
/**
* Gets a world space ray coming from the pointer or grip
* @param result the resulting ray
* @param gripIfAvailable use the grip mesh instead of the pointer, if available
*/
public getWorldPointerRayToRef(result: Ray, gripIfAvailable: boolean = false) {
const object = gripIfAvailable && this.grip ? this.grip : this.pointer;
Vector3.TransformNormalToRef(this._tmpVector, object.getWorldMatrix(), result.direction);
result.direction.normalize();
result.origin.copyFrom(object.absolutePosition);
result.length = 1000;
}
/**
* Updates the controller pose based on the given XRFrame
* @param xrFrame xr frame to update the pose with
* @param referenceSpace reference space to use
* @param xrCamera the xr camera, used for parenting
*/
public updateFromXRFrame(xrFrame: XRFrame, referenceSpace: XRReferenceSpace, xrCamera: WebXRCamera) {
const pose = xrFrame.getPose(this.inputSource.targetRaySpace, referenceSpace);
this._lastXRPose = pose;
// Update the pointer mesh
if (pose) {
const pos = pose.transform.position;
this.pointer.position.set(pos.x, pos.y, pos.z);
const orientation = pose.transform.orientation;
this.pointer.rotationQuaternion!.set(orientation.x, orientation.y, orientation.z, orientation.w);
if (!this._scene.useRightHandedSystem) {
this.pointer.position.z *= -1;
this.pointer.rotationQuaternion!.z *= -1;
this.pointer.rotationQuaternion!.w *= -1;
}
this.pointer.parent = xrCamera.parent;
}
// Update the grip mesh if it exists
if (this.inputSource.gripSpace && this.grip) {
let pose = xrFrame.getPose(this.inputSource.gripSpace, referenceSpace);
<|fim▁hole|> const pos = pose.transform.position;
const orientation = pose.transform.orientation;
this.grip.position.set(pos.x, pos.y, pos.z);
this.grip.rotationQuaternion!.set(orientation.x, orientation.y, orientation.z, orientation.w);
if (!this._scene.useRightHandedSystem) {
this.grip.position.z *= -1;
this.grip.rotationQuaternion!.z *= -1;
this.grip.rotationQuaternion!.w *= -1;
}
}
this.grip.parent = xrCamera.parent;
}
if (this.motionController) {
// either update buttons only or also position, if in gamepad mode
this.motionController.updateFromXRFrame(xrFrame);
}
}
}<|fim▁end|> | if (pose) {
|
<|file_name|>ImmuLoaderOfMemberLogin.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2004-2013 the Seasar Foundation and the Others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.docksidestage.mysql.dbflute.immuhama.bsbhv.loader;
import java.util.List;
import org.dbflute.bhv.*;
import org.docksidestage.mysql.dbflute.immuhama.exbhv.*;
import org.docksidestage.mysql.dbflute.immuhama.exentity.*;
/**
* The referrer loader of (会員ログイン情報)MEMBER_LOGIN as TABLE. <br>
* <pre>
* [primary key]
* MEMBER_LOGIN_ID
*
* [column]
* MEMBER_LOGIN_ID, MEMBER_ID, LOGIN_DATETIME, MOBILE_LOGIN_FLG, LOGIN_MEMBER_STATUS_CODE
*
* [sequence]
*
*
* [identity]
* MEMBER_LOGIN_ID
*
* [version-no]
*
*
* [foreign table]
* MEMBER_STATUS, MEMBER
*
* [referrer table]
*
*
* [foreign property]
* memberStatus, member
*
* [referrer property]
*
* </pre>
* @author DBFlute(AutoGenerator)
*/
public class ImmuLoaderOfMemberLogin {
// ===================================================================================
// Attribute
// =========
protected List<ImmuMemberLogin> _selectedList;
protected BehaviorSelector _selector;
protected ImmuMemberLoginBhv _myBhv; // lazy-loaded
// ===================================================================================
// Ready for Loading
// =================
public ImmuLoaderOfMemberLogin ready(List<ImmuMemberLogin> selectedList, BehaviorSelector selector)
{ _selectedList = selectedList; _selector = selector; return this; }
protected ImmuMemberLoginBhv myBhv()
{ if (_myBhv != null) { return _myBhv; } else { _myBhv = _selector.select(ImmuMemberLoginBhv.class); return _myBhv; } }
// ===================================================================================
// Pull out Foreign
<|fim▁hole|> // ================
protected ImmuLoaderOfMemberStatus _foreignMemberStatusLoader;
public ImmuLoaderOfMemberStatus pulloutMemberStatus() {
if (_foreignMemberStatusLoader == null)
{ _foreignMemberStatusLoader = new ImmuLoaderOfMemberStatus().ready(myBhv().pulloutMemberStatus(_selectedList), _selector); }
return _foreignMemberStatusLoader;
}
protected ImmuLoaderOfMember _foreignMemberLoader;
public ImmuLoaderOfMember pulloutMember() {
if (_foreignMemberLoader == null)
{ _foreignMemberLoader = new ImmuLoaderOfMember().ready(myBhv().pulloutMember(_selectedList), _selector); }
return _foreignMemberLoader;
}
// ===================================================================================
// Accessor
// ========
public List<ImmuMemberLogin> getSelectedList() { return _selectedList; }
public BehaviorSelector getSelector() { return _selector; }
}<|fim▁end|> | |
<|file_name|>contrib-sass.js<|end_file_name|><|fim▁begin|>module.exports = function(grunt) {
grunt.config.set('sass', {
options: {
loadPath: 'src/styles'<|fim▁hole|> dev: {
files: {
'build/dist/styles.css': 'build/tmp/styles.scss'
}
},
prod: {
files: {
'build/dist/styles.css': 'build/tmp/styles.scss'
}
}
});
grunt.loadNpmTasks('grunt-contrib-sass');
};<|fim▁end|> | }, |
<|file_name|>test_exceptions.py<|end_file_name|><|fim▁begin|>from django.template import TemplateDoesNotExist, TemplateSyntaxError
from django.test import SimpleTestCase
from ..utils import setup
from .test_extends import inheritance_templates
class ExceptionsTests(SimpleTestCase):
@setup({'exception01': "{% extends 'nonexistent' %}"})
def test_exception01(self):
"""
Raise exception for invalid template name
"""
with self.assertRaises(TemplateDoesNotExist):
self.engine.render_to_string('exception01')
@setup({'exception02': '{% extends nonexistent %}'})
def test_exception02(self):
"""
Raise exception for invalid variable template name
"""
if self.engine.string_if_invalid:
with self.assertRaises(TemplateDoesNotExist):
self.engine.render_to_string('exception02')
else:
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('exception02')
@setup(
{'exception03': "{% extends 'inheritance01' %}"
<|fim▁hole|> """
Raise exception for extra {% extends %} tags
"""
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('exception03')
@setup(
{'exception04': "{% extends 'inheritance17' %}{% block first %}{% echo 400 %}5678{% endblock %}"},
inheritance_templates,
)
def test_exception04(self):
"""
Raise exception for custom tags used in child with {% load %} tag in parent, not in child
"""
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('exception04')
@setup({'exception05': '{% block first %}{{ block.super }}{% endblock %}'})
def test_exception05(self):
"""
Raise exception for block.super used in base template
"""
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('exception05')<|fim▁end|> | "{% block first %}2{% endblock %}{% extends 'inheritance16' %}"},
inheritance_templates,
)
def test_exception03(self):
|
<|file_name|>33.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Title:大幂次运算
# 给你两个正整数a(0 < a < 100000)和n(0 <= n <=100000000000),计算(a^n) % 20132013并输出结果
import math<|fim▁hole|>a,n =10000,10000000
# Answer
ret = 1 #余数
def PowerMod(a, n, ret):
if n == 0:
return ret
if n % 2: # n为奇数
ret = ret * a % 20132013
return PowerMod(a*a%20132013, n/2, ret) #n为偶数。a^n %m = (a^2^n/2)%m = ((a^2%m)^n/2)%m
print PowerMod(a, n, ret)<|fim▁end|> |
# Test |
<|file_name|>react-apollo.js<|end_file_name|><|fim▁begin|>declare module "react-apollo" {
declare function graphql(query: Object, options: Object): Function;<|fim▁hole|><|fim▁end|> | } |
<|file_name|>tree_generator.py<|end_file_name|><|fim▁begin|># tree_generator was written with Python 2.7.4.
# The pickle files it produces should not be read with a version of
# Python less than 2.7.4, as they are not forwards compatible.
from piece_definitions import PIECES
import numpy as np
import sys
import collections
import itertools
import argparse
import multiprocessing
import time
import hashlib
from math import factorial
from rect import Rect
from tree import *
from helper import *
import pickle
WIDTH = 4 # Default width
HEIGHT = 4 # Default height
BOARD = Board(HEIGHT, WIDTH)
PIECES_FIT = (WIDTH * HEIGHT) / 4 # Number of pieces that can fit in board
NUM_PIECES = len(PIECES)
NOTIFY_INTERVAL = 10 # Number of seconds between progress notification
args = None
# The adjacent function returns a 2D-array of all blocks that are vertically adjacent
# to the given 2D-array "a".
# A piece is not hovering in midair if part of it collides with the adjacent matrix.
def adjacent(a):
HEIGHT = a.shape[0]
WIDTH = a.shape[1]
m = np.zeros((HEIGHT, WIDTH), np.bool)
m[-1] = True # Set bottom row
# Set edge values
for x in range(HEIGHT):
for y in range(WIDTH):
if np.all(a[:, y]): # Special case for blocks that take up a whole column
m[:, y] = False
elif a[x, y] and x > 0:
m[x-1, y] = True
# Remove all but heighest values
for x in range(HEIGHT):
for y in range(WIDTH):
if m[x, y]:
m[x+1:, y] = False
return m
# The overhang function returns a 2D-array of all blocks that are empty space, but
# have a piece above them.
# A piece can be successfully dropped from above into its current position if it does
# not collide with the overhang matrix.
def overhang(a):
HEIGHT = a.shape[0]
WIDTH = a.shape[1]
m = np.zeros((HEIGHT, WIDTH), np.bool)
for y in range(WIDTH):
for x in range(1, HEIGHT):
if a[x-1, y] and not a[x, y]:
m[x, y] = True
return m
# The possible function returns a value indicating if a piece placement "p" on a given
# Tetris grid "a" would be possible (p does not occupy the same space as a).
def possible(p, a):
# See if the pieces clash
land = np.logical_and(p, a)
if np.any(land):
return False
return True
# The possible function returns a value indicating if a piece placement "p" on a given
# Tetris grid "a" would be valid (p is not in mid-air, and can be dropped vertically
# into destination position).
def valid(p, a):
# See if the piece is being placed in mid-air
hover = np.logical_and( p, adjacent(a) )
if not np.any(hover):
return False
# See if the piece can be placed when dropped vertically
drop = np.logical_and( p, overhang(a) )
if np.any(drop):
return False
return True
# Calculate every possible position a piece can have on a WIDTH*HEIGHT grid
def calculate_positions():
print 'Computing all possible orientations and positions of given tetrominoes on %dx%d grid.' % (WIDTH, HEIGHT)
possibilities = []
i = 0
for n, p in enumerate(PIECES):
options = []
p_width = len(p[0])
p_height = len(p)
# Calculate the number of rotations a piece requires, default 3 (all)
nrot = 4
if rall(p):
if p_width == p_height: # Piece is square, no rotation needed
nrot = 1
else: # Piece is rectangular, one rotation needed
nrot = 2
# Add all rotations to an options list
for r in range(nrot):
p = np.rot90(p, r)
# Remove duplicate rotations
already = False
for p2, r2 in options:
if np.array_equal(p, p2):
already = True
if not already:
options.append((p, r))
# Create all combinations
for _, r in options:
for h in range(HEIGHT):
for w in range(WIDTH):
try:
i += 1
op = DAction(BOARD, n, r, h, w)
possibilities.append(op)
except PieceNotFitError:
pass
print i
lp = len(possibilities)
print "There are %d possible orientations and positions for the given tetrominoes." % lp
calculate_possible(possibilities)
# Simple iterator that outputs the HEIGHT and WIDTH for our multiprocessing functions
def hw_iterator():
while True:
yield (HEIGHT, WIDTH)
# Check possibility
def check_possibility(data):
global PIECES, HEIGHT, WIDTH
hw, cur_pieces = data
height, width = hw
HEIGHT = height
WIDTH = width
board = np.zeros((HEIGHT, WIDTH), np.bool)
indr = [] # List of coordinate pairs of all pieces
lowestc = [HEIGHT, WIDTH] # Lowest coordinate of all pieces: (bottom, left)
highestc = [0, 0] # Highest coordinate of all pieces: (top, right)
boxcalc = False
prev_p = None
prev_bounding = None
for p in cur_pieces:
pheight = len(PIECES[p.piece])
pwidth = len(PIECES[p.piece][0])
coords = [[p.h, p.w], [pheight + p.h, pwidth + p.w]]
max_bounding = Rect(lowestc, highestc)
cur_bounding = Rect(*coords) # (bottom, left), (top, right)
if prev_p is not None and prev_bounding is not None:
board = np.logical_or(prev_p.data, board)
indr.append(prev_bounding)
prev_p = p
prev_bounding = cur_bounding
# We couldn't work out if it collides or not cheaply, so now onto the hard stuff
if not possible(p.data, board):
return None # This seems to have improved performance by like 10000%, very suspicious, keep an eye on it
return cur_pieces
# Input seconds, output H:MM:SS
def time_output(s):
hours, remainder = divmod(s, 3600)
minutes, seconds = divmod(remainder, 60)
return '%.f:%02.f:%02.f' % (hours, minutes, seconds)
# We combine all existing combinations and rotations of pieces to see which
# successfully fit together.
def calculate_possible(positions):
lp = len(positions)
search_space = 0
iterables = []
for i in range(PIECES_FIT):
search_space = search_space + ( factorial(lp) / ( factorial(lp-(PIECES_FIT-i)) * factorial(PIECES_FIT-i) ) )
iterables.append(itertools.combinations(positions, PIECES_FIT-i))
print "Calculating possible combinations of tetrominoes from all placements (%d combinations)." % search_space
start_time = time.time()
combinations = []
timer = time.time()
prev_i = 0
pool = multiprocessing.Pool() # Use multiple processes to leaverage maximum processing power
#for i, res in enumerate( itertools.imap(check_possibility, itertools.combinations(positions, PIECES_FIT)) ):
for i, res in enumerate( pool.imap_unordered(check_possibility, itertools.izip(hw_iterator(), itertools.chain(*iterables)), max(5, search_space/500)) ):
if res:
combinations.append(res)
elapsed = time.time() - timer
if elapsed > NOTIFY_INTERVAL and i != 0: # If x seconds have elapsed
pps = (i-prev_i)/elapsed
print "Searched %d/%d placements (%.1f%% complete, %.0f pieces/sec, ~%s remaining)" % (i, search_space, (i/float(search_space))*100, pps, time_output((search_space-i)/pps))
prev_i = i
timer = time.time()
pool.terminate()
lc = len(combinations)
print "There are %d possible combinations of a maximum of %d tetrominoes within the %d positions." % (lc, PIECES_FIT, search_space)
print "The calculation took %s." % time_output(time.time() - start_time)
if args.out_p:
pickle.dump(combinations, open(args.out_p,'wb'))
print "Output saved to '%s'." % args.out_p
calculate_valid(combinations)
# Check validity
def check_validity(data):
global HEIGHT, WIDTH
hw, pieces = data
height, width = hw
HEIGHT = height
WIDTH = width
board = np.zeros((HEIGHT, WIDTH), np.bool)
pos = True
for p in pieces:
if valid(p.data, board):
board = np.logical_or(p.data, board)
else:
return None
if pos:
return pieces
# We permute over all possible combinations and rotations of pieces to see which
# are valid tetris plays.
def calculate_valid(possibilities):
lp = len(possibilities)
search_space = lp * factorial(PIECES_FIT)
start_time = time.time()
print "Calculating valid permutations of tetrominoes from all possible (%d permutations)." % search_space
combinations = []
timer = time.time()
prev_i = 0
counter = 0
pool = multiprocessing.Pool() # Use multiple processes to leaverage maximum processing power
for possibility in possibilities:
# We permute every combination to work out the orders in which it would be valid
#for i, res in enumerate( itertools.imap(check_validity, itertools.permutations(possibility, len(possibility))) ):
for i, res in enumerate( pool.imap_unordered(check_validity, itertools.izip(hw_iterator(), itertools.permutations(possibility, len(possibility))), max(5,search_space/20)) ):
if res:
combinations.append(res)
counter += 1
elapsed = time.time() - timer
if elapsed > NOTIFY_INTERVAL and i != 0: # If x seconds have elapsed
pps = (i-prev_i)/elapsed
print "Searched %d/%d placements (%.1f%% complete, %.0f pieces/sec, ~%s remaining)" % (i, search_space, (i/float(search_space))*100, pps, time_output((search_space-i)/pps))
prev_i = i
timer = time.time()
pool.terminate()
print counter
lc = len(combinations)
print "There are %d valid permutations of a maximum of %d tetrominoes within the %d possibilities." % (lc, PIECES_FIT, search_space)
print "The calculation took %s." % time_output(time.time() - start_time)
if args.out_v:
pickle.dump(combinations, open(args.out_v,'wb'))
print "Output saved to '%s'." % args.out_v
# for c in combinations:
# found = False
# for e in c:
# if e.piece in [5, 6] or found:
# found = True
# break
# if found:
# print_multi_board(to_byte_matrix(c))
# print
combinations.sort()
create_tree(combinations)
# Creates tree from sorted list of tuples of actions
# "permutations" assumes a sorted list of permutations
def create_tree(permutations):
print "Converting %d permutations into decision tree." % len(permutations)
# Create root tree node. It has no parent and maximal utility.
root = State(BOARD, None, np.zeros((HEIGHT,WIDTH), np.bool))
root.utility = float('inf') # Utility of this action
# Terminal nodes are used to reverse traverse the tree to calculate the max_utility
term_nodes = []
print "Calculating utilities."
for nodes in permutations:
actions = []
parents = []
children = []
cur_parent = root
board_state = np.zeros((HEIGHT,WIDTH), np.bool)
for i, p in enumerate(nodes):
board_state = np.logical_or(board_state, p.data)
a = p.get_action()
actions.append(a)
if a not in cur_parent.actions[a.piece].keys(): # Make sure we don't override the state node
s = State(BOARD, cur_parent, board_state)
# print "%s{%s}.actions[%d][%s] = %s{%s}" % (cur_parent, hex(id(cur_parent)), a.piece, a, s, hex(id(s)))
cur_parent.actions[a.piece][a] = s
cur_parent = s
else:
cur_parent = cur_parent.actions[a.piece][a]
# Get list of memory references when traversing downwards
cur_state = root
drilldown = []
for a in actions:
drilldown.append(id(cur_state))
cur_state = cur_state.actions[a.piece][a]
drilldown.append(id(cur_state))
drilldown.reverse()
# Get list of memory references when traversing upwards
cur_state = cur_parent
i = 0
drillup = []
while cur_state.parent is not None and i <= PIECES_FIT:
drillup.append(id(cur_state))
cur_state = cur_state.parent
i += 1
drillup.append(id(cur_state))
# Sanity check to ensure that parent->children == children->parent
if not (drillup == drilldown):
print "Uh oh, something is wrong!"
print drilldown
print drillup
print
# cur_parent is the terminal node (at least currently)
cur_parent.max_utility = cur_parent.utility # The maximum utility of a terminal node is itself
term_nodes.append(cur_parent)
# Reverse traverse the tree to calculate the max_utility
print "Calculating max utilities."
for n in term_nodes:
i = 0
while n.parent is not None:
c = n
n = n.parent
if c.max_utility > n.max_utility:
n.max_utility = c.max_utility
i += 1
if i > PIECES_FIT:
break
print "We seem to be stuck in a loop, exiting"
if n != root:
print "Something is very wrong. The final node isn't the parent."
print "Tree created."
if args.out_t:
pickle.dump(root, open(args.out_t,'wb'))
print "Output saved to '%s'." % args.out_t
# Enter an interactive shell
# import code<|fim▁hole|>
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='Computes a Tetris decision tree for a NxM sized grid'
)
parser.add_argument('--width', metavar='WIDTH', type=int,
default=WIDTH, help='width of Tetris grid')
parser.add_argument('--height', metavar='HEIGHT', type=int,
default=HEIGHT, help='height of Tetris grid')
pin = parser.add_mutually_exclusive_group()
pin.add_argument('--in-p', metavar='IN_P', type=str,
help='import possibilities and resume program')
pin.add_argument('--in-v', metavar='IN_V', type=str,
help='import valid permutations and resume program')
pout = parser.add_argument_group('output')
pout.add_argument('--out-p', metavar='OUT_P', type=str,
default='possible.p', help='save possible combinations [default: possible.p]')
pout.add_argument('--out-v', metavar='OUT_V', type=str,
default='valid.p', help='save valid permutations [default: valid.p]')
pout.add_argument('--out-t', metavar='OUT_T', type=str,
default='tree.p', help='save generated tree [default: tree.p]')
args = parser.parse_args()
WIDTH = args.width # Width of board
HEIGHT = args.height # Height of board
BOARD = Board(HEIGHT, WIDTH)
PIECES_FIT = (WIDTH * HEIGHT) / 4
if sys.version_info[:3] != (2, 7, 4):
print "WARNING: This program was designed to work on Python 2.7.4."
print " Not using that version could cause pickle compatibility issues."
if args.in_p:
p = pickle.load( open(args.in_p,'rb') )
calculate_valid(p)
elif args.in_v:
p = pickle.load( open(args.in_v,'rb') )
create_tree(p)
else:
calculate_positions()
print "Program complete."<|fim▁end|> | # vars = globals().copy()
# vars.update(locals())
# shell = code.InteractiveConsole(vars)
# shell.interact() |
<|file_name|>DeinterlacerMode.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.<|fim▁hole|>
import javax.annotation.Generated;
/**
* Use Deinterlacer (DeinterlaceMode) to choose how the service will do deinterlacing. Default is Deinterlace. -
* Deinterlace converts interlaced to progressive. - Inverse telecine converts Hard Telecine 29.97i to progressive
* 23.976p. - Adaptive auto-detects and converts to progressive.
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public enum DeinterlacerMode {
DEINTERLACE("DEINTERLACE"),
INVERSE_TELECINE("INVERSE_TELECINE"),
ADAPTIVE("ADAPTIVE");
private String value;
private DeinterlacerMode(String value) {
this.value = value;
}
@Override
public String toString() {
return this.value;
}
/**
* Use this in place of valueOf.
*
* @param value
* real value
* @return DeinterlacerMode corresponding to the value
*
* @throws IllegalArgumentException
* If the specified value does not map to one of the known values in this enum.
*/
public static DeinterlacerMode fromValue(String value) {
if (value == null || "".equals(value)) {
throw new IllegalArgumentException("Value cannot be null or empty!");
}
for (DeinterlacerMode enumEntry : DeinterlacerMode.values()) {
if (enumEntry.toString().equals(value)) {
return enumEntry;
}
}
throw new IllegalArgumentException("Cannot create enum from " + value + " value!");
}
}<|fim▁end|> | */
package com.amazonaws.services.mediaconvert.model; |
<|file_name|>config.py<|end_file_name|><|fim▁begin|><|fim▁hole|>##
## This file is part of wfrog
##
## wfrog is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import log
import yaml
import inspect
import sys
import os.path
import copy
from Cheetah.Template import Template
wfrog_version = "0.8.2.99-git"
class Configurer(object):
default_filename = None
module_map = None
log_configurer = log.LogConfigurer()
logger = logging.getLogger('config')
def __init__(self, module_map):
self.module_map = module_map
self.extensions = {}
def add_options(self, opt_parser):
opt_parser.add_option("-f", "--config", dest="config",
help="Configuration file (in yaml)", metavar="CONFIG_FILE")
opt_parser.add_option("-s", "--settings", dest="settings",
help="Settings file (in yaml)", metavar="SETTINGS_FILE")
opt_parser.add_option("-H", action="store_true", dest="help_list", help="Gives help on the configuration file and the list of possible config !elements in the yaml config file")
opt_parser.add_option("-E", dest="help_element", metavar="ELEMENT", help="Gives help about a config !element")
opt_parser.add_option("-e", "--extensions", dest="extension_names", metavar="MODULE1,MODULE2,...", help="Comma-separated list of modules containing custom configuration elements")
self.log_configurer.add_options(opt_parser)
def configure(self, options, component, config_file, settings_file=None, embedded=False):
self.config_file = config_file
self.settings_file = settings_file
if options.extension_names:
for ext in options.extension_names.split(","):
self.logger.debug("Loading extension module '"+ext+"'")
self.extensions[ext]=__import__(ext)
if options.help_list:
if component.__doc__ is not None:
print component.__doc__
for (k,v) in self.module_map:
print k
print "-"*len(k) +"\n"
self.print_help(v)
if options.extension_names:
print "Extensions"
print "----------\n"
for ext in self.extensions:
print "[" + ext + "]"
print
self.print_help(self.extensions[ext])
# Adds logger documentation
print self.log_configurer.__doc__
print " Use option -H ELEMENT for help on a particular !element"
sys.exit()
if options.help_element:
element = options.help_element
if element[0] is not '!':
element = '!' + element
desc = {}
for(k,v) in self.module_map:
desc.update(self.get_help_desc(v))
if len(desc) == 0:
for ext in self.extensions:
desc.update(self.get_help_desc(self.extensions[ext]))
if desc.has_key(element):
print
print element + " [" + desc[element][1] +"]"
print " " + desc[element][0]
print
else:
print "Element "+element+" not found or not documented"
sys.exit()
if not embedded and options.config:
self.config_file = options.config
settings_warning=False
if self.settings_file is None:
if options.settings is not None:
self.settings_file = options.settings
else:
settings_warning=True
self.settings_file = os.path.dirname(self.config_file)+'/../../wfcommon/config/default-settings.yaml'
settings = yaml.load( file(self.settings_file, 'r') )
variables = {}
variables['settings']=settings
config = yaml.load( str(Template(file=file(self.config_file, "r"), searchList=[variables])))
if settings is not None:
context = copy.deepcopy(settings)
else:
context = {}
context['_yaml_config_file'] = self.config_file
context['os']=sys.platform
if not embedded:
self.log_configurer.configure(options, config, context)
self.logger.info("Starting wfrog " + wfrog_version)
if settings_warning:
self.logger.warn('User settings are missing. Loading default ones. Run \'wfrog -S\' for user settings setup.')
self.logger.info("Loaded settings file " + os.path.normpath(self.settings_file))
self.logger.debug('Loaded settings %s', repr(settings))
self.logger.debug("Loaded config file " + os.path.normpath(self.config_file))
if config.has_key('init'):
for k,v in config['init'].iteritems():
self.logger.debug("Initializing "+k)
try:
v.init(context=context)
except AttributeError:
pass # In case the element has not init method
return ( config, context )
def print_help(self, module):
desc = self.get_help_desc(module, summary=True)
sorted = desc.keys()
sorted.sort()
for k in sorted:
print k
print " " + desc[k][0]
print
def get_help_desc(self, module, summary=False):
self.logger.debug("Getting info on module '"+module.__name__+"'")
elements = inspect.getmembers(module, lambda l : inspect.isclass(l) and yaml.YAMLObject in inspect.getmro(l))
desc={}
for element in elements:
self.logger.debug("Getting doc of "+element[0])
# Gets the documentation of the first superclass
superclass = inspect.getmro(element[1])[1]
fulldoc=superclass.__doc__
# Add the doc of the super-super-class if _element_doc is
if hasattr(inspect.getmro(superclass)[1], "_element_doc") and inspect.getmro(superclass)[1].__doc__ is not None:
fulldoc = fulldoc + inspect.getmro(superclass)[1].__doc__
firstline=fulldoc.split(".")[0]
self.logger.debug(firstline)
module_name = module.__name__.split('.')[-1]
if summary:
desc[element[1].yaml_tag] = [ firstline, module_name ]
else:
desc[element[1].yaml_tag] = [ fulldoc, module_name ]
return desc<|fim▁end|> | ## Copyright 2009 Laurent Bovet <[email protected]>
## Jordi Puigsegur <[email protected]> |
<|file_name|>storage_profile_py3.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class StorageProfile(Model):
"""Specifies the storage settings for the virtual machine disks.
:param image_reference: Specifies information about the image to use. You
can specify information about platform images, marketplace images, or
virtual machine images. This element is required when you want to use a
platform image, marketplace image, or virtual machine image, but is not
used in other creation operations.
:type image_reference:
~azure.mgmt.compute.v2016_03_30.models.ImageReference
:param os_disk: Specifies information about the operating system disk used
by the virtual machine. <br><br> For more information about disks, see
[About disks and VHDs for Azure virtual
machines](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-about-disks-vhds?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json).
:type os_disk: ~azure.mgmt.compute.v2016_03_30.models.OSDisk
:param data_disks: Specifies the parameters that are used to add a data<|fim▁hole|> """
_attribute_map = {
'image_reference': {'key': 'imageReference', 'type': 'ImageReference'},
'os_disk': {'key': 'osDisk', 'type': 'OSDisk'},
'data_disks': {'key': 'dataDisks', 'type': '[DataDisk]'},
}
def __init__(self, *, image_reference=None, os_disk=None, data_disks=None, **kwargs) -> None:
super(StorageProfile, self).__init__(**kwargs)
self.image_reference = image_reference
self.os_disk = os_disk
self.data_disks = data_disks<|fim▁end|> | disk to a virtual machine. <br><br> For more information about disks, see
[About disks and VHDs for Azure virtual
machines](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-about-disks-vhds?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json).
:type data_disks: list[~azure.mgmt.compute.v2016_03_30.models.DataDisk] |
<|file_name|>test_merge.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 TiKV Project Authors. Licensed under Apache-2.0.
use std::iter::*;
use std::sync::atomic::Ordering;
use std::sync::*;
use std::thread;
use std::time::*;
use kvproto::kvrpcpb::Context;
use kvproto::raft_cmdpb::CmdType;
use kvproto::raft_serverpb::{PeerState, RegionLocalState};
use raft::eraftpb::MessageType;
use engine_rocks::Compat;
use engine_traits::Peekable;
use engine_traits::{CF_RAFT, CF_WRITE};
use pd_client::PdClient;
use test_raftstore::*;
use tikv::storage::kv::SnapContext;
use tikv_util::config::*;
use tikv_util::HandyRwLock;
/// Test if merge is working as expected in a general condition.
#[test]
fn test_node_base_merge() {
let mut cluster = new_node_cluster(0, 3);
configure_for_merge(&mut cluster);
cluster.run();
cluster.must_put(b"k1", b"v1");
cluster.must_put(b"k3", b"v3");
for i in 0..3 {
must_get_equal(&cluster.get_engine(i + 1), b"k1", b"v1");
must_get_equal(&cluster.get_engine(i + 1), b"k3", b"v3");
}
let pd_client = Arc::clone(&cluster.pd_client);
let region = pd_client.get_region(b"k1").unwrap();
cluster.must_split(®ion, b"k2");
let left = pd_client.get_region(b"k1").unwrap();
let right = pd_client.get_region(b"k2").unwrap();
assert_eq!(region.get_id(), right.get_id());
assert_eq!(left.get_end_key(), right.get_start_key());
assert_eq!(right.get_start_key(), b"k2");
let get = new_request(
right.get_id(),
right.get_region_epoch().clone(),
vec![new_get_cmd(b"k1")],
false,
);
debug!("requesting {:?}", get);
let resp = cluster
.call_command_on_leader(get, Duration::from_secs(5))
.unwrap();
assert!(resp.get_header().has_error(), "{:?}", resp);
assert!(
resp.get_header().get_error().has_key_not_in_region(),
"{:?}",
resp
);
pd_client.must_merge(left.get_id(), right.get_id());
let region = pd_client.get_region(b"k1").unwrap();
assert_eq!(region.get_id(), right.get_id());
assert_eq!(region.get_start_key(), left.get_start_key());
assert_eq!(region.get_end_key(), right.get_end_key());
let origin_epoch = left.get_region_epoch();
let new_epoch = region.get_region_epoch();
// PrepareMerge + CommitMerge, so it should be 2.
assert_eq!(new_epoch.get_version(), origin_epoch.get_version() + 2);
assert_eq!(new_epoch.get_conf_ver(), origin_epoch.get_conf_ver());
let get = new_request(
region.get_id(),
new_epoch.to_owned(),
vec![new_get_cmd(b"k1")],
false,
);
debug!("requesting {:?}", get);
let resp = cluster
.call_command_on_leader(get, Duration::from_secs(5))
.unwrap();
assert!(!resp.get_header().has_error(), "{:?}", resp);
assert_eq!(resp.get_responses()[0].get_get().get_value(), b"v1");
let version = left.get_region_epoch().get_version();
let conf_ver = left.get_region_epoch().get_conf_ver();
'outer: for i in 1..4 {
let state_key = keys::region_state_key(left.get_id());
let mut state = RegionLocalState::default();
for _ in 0..3 {
state = cluster
.get_engine(i)
.c()
.get_msg_cf(CF_RAFT, &state_key)
.unwrap()
.unwrap();
if state.get_state() == PeerState::Tombstone {
let epoch = state.get_region().get_region_epoch();
assert_eq!(epoch.get_version(), version + 1);
assert_eq!(epoch.get_conf_ver(), conf_ver + 1);
continue 'outer;
}
thread::sleep(Duration::from_millis(500));
}
panic!("store {} is still not merged: {:?}", i, state);
}
cluster.must_put(b"k4", b"v4");
}
#[test]
fn test_node_merge_with_slow_learner() {
let mut cluster = new_node_cluster(0, 2);
configure_for_merge(&mut cluster);
cluster.cfg.raft_store.raft_log_gc_threshold = 40;
cluster.cfg.raft_store.raft_log_gc_count_limit = 40;
cluster.cfg.raft_store.merge_max_log_gap = 15;
cluster.pd_client.disable_default_operator();
// Create a cluster with peer 1 as leader and peer 2 as learner.
let r1 = cluster.run_conf_change();
let pd_client = Arc::clone(&cluster.pd_client);
pd_client.must_add_peer(r1, new_learner_peer(2, 2));
// Split the region.
let pd_client = Arc::clone(&cluster.pd_client);
let region = pd_client.get_region(b"k1").unwrap();
cluster.must_split(®ion, b"k2");
let left = pd_client.get_region(b"k1").unwrap();
let right = pd_client.get_region(b"k2").unwrap();
assert_eq!(region.get_id(), right.get_id());
assert_eq!(left.get_end_key(), right.get_start_key());
assert_eq!(right.get_start_key(), b"k2");
// Make sure the leader has received the learner's last index.
cluster.must_put(b"k1", b"v1");
cluster.must_put(b"k3", b"v3");
must_get_equal(&cluster.get_engine(2), b"k1", b"v1");
must_get_equal(&cluster.get_engine(2), b"k3", b"v3");
cluster.add_send_filter(IsolationFilterFactory::new(2));
(0..20).for_each(|i| cluster.must_put(b"k1", format!("v{}", i).as_bytes()));
// Merge 2 regions under isolation should fail.
let merge = new_prepare_merge(right.clone());
let req = new_admin_request(left.get_id(), left.get_region_epoch(), merge);
let resp = cluster
.call_command_on_leader(req, Duration::from_secs(3))
.unwrap();
assert!(
resp.get_header()
.get_error()
.get_message()
.contains("log gap")
);
cluster.clear_send_filters();
cluster.must_put(b"k11", b"v100");
must_get_equal(&cluster.get_engine(1), b"k11", b"v100");
must_get_equal(&cluster.get_engine(2), b"k11", b"v100");
pd_client.must_merge(left.get_id(), right.get_id());
// Test slow learner will be cleaned up when merge can't be continued.
let region = pd_client.get_region(b"k1").unwrap();
cluster.must_split(®ion, b"k5");
cluster.must_put(b"k4", b"v4");
cluster.must_put(b"k5", b"v5");
must_get_equal(&cluster.get_engine(2), b"k4", b"v4");
must_get_equal(&cluster.get_engine(2), b"k5", b"v5");
let left = pd_client.get_region(b"k1").unwrap();
let right = pd_client.get_region(b"k5").unwrap();
cluster.add_send_filter(IsolationFilterFactory::new(2));
pd_client.must_merge(left.get_id(), right.get_id());
let state1 = cluster.truncated_state(right.get_id(), 1);
(0..50).for_each(|i| cluster.must_put(b"k2", format!("v{}", i).as_bytes()));
// Wait to trigger compact raft log
cluster.wait_log_truncated(right.get_id(), 1, state1.get_index() + 1);
cluster.clear_send_filters();
cluster.must_put(b"k6", b"v6");
must_get_equal(&cluster.get_engine(2), b"k6", b"v6");
}
/// Test whether merge will be aborted if prerequisites is not met.
// FIXME(nrc) failing on CI only
#[cfg(feature = "protobuf-codec")]
#[test]
fn test_node_merge_prerequisites_check() {
let mut cluster = new_node_cluster(0, 3);
configure_for_merge(&mut cluster);
let pd_client = Arc::clone(&cluster.pd_client);
cluster.run();
cluster.must_put(b"k1", b"v1");
cluster.must_put(b"k3", b"v3");
let region = pd_client.get_region(b"k1").unwrap();
cluster.must_split(®ion, b"k2");
let left = pd_client.get_region(b"k1").unwrap();
let right = pd_client.get_region(b"k2").unwrap();
let left_on_store1 = find_peer(&left, 1).unwrap().to_owned();
cluster.must_transfer_leader(left.get_id(), left_on_store1);
let right_on_store1 = find_peer(&right, 1).unwrap().to_owned();
cluster.must_transfer_leader(right.get_id(), right_on_store1);
// first MsgAppend will append log, second MsgAppend will set commit index,
// So only allowing first MsgAppend to make source peer have uncommitted entries.
cluster.add_send_filter(CloneFilterFactory(
RegionPacketFilter::new(left.get_id(), 3)
.direction(Direction::Recv)
.msg_type(MessageType::MsgAppend)
.allow(1),
));<|fim▁hole|> cluster.add_send_filter(CloneFilterFactory(
RegionPacketFilter::new(left.get_id(), 3)
.msg_type(MessageType::MsgHeartbeat)
.direction(Direction::Recv),
));
cluster.must_split(&left, b"k11");
let res = cluster.try_merge(left.get_id(), right.get_id());
// log gap (min_committed, last_index] contains admin entries.
assert!(res.get_header().has_error(), "{:?}", res);
cluster.clear_send_filters();
cluster.must_put(b"k22", b"v22");
must_get_equal(&cluster.get_engine(3), b"k22", b"v22");
cluster.add_send_filter(CloneFilterFactory(RegionPacketFilter::new(
right.get_id(),
3,
)));
// It doesn't matter if the index and term is correct.
let compact_log = new_compact_log_request(100, 10);
let req = new_admin_request(right.get_id(), right.get_region_epoch(), compact_log);
debug!("requesting {:?}", req);
let res = cluster
.call_command_on_leader(req, Duration::from_secs(3))
.unwrap();
assert!(res.get_header().has_error(), "{:?}", res);
let res = cluster.try_merge(right.get_id(), left.get_id());
// log gap (min_matched, last_index] contains admin entries.
assert!(res.get_header().has_error(), "{:?}", res);
cluster.clear_send_filters();
cluster.must_put(b"k23", b"v23");
must_get_equal(&cluster.get_engine(3), b"k23", b"v23");
cluster.add_send_filter(CloneFilterFactory(RegionPacketFilter::new(
right.get_id(),
3,
)));
let mut large_bytes = vec![b'k', b'3'];
// 3M
large_bytes.extend(repeat(b'0').take(1024 * 1024 * 3));
cluster.must_put(&large_bytes, &large_bytes);
cluster.must_put(&large_bytes, &large_bytes);
// So log gap now contains 12M data, which exceeds the default max entry size.
let res = cluster.try_merge(right.get_id(), left.get_id());
// log gap contains admin entries.
assert!(res.get_header().has_error(), "{:?}", res);
cluster.clear_send_filters();
cluster.must_put(b"k24", b"v24");
must_get_equal(&cluster.get_engine(3), b"k24", b"v24");
}
/// Test if stale peer will be handled properly after merge.
#[test]
fn test_node_check_merged_message() {
let mut cluster = new_node_cluster(0, 4);
configure_for_merge(&mut cluster);
ignore_merge_target_integrity(&mut cluster);
let pd_client = Arc::clone(&cluster.pd_client);
pd_client.disable_default_operator();
cluster.run_conf_change();
cluster.must_put(b"k1", b"v1");
cluster.must_put(b"k3", b"v3");
// test if stale peer before conf removal is destroyed automatically
let mut region = pd_client.get_region(b"k1").unwrap();
pd_client.must_add_peer(region.get_id(), new_peer(2, 2));
pd_client.must_add_peer(region.get_id(), new_peer(3, 3));
cluster.must_split(®ion, b"k2");
let mut left = pd_client.get_region(b"k1").unwrap();
let mut right = pd_client.get_region(b"k2").unwrap();
pd_client.must_add_peer(left.get_id(), new_peer(4, 4));
must_get_equal(&cluster.get_engine(4), b"k1", b"v1");
cluster.add_send_filter(IsolationFilterFactory::new(4));
pd_client.must_remove_peer(left.get_id(), new_peer(4, 4));
pd_client.must_merge(left.get_id(), right.get_id());
cluster.clear_send_filters();
must_get_none(&cluster.get_engine(4), b"k1");
// test gc work under complicated situation.
cluster.must_put(b"k5", b"v5");
region = pd_client.get_region(b"k2").unwrap();
cluster.must_split(®ion, b"k2");
region = pd_client.get_region(b"k4").unwrap();
cluster.must_split(®ion, b"k4");
left = pd_client.get_region(b"k1").unwrap();
let middle = pd_client.get_region(b"k3").unwrap();
let middle_on_store1 = find_peer(&middle, 1).unwrap().to_owned();
cluster.must_transfer_leader(middle.get_id(), middle_on_store1);
right = pd_client.get_region(b"k5").unwrap();
let left_on_store3 = find_peer(&left, 3).unwrap().to_owned();
pd_client.must_remove_peer(left.get_id(), left_on_store3);
must_get_none(&cluster.get_engine(3), b"k1");
cluster.add_send_filter(IsolationFilterFactory::new(3));
left = pd_client.get_region(b"k1").unwrap();
pd_client.must_add_peer(left.get_id(), new_peer(3, 5));
left = pd_client.get_region(b"k1").unwrap();
pd_client.must_merge(middle.get_id(), left.get_id());
pd_client.must_merge(right.get_id(), left.get_id());
cluster.must_delete(b"k3");
cluster.must_delete(b"k5");
cluster.must_put(b"k4", b"v4");
cluster.clear_send_filters();
let engine3 = cluster.get_engine(3);
must_get_equal(&engine3, b"k1", b"v1");
must_get_equal(&engine3, b"k4", b"v4");
must_get_none(&engine3, b"k3");
must_get_none(&engine3, b"v5");
}
#[test]
fn test_node_merge_slow_split_right() {
test_node_merge_slow_split(true);
}
#[test]
fn test_node_merge_slow_split_left() {
test_node_merge_slow_split(false);
}
// Test if a merge handled properly when there is a unfinished slow split before merge.
fn test_node_merge_slow_split(is_right_derive: bool) {
let mut cluster = new_node_cluster(0, 3);
configure_for_merge(&mut cluster);
ignore_merge_target_integrity(&mut cluster);
let pd_client = Arc::clone(&cluster.pd_client);
pd_client.disable_default_operator();
cluster.cfg.raft_store.right_derive_when_split = is_right_derive;
cluster.run();
cluster.must_put(b"k1", b"v1");
cluster.must_put(b"k3", b"v3");
let region = pd_client.get_region(b"k1").unwrap();
cluster.must_split(®ion, b"k2");
let left = pd_client.get_region(b"k1").unwrap();
let right = pd_client.get_region(b"k3").unwrap();
let target_leader = right
.get_peers()
.iter()
.find(|p| p.get_store_id() == 1)
.unwrap()
.clone();
cluster.must_transfer_leader(right.get_id(), target_leader);
let target_leader = left
.get_peers()
.iter()
.find(|p| p.get_store_id() == 2)
.unwrap()
.clone();
cluster.must_transfer_leader(left.get_id(), target_leader);
must_get_equal(&cluster.get_engine(1), b"k3", b"v3");
// So cluster becomes:
// left region: 1 2(leader) I 3
// right region: 1(leader) 2 I 3
// I means isolation.(here just means 3 can not receive append log)
cluster.add_send_filter(CloneFilterFactory(
RegionPacketFilter::new(left.get_id(), 3)
.direction(Direction::Recv)
.msg_type(MessageType::MsgAppend),
));
cluster.add_send_filter(CloneFilterFactory(
RegionPacketFilter::new(right.get_id(), 3)
.direction(Direction::Recv)
.msg_type(MessageType::MsgAppend),
));
cluster.must_split(&right, b"k3");
// left region and right region on store 3 fall behind
// so after split, the new generated region is not on store 3 now
let right1 = pd_client.get_region(b"k2").unwrap();
let right2 = pd_client.get_region(b"k3").unwrap();
assert_ne!(right1.get_id(), right2.get_id());
pd_client.must_merge(left.get_id(), right1.get_id());
// after merge, the left region still exists on store 3
cluster.must_put(b"k0", b"v0");
cluster.clear_send_filters();
must_get_equal(&cluster.get_engine(3), b"k0", b"v0");
}
/// Test various cases that a store is isolated during merge.
#[test]
fn test_node_merge_dist_isolation() {
let mut cluster = new_node_cluster(0, 3);
configure_for_merge(&mut cluster);
ignore_merge_target_integrity(&mut cluster);
let pd_client = Arc::clone(&cluster.pd_client);
pd_client.disable_default_operator();
cluster.run();
cluster.must_put(b"k1", b"v1");
cluster.must_put(b"k3", b"v3");
let region = pd_client.get_region(b"k1").unwrap();
cluster.must_split(®ion, b"k2");
let left = pd_client.get_region(b"k1").unwrap();
let right = pd_client.get_region(b"k3").unwrap();
cluster.must_transfer_leader(right.get_id(), new_peer(1, 1));
let target_leader = left
.get_peers()
.iter()
.find(|p| p.get_store_id() == 3)
.unwrap()
.clone();
cluster.must_transfer_leader(left.get_id(), target_leader);
must_get_equal(&cluster.get_engine(1), b"k3", b"v3");
// So cluster becomes:
// left region: 1 I 2 3(leader)
// right region: 1(leader) I 2 3
// I means isolation.
cluster.add_send_filter(IsolationFilterFactory::new(1));
pd_client.must_merge(left.get_id(), right.get_id());
cluster.must_put(b"k4", b"v4");
cluster.clear_send_filters();
must_get_equal(&cluster.get_engine(1), b"k4", b"v4");
let region = pd_client.get_region(b"k1").unwrap();
cluster.must_split(®ion, b"k2");
let left = pd_client.get_region(b"k1").unwrap();
let right = pd_client.get_region(b"k3").unwrap();
cluster.must_put(b"k11", b"v11");
pd_client.must_remove_peer(right.get_id(), new_peer(3, 3));
cluster.must_put(b"k33", b"v33");
cluster.add_send_filter(CloneFilterFactory(
RegionPacketFilter::new(right.get_id(), 3).direction(Direction::Recv),
));
pd_client.must_add_peer(right.get_id(), new_peer(3, 4));
let right = pd_client.get_region(b"k3").unwrap();
// So cluster becomes:
// left region: 1 2 3(leader)
// right region: 1(leader) 2 [3]
// [x] means a replica exists logically but is not created on the store x yet.
let res = cluster.try_merge(region.get_id(), right.get_id());
// Leader can't find replica 3 of right region, so it fails.
assert!(res.get_header().has_error(), "{:?}", res);
let target_leader = left
.get_peers()
.iter()
.find(|p| p.get_store_id() == 2)
.unwrap()
.clone();
cluster.must_transfer_leader(left.get_id(), target_leader);
pd_client.must_merge(left.get_id(), right.get_id());
cluster.must_put(b"k4", b"v4");
cluster.clear_send_filters();
must_get_equal(&cluster.get_engine(3), b"k4", b"v4");
}
/// Similar to `test_node_merge_dist_isolation`, but make the isolated store
/// way behind others so others have to send it a snapshot.
#[test]
fn test_node_merge_brain_split() {
let mut cluster = new_node_cluster(0, 3);
configure_for_merge(&mut cluster);
ignore_merge_target_integrity(&mut cluster);
cluster.cfg.raft_store.raft_log_gc_threshold = 12;
cluster.cfg.raft_store.raft_log_gc_count_limit = 12;
cluster.run();
cluster.must_put(b"k1", b"v1");
cluster.must_put(b"k3", b"v3");
let pd_client = Arc::clone(&cluster.pd_client);
let region = pd_client.get_region(b"k1").unwrap();
cluster.must_split(®ion, b"k2");
let left = pd_client.get_region(b"k1").unwrap();
let right = pd_client.get_region(b"k3").unwrap();
// The split regions' leaders could be at store 3, so transfer them to peer 1.
let left_peer_1 = find_peer(&left, 1).cloned().unwrap();
cluster.must_transfer_leader(left.get_id(), left_peer_1);
let right_peer_1 = find_peer(&right, 1).cloned().unwrap();
cluster.must_transfer_leader(right.get_id(), right_peer_1);
cluster.must_put(b"k11", b"v11");
cluster.must_put(b"k21", b"v21");
// Make sure peers on store 3 have replicated latest update, which means
// they have already reported their progresses to leader.
must_get_equal(&cluster.get_engine(3), b"k11", b"v11");
must_get_equal(&cluster.get_engine(3), b"k21", b"v21");
cluster.add_send_filter(IsolationFilterFactory::new(3));
// So cluster becomes:
// left region: 1(leader) 2 I 3
// right region: 1(leader) 2 I 3
// I means isolation.
pd_client.must_merge(left.get_id(), right.get_id());
for i in 0..100 {
cluster.must_put(format!("k4{}", i).as_bytes(), b"v4");
}
must_get_equal(&cluster.get_engine(2), b"k40", b"v4");
must_get_equal(&cluster.get_engine(1), b"k40", b"v4");
cluster.clear_send_filters();
// Wait until store 3 get data after merging
must_get_equal(&cluster.get_engine(3), b"k40", b"v4");
let right_peer_3 = find_peer(&right, 3).cloned().unwrap();
cluster.must_transfer_leader(right.get_id(), right_peer_3);
cluster.must_put(b"k40", b"v5");
// Make sure the two regions are already merged on store 3.
let state_key = keys::region_state_key(left.get_id());
let state: RegionLocalState = cluster
.get_engine(3)
.c()
.get_msg_cf(CF_RAFT, &state_key)
.unwrap()
.unwrap();
assert_eq!(state.get_state(), PeerState::Tombstone);
must_get_equal(&cluster.get_engine(3), b"k40", b"v5");
for i in 1..100 {
must_get_equal(&cluster.get_engine(3), format!("k4{}", i).as_bytes(), b"v4");
}
let region = pd_client.get_region(b"k1").unwrap();
cluster.must_split(®ion, b"k2");
let region = pd_client.get_region(b"k2").unwrap();
cluster.must_split(®ion, b"k3");
let middle = pd_client.get_region(b"k2").unwrap();
let peer_on_store1 = find_peer(&middle, 1).unwrap().to_owned();
cluster.must_transfer_leader(middle.get_id(), peer_on_store1);
cluster.must_put(b"k22", b"v22");
cluster.must_put(b"k33", b"v33");
must_get_equal(&cluster.get_engine(3), b"k33", b"v33");
let left = pd_client.get_region(b"k1").unwrap();
pd_client.disable_default_operator();
let peer_on_left = find_peer(&left, 3).unwrap().to_owned();
pd_client.must_remove_peer(left.get_id(), peer_on_left);
let right = pd_client.get_region(b"k3").unwrap();
let peer_on_right = find_peer(&right, 3).unwrap().to_owned();
pd_client.must_remove_peer(right.get_id(), peer_on_right);
must_get_none(&cluster.get_engine(3), b"k11");
must_get_equal(&cluster.get_engine(3), b"k22", b"v22");
must_get_none(&cluster.get_engine(3), b"k33");
cluster.add_send_filter(IsolationFilterFactory::new(3));
pd_client.must_add_peer(left.get_id(), new_peer(3, 11));
pd_client.must_merge(middle.get_id(), left.get_id());
pd_client.must_remove_peer(left.get_id(), new_peer(3, 11));
pd_client.must_merge(right.get_id(), left.get_id());
pd_client.must_add_peer(left.get_id(), new_peer(3, 12));
let region = pd_client.get_region(b"k1").unwrap();
// So cluster becomes
// store 3: k2 [middle] k3
// store 1/2: [ new_left ] k4 [left]
cluster.must_split(®ion, b"k4");
cluster.must_put(b"k12", b"v12");
cluster.clear_send_filters();
must_get_equal(&cluster.get_engine(3), b"k12", b"v12");
}
/// Test whether approximate size and keys are updated after merge
#[test]
fn test_merge_approximate_size_and_keys() {
let mut cluster = new_node_cluster(0, 3);
cluster.cfg.raft_store.split_region_check_tick_interval = ReadableDuration::millis(20);
cluster.run();
let mut range = 1..;
let middle_key = put_cf_till_size(&mut cluster, CF_WRITE, 100, &mut range);
let max_key = put_cf_till_size(&mut cluster, CF_WRITE, 100, &mut range);
let pd_client = Arc::clone(&cluster.pd_client);
let region = pd_client.get_region(b"").unwrap();
cluster.must_split(®ion, &middle_key);
// make sure split check is invoked so size and keys are updated.
thread::sleep(Duration::from_millis(100));
let left = pd_client.get_region(b"").unwrap();
let right = pd_client.get_region(&max_key).unwrap();
assert_ne!(left, right);
// make sure all peer's approximate size is not None.
cluster.must_transfer_leader(right.get_id(), right.get_peers()[0].clone());
thread::sleep(Duration::from_millis(100));
cluster.must_transfer_leader(right.get_id(), right.get_peers()[1].clone());
thread::sleep(Duration::from_millis(100));
cluster.must_transfer_leader(right.get_id(), right.get_peers()[2].clone());
thread::sleep(Duration::from_millis(100));
let size = pd_client
.get_region_approximate_size(right.get_id())
.unwrap();
assert_ne!(size, 0);
let keys = pd_client
.get_region_approximate_keys(right.get_id())
.unwrap();
assert_ne!(keys, 0);
pd_client.must_merge(left.get_id(), right.get_id());
// make sure split check is invoked so size and keys are updated.
thread::sleep(Duration::from_millis(100));
let region = pd_client.get_region(b"").unwrap();
// size and keys should be updated.
assert_ne!(
pd_client
.get_region_approximate_size(region.get_id())
.unwrap(),
size
);
assert_ne!(
pd_client
.get_region_approximate_keys(region.get_id())
.unwrap(),
keys
);
// after merge and then transfer leader, if not update new leader's approximate size, it maybe be stale.
cluster.must_transfer_leader(region.get_id(), region.get_peers()[0].clone());
// make sure split check is invoked
thread::sleep(Duration::from_millis(100));
assert_ne!(
pd_client
.get_region_approximate_size(region.get_id())
.unwrap(),
size
);
assert_ne!(
pd_client
.get_region_approximate_keys(region.get_id())
.unwrap(),
keys
);
}
#[test]
fn test_node_merge_update_region() {
let mut cluster = new_node_cluster(0, 3);
configure_for_merge(&mut cluster);
// Election timeout and max leader lease is 1s.
configure_for_lease_read(&mut cluster, Some(100), Some(10));
cluster.run();
cluster.must_put(b"k1", b"v1");
cluster.must_put(b"k3", b"v3");
let pd_client = Arc::clone(&cluster.pd_client);
let region = pd_client.get_region(b"k1").unwrap();
cluster.must_split(®ion, b"k2");
let left = pd_client.get_region(b"k1").unwrap();
let right = pd_client.get_region(b"k2").unwrap();
// Make sure the leader is in lease.
cluster.must_put(b"k1", b"v2");
// "k3" is not in the range of left.
let get = new_request(
left.get_id(),
left.get_region_epoch().clone(),
vec![new_get_cmd(b"k3")],
false,
);
debug!("requesting key not in range {:?}", get);
let resp = cluster
.call_command_on_leader(get, Duration::from_secs(5))
.unwrap();
assert!(resp.get_header().has_error(), "{:?}", resp);
assert!(
resp.get_header().get_error().has_key_not_in_region(),
"{:?}",
resp
);
// Merge right to left.
pd_client.must_merge(right.get_id(), left.get_id());
let origin_leader = cluster.leader_of_region(left.get_id()).unwrap();
let new_leader = left
.get_peers()
.iter()
.cloned()
.find(|p| p.get_id() != origin_leader.get_id())
.unwrap();
// Make sure merge is done in the new_leader.
// There is only one region in the cluster, "k0" must belongs to it.
cluster.must_put(b"k0", b"v0");
must_get_equal(&cluster.get_engine(new_leader.get_store_id()), b"k0", b"v0");
// Transfer leadership to the new_leader.
cluster.must_transfer_leader(left.get_id(), new_leader);
// Make sure the leader is in lease.
cluster.must_put(b"k0", b"v1");
let new_region = pd_client.get_region(b"k2").unwrap();
let get = new_request(
new_region.get_id(),
new_region.get_region_epoch().clone(),
vec![new_get_cmd(b"k3")],
false,
);
debug!("requesting {:?}", get);
let resp = cluster
.call_command_on_leader(get, Duration::from_secs(5))
.unwrap();
assert!(!resp.get_header().has_error(), "{:?}", resp);
assert_eq!(resp.get_responses().len(), 1);
assert_eq!(resp.get_responses()[0].get_cmd_type(), CmdType::Get);
assert_eq!(resp.get_responses()[0].get_get().get_value(), b"v3");
}
/// Test if merge is working properly when merge entries is empty but commit index is not updated.
#[test]
fn test_node_merge_catch_up_logs_empty_entries() {
let mut cluster = new_node_cluster(0, 3);
configure_for_merge(&mut cluster);
cluster.run();
cluster.must_put(b"k1", b"v1");
cluster.must_put(b"k3", b"v3");
let pd_client = Arc::clone(&cluster.pd_client);
let region = pd_client.get_region(b"k1").unwrap();
let peer_on_store1 = find_peer(®ion, 1).unwrap().to_owned();
cluster.must_transfer_leader(region.get_id(), peer_on_store1);
cluster.must_split(®ion, b"k2");
let left = pd_client.get_region(b"k1").unwrap();
let right = pd_client.get_region(b"k2").unwrap();
// make sure the peer of left region on engine 3 has caught up logs.
cluster.must_put(b"k0", b"v0");
must_get_equal(&cluster.get_engine(3), b"k0", b"v0");
// first MsgAppend will append log, second MsgAppend will set commit index,
// So only allowing first MsgAppend to make source peer have uncommitted entries.
cluster.add_send_filter(CloneFilterFactory(
RegionPacketFilter::new(left.get_id(), 3)
.direction(Direction::Recv)
.msg_type(MessageType::MsgAppend)
.allow(1),
));
// make the source peer have no way to know the uncommitted entries can be applied from heartbeat.
cluster.add_send_filter(CloneFilterFactory(
RegionPacketFilter::new(left.get_id(), 3)
.msg_type(MessageType::MsgHeartbeat)
.direction(Direction::Recv),
));
// make the source peer have no way to know the uncommitted entries can be applied from target region.
cluster.add_send_filter(CloneFilterFactory(
RegionPacketFilter::new(right.get_id(), 3)
.msg_type(MessageType::MsgAppend)
.direction(Direction::Recv),
));
pd_client.must_merge(left.get_id(), right.get_id());
cluster.must_region_not_exist(left.get_id(), 2);
cluster.shutdown();
cluster.clear_send_filters();
// as expected, merge process will forward the commit index
// and the source peer will be destroyed.
cluster.start().unwrap();
cluster.must_region_not_exist(left.get_id(), 3);
}
#[test]
fn test_merge_with_slow_promote() {
let mut cluster = new_node_cluster(0, 3);
configure_for_merge(&mut cluster);
let pd_client = Arc::clone(&cluster.pd_client);
pd_client.disable_default_operator();
let r1 = cluster.run_conf_change();
pd_client.must_add_peer(r1, new_peer(2, 2));
let region = pd_client.get_region(b"k1").unwrap();
cluster.must_split(®ion, b"k2");
let left = pd_client.get_region(b"k1").unwrap();
let right = pd_client.get_region(b"k2").unwrap();
pd_client.must_add_peer(left.get_id(), new_peer(3, left.get_id() + 3));
pd_client.must_add_peer(right.get_id(), new_learner_peer(3, right.get_id() + 3));
cluster.must_put(b"k1", b"v1");
cluster.must_put(b"k3", b"v3");
must_get_equal(&cluster.get_engine(3), b"k1", b"v1");
must_get_equal(&cluster.get_engine(3), b"k3", b"v3");
let delay_filter =
Box::new(RegionPacketFilter::new(right.get_id(), 3).direction(Direction::Recv));
cluster.sim.wl().add_send_filter(3, delay_filter);
pd_client.must_add_peer(right.get_id(), new_peer(3, right.get_id() + 3));
pd_client.must_merge(right.get_id(), left.get_id());
cluster.sim.wl().clear_send_filters(3);
cluster.must_transfer_leader(left.get_id(), new_peer(3, left.get_id() + 3));
}
/// Test whether a isolated store recover properly if there is no target peer
/// on this store before isolated.
/// A (-∞, k2), B [k2, +∞) on store 1,2,4
/// store 4 is isolated
/// B merge to A (target peer A is not created on store 4. It‘s just exist logically)
/// A split => C (-∞, k3), A [k3, +∞)
/// Then network recovery
#[test]
fn test_merge_isolated_store_with_no_target_peer() {
let mut cluster = new_node_cluster(0, 4);
configure_for_merge(&mut cluster);
ignore_merge_target_integrity(&mut cluster);
cluster.cfg.raft_store.right_derive_when_split = true;
let pd_client = Arc::clone(&cluster.pd_client);
pd_client.disable_default_operator();
let r1 = cluster.run_conf_change();
pd_client.must_add_peer(r1, new_peer(2, 2));
pd_client.must_add_peer(r1, new_peer(3, 3));
for i in 0..10 {
cluster.must_put(format!("k{}", i).as_bytes(), b"v1");
}
let region = pd_client.get_region(b"k1").unwrap();
// (-∞, k2), [k2, +∞)
cluster.must_split(®ion, b"k2");
let left = pd_client.get_region(b"k1").unwrap();
let right = pd_client.get_region(b"k2").unwrap();
let left_on_store1 = find_peer(&left, 1).unwrap().to_owned();
cluster.must_transfer_leader(left.get_id(), left_on_store1);
let right_on_store1 = find_peer(&right, 1).unwrap().to_owned();
cluster.must_transfer_leader(right.get_id(), right_on_store1);
pd_client.must_add_peer(right.get_id(), new_peer(4, 4));
let right_on_store3 = find_peer(&right, 3).unwrap().to_owned();
pd_client.must_remove_peer(right.get_id(), right_on_store3);
// Ensure snapshot is sent and applied.
must_get_equal(&cluster.get_engine(4), b"k4", b"v1");
cluster.must_put(b"k22", b"v22");
// Ensure leader has updated its progress.
must_get_equal(&cluster.get_engine(4), b"k22", b"v22");
cluster.add_send_filter(IsolationFilterFactory::new(4));
pd_client.must_add_peer(left.get_id(), new_peer(4, 5));
let left_on_store3 = find_peer(&left, 3).unwrap().to_owned();
pd_client.must_remove_peer(left.get_id(), left_on_store3);
pd_client.must_merge(right.get_id(), left.get_id());
let new_left = pd_client.get_region(b"k1").unwrap();
// (-∞, k3), [k3, +∞)
cluster.must_split(&new_left, b"k3");
// Now new_left region range is [k3, +∞)
cluster.must_put(b"k345", b"v345");
cluster.clear_send_filters();
must_get_equal(&cluster.get_engine(4), b"k345", b"v345");
}
/// Test whether a isolated peer can recover when two other regions merge to its region
#[test]
fn test_merge_cascade_merge_isolated() {
let mut cluster = new_node_cluster(0, 3);
configure_for_merge(&mut cluster);
let pd_client = Arc::clone(&cluster.pd_client);
pd_client.disable_default_operator();
cluster.run();
let mut region = pd_client.get_region(b"k1").unwrap();
cluster.must_split(®ion, b"k2");
region = pd_client.get_region(b"k2").unwrap();
cluster.must_split(®ion, b"k3");
cluster.must_put(b"k1", b"v1");
cluster.must_put(b"k2", b"v2");
cluster.must_put(b"k3", b"v3");
must_get_equal(&cluster.get_engine(3), b"k1", b"v1");
must_get_equal(&cluster.get_engine(3), b"k2", b"v2");
must_get_equal(&cluster.get_engine(3), b"k3", b"v3");
let r1 = pd_client.get_region(b"k1").unwrap();
let r2 = pd_client.get_region(b"k2").unwrap();
let r3 = pd_client.get_region(b"k3").unwrap();
let r1_on_store1 = find_peer(&r1, 1).unwrap().to_owned();
cluster.must_transfer_leader(r1.get_id(), r1_on_store1);
let r2_on_store2 = find_peer(&r2, 2).unwrap().to_owned();
cluster.must_transfer_leader(r2.get_id(), r2_on_store2);
let r3_on_store1 = find_peer(&r3, 1).unwrap().to_owned();
cluster.must_transfer_leader(r3.get_id(), r3_on_store1);
cluster.add_send_filter(IsolationFilterFactory::new(3));
// r1, r3 both merge to r2
pd_client.must_merge(r1.get_id(), r2.get_id());
pd_client.must_merge(r3.get_id(), r2.get_id());
cluster.must_put(b"k4", b"v4");
cluster.clear_send_filters();
must_get_equal(&cluster.get_engine(3), b"k4", b"v4");
}
// Test if a learner can be destroyed properly when it's isolated and removed by conf change
// before its region merge to another region
#[test]
fn test_merge_isolated_not_in_merge_learner() {
let mut cluster = new_node_cluster(0, 3);
configure_for_merge(&mut cluster);
let pd_client = Arc::clone(&cluster.pd_client);
pd_client.disable_default_operator();
cluster.run_conf_change();
let region = pd_client.get_region(b"k1").unwrap();
cluster.must_split(®ion, b"k2");
let left = pd_client.get_region(b"k1").unwrap();
let right = pd_client.get_region(b"k2").unwrap();
let left_on_store1 = find_peer(&left, 1).unwrap().to_owned();
let right_on_store1 = find_peer(&right, 1).unwrap().to_owned();
pd_client.must_add_peer(left.get_id(), new_learner_peer(2, 2));
// Ensure this learner exists
cluster.must_put(b"k1", b"v1");
must_get_equal(&cluster.get_engine(2), b"k1", b"v1");
cluster.stop_node(2);
pd_client.must_remove_peer(left.get_id(), new_learner_peer(2, 2));
pd_client.must_add_peer(left.get_id(), new_peer(3, 3));
pd_client.must_remove_peer(left.get_id(), left_on_store1);
pd_client.must_add_peer(right.get_id(), new_peer(3, 4));
pd_client.must_remove_peer(right.get_id(), right_on_store1);
pd_client.must_merge(left.get_id(), right.get_id());
// Add a new learner on store 2 to trigger peer 2 send check-stale-peer msg to other peers
pd_client.must_add_peer(right.get_id(), new_learner_peer(2, 5));
cluster.must_put(b"k123", b"v123");
cluster.run_node(2).unwrap();
// We can see if the old peer 2 is destroyed
must_get_equal(&cluster.get_engine(2), b"k123", b"v123");
}
// Test if a learner can be destroyed properly when it's isolated and removed by conf change
// before another region merge to its region
#[test]
fn test_merge_isolated_stale_learner() {
let mut cluster = new_node_cluster(0, 3);
configure_for_merge(&mut cluster);
cluster.cfg.raft_store.right_derive_when_split = true;
// Do not rely on pd to remove stale peer
cluster.cfg.raft_store.max_leader_missing_duration = ReadableDuration::hours(2);
cluster.cfg.raft_store.abnormal_leader_missing_duration = ReadableDuration::minutes(10);
cluster.cfg.raft_store.peer_stale_state_check_interval = ReadableDuration::minutes(5);
let pd_client = Arc::clone(&cluster.pd_client);
pd_client.disable_default_operator();
cluster.run_conf_change();
let mut region = pd_client.get_region(b"k1").unwrap();
cluster.must_split(®ion, b"k2");
let left = pd_client.get_region(b"k1").unwrap();
let right = pd_client.get_region(b"k2").unwrap();
pd_client.must_add_peer(left.get_id(), new_learner_peer(2, 2));
// Ensure this learner exists
cluster.must_put(b"k1", b"v1");
must_get_equal(&cluster.get_engine(2), b"k1", b"v1");
cluster.stop_node(2);
pd_client.must_remove_peer(left.get_id(), new_learner_peer(2, 2));
pd_client.must_merge(right.get_id(), left.get_id());
region = pd_client.get_region(b"k1").unwrap();
cluster.must_split(®ion, b"k2");
let new_left = pd_client.get_region(b"k1").unwrap();
assert_ne!(left.get_id(), new_left.get_id());
// Add a new learner on store 2 to trigger peer 2 send check-stale-peer msg to other peers
pd_client.must_add_peer(new_left.get_id(), new_learner_peer(2, 5));
cluster.must_put(b"k123", b"v123");
cluster.run_node(2).unwrap();
// We can see if the old peer 2 is destroyed
must_get_equal(&cluster.get_engine(2), b"k123", b"v123");
}
/// Test if a learner can be destroyed properly in such conditions as follows
/// 1. A peer is isolated
/// 2. Be the last removed peer in its peer list
/// 3. Then its region merges to another region.
/// 4. Isolation disappears
#[test]
fn test_merge_isolated_not_in_merge_learner_2() {
let mut cluster = new_node_cluster(0, 3);
configure_for_merge(&mut cluster);
let pd_client = Arc::clone(&cluster.pd_client);
pd_client.disable_default_operator();
cluster.run_conf_change();
let region = pd_client.get_region(b"k1").unwrap();
cluster.must_split(®ion, b"k2");
let left = pd_client.get_region(b"k1").unwrap();
let right = pd_client.get_region(b"k2").unwrap();
let left_on_store1 = find_peer(&left, 1).unwrap().to_owned();
let right_on_store1 = find_peer(&right, 1).unwrap().to_owned();
pd_client.must_add_peer(left.get_id(), new_learner_peer(2, 2));
// Ensure this learner exists
cluster.must_put(b"k1", b"v1");
must_get_equal(&cluster.get_engine(2), b"k1", b"v1");
cluster.stop_node(2);
pd_client.must_add_peer(left.get_id(), new_peer(3, 3));
pd_client.must_remove_peer(left.get_id(), left_on_store1);
pd_client.must_add_peer(right.get_id(), new_peer(3, 4));
pd_client.must_remove_peer(right.get_id(), right_on_store1);
// The peer list of peer 2 is (1001, 1), (2, 2)
pd_client.must_remove_peer(left.get_id(), new_learner_peer(2, 2));
pd_client.must_merge(left.get_id(), right.get_id());
cluster.run_node(2).unwrap();
// When the abnormal leader missing duration has passed, the check-stale-peer msg will be sent to peer 1001.
// After that, a new peer list will be returned (2, 2) (3, 3).
// Then peer 2 sends the check-stale-peer msg to peer 3 and it will get a tombstone response.
// Finally peer 2 will be destroyed.
must_get_none(&cluster.get_engine(2), b"k1");
}
/// Test if a peer can be removed if its target peer has been removed and doesn't apply the
/// CommitMerge log.
#[test]
fn test_merge_remove_target_peer_isolated() {
let mut cluster = new_node_cluster(0, 4);
configure_for_merge(&mut cluster);
let pd_client = Arc::clone(&cluster.pd_client);
pd_client.disable_default_operator();
cluster.run_conf_change();
let mut region = pd_client.get_region(b"k1").unwrap();
pd_client.must_add_peer(region.get_id(), new_peer(2, 2));
pd_client.must_add_peer(region.get_id(), new_peer(3, 3));
cluster.must_split(®ion, b"k2");
region = pd_client.get_region(b"k2").unwrap();
cluster.must_split(®ion, b"k3");
let r1 = pd_client.get_region(b"k1").unwrap();
let r2 = pd_client.get_region(b"k2").unwrap();
let r3 = pd_client.get_region(b"k3").unwrap();
let r1_on_store1 = find_peer(&r1, 1).unwrap().to_owned();
cluster.must_transfer_leader(r1.get_id(), r1_on_store1);
let r2_on_store2 = find_peer(&r2, 2).unwrap().to_owned();
cluster.must_transfer_leader(r2.get_id(), r2_on_store2);
for i in 1..4 {
cluster.must_put(format!("k{}", i).as_bytes(), b"v1");
}
for i in 1..4 {
must_get_equal(&cluster.get_engine(3), format!("k{}", i).as_bytes(), b"v1");
}
cluster.add_send_filter(IsolationFilterFactory::new(3));
// Make region r2's epoch > r2 peer on store 3.
// r2 peer on store 3 will be removed whose epoch is staler than the epoch when r1 merge to r2.
pd_client.must_add_peer(r2.get_id(), new_peer(4, 4));
pd_client.must_remove_peer(r2.get_id(), new_peer(4, 4));
let r2_on_store3 = find_peer(&r2, 3).unwrap().to_owned();
let r3_on_store3 = find_peer(&r3, 3).unwrap().to_owned();
pd_client.must_merge(r1.get_id(), r2.get_id());
pd_client.must_remove_peer(r2.get_id(), r2_on_store3);
pd_client.must_remove_peer(r3.get_id(), r3_on_store3);
pd_client.must_merge(r2.get_id(), r3.get_id());
cluster.clear_send_filters();
for i in 1..4 {
must_get_none(&cluster.get_engine(3), format!("k{}", i).as_bytes());
}
}
#[test]
fn test_sync_max_ts_after_region_merge() {
use tikv::storage::{Engine, Snapshot};
let mut cluster = new_server_cluster(0, 3);
configure_for_merge(&mut cluster);
cluster.run();
// Transfer leader to node 1 first to ensure all operations happen on node 1
cluster.must_transfer_leader(1, new_peer(1, 1));
cluster.must_put(b"k1", b"v1");
cluster.must_put(b"k3", b"v3");
let region = cluster.get_region(b"k1");
cluster.must_split(®ion, b"k2");
let left = cluster.get_region(b"k1");
let right = cluster.get_region(b"k3");
let cm = cluster.sim.read().unwrap().get_concurrency_manager(1);
let storage = cluster
.sim
.read()
.unwrap()
.storages
.get(&1)
.unwrap()
.clone();
let wait_for_synced = |cluster: &mut Cluster<ServerCluster>| {
let region_id = right.get_id();
let leader = cluster.leader_of_region(region_id).unwrap();
let epoch = cluster.get_region_epoch(region_id);
let mut ctx = Context::default();
ctx.set_region_id(region_id);
ctx.set_peer(leader);
ctx.set_region_epoch(epoch);
let snap_ctx = SnapContext {
pb_ctx: &ctx,
..Default::default()
};
let snapshot = storage.snapshot(snap_ctx).unwrap();
let max_ts_sync_status = snapshot.max_ts_sync_status.clone().unwrap();
for retry in 0..10 {
if max_ts_sync_status.load(Ordering::SeqCst) & 1 == 1 {
break;
}
thread::sleep(Duration::from_millis(1 << retry));
}
assert!(snapshot.is_max_ts_synced());
};
wait_for_synced(&mut cluster);
let max_ts = cm.max_ts();
cluster.pd_client.trigger_tso_failure();
// Merge left to right
cluster.pd_client.must_merge(left.get_id(), right.get_id());
wait_for_synced(&mut cluster);
let new_max_ts = cm.max_ts();
assert!(new_max_ts > max_ts);
}<|fim▁end|> | // make the source peer's commit index can't be updated by MsgHeartbeat. |
<|file_name|>crc32.rs<|end_file_name|><|fim▁begin|>//! Helper module to compute a CRC32 checksum
use std::io;
use std::io::prelude::*;
static CRC32_TABLE : [u32; 256] = [
0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419, 0x706af48f,
0xe963a535, 0x9e6495a3, 0x0edb8832, 0x79dcb8a4, 0xe0d5e91e, 0x97d2d988,
0x09b64c2b, 0x7eb17cbd, 0xe7b82d07, 0x90bf1d91, 0x1db71064, 0x6ab020f2,
0xf3b97148, 0x84be41de, 0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7,
0x136c9856, 0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, 0x14015c4f, 0x63066cd9,
0xfa0f3d63, 0x8d080df5, 0x3b6e20c8, 0x4c69105e, 0xd56041e4, 0xa2677172,
0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b, 0x35b5a8fa, 0x42b2986c,
0xdbbbc9d6, 0xacbcf940, 0x32d86ce3, 0x45df5c75, 0xdcd60dcf, 0xabd13d59,
0x26d930ac, 0x51de003a, 0xc8d75180, 0xbfd06116, 0x21b4f4b5, 0x56b3c423,
0xcfba9599, 0xb8bda50f, 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924,
0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d, 0x76dc4190, 0x01db7106,
0x98d220bc, 0xefd5102a, 0x71b18589, 0x06b6b51f, 0x9fbfe4a5, 0xe8b8d433,
0x7807c9a2, 0x0f00f934, 0x9609a88e, 0xe10e9818, 0x7f6a0dbb, 0x086d3d2d,
0x91646c97, 0xe6635c01, 0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e,
0x6c0695ed, 0x1b01a57b, 0x8208f4c1, 0xf50fc457, 0x65b0d9c6, 0x12b7e950,
0x8bbeb8ea, 0xfcb9887c, 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3, 0xfbd44c65,
0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2, 0x4adfa541, 0x3dd895d7,
0xa4d1c46d, 0xd3d6f4fb, 0x4369e96a, 0x346ed9fc, 0xad678846, 0xda60b8d0,
0x44042d73, 0x33031de5, 0xaa0a4c5f, 0xdd0d7cc9, 0x5005713c, 0x270241aa,
0xbe0b1010, 0xc90c2086, 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f,
0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, 0x59b33d17, 0x2eb40d81,
0xb7bd5c3b, 0xc0ba6cad, 0xedb88320, 0x9abfb3b6, 0x03b6e20c, 0x74b1d29a,
0xead54739, 0x9dd277af, 0x04db2615, 0x73dc1683, 0xe3630b12, 0x94643b84,
0x0d6d6a3e, 0x7a6a5aa8, 0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1,
0xf00f9344, 0x8708a3d2, 0x1e01f268, 0x6906c2fe, 0xf762575d, 0x806567cb,
0x196c3671, 0x6e6b06e7, 0xfed41b76, 0x89d32be0, 0x10da7a5a, 0x67dd4acc,
0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5, 0xd6d6a3e8, 0xa1d1937e,
0x38d8c2c4, 0x4fdff252, 0xd1bb67f1, 0xa6bc5767, 0x3fb506dd, 0x48b2364b,
0xd80d2bda, 0xaf0a1b4c, 0x36034af6, 0x41047a60, 0xdf60efc3, 0xa867df55,
0x316e8eef, 0x4669be79, 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236,
0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f, 0xc5ba3bbe, 0xb2bd0b28,
0x2bb45a92, 0x5cb36a04, 0xc2d7ffa7, 0xb5d0cf31, 0x2cd99e8b, 0x5bdeae1d,
0x9b64c2b0, 0xec63f226, 0x756aa39c, 0x026d930a, 0x9c0906a9, 0xeb0e363f,
0x72076785, 0x05005713, 0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38,
0x92d28e9b, 0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21, 0x86d3d2d4, 0xf1d4e242,
0x68ddb3f8, 0x1fda836e, 0x81be16cd, 0xf6b9265b, 0x6fb077e1, 0x18b74777,
0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c, 0x8f659eff, 0xf862ae69,
0x616bffd3, 0x166ccf45, 0xa00ae278, 0xd70dd2ee, 0x4e048354, 0x3903b3c2,
0xa7672661, 0xd06016f7, 0x4969474d, 0x3e6e77db, 0xaed16a4a, 0xd9d65adc,
0x40df0b66, 0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9,
0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605, 0xcdd70693,
0x54de5729, 0x23d967bf, 0xb3667a2e, 0xc4614ab8, 0x5d681b02, 0x2a6f2b94,
0xb40bbe37, 0xc30c8ea1, 0x5a05df1b, 0x2d02ef8d
];
/// Update the checksum prev based upon the contents of buf.
pub fn update(prev: u32, buf: &[u8]) -> u32
{
let mut crc = !prev;
for &byte in buf.iter()
{
crc = CRC32_TABLE[((crc as u8) ^ byte) as usize] ^ (crc >> 8);
}
!crc
}
/// Reader that validates the CRC32 when it reaches the EOF.
pub struct Crc32Reader<R>
{
inner: R,
crc: u32,
check: u32,
}
impl<R> Crc32Reader<R>
{
/// Get a new Crc32Reader which check the inner reader against checksum.
pub fn new(inner: R, checksum: u32) -> Crc32Reader<R>
{
Crc32Reader
{
inner: inner,
crc: 0,
check: checksum,
}
}
fn check_matches(&self) -> bool
{
self.check == self.crc
}
}<|fim▁hole|>{
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize>
{
let count = match self.inner.read(buf)
{
Ok(0) if !self.check_matches() => { return Err(io::Error::new(io::ErrorKind::Other, "Invalid checksum")) },
Ok(n) => n,
Err(e) => return Err(e),
};
self.crc = update(self.crc, &buf[0..count]);
Ok(count)
}
}
#[cfg(test)]
mod test {
#[test]
fn samples() {
assert_eq!(super::update(0, b""), 0);
// test vectors from the iPXE project (input and output are bitwise negated)
assert_eq!(super::update(!0x12345678, b""), !0x12345678);
assert_eq!(super::update(!0xffffffff, b"hello world"), !0xf2b5ee7a);
assert_eq!(super::update(!0xffffffff, b"hello"), !0xc9ef5979);
assert_eq!(super::update(!0xc9ef5979, b" world"), !0xf2b5ee7a);
// Some vectors found on Rosetta code
assert_eq!(super::update(0, b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), 0x190A55AD);
assert_eq!(super::update(0, b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"), 0xFF6CAB0B);
assert_eq!(super::update(0, b"\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0A\x0B\x0C\x0D\x0E\x0F\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1A\x1B\x1C\x1D\x1E\x1F"), 0x91267E8A);
}
}<|fim▁end|> |
impl<R: Read> Read for Crc32Reader<R> |
<|file_name|>MessengerProtocol.java<|end_file_name|><|fim▁begin|>/*
* MessengerProtocol.java
*
* Herald, An Instant Messenging Application
*
* Copyright © 2000 Chad Gibbons
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package net.sourceforge.herald;
/**
* This class contains field definitions and utilities as defined by
* the MSN Messenger V1.0 Protocol document.
*/
public final class MessengerProtocol {
public final static int PORT = 1863;
public final static String DIALECT_NAME = "MSNP2";
public final static String END_OF_COMMAND = "\r\n";
public final static String CMD_ACK = "ACK";
public final static String CMD_ADD = "ADD";
public final static String CMD_ANS = "ANS";
public final static String CMD_BLP = "BLP";
public final static String CMD_BYE = "BYE";
public final static String CMD_CAL = "CAL";
public final static String CMD_CHG = "CHG";
public final static String CMD_FLN = "FLN";
public final static String CMD_GTC = "GTC";
public final static String CMD_INF = "INF";
public final static String CMD_ILN = "ILN";
public final static String CMD_IRO = "IRO";
public final static String CMD_JOI = "JOI";
public final static String CMD_LST = "LST";
public final static String CMD_MSG = "MSG";
public final static String CMD_NAK = "NAK";
public final static String CMD_NLN = "NLN";
public final static String CMD_OUT = "OUT";
public final static String CMD_REM = "REM";
public final static String CMD_RNG = "RNG";
public final static String CMD_SYN = "SYN";
public final static String CMD_USR = "USR";
public final static String CMD_VER = "VER";
public final static String CMD_XFR = "XFR";
public final static String ERR_SYNTAX_ERROR = "200";
public final static String ERR_INVALID_PARAMETER = "201";
public final static String ERR_INVALID_USER = "205";
public final static String ERR_FQDN_MISSING = "206";
public final static String ERR_ALREADY_LOGIN = "207";
public final static String ERR_INVALID_USERNAME = "208";
public final static String ERR_INVALID_FRIENDLY_NAME = "209";
public final static String ERR_LIST_FULL = "210";
public final static String ERR_NOT_ON_LIST = "216";
public final static String ERR_ALREADY_IN_THE_MODE = "218";
public final static String ERR_ALREADY_IN_OPPOSITE_LIST = "219";
public final static String ERR_SWITCHBOARD_FAILED = "280";
public final static String ERR_NOTIFY_XFER_FAILED = "281";
public final static String ERR_REQUIRED_FIELDS_MISSING = "300";
public final static String ERR_NOT_LOGGED_IN = "302";
public final static String ERR_INTERNAL_SERVER = "500";
public final static String ERR_DB_SERVER = "501";
public final static String ERR_FILE_OPERATION = "510";
public final static String ERR_MEMORY_ALLOC = "520";<|fim▁hole|> public final static String ERR_SERVER_GOING_DOWN = "604";
public final static String ERR_CREATE_CONNECTION = "707";
public final static String ERR_BLOCKING_WRITE = "711";
public final static String ERR_SESSION_OVERLOAD = "712";
public final static String ERR_USER_TOO_ACTIVE = "713";
public final static String ERR_TOO_MANY_SESSIONS = "714";
public final static String ERR_NOT_EXPECTED = "715";
public final static String ERR_BAD_FRIEND_FILE = "717";
public final static String ERR_AUTHENTICATION_FAILED = "911";
public final static String ERR_NOT_ALLOWED_WHEN_OFFLINE = "913";
public final static String ERR_NOT_ACCEPTING_NEW_USERS = "920";
public final static String STATE_ONLINE = "NLN";
public final static String STATE_OFFLINE = "FLN";
public final static String STATE_HIDDEN = "HDN";
public final static String STATE_BUSY = "BSY";
public final static String STATE_IDLE = "IDL";
public final static String STATE_BRB = "BRB";
public final static String STATE_AWAY = "AWY";
public final static String STATE_PHONE = "PHN";
public final static String STATE_LUNCH = "LUN";
/**
* Retrieves the next available transaction ID that is unique
* within this virtual machine context.
*
* @returns int a signed 32-bit transaction ID
*/
public static synchronized int getTransactionID() {
return transactionID++;
}
public static synchronized void reset() {
transactionID = 0;
}
private static int transactionID = 0;
}<|fim▁end|> | public final static String ERR_SERVER_BUSY = "600";
public final static String ERR_SERVER_UNAVAILABLE = "601";
public final static String ERR_PERR_NS_DOWN = "601";
public final static String ERR_DB_CONNECT = "603"; |
<|file_name|>aliveim_test.go<|end_file_name|><|fim▁begin|>package main
import (
"fmt"
"io"
"net/http"
"net/http/httptest"
"net/url"
"strings"
"testing"
"time"
"github.com/stretchr/testify/assert"
)
type nopCloser struct {
io.Reader
}
func (nopCloser) Close() error { return nil }
var (
server *httptest.Server
reader io.Reader
devicePostURL string
)
func testTools(code int, body string) (*httptest.Server, *Client) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(code)
w.Header().Set("Content-Type", "application/json")
fmt.Fprintln(w, body)
}))
transport := &http.Transport{
Proxy: func(req *http.Request) (*url.URL, error) {
return url.Parse(server.URL)
},
}
httpClient := &http.Client{Transport: transport}
mockedClient := &Client{httpClient}
return server, mockedClient
}
func init() {
server = httptest.NewServer(Handlers())
devicePostURL = fmt.Sprintf("%s/", server.URL)
_, client = testTools(200, `{"status": "ok"}`)
}
func TestParseAlivePost(t *testing.T) {
var body io.ReadCloser = nopCloser{strings.NewReader(`{"device_id": "abc123", "timeout": 300}`)}
ar, _ := parseAlivePost(body)
if ar.DeviceID != "abc123" || ar.Timeout != 300 {
t.Fatalf("Expected: DeviceID: %s, Timeout: %d, got DeviceID: %s, Timeout: %d",
"abc123", 300, ar.DeviceID, ar.Timeout)
}
}
<|fim▁hole|>func TestCreateTimerInsertMapRetrive(t *testing.T) {
var timersMap = make(map[string]DeviceTimer)
timer := time.NewTimer(time.Second * 2)
deviceTimer := DeviceTimer{"abc123", timer}
timersMap["abc123"] = deviceTimer
myTimer := timersMap["abc123"]
if myTimer.DeviceID != "abc123" {
t.Fatalf("Expected: DeviceID: %s, got DeviceID: %s", "abc123", myTimer.DeviceID)
}
}
func TestDeviceTimerStartTimerTimeout(t *testing.T) {
timer := time.NewTimer(time.Millisecond * 300)
deviceTimer := DeviceTimer{"abc123", timer}
fmt.Println("Start timer...")
go deviceTimer.startTimer()
fmt.Println("Sleep 100 ms...")
time.Sleep(time.Millisecond * 100)
fmt.Println("Sleep 300 ms...")
time.Sleep(time.Millisecond * 300)
fmt.Println("Printed after device expiration")
}
func TestPostDevicePayloadEmptyTimersMap(t *testing.T) {
deviceJSON := `{"device_id": "abc123", "timeout": 300}`
reader = strings.NewReader(deviceJSON) //Convert string to reader
request, err := http.NewRequest("POST", devicePostURL, reader) //Create request with JSON body
res, err := http.DefaultClient.Do(request)
if err != nil {
t.Error(err) //Something is wrong while sending request
}
if res.StatusCode != 201 {
t.Errorf("Success expected: %d", res.StatusCode) //Uh-oh this means our test failed
}
timer, timerFound := timersMap["abc123"]
assert.True(t, timerFound)
assert.NotNil(t, timer)
}
func TestPostDevicePayloadExistingTimersMap(t *testing.T) {
timer := time.NewTimer(time.Millisecond * time.Duration(300))
deviceTimer := DeviceTimer{"abc123", timer}
timersMap["abc123"] = deviceTimer
deviceJSON := `{"device_id": "abc123", "timeout": 300}`
reader = strings.NewReader(deviceJSON) //Convert string to reader
request, err := http.NewRequest("POST", devicePostURL, reader) //Create request with JSON body
res, err := http.DefaultClient.Do(request)
if err != nil {
t.Error(err) //Something is wrong while sending request
}
if res.StatusCode != 200 {
t.Errorf("Success expected: %d", res.StatusCode) //Uh-oh this means our test failed
}
}
func TestMalformedJSONPayLoad(t *testing.T) {
reader := strings.NewReader("") // empty request
request, err := http.NewRequest("POST", devicePostURL, reader)
res, err := http.DefaultClient.Do(request)
if err != nil {
t.Error(err) //Something is wrong while sending request
}
if res.StatusCode != 400 {
t.Errorf("Failure expected: %d", res.StatusCode) //Uh-oh this means our test failed
}
}
func TestNotifyAPIDeviceTimerExpiredSuccess(t *testing.T) {
server, client = testTools(200, `{"status": "ok"}`)
defer server.Close()
err := client.notifyAPIDeviceTimerExpired("abcd1234")
assert.Nil(t, err)
}<|fim▁end|> | |
<|file_name|>signer.go<|end_file_name|><|fim▁begin|>package goverify
import (
"crypto/rsa"
"fmt"
)
// A Signer is can create signatures that verify against a public key.
type Signer interface {
// Sign returns raw signature for the given data. This method
// will apply the hash specified for the keytype to the data.
Sign(data []byte) ([]byte, error)
}
func newSignerFromKey(k interface{}) (Signer, error) {<|fim▁hole|> switch t := k.(type) {
case *rsa.PrivateKey:
sshKey = &RSAPrivateKey{t}
default:
return nil, fmt.Errorf("ssh: unsupported key type %T", k)
}
return sshKey, nil
}<|fim▁end|> | var sshKey Signer |
<|file_name|>mayaxes.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Created on Tue May 28 12:20:59 2013
=== MAYAXES (v1.1) ===
Generates a set of MayaVI axes using the mayavi.mlab.axes() object with a
white background, small black text and a centred title. Designed to better
mimic MATLAB style plots. <|fim▁hole|>Unspecified arguments will be set to default values when mayaxes is called
(note that default settings are configured for a figure measuring 1024 x 768
pixels and may not display correctly on plots that are significantly larger
or smaller).
=== Inputs ===
'title' Figure title text (default = 'VOID')
'xlabel' X axis label text (default = 'X')
'ylabel' Y axis label text (default = 'Y')
'zlabel' Z axis label text (default = 'Z')
'handle' Graphics handle of object (if bounding box is to be plotted)
'title_size' Font size of the title text (default = 25)
'ticks' Number of divisions on each axis (default = 7)
'font_scaling' Font scaling factor for axis text (default = 0.7)
'background' Background colour (can be 'b' (black) or 'w' (white))
=== Notes ===
Disbaling figure title: specify title_string='void' OR title_string='Void' OR
title_string='VOID' to disable figure title.
Disabling bounding box: specify handle='void' OR handle='Void' OR handle='VOID'
to disable figure bounding box.
=== Usage ===
from mayaxes import mayaxes
mayaxes('Figure title','X axis label','Y axis label','Z axis label')
OR
mayaxes(title_string='TITLE',xlabel='X',ylabel='Y',zlabel='Z',title_size=25,ticks=7,font_scaling=0.7)
=== Example ===
from mayaxes import test_mayaxes
test_mayaxes()
@author: Nathan Donaldson
"""
def mayaxes(title_string='VOID', xlabel='VOID', ylabel='VOID', zlabel='VOID', handle='VOID', \
title_size=25, ticks=7, font_scaling=0.7, background='w'):
if type(title_string) != str or type(xlabel) != str or type(ylabel) != str or type(zlabel) != str:
print('ERROR: label inputs must all be strings')
return
elif type(ticks) != int:
print('ERROR: number of ticks must be an integer')
return
elif type(font_scaling) != float and type(font_scaling) != int:
print('Error: font scaling factor must be an integer or a float')
return
from mayavi.mlab import axes,title,gcf,outline
# Create axes object
ax = axes()
# Font factor globally adjusts figure text size
ax.axes.font_factor = font_scaling
# Number of ticks along each axis
ax.axes.number_of_labels = ticks
# Set axis labels to input strings
# (spaces are included for padding so that labels do not intersect with axes)
if xlabel=='void' or xlabel=='Void' or xlabel=='VOID':
print 'X axis label title disabled'
else:
ax.axes.x_label = ' ' + xlabel
if ylabel=='void' or ylabel=='Void' or ylabel=='VOID':
print 'Y axis label disabled'
else:
ax.axes.y_label = ylabel + ' '
if zlabel=='void' or zlabel=='Void' or zlabel=='VOID':
print 'Z axis label disabled'
else:
ax.axes.z_label = zlabel + ' '
# Create figure title
if title_string=='void' or title_string=='Void' or title_string=='VOID':
print 'Figure title disabled'
else:
text_title = title(title_string)
text_title.x_position = 0.5
text_title.y_position = 0.9
text_title.property.color = (0.0, 0.0, 0.0)
text_title.actor.text_scale_mode = 'none'
text_title.property.font_size = title_size
text_title.property.justification = 'centered'
# Create bounding box
if handle=='void' or handle=='Void' or handle=='VOID':
print 'Bounding box disabled'
else:
if background == 'w':
bounding_box = outline(handle, color=(0.0, 0.0, 0.0), opacity=0.2)
elif background == 'b':
bounding_box = outline(handle, color=(1.0, 1.0, 1.0), opacity=0.2)
# Set axis, labels and titles to neat black text
#ax.property.color = (0.0, 0.0, 0.0)
#ax.title_text_property.color = (0.0, 0.0, 0.0)
#ax.label_text_property.color = (0.0, 0.0, 0.0)
ax.label_text_property.bold = False
ax.label_text_property.italic = False
ax.title_text_property.italic = False
ax.title_text_property.bold = False
# Reset axis range
ax.axes.use_ranges = True
# Set scene background, axis and text colours
fig = gcf()
if background == 'w':
fig.scene.background = (1.0, 1.0, 1.0)
ax.label_text_property.color = (0.0, 0.0, 0.0)
ax.property.color = (0.0, 0.0, 0.0)
ax.title_text_property.color = (0.0, 0.0, 0.0)
elif background == 'b':
fig.scene.background = (0.0, 0.0, 0.0)
ax.label_text_property.color = (1.0, 1.0, 1.0)
ax.property.color = (1.0, 1.0, 1.0)
ax.title_text_property.color = (1.0, 1.0, 1.0)
fig.scene.parallel_projection = True
def test_mayaxes():
from mayaxes import mayaxes
from scipy import sqrt,sin,meshgrid,linspace,pi
import mayavi.mlab as mlab
resolution = 200
lambda_var = 3
theta = linspace(-lambda_var*2*pi,lambda_var*2*pi,resolution)
x, y = meshgrid(theta, theta)
r = sqrt(x**2 + y**2)
z = sin(r)/r
fig = mlab.figure(size=(1024,768))
surf = mlab.surf(theta,theta,z,colormap='jet',opacity=1.0,warp_scale='auto')
mayaxes(title_string='Figure 1: Diminishing polar cosine series', \
xlabel='X data',ylabel='Y data',zlabel='Z data',handle=surf)
fig.scene.camera.position = [435.4093863309094, 434.1268937227623, 315.90311468125287]
fig.scene.camera.focal_point = [94.434632665253829, 93.152140057106593, -25.071638984402856]
fig.scene.camera.view_angle = 30.0
fig.scene.camera.view_up = [0.0, 0.0, 1.0]
fig.scene.camera.clipping_range = [287.45231734040635, 973.59247058049255]
fig.scene.camera.compute_view_plane_normal()
fig.scene.render()
mlab.show()<|fim▁end|> | |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from setuptools import setup, Extension
#from distutils.core import setup, Extension
module1 = Extension('giscup15',
sources = ['giscup15.cpp'],
extra_compile_args=['-std=c++11'],<|fim▁hole|>
setup (name = 'giscup15',
version = '1.0',
description = 'This is a wrapper around the shortest path engine from GIS Cup 2015 (Dijkstra, A*, ALT supported)',
ext_modules = [module1])<|fim▁end|> | libraries=['shp']) |
<|file_name|>test_data_source.py<|end_file_name|><|fim▁begin|>#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import mock
from heat.common import exception
from heat.common import template_format
from heat.engine.resources.openstack.sahara import data_source
from heat.engine import scheduler
from heat.tests import common
from heat.tests import utils
data_source_template = """
heat_template_version: 2015-10-15
resources:
data-source:
type: OS::Sahara::DataSource
properties:
name: my-ds<|fim▁hole|> type: swift
url: swift://container.sahara/text
credentials:
user: admin
password: swordfish
"""
class SaharaDataSourceTest(common.HeatTestCase):
def setUp(self):
super(SaharaDataSourceTest, self).setUp()
t = template_format.parse(data_source_template)
self.stack = utils.parse_stack(t)
resource_defns = self.stack.t.resource_definitions(self.stack)
self.rsrc_defn = resource_defns['data-source']
self.client = mock.Mock()
self.patchobject(data_source.DataSource, 'client',
return_value=self.client)
def _create_resource(self, name, snippet, stack):
ds = data_source.DataSource(name, snippet, stack)
value = mock.MagicMock(id='12345')
self.client.data_sources.create.return_value = value
scheduler.TaskRunner(ds.create)()
return ds
def test_create(self):
ds = self._create_resource('data-source', self.rsrc_defn, self.stack)
args = self.client.data_sources.create.call_args[1]
expected_args = {
'name': 'my-ds',
'description': '',
'data_source_type': 'swift',
'url': 'swift://container.sahara/text',
'credential_user': 'admin',
'credential_pass': 'swordfish'
}
self.assertEqual(expected_args, args)
self.assertEqual('12345', ds.resource_id)
expected_state = (ds.CREATE, ds.COMPLETE)
self.assertEqual(expected_state, ds.state)
def test_update(self):
ds = self._create_resource('data-source', self.rsrc_defn,
self.stack)
props = self.stack.t.t['resources']['data-source']['properties'].copy()
props['type'] = 'hdfs'
props['url'] = 'my/path'
self.rsrc_defn = self.rsrc_defn.freeze(properties=props)
scheduler.TaskRunner(ds.update, self.rsrc_defn)()
data = {
'name': 'my-ds',
'description': '',
'type': 'hdfs',
'url': 'my/path',
'credentials': {
'user': 'admin',
'password': 'swordfish'
}
}
self.client.data_sources.update.assert_called_once_with(
'12345', data)
self.assertEqual((ds.UPDATE, ds.COMPLETE), ds.state)
def test_show_attribute(self):
ds = self._create_resource('data-source', self.rsrc_defn, self.stack)
value = mock.MagicMock()
value.to_dict.return_value = {'ds': 'info'}
self.client.data_sources.get.return_value = value
self.assertEqual({'ds': 'info'}, ds.FnGetAtt('show'))
def test_validate_password_without_user(self):
props = self.stack.t.t['resources']['data-source']['properties'].copy()
del props['credentials']['user']
self.rsrc_defn = self.rsrc_defn.freeze(properties=props)
ds = data_source.DataSource('data-source', self.rsrc_defn, self.stack)
ex = self.assertRaises(exception.StackValidationFailed, ds.validate)
error_msg = ('Property error: resources.data-source.properties.'
'credentials: Property user not assigned')
self.assertEqual(error_msg, str(ex))<|fim▁end|> | |
<|file_name|>sigopt_example.py<|end_file_name|><|fim▁begin|>"""This example demonstrates the usage of SigOpt with Ray Tune.
It also checks that it is usable with a separate scheduler.
"""
import sys
import time
from ray import tune
from ray.tune.schedulers import AsyncHyperBandScheduler
from ray.tune.suggest.sigopt import SigOptSearch
<|fim▁hole|>def easy_objective(config):
# Hyperparameters
width, height = config["width"], config["height"]
for step in range(config["steps"]):
# Iterative training function - can be any arbitrary training procedure
intermediate_score = evaluate(step, width, height)
# Feed the score back back to Tune.
tune.report(iterations=step, mean_loss=intermediate_score)
time.sleep(0.1)
if __name__ == "__main__":
import argparse
import os
parser = argparse.ArgumentParser()
parser.add_argument(
"--smoke-test", action="store_true", help="Finish quickly for testing"
)
args, _ = parser.parse_known_args()
if "SIGOPT_KEY" not in os.environ:
if args.smoke_test:
print("SigOpt API Key not found. Skipping smoke test.")
sys.exit(0)
else:
raise ValueError(
"SigOpt API Key not found. Please set the SIGOPT_KEY "
"environment variable."
)
space = [
{
"name": "width",
"type": "int",
"bounds": {"min": 0, "max": 20},
},
{
"name": "height",
"type": "int",
"bounds": {"min": -100, "max": 100},
},
]
algo = SigOptSearch(
space,
name="SigOpt Example Experiment",
max_concurrent=1,
metric="mean_loss",
mode="min",
)
scheduler = AsyncHyperBandScheduler(metric="mean_loss", mode="min")
analysis = tune.run(
easy_objective,
name="my_exp",
search_alg=algo,
scheduler=scheduler,
num_samples=4 if args.smoke_test else 100,
config={"steps": 10},
)
print(
"Best hyperparameters found were: ",
analysis.get_best_config("mean_loss", "min"),
)<|fim▁end|> | def evaluate(step, width, height):
return (0.1 + width * step / 100) ** (-1) + height * 0.01
|
<|file_name|>api.py<|end_file_name|><|fim▁begin|># The MIT License
#
# Copyright (c) 2008 William T. Katz
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
"""A simple RESTful status framework on Google App Engine
This app's API should be reasonably clean and easily targeted by other
clients, like a Flex app or a desktop program.
"""
__author__ = 'Kyle Conroy'
import string
import re
import os
import cgi
import logging
from datetime import timedelta
from datetime import date
from datetime import datetime
from datetime import time
from dateutil.parser import parse
from google.appengine.api import memcache
from google.appengine.api import datastore_errors
from google.appengine.api import taskqueue
from google.appengine.api import users
from google.appengine.ext import webapp
from google.appengine.ext import db
from handlers import restful
from time import mktime
from utils import authorized
from utils import slugify
from models import List, Status, Event, Service, Image
from wsgiref.handlers import format_date_time
def invalidate_cache():
all_pages = memcache.get("__all_pages__")
if all_pages is not None:
for page,d in all_pages.items():
if not memcache.delete(page):
logging.error("Memcache delete failed on %s", page)
if not memcache.delete("__all_pages__"):
logging.error("Memcache delete failed on __all_pages__")
taskqueue.add(url='/', method="GET")
def aware_to_naive(d):
"""Convert an aware date to an naive date, in UTC"""
offset = d.utcoffset()
if offset:
d = d.replace(tzinfo=None)
d = d - offset
return d
class NotFoundHandler(restful.Controller):
def get(self):
self.error(404, "Can't find resource")
class ListsListHandler(restful.Controller):
def get(self, version):
if not self.valid_version(version):
self.error(404, "API Version %s not supported" % version)
return
query = List.all().order('name')
data = [s.rest(self.base_url(version)) for s in query]
data = {"lists": data}
self.json(data)
@authorized.api("admin")
def post(self, version):
if not self.valid_version(version):
self.error(404, "API Version %s not supported" % version)
return
name = self.request.get('name', default_value=None)
description = self.request.get('description', default_value=None)
if not name or not description:
self.error(400, "Bad Data: Name: %s, Description: %s" \
% (name, description))
return
slug = slugify.slugify(name)
existing_s = List.get_by_slug(slug)
if existing_s:
self.error(404, "A list with this name already exists")
return
l = List(name=name, slug=slug, description=description)
l.put()
invalidate_cache()
self.response.set_status(201)
self.json(l.rest(self.base_url(version)))
class ListInstanceHandler(restful.Controller):
def get(self, version, list_slug):
if not self.valid_version(version):
self.error(404, "API Version %s not supported" % version)
return
list = List.get_by_slug(list_slug)
if not list:
self.error(404, "List %s does not exist" % list_slug)
return
self.json(list.rest(self.base_url(version)))
@authorized.api("admin")
def post(self, version, list_slug):
if not self.valid_version(version):
self.error(404, "API Version %s not supported" % version)
return
list = List.get_by_slug(list_slug)
if not list:
self.error(404, "Service %s does not exist" % list_slug)
return
name = self.request.get('name', default_value=None)
description = self.request.get('description', default_value=None)
if description:
list.description = description
if name:
list.name = name
if name or description:
invalidate_cache()
list.put()
self.json(list.rest(self.base_url(version)))
@authorized.api("admin")
def delete(self, version, list_slug):
if not self.valid_version(version):
self.error(404, "API Version %s not supported" % version)
return
list = List.get_by_slug(list_slug)
if not list:
self.error(404, "List %s not found" % list_slug)
return
query = Service.all()
query.filter('list =', list)
if query:
for s in query:
s.list = None
s.put()
invalidate_cache()
list.delete()
self.json(list.rest(self.base_url(version)))
class ServicesListHandler(restful.Controller):
def get(self, version):
if not self.valid_version(version):
self.error(404, "API Version %s not supported" % version)
return
query = Service.all().order('name')
data = [s.rest(self.base_url(version)) for s in query]
data = {"services": data}
self.json(data)
@authorized.api("admin")
def post(self, version):
if not self.valid_version(version):
self.error(404, "API Version %s not supported" % version)
return
name = self.request.get('name', default_value=None)
description = self.request.get('description', default_value=None)
slist = self.request.get('list', default_value=None)
l = None
if slist:
l = List.all().filter("slug =", slist).get()
if not name:
self.error(400, "Bad name: %s" % name)
return
if not description:
self.error(400, "Bad description: %s" % description)
return
if slist and not l:
self.error(400, "Bad list slug: %s" % slist)
return
slug = slugify.slugify(name)
existing_s = Service.get_by_slug(slug)
if existing_s:
self.error(404, "A sevice with this name already exists")
return
s = Service(name=name, slug=slug, description=description, list=l)
s.put()
invalidate_cache()
self.response.set_status(201)
self.json(s.rest(self.base_url(version)))
class ServiceInstanceHandler(restful.Controller):
def get(self, version, service_slug):
if not self.valid_version(version):
self.error(404, "API Version %s not supported" % version)
return
service = Service.get_by_slug(service_slug)
if not service:
self.error(404, "Service %s does not exist" % service_slug)
return
self.json(service.rest(self.base_url(version)))
@authorized.api("admin")
def post(self, version, service_slug):
if not self.valid_version(version):
self.error(404, "API Version %s not supported" % version)
return
service = Service.get_by_slug(service_slug)
if not service:
self.error(404, "Service %s does not exist" % service_slug)
return
name = self.request.get('name', default_value=None)
description = self.request.get('description', default_value=None)
list = self.request.get('list', default_value=None)
if description:
service.description = description
if name:
service.name = name
if list:
l = List.all().filter("slug = ", list).get()
if l is None:
self.error(400, "Can't find list with slug %s" % list)
return
service.list = l
if "" == list:
service.list = None
list = "removed"
if name or description or list:
invalidate_cache()
service.put()
self.json(service.rest(self.base_url(version)))
@authorized.api("admin")
def delete(self, version, service_slug):
if not self.valid_version(version):
self.error(404, "API Version %s not supported" % version)
return
service = Service.get_by_slug(service_slug)
if not service:
self.error(404, "Service %s not found" % service_slug)
return
query = Event.all()
query.filter('service =', service)
if query:
for e in query:
e.delete()
invalidate_cache()
service.delete()
self.json(service.rest(self.base_url(version)))
class EventsListHandler(restful.Controller):
def get(self, version, service_slug):
if not self.valid_version(version):
self.error(404, "API Version %s not supported" % version)
return
service = Service.get_by_slug(service_slug)
if not service:
self.error(404, "Service %s not found" % service_slug)
return
start = self.request.get('start', default_value=None)
end = self.request.get('end', default_value=None)
query = Event.all()
query.filter('service =', service)
if start:
try:
_start = aware_to_naive(parse(start))
query.filter("start >= ", _start)
except:
self.error(400, "Invalid Date: %s" % start)
return
if end:
try:
_end = aware_to_naive(parse(end))
query.filter("start <=", _end)
except:
self.error(400, "Invalid Date: %s" % end)
return
query.order('-start')
data = [s.rest(self.base_url(version)) for s in query]
self.json({"events": data})
@authorized.api("admin")
def post(self, version, service_slug):
if not self.valid_version(version):
self.error(404, "API Version %s not supported" % version)
return
status_slug = self.request.get("status", default_value=None)
message = self.request.get("message", default_value=None)
informational = self.request.get("informational", default_value=None)
if not message:
self.error(400, "Event message is required")
return
service = Service.get_by_slug(service_slug)
if not service:
self.error(404, "Service %s not found" % service_slug)
return
if not status_slug:
event = service.current_event()
if event:
status = event.status
else:
status = Status.get_default()
else:
status = Status.get_by_slug(status_slug)
if not status:
self.error(404, "Status %s not found" % status_slug)
return
e = Event(status=status, service=service, message=message)
e.informational = informational and informational == "true"
e.put()
# Queue up a task that calls the Twitter API to make a tweet.
if self.request.get('tweet'):
logging.info('Attempting to post a tweet for the latest event via async GAE task queue.')
taskqueue.add(url='/admin/tweet', params={'service_name': service.name, 'status_name': status.name, 'message': message})
invalidate_cache()
self.json(e.rest(self.base_url(version)))
class CurrentEventHandler(restful.Controller):
def get(self, version, service_slug):
if not self.valid_version(version):
self.error(404, "API Version %s not supported" % version)
return
service = Service.get_by_slug(service_slug)
if not service:
self.error(404, "Service %s not found" % service_slug)
return
event = service.current_event()
if not event:
self.error(404, "No current event for Service %s" % service_slug)
return
self.json(event.rest(self.base_url(version)))
class EventInstanceHandler(restful.Controller):
def get(self, version, service_slug, sid):
if not self.valid_version(version):
self.error(404, "API Version %s not supported" % version)
return
service = Service.get_by_slug(service_slug)
if not service:
self.error(404, "Service %s not found" % service_slug)
return
try:
event = Event.get(db.Key(sid))
except datastore_errors.BadKeyError:
self.error(404, "Event %s not found" % sid)
return
if not event or service.key() != event.service.key():
self.error(404, "No event for Service %s with sid = %s" \
% (service_slug, sid))
return
self.json(event.rest(self.base_url(version)))
@authorized.api("admin")
def delete(self, version, service_slug, sid):
if not self.valid_version(version):
self.error(404, "API Version %s not supported" % version)
return
service = Service.get_by_slug(service_slug)
if not service:
self.error(404, "Service %s not found" % service_slug)
return
try:
event = Event.get(db.Key(sid))
except datastore_errors.BadKeyError:
self.error(404, "Event %s not found" % sid)
return
if not event or service.key() != event.service.key():
self.error(404, "No event for Service %s with sid = %s" \
% (service_slug, sid))
return
event.delete()
invalidate_cache()<|fim▁hole|>
class StatusesListHandler(restful.Controller):
def get(self, version):
if not self.valid_version(version):
self.error(404, "API Version %s not supported" % version)
return
query = Status.all().order('name')
data = [s.rest(self.base_url(version)) for s in query]
self.json({"statuses": data})
@authorized.api("admin")
def post(self, version):
if not self.valid_version(version):
self.error(404, "API Version %s not supported" % version)
return
name = self.request.get('name', default_value=None)
description = self.request.get('description', default_value=None)
image_slug = self.request.get('image', default_value=None)
default = self.request.get('default', default_value="false")
if default not in ["true", "false"]:
self.error(400, "Default must be true or false")
return
if not name or not description or not image_slug:
self.error(400, "Bad Data")
return
slug = slugify.slugify(name)
status = Status.get_by_slug(slug)
image = Image.get_by_slug(image_slug)
if status is not None:
self.error(400, "A Status with the slug %s already exists" % slug)
return
if image is None:
msg = "An Image with the slug %s doesn't exist" % image_slug
self.error(400, msg)
return
# Reset default status
if default == "true":
for stat in Status.all().filter("default", True):
stat.default = False
stat.put()
default = default == "true"
status = Status(name=name, slug=slug, description=description,
image=image.path, default=default)
status.put()
invalidate_cache()
self.response.set_status(201)
self.json(status.rest(self.base_url(version)))
class StatusInstanceHandler(restful.Controller):
def get(self, version, status_slug):
if not self.valid_version(version):
self.error(404, "API Version %s not supported" % version)
return
status = Status.get_by_slug(status_slug)
if not status:
self.error(404, "No status with the slug %s found" % status_slug)
return
self.json(status.rest(self.base_url(version)))
@authorized.api("admin")
def post(self, version, status_slug):
if not self.valid_version(version):
self.error(404, "API Version %s not supported" % version)
return
status = Status.get_by_slug(status_slug)
if not status:
self.error(404, "No status with the slug %s found" % status_slug)
return
name = self.request.get('name', default_value=None)
image_slug = self.request.get('image', default_value=None)
image = None
default = self.request.get('default', default_value=None)
description = self.request.get('description', default_value=None)
if image_slug is not None:
image = Image.get_by_slug(image_slug)
if image is None:
self.error(400, "An Image with the "
"slug %s doesn't exist" % image_slug)
return
status.image = image.path
if description is not None:
status.description = description
if default is not None and default in ["false", "true"]:
# Reset default status
if default == "true":
for stat in Status.all().filter("default", True):
stat.default = False
stat.put()
status.default = default == "true"
if name is not None:
status.name = name
if description or name or image or default:
status.put()
invalidate_cache()
self.json(status.rest(self.base_url(version)))
@authorized.api("admin")
def delete(self, version, status_slug):
if not self.valid_version(version):
self.error(404, "API Version %s not supported" % version)
return
status = Status.get_by_slug(status_slug)
if not status:
self.error(404, "Status %s not found" % status_slug)
return
# We may want to think more about this
events = Event.all().filter('status =', status).fetch(1000)
for event in events:
event.delete()
status.delete()
self.json(status.rest(self.base_url(version)))
class LevelListHandler(restful.Controller):
def get(self, version):
if not self.valid_version(version):
self.error(404, "API Version %s not supported" % version)
return
self.json({"levels": ["NORMAL", "WARNING", "ERROR", "CRITICAL"]})
class ImagesListHandler(restful.Controller):
def get(self, version):
if not self.valid_version(version):
self.error(404, "API Version %s not supported" % version)
return
host = self.request.headers.get('host', 'nohost')
images = []
for img in Image.all().fetch(1000):
image = {
"url": "http://" + host + "/images/" + img.path,
"icon_set": img.icon_set,
"name": img.slug,
}
images.append(image)
self.json({"images": images})<|fim▁end|> |
# Why not JSON?
self.success(event.rest(self.base_url(version))) |
<|file_name|>psllq.rs<|end_file_name|><|fim▁begin|>use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
fn psllq_1() {
run_test(&Instruction { mnemonic: Mnemonic::PSLLQ, operand1: Some(Direct(MM6)), operand2: Some(Literal8(73)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 115, 246, 73], OperandSize::Dword)
}
fn psllq_2() {
run_test(&Instruction { mnemonic: Mnemonic::PSLLQ, operand1: Some(Direct(MM7)), operand2: Some(Literal8(11)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 115, 247, 11], OperandSize::Qword)
}
fn psllq_3() {
run_test(&Instruction { mnemonic: Mnemonic::PSLLQ, operand1: Some(Direct(XMM1)), operand2: Some(Literal8(103)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 115, 241, 103], OperandSize::Dword)
}
fn psllq_4() {
run_test(&Instruction { mnemonic: Mnemonic::PSLLQ, operand1: Some(Direct(XMM5)), operand2: Some(Literal8(20)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 115, 245, 20], OperandSize::Qword)
}<|fim▁hole|>}
fn psllq_6() {
run_test(&Instruction { mnemonic: Mnemonic::PSLLQ, operand1: Some(Direct(MM3)), operand2: Some(IndirectScaledIndexed(EBX, ESI, Eight, Some(OperandSize::Qword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 243, 28, 243], OperandSize::Dword)
}
fn psllq_7() {
run_test(&Instruction { mnemonic: Mnemonic::PSLLQ, operand1: Some(Direct(MM4)), operand2: Some(Direct(MM1)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 243, 225], OperandSize::Qword)
}
fn psllq_8() {
run_test(&Instruction { mnemonic: Mnemonic::PSLLQ, operand1: Some(Direct(MM1)), operand2: Some(Indirect(RDI, Some(OperandSize::Qword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 243, 15], OperandSize::Qword)
}
fn psllq_9() {
run_test(&Instruction { mnemonic: Mnemonic::PSLLQ, operand1: Some(Direct(XMM2)), operand2: Some(Direct(XMM3)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 243, 211], OperandSize::Dword)
}
fn psllq_10() {
run_test(&Instruction { mnemonic: Mnemonic::PSLLQ, operand1: Some(Direct(XMM5)), operand2: Some(IndirectDisplaced(EBX, 1700753197, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 243, 171, 45, 111, 95, 101], OperandSize::Dword)
}
fn psllq_11() {
run_test(&Instruction { mnemonic: Mnemonic::PSLLQ, operand1: Some(Direct(XMM0)), operand2: Some(Direct(XMM1)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 243, 193], OperandSize::Qword)
}
fn psllq_12() {
run_test(&Instruction { mnemonic: Mnemonic::PSLLQ, operand1: Some(Direct(XMM4)), operand2: Some(IndirectScaledIndexedDisplaced(RAX, RAX, Two, 103442484, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 243, 164, 64, 52, 104, 42, 6], OperandSize::Qword)
}<|fim▁end|> |
fn psllq_5() {
run_test(&Instruction { mnemonic: Mnemonic::PSLLQ, operand1: Some(Direct(MM3)), operand2: Some(Direct(MM7)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 243, 223], OperandSize::Dword) |
<|file_name|>partnerclient-plan-test.js<|end_file_name|><|fim▁begin|>import { module, test } from 'qunit';
import { setupTest } from 'ember-qunit';
module('Unit | Model | partner partnerclient plan', function(hooks) {<|fim▁hole|> test('it exists', function(assert) {
let store = this.owner.lookup('service:store');
let model = store.createRecord('partner/partnerclient-plan', {});
assert.ok(model);
});
});<|fim▁end|> | setupTest(hooks);
// Replace this with your real tests. |
<|file_name|>create_mapping_on_deploy.py<|end_file_name|><|fim▁begin|>import argparse
import structlog
import logging
from pyramid.paster import get_app
from snovault.elasticsearch.create_mapping import run as run_create_mapping
from dcicutils.log_utils import set_logging
from dcicutils.deployment_utils import CreateMappingOnDeployManager
log = structlog.getLogger(__name__)
EPILOG = __doc__
# This order determines order that items will be mapped + added to the queue
# Can use item type (e.g. file_fastq) or class name (e.g. FileFastq)
ITEM_INDEX_ORDER = [
'Award',
'Lab',
'AccessKey',
'User',
'Ontology',
'OntologyTerm',
'StaticSection',
'Document',
'Protocol',
'FileFormat',
'ExperimentType',
'Vendor',
'Organism',
'Gene',
'GenomicRegion',
'BioFeature',
'Target',
'Construct',
'Enzyme',
'Antibody',
'FileReference',
'IndividualChicken',
'IndividualFly',
'IndividualHuman',
'IndividualMouse',
'IndividualPrimate',
'IndividualZebrafish',
'Image',
'Modification',
'Biosource',
'BiosampleCellCulture',
'Biosample',
'Workflow',<|fim▁hole|>
'PublicationTracking',
'Software',
'AnalysisStep',
'Badge',
'SopMap',
'SummaryStatistic',
'SummaryStatisticHiC',
'TrackingItem',
'TreatmentAgent',
'TreatmentRnai',
'ImagingPath',
'MicroscopeSettingA1',
'MicroscopeSettingA2',
'MicroscopeSettingD1',
'MicroscopeSettingD2',
'MicroscopeConfiguration',
'HiglassViewConfig',
'QualityMetricAtacseq',
'QualityMetricBamqc',
'QualityMetricBamcheck',
'QualityMetricChipseq',
'QualityMetricDedupqcRepliseq',
'QualityMetricFastqc',
'QualityMetricFlag',
'QualityMetricPairsqc',
'QualityMetricMargi',
'QualityMetricRnaseq',
'QualityMetricRnaseqMadqc',
'QualityMetricWorkflowrun',
'QualityMetricQclist',
'QualityMetricMcool',
'ExperimentAtacseq',
'ExperimentCaptureC',
'ExperimentChiapet',
'ExperimentDamid',
'ExperimentHiC',
'ExperimentMic',
'ExperimentRepliseq',
'ExperimentSeq',
'ExperimentTsaseq',
'ExperimentSet',
'ExperimentSetReplicate',
'Publication',
'FileCalibration',
'FileFastq',
'FileMicroscopy',
'FileProcessed',
'FileSet',
'FileSetCalibration',
'FileSetMicroscopeQc',
'FileVistrack',
'DataReleaseUpdate',
'WorkflowRun',
'WorkflowRunAwsem',
'WorkflowRunSbg',
'Page',
]
def get_my_env(app):
"""
Gets the env name of the currently running environment
:param app: handle to Pyramid app
:return: current env
"""
# Return value is presumably one of the above-declared environments
return app.registry.settings.get('env.name')
def _run_create_mapping(app, args):
"""
Runs create_mapping with deploy options and report errors. Allows args passed from argparse in main to override
the default deployment configuration.
:param app: pyramid application handle
:param args: args from argparse
:return: None
"""
try:
deploy_cfg = CreateMappingOnDeployManager.get_deploy_config(env=get_my_env(app), args=args, log=log,
client='create_mapping_on_deploy')
if not deploy_cfg['SKIP']:
log.info('Calling run_create_mapping for env %s.' % deploy_cfg['ENV_NAME'])
run_create_mapping(app=app,
check_first=(not deploy_cfg['WIPE_ES']),
purge_queue=args.clear_queue, # this option does not vary, so no need to override
item_order=ITEM_INDEX_ORDER,
strict=deploy_cfg['STRICT'])
else:
log.info('NOT calling run_create_mapping for env %s.' % deploy_cfg['ENV_NAME'])
exit(0)
except Exception as e:
log.error("Exception encountered while gathering deployment information or running create_mapping")
log.error("%s: %s" % (e.__class__.__name__, e))
exit(1)
def main():
parser = argparse.ArgumentParser( # noqa - PyCharm wrongly thinks the formatter_class is specified wrong here.
description="Create Elasticsearch mapping on deployment", epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument('config_uri', help="path to configfile")
parser.add_argument('--app-name', help="Pyramid app name in configfile")
parser.add_argument('--clear-queue', help="Specify to clear the SQS queue", action='store_true', default=False)
CreateMappingOnDeployManager.add_argparse_arguments(parser)
args = parser.parse_args()
app = get_app(args.config_uri, args.app_name)
# Loading app will have configured from config file. Reconfigure here:
set_logging(in_prod=app.registry.settings.get('production'), log_name=__name__, level=logging.DEBUG)
# set_logging(app.registry.settings.get('elasticsearch.server'),
# app.registry.settings.get('production'),
# level=logging.DEBUG)
_run_create_mapping(app, args)
exit(0)
if __name__ == '__main__':
main()<|fim▁end|> | 'WorkflowMapping', |
<|file_name|>package.py<|end_file_name|><|fim▁begin|># Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PerlSubIdentify(PerlPackage):
"""Retrieve names of code references"""
homepage = "https://metacpan.org/pod/Sub::Identify"<|fim▁hole|>
version('0.14', sha256='068d272086514dd1e842b6a40b1bedbafee63900e5b08890ef6700039defad6f')<|fim▁end|> | url = "http://search.cpan.org/CPAN/authors/id/R/RG/RGARCIA/Sub-Identify-0.14.tar.gz" |
<|file_name|>NetWkstaInfo1059.py<|end_file_name|><|fim▁begin|># encoding: utf-8
# module samba.dcerpc.wkssvc
# from /usr/lib/python2.7/dist-packages/samba/dcerpc/wkssvc.so
# by generator 1.135
""" wkssvc DCE/RPC """
# imports
import dcerpc as __dcerpc
import talloc as __talloc
class NetWkstaInfo1059(__talloc.Object):
# no doc
def __init__(self, *args, **kwargs): # real signature unknown
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__<|fim▁hole|> """ T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
buf_read_only_files = property(lambda self: object(), lambda self, v: None, lambda self: None) # default<|fim▁end|> | |
<|file_name|>gpu_declarations.hpp<|end_file_name|><|fim▁begin|>#pragma once
#include <Poco/Types.h>
namespace Burst
{
struct CalculatedDeadline
{
Poco::UInt64 deadline;
Poco::UInt64 nonce;
};
enum class MemoryType
{
Buffer,
Gensig,
Deadlines,<|fim▁hole|>
enum class MemoryCopyDirection
{
ToHost,
ToDevice
};
}<|fim▁end|> | Bytes
}; |
<|file_name|>juce_win32_Midi.cpp<|end_file_name|><|fim▁begin|>/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2020 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
#ifndef DRV_QUERYDEVICEINTERFACE
#define DRV_RESERVED 0x0800
#define DRV_QUERYDEVICEINTERFACE (DRV_RESERVED + 12)
#define DRV_QUERYDEVICEINTERFACESIZE (DRV_RESERVED + 13)
#endif
namespace juce
{
class MidiInput::Pimpl
{
public:
virtual ~Pimpl() noexcept = default;
virtual String getDeviceIdentifier() = 0;
virtual String getDeviceName() = 0;
virtual void start() = 0;
virtual void stop() = 0;
};
class MidiOutput::Pimpl
{
public:
virtual ~Pimpl() noexcept = default;
virtual String getDeviceIdentifier() = 0;
virtual String getDeviceName() = 0;
virtual void sendMessageNow (const MidiMessage&) = 0;
};
struct MidiServiceType
{
MidiServiceType() = default;
virtual ~MidiServiceType() noexcept = default;
virtual Array<MidiDeviceInfo> getAvailableDevices (bool) = 0;
virtual MidiDeviceInfo getDefaultDevice (bool) = 0;
virtual MidiInput::Pimpl* createInputWrapper (MidiInput&, const String&, MidiInputCallback&) = 0;
virtual MidiOutput::Pimpl* createOutputWrapper (const String&) = 0;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (MidiServiceType)
};
//==============================================================================
struct Win32MidiService : public MidiServiceType,
private Timer
{
Win32MidiService() {}
Array<MidiDeviceInfo> getAvailableDevices (bool isInput) override
{
return isInput ? Win32InputWrapper::getAvailableDevices()
: Win32OutputWrapper::getAvailableDevices();
}
MidiDeviceInfo getDefaultDevice (bool isInput) override
{
return isInput ? Win32InputWrapper::getDefaultDevice()
: Win32OutputWrapper::getDefaultDevice();
}
MidiInput::Pimpl* createInputWrapper (MidiInput& input, const String& deviceIdentifier, MidiInputCallback& callback) override
{
return new Win32InputWrapper (*this, input, deviceIdentifier, callback);
}
MidiOutput::Pimpl* createOutputWrapper (const String& deviceIdentifier) override
{
return new Win32OutputWrapper (*this, deviceIdentifier);
}
private:
struct Win32InputWrapper;
//==============================================================================
struct MidiInCollector : public ReferenceCountedObject
{
MidiInCollector (Win32MidiService& s, MidiDeviceInfo d)
: deviceInfo (d), midiService (s)
{
}
~MidiInCollector()
{
stop();
if (deviceHandle != 0)
{
for (int count = 5; --count >= 0;)
{
if (midiInClose (deviceHandle) == MMSYSERR_NOERROR)
break;
Sleep (20);
}
}
}
using Ptr = ReferenceCountedObjectPtr<MidiInCollector>;
void addClient (Win32InputWrapper* c)
{
const ScopedLock sl (clientLock);
jassert (! clients.contains (c));
clients.add (c);
}
void removeClient (Win32InputWrapper* c)
{
const ScopedLock sl (clientLock);
clients.removeFirstMatchingValue (c);
startOrStop();
midiService.asyncCheckForUnusedCollectors();
}
void handleMessage (const uint8* bytes, uint32 timeStamp)
{
if (bytes[0] >= 0x80 && isStarted.load())
{
{
auto len = MidiMessage::getMessageLengthFromFirstByte (bytes[0]);
auto time = convertTimeStamp (timeStamp);
const ScopedLock sl (clientLock);
for (auto* c : clients)
c->pushMidiData (bytes, len, time);
}
writeFinishedBlocks();
}
}
void handleSysEx (MIDIHDR* hdr, uint32 timeStamp)
{
if (isStarted.load() && hdr->dwBytesRecorded > 0)
{
{
auto time = convertTimeStamp (timeStamp);
const ScopedLock sl (clientLock);
for (auto* c : clients)
c->pushMidiData (hdr->lpData, (int) hdr->dwBytesRecorded, time);
}
writeFinishedBlocks();
}
}
void startOrStop()
{
const ScopedLock sl (clientLock);
if (countRunningClients() == 0)
stop();
else
start();
}
void start()
{
if (deviceHandle != 0 && ! isStarted.load())
{
activeMidiCollectors.addIfNotAlreadyThere (this);
for (int i = 0; i < (int) numHeaders; ++i)
{
headers[i].prepare (deviceHandle);
headers[i].write (deviceHandle);
}
startTime = Time::getMillisecondCounterHiRes();
auto res = midiInStart (deviceHandle);
if (res == MMSYSERR_NOERROR)
isStarted = true;
else
unprepareAllHeaders();
}
}
void stop()
{
if (isStarted.load())
{
isStarted = false;
midiInReset (deviceHandle);
midiInStop (deviceHandle);
activeMidiCollectors.removeFirstMatchingValue (this);
unprepareAllHeaders();
}
}
static void CALLBACK midiInCallback (HMIDIIN, UINT uMsg, DWORD_PTR dwInstance,
DWORD_PTR midiMessage, DWORD_PTR timeStamp)
{
auto* collector = reinterpret_cast<MidiInCollector*> (dwInstance);
// This is primarily a check for the collector being a dangling
// pointer, as the callback can sometimes be delayed
if (activeMidiCollectors.contains (collector))
{
if (uMsg == MIM_DATA)
collector->handleMessage ((const uint8*) &midiMessage, (uint32) timeStamp);
else if (uMsg == MIM_LONGDATA)
collector->handleSysEx ((MIDIHDR*) midiMessage, (uint32) timeStamp);
}
}
MidiDeviceInfo deviceInfo;
HMIDIIN deviceHandle = 0;
private:
Win32MidiService& midiService;
CriticalSection clientLock;
Array<Win32InputWrapper*> clients;
std::atomic<bool> isStarted { false };
double startTime = 0;
// This static array is used to prevent occasional callbacks to objects that are
// in the process of being deleted
static Array<MidiInCollector*, CriticalSection> activeMidiCollectors;
int countRunningClients() const
{
int num = 0;
for (auto* c : clients)
if (c->started)
++num;
return num;
}
struct MidiHeader
{
MidiHeader() {}
void prepare (HMIDIIN device)
{
zerostruct (hdr);
hdr.lpData = data;
hdr.dwBufferLength = (DWORD) numElementsInArray (data);
midiInPrepareHeader (device, &hdr, sizeof (hdr));
}
void unprepare (HMIDIIN device)
{
if ((hdr.dwFlags & WHDR_DONE) != 0)
{
int c = 10;
while (--c >= 0 && midiInUnprepareHeader (device, &hdr, sizeof (hdr)) == MIDIERR_STILLPLAYING)
Thread::sleep (20);
jassert (c >= 0);
}
}
void write (HMIDIIN device)
{
hdr.dwBytesRecorded = 0;
midiInAddBuffer (device, &hdr, sizeof (hdr));
}
void writeIfFinished (HMIDIIN device)
{
if ((hdr.dwFlags & WHDR_DONE) != 0)
write (device);
}
MIDIHDR hdr;
char data[256];
JUCE_DECLARE_NON_COPYABLE (MidiHeader)
};
enum { numHeaders = 32 };
MidiHeader headers[numHeaders];
void writeFinishedBlocks()
{
for (int i = 0; i < (int) numHeaders; ++i)
headers[i].writeIfFinished (deviceHandle);
}
void unprepareAllHeaders()
{
for (int i = 0; i < (int) numHeaders; ++i)
headers[i].unprepare (deviceHandle);
}
double convertTimeStamp (uint32 timeStamp)
{
auto t = startTime + timeStamp;
auto now = Time::getMillisecondCounterHiRes();
if (t > now)
{
if (t > now + 2.0)
startTime -= 1.0;
t = now;
}
return t * 0.001;
}
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (MidiInCollector)
};
//==============================================================================
template<class WrapperType>
struct Win32MidiDeviceQuery
{
static Array<MidiDeviceInfo> getAvailableDevices()
{
StringArray deviceNames, deviceIDs;
auto deviceCaps = WrapperType::getDeviceCaps();
for (int i = 0; i < deviceCaps.size(); ++i)
{
deviceNames.add (deviceCaps[i].szPname);
auto identifier = getInterfaceIDForDevice ((UINT) i);
if (identifier.isNotEmpty())
deviceIDs.add (identifier);
else
deviceIDs.add (deviceNames[i]);
}
deviceNames.appendNumbersToDuplicates (false, false, CharPointer_UTF8 ("-"), CharPointer_UTF8 (""));
deviceIDs .appendNumbersToDuplicates (false, false, CharPointer_UTF8 ("-"), CharPointer_UTF8 (""));
Array<MidiDeviceInfo> devices;
for (int i = 0; i < deviceNames.size(); ++i)
devices.add ({ deviceNames[i], deviceIDs[i] });
return devices;
}
private:
static String getInterfaceIDForDevice (UINT id)
{
ULONG size = 0;
if (WrapperType::sendMidiMessage ((UINT_PTR) id, DRV_QUERYDEVICEINTERFACESIZE, (DWORD_PTR) &size, 0) == MMSYSERR_NOERROR)
{
WCHAR interfaceName[512] = {};
if (isPositiveAndBelow (size, sizeof (interfaceName))
&& WrapperType::sendMidiMessage ((UINT_PTR) id, DRV_QUERYDEVICEINTERFACE,
(DWORD_PTR) interfaceName, sizeof (interfaceName)) == MMSYSERR_NOERROR)
{
return interfaceName;
}
}
return {};
}
};
struct Win32InputWrapper : public MidiInput::Pimpl,
public Win32MidiDeviceQuery<Win32InputWrapper>
{
Win32InputWrapper (Win32MidiService& parentService, MidiInput& midiInput, const String& deviceIdentifier, MidiInputCallback& c)
: input (midiInput), callback (c)
{
collector = getOrCreateCollector (parentService, deviceIdentifier);
collector->addClient (this);
}
~Win32InputWrapper()
{
collector->removeClient (this);
}
static MidiInCollector::Ptr getOrCreateCollector (Win32MidiService& parentService, const String& deviceIdentifier)
{
UINT deviceID = MIDI_MAPPER;
String deviceName;
auto devices = getAvailableDevices();
for (int i = 0; i < devices.size(); ++i)
{
auto d = devices.getUnchecked (i);
if (d.identifier == deviceIdentifier)
{
deviceID = i;
deviceName = d.name;
break;
}
}
const ScopedLock sl (parentService.activeCollectorLock);
for (auto& c : parentService.activeCollectors)
if (c->deviceInfo.identifier == deviceIdentifier)
return c;
MidiInCollector::Ptr c (new MidiInCollector (parentService, { deviceName, deviceIdentifier }));
HMIDIIN h;
auto err = midiInOpen (&h, deviceID,
(DWORD_PTR) &MidiInCollector::midiInCallback,
(DWORD_PTR) (MidiInCollector*) c.get(),
CALLBACK_FUNCTION);
if (err != MMSYSERR_NOERROR)
throw std::runtime_error ("Failed to create Windows input device wrapper");
c->deviceHandle = h;
parentService.activeCollectors.add (c);
return c;
}
static DWORD sendMidiMessage (UINT_PTR deviceID, UINT msg, DWORD_PTR arg1, DWORD_PTR arg2)
{
return midiInMessage ((HMIDIIN) deviceID, msg, arg1, arg2);
}
static Array<MIDIINCAPS> getDeviceCaps()
{
Array<MIDIINCAPS> devices;
for (UINT i = 0; i < midiInGetNumDevs(); ++i)
{
MIDIINCAPS mc = {};
if (midiInGetDevCaps (i, &mc, sizeof (mc)) == MMSYSERR_NOERROR)
devices.add (mc);
}
return devices;
}
static MidiDeviceInfo getDefaultDevice() { return getAvailableDevices().getFirst(); }
void start() override { started = true; concatenator.reset(); collector->startOrStop(); }
void stop() override { started = false; collector->startOrStop(); concatenator.reset(); }
String getDeviceIdentifier() override { return collector->deviceInfo.identifier; }
String getDeviceName() override { return collector->deviceInfo.name; }
void pushMidiData (const void* inputData, int numBytes, double time)
{
concatenator.pushMidiData (inputData, numBytes, time, &input, callback);
}
MidiInput& input;
MidiInputCallback& callback;
MidiDataConcatenator concatenator { 4096 };
MidiInCollector::Ptr collector;
bool started = false;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (Win32InputWrapper)
};
//==============================================================================
struct MidiOutHandle : public ReferenceCountedObject
{
using Ptr = ReferenceCountedObjectPtr<MidiOutHandle>;
MidiOutHandle (Win32MidiService& parent, MidiDeviceInfo d, HMIDIOUT h)
: owner (parent), deviceInfo (d), handle (h)
{
owner.activeOutputHandles.add (this);
}
~MidiOutHandle()
{
if (handle != nullptr)
midiOutClose (handle);
owner.activeOutputHandles.removeFirstMatchingValue (this);
}
Win32MidiService& owner;
MidiDeviceInfo deviceInfo;
HMIDIOUT handle;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (MidiOutHandle)
};
//==============================================================================
struct Win32OutputWrapper : public MidiOutput::Pimpl,
public Win32MidiDeviceQuery<Win32OutputWrapper>
{
Win32OutputWrapper (Win32MidiService& p, const String& deviceIdentifier)
: parent (p)
{
auto devices = getAvailableDevices();
UINT deviceID = MIDI_MAPPER;
String deviceName;
for (int i = 0; i < devices.size(); ++i)
{
auto d = devices.getUnchecked (i);
if (d.identifier == deviceIdentifier)
{
deviceID = i;
deviceName = d.name;
break;
}
}
if (deviceID == MIDI_MAPPER)
{
// use the microsoft sw synth as a default - best not to allow deviceID
// to be MIDI_MAPPER, or else device sharing breaks
for (int i = 0; i < devices.size(); ++i)
if (devices[i].name.containsIgnoreCase ("microsoft"))
deviceID = (UINT) i;
}
for (int i = parent.activeOutputHandles.size(); --i >= 0;)
{
auto* activeHandle = parent.activeOutputHandles.getUnchecked (i);
if (activeHandle->deviceInfo.identifier == deviceIdentifier)
{
han = activeHandle;
return;
}
}
for (int i = 4; --i >= 0;)
{
HMIDIOUT h = 0;
auto res = midiOutOpen (&h, deviceID, 0, 0, CALLBACK_NULL);
if (res == MMSYSERR_NOERROR)
{
han = new MidiOutHandle (parent, { deviceName, deviceIdentifier }, h);
return;
}
if (res == MMSYSERR_ALLOCATED)
Sleep (100);
else
break;
}
throw std::runtime_error ("Failed to create Windows output device wrapper");
}
void sendMessageNow (const MidiMessage& message) override
{
if (message.getRawDataSize() > 3 || message.isSysEx())
{
MIDIHDR h = {};
h.lpData = (char*) message.getRawData();
h.dwBytesRecorded = h.dwBufferLength = (DWORD) message.getRawDataSize();
if (midiOutPrepareHeader (han->handle, &h, sizeof (MIDIHDR)) == MMSYSERR_NOERROR)
{
auto res = midiOutLongMsg (han->handle, &h, sizeof (MIDIHDR));
if (res == MMSYSERR_NOERROR)
{
while ((h.dwFlags & MHDR_DONE) == 0)
Sleep (1);
int count = 500; // 1 sec timeout
while (--count >= 0)
{
res = midiOutUnprepareHeader (han->handle, &h, sizeof (MIDIHDR));
if (res == MIDIERR_STILLPLAYING)
Sleep (2);
else
break;
}
}
}
}
else
{
for (int i = 0; i < 50; ++i)
{
if (midiOutShortMsg (han->handle, *(unsigned int*) message.getRawData()) != MIDIERR_NOTREADY)
break;
Sleep (1);
}
}
}
static DWORD sendMidiMessage (UINT_PTR deviceID, UINT msg, DWORD_PTR arg1, DWORD_PTR arg2)
{
return midiOutMessage ((HMIDIOUT) deviceID, msg, arg1, arg2);
}
static Array<MIDIOUTCAPS> getDeviceCaps()
{
Array<MIDIOUTCAPS> devices;
for (UINT i = 0; i < midiOutGetNumDevs(); ++i)
{
MIDIOUTCAPS mc = {};
if (midiOutGetDevCaps (i, &mc, sizeof (mc)) == MMSYSERR_NOERROR)
devices.add (mc);
}
return devices;
}
static MidiDeviceInfo getDefaultDevice()
{
auto defaultIndex = []()
{
auto deviceCaps = getDeviceCaps();
for (int i = 0; i < deviceCaps.size(); ++i)
if ((deviceCaps[i].wTechnology & MOD_MAPPER) != 0)
return i;
return 0;
}();
return getAvailableDevices()[defaultIndex];
}
String getDeviceIdentifier() override { return han->deviceInfo.identifier; }
String getDeviceName() override { return han->deviceInfo.name; }
Win32MidiService& parent;
MidiOutHandle::Ptr han;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (Win32OutputWrapper)
};
//==============================================================================
void asyncCheckForUnusedCollectors()
{
startTimer (10);
}
void timerCallback() override
{
stopTimer();
const ScopedLock sl (activeCollectorLock);
for (int i = activeCollectors.size(); --i >= 0;)
if (activeCollectors.getObjectPointer(i)->getReferenceCount() == 1)
activeCollectors.remove (i);
}
CriticalSection activeCollectorLock;
ReferenceCountedArray<MidiInCollector> activeCollectors;
Array<MidiOutHandle*> activeOutputHandles;
};
Array<Win32MidiService::MidiInCollector*, CriticalSection> Win32MidiService::MidiInCollector::activeMidiCollectors;
//==============================================================================
//==============================================================================
#if JUCE_USE_WINRT_MIDI
#ifndef JUCE_FORCE_WINRT_MIDI
#define JUCE_FORCE_WINRT_MIDI 0
#endif
#ifndef JUCE_WINRT_MIDI_LOGGING
#define JUCE_WINRT_MIDI_LOGGING 0
#endif
#if JUCE_WINRT_MIDI_LOGGING
#define JUCE_WINRT_MIDI_LOG(x) DBG(x)
#else
#define JUCE_WINRT_MIDI_LOG(x)
#endif
using namespace Microsoft::WRL;
using namespace ABI::Windows::Foundation;
using namespace ABI::Windows::Foundation::Collections;
using namespace ABI::Windows::Devices::Midi;
using namespace ABI::Windows::Devices::Enumeration;
using namespace ABI::Windows::Storage::Streams;
//==============================================================================
struct WinRTMidiService : public MidiServiceType
{
public:
//==============================================================================
WinRTMidiService()
{
auto* wrtWrapper = WinRTWrapper::getInstance();
if (! wrtWrapper->isInitialised())
throw std::runtime_error ("Failed to initialise the WinRT wrapper");
midiInFactory = wrtWrapper->getWRLFactory<IMidiInPortStatics> (&RuntimeClass_Windows_Devices_Midi_MidiInPort[0]);
if (midiInFactory == nullptr)
throw std::runtime_error ("Failed to create midi in factory");
midiOutFactory = wrtWrapper->getWRLFactory<IMidiOutPortStatics> (&RuntimeClass_Windows_Devices_Midi_MidiOutPort[0]);
if (midiOutFactory == nullptr)
throw std::runtime_error ("Failed to create midi out factory");
// The WinRT BLE MIDI API doesn't provide callbacks when devices become disconnected,
// but it does require a disconnection via the API before a device will reconnect again.
// We can monitor the BLE connection state of paired devices to get callbacks when
// connections are broken.
bleDeviceWatcher.reset (new BLEDeviceWatcher());
if (! bleDeviceWatcher->start())
throw std::runtime_error ("Failed to start the BLE device watcher");
inputDeviceWatcher.reset (new MidiIODeviceWatcher<IMidiInPortStatics> (midiInFactory));
if (! inputDeviceWatcher->start())
throw std::runtime_error ("Failed to start the midi input device watcher");
outputDeviceWatcher.reset (new MidiIODeviceWatcher<IMidiOutPortStatics> (midiOutFactory));
if (! outputDeviceWatcher->start())
throw std::runtime_error ("Failed to start the midi output device watcher");
}
Array<MidiDeviceInfo> getAvailableDevices (bool isInput) override
{
return isInput ? inputDeviceWatcher ->getAvailableDevices()
: outputDeviceWatcher->getAvailableDevices();
}
MidiDeviceInfo getDefaultDevice (bool isInput) override
{
return isInput ? inputDeviceWatcher ->getDefaultDevice()
: outputDeviceWatcher->getDefaultDevice();
}
MidiInput::Pimpl* createInputWrapper (MidiInput& input, const String& deviceIdentifier, MidiInputCallback& callback) override
{
return new WinRTInputWrapper (*this, input, deviceIdentifier, callback);
}
MidiOutput::Pimpl* createOutputWrapper (const String& deviceIdentifier) override
{
return new WinRTOutputWrapper (*this, deviceIdentifier);
}
private:
//==============================================================================
class DeviceCallbackHandler
{
public:
virtual ~DeviceCallbackHandler() {};
virtual HRESULT addDevice (IDeviceInformation*) = 0;
virtual HRESULT removeDevice (IDeviceInformationUpdate*) = 0;
virtual HRESULT updateDevice (IDeviceInformationUpdate*) = 0;
bool attach (HSTRING deviceSelector, DeviceInformationKind infoKind)
{
auto* wrtWrapper = WinRTWrapper::getInstanceWithoutCreating();
if (wrtWrapper == nullptr)
{
JUCE_WINRT_MIDI_LOG ("Failed to get the WinRTWrapper singleton!");
return false;
}
auto deviceInfoFactory = wrtWrapper->getWRLFactory<IDeviceInformationStatics2> (&RuntimeClass_Windows_Devices_Enumeration_DeviceInformation[0]);
if (deviceInfoFactory == nullptr)
return false;
// A quick way of getting an IVector<HSTRING>...
auto requestedProperties = [wrtWrapper]
{
auto devicePicker = wrtWrapper->activateInstance<IDevicePicker> (&RuntimeClass_Windows_Devices_Enumeration_DevicePicker[0],
__uuidof (IDevicePicker));
jassert (devicePicker != nullptr);
IVector<HSTRING>* result;
auto hr = devicePicker->get_RequestedProperties (&result);
jassert (SUCCEEDED (hr));
hr = result->Clear();
jassert (SUCCEEDED (hr));
return result;
}();
StringArray propertyKeys ("System.Devices.ContainerId",
"System.Devices.Aep.ContainerId",
"System.Devices.Aep.IsConnected");
for (auto& key : propertyKeys)
{
WinRTWrapper::ScopedHString hstr (key);
auto hr = requestedProperties->Append (hstr.get());
if (FAILED (hr))
{
jassertfalse;
return false;
}
}
WinRTWrapper::ComPtr<IIterable<HSTRING>> iter;
auto hr = requestedProperties->QueryInterface (__uuidof (IIterable<HSTRING>), (void**) iter.resetAndGetPointerAddress());
if (FAILED (hr))
{
jassertfalse;
return false;
}
hr = deviceInfoFactory->CreateWatcherWithKindAqsFilterAndAdditionalProperties (deviceSelector, iter, infoKind,
watcher.resetAndGetPointerAddress());
if (FAILED (hr))
{
jassertfalse;
return false;
}
enumerationThread.startThread();
return true;
};
void detach()
{
enumerationThread.stopThread (2000);
if (watcher == nullptr)
return;
auto hr = watcher->Stop();
jassert (SUCCEEDED (hr));
if (deviceAddedToken.value != 0)
{
hr = watcher->remove_Added (deviceAddedToken);
jassert (SUCCEEDED (hr));
deviceAddedToken.value = 0;
}
if (deviceUpdatedToken.value != 0)
{
hr = watcher->remove_Updated (deviceUpdatedToken);
jassert (SUCCEEDED (hr));
deviceUpdatedToken.value = 0;
}
if (deviceRemovedToken.value != 0)
{
hr = watcher->remove_Removed (deviceRemovedToken);
jassert (SUCCEEDED (hr));
deviceRemovedToken.value = 0;
}
watcher = nullptr;
}
template<typename InfoType>
IInspectable* getValueFromDeviceInfo (String key, InfoType* info)
{
__FIMapView_2_HSTRING_IInspectable* properties;
info->get_Properties (&properties);
boolean found = false;
WinRTWrapper::ScopedHString keyHstr (key);
auto hr = properties->HasKey (keyHstr.get(), &found);
if (FAILED (hr))
{
jassertfalse;
return nullptr;
}
if (! found)
return nullptr;
IInspectable* inspectable;
hr = properties->Lookup (keyHstr.get(), &inspectable);
if (FAILED (hr))
{
jassertfalse;
return nullptr;
}
return inspectable;
}
String getGUIDFromInspectable (IInspectable& inspectable)
{
WinRTWrapper::ComPtr<IReference<GUID>> guidRef;
auto hr = inspectable.QueryInterface (__uuidof (IReference<GUID>),
(void**) guidRef.resetAndGetPointerAddress());
if (FAILED (hr))
{
jassertfalse;
return {};
}
GUID result;
hr = guidRef->get_Value (&result);
if (FAILED (hr))
{
jassertfalse;
return {};
}
OLECHAR* resultString;
StringFromCLSID (result, &resultString);
return resultString;
}
bool getBoolFromInspectable (IInspectable& inspectable)
{
WinRTWrapper::ComPtr<IReference<bool>> boolRef;
auto hr = inspectable.QueryInterface (__uuidof (IReference<bool>),
(void**) boolRef.resetAndGetPointerAddress());
if (FAILED (hr))
{
jassertfalse;
return false;
}
boolean result;
hr = boolRef->get_Value (&result);
if (FAILED (hr))
{
jassertfalse;
return false;
}
return result;
}
private:
//==============================================================================
struct DeviceEnumerationThread : public Thread
{
DeviceEnumerationThread (DeviceCallbackHandler& h,
WinRTWrapper::ComPtr<IDeviceWatcher>& w,
EventRegistrationToken& added,
EventRegistrationToken& removed,
EventRegistrationToken& updated)
: Thread ("WinRT Device Enumeration Thread"), handler (h), watcher (w),
deviceAddedToken (added), deviceRemovedToken (removed), deviceUpdatedToken (updated)
{}
void run() override
{
auto handlerPtr = std::addressof (handler);
watcher->add_Added (
Callback<ITypedEventHandler<DeviceWatcher*, DeviceInformation*>> (
[handlerPtr] (IDeviceWatcher*, IDeviceInformation* info) { return handlerPtr->addDevice (info); }
).Get(),
&deviceAddedToken);
watcher->add_Removed (
Callback<ITypedEventHandler<DeviceWatcher*, DeviceInformationUpdate*>> (
[handlerPtr] (IDeviceWatcher*, IDeviceInformationUpdate* infoUpdate) { return handlerPtr->removeDevice (infoUpdate); }
).Get(),
&deviceRemovedToken);
watcher->add_Updated (
Callback<ITypedEventHandler<DeviceWatcher*, DeviceInformationUpdate*>> (
[handlerPtr] (IDeviceWatcher*, IDeviceInformationUpdate* infoUpdate) { return handlerPtr->updateDevice (infoUpdate); }
).Get(),
&deviceUpdatedToken);
watcher->Start();
}
DeviceCallbackHandler& handler;
WinRTWrapper::ComPtr<IDeviceWatcher>& watcher;
EventRegistrationToken& deviceAddedToken, deviceRemovedToken, deviceUpdatedToken;
};
//==============================================================================
WinRTWrapper::ComPtr<IDeviceWatcher> watcher;
EventRegistrationToken deviceAddedToken { 0 },
deviceRemovedToken { 0 },
deviceUpdatedToken { 0 };
DeviceEnumerationThread enumerationThread { *this, watcher,
deviceAddedToken,
deviceRemovedToken,
deviceUpdatedToken };
};
//==============================================================================
struct BLEDeviceWatcher final : private DeviceCallbackHandler
{
struct DeviceInfo
{
String containerID;
bool isConnected = false;
};
BLEDeviceWatcher() = default;
~BLEDeviceWatcher()
{
detach();
}
//==============================================================================
HRESULT addDevice (IDeviceInformation* addedDeviceInfo) override
{
HSTRING deviceIDHst;
auto hr = addedDeviceInfo->get_Id (&deviceIDHst);
if (FAILED (hr))
{
JUCE_WINRT_MIDI_LOG ("Failed to query added BLE device ID!");
return S_OK;
}
auto* wrtWrapper = WinRTWrapper::getInstanceWithoutCreating();
if (wrtWrapper == nullptr)
{
JUCE_WINRT_MIDI_LOG ("Failed to get the WinRTWrapper singleton!");
return false;
}
auto deviceID = wrtWrapper->hStringToString (deviceIDHst);
JUCE_WINRT_MIDI_LOG ("Detected paired BLE device: " << deviceID);
if (auto* containerIDValue = getValueFromDeviceInfo ("System.Devices.Aep.ContainerId", addedDeviceInfo))
{
auto containerID = getGUIDFromInspectable (*containerIDValue);
if (containerID.isNotEmpty())
{
DeviceInfo info = { containerID };
if (auto* connectedValue = getValueFromDeviceInfo ("System.Devices.Aep.IsConnected", addedDeviceInfo))
info.isConnected = getBoolFromInspectable (*connectedValue);
JUCE_WINRT_MIDI_LOG ("Adding BLE device: " << deviceID << " " << info.containerID
<< " " << (info.isConnected ? "connected" : "disconnected"));
devices.set (deviceID, info);
return S_OK;
}
}
JUCE_WINRT_MIDI_LOG ("Failed to get a container ID for BLE device: " << deviceID);
return S_OK;
}
HRESULT removeDevice (IDeviceInformationUpdate* removedDeviceInfo) override
{
HSTRING removedDeviceIdHstr;
auto hr = removedDeviceInfo->get_Id (&removedDeviceIdHstr);
if (FAILED (hr))
{
JUCE_WINRT_MIDI_LOG ("Failed to query removed BLE device ID!");
return S_OK;
}
auto* wrtWrapper = WinRTWrapper::getInstanceWithoutCreating();
if (wrtWrapper == nullptr)
{
JUCE_WINRT_MIDI_LOG ("Failed to get the WinRTWrapper singleton!");
return false;
}
auto removedDeviceId = wrtWrapper->hStringToString (removedDeviceIdHstr);
JUCE_WINRT_MIDI_LOG ("Removing BLE device: " << removedDeviceId);
{
const ScopedLock lock (deviceChanges);
if (devices.contains (removedDeviceId))
{
auto& info = devices.getReference (removedDeviceId);
listeners.call ([&info] (Listener& l) { l.bleDeviceDisconnected (info.containerID); });
devices.remove (removedDeviceId);
JUCE_WINRT_MIDI_LOG ("Removed BLE device: " << removedDeviceId);
}
}
return S_OK;
}
HRESULT updateDevice (IDeviceInformationUpdate* updatedDeviceInfo) override
{
HSTRING updatedDeviceIdHstr;
auto hr = updatedDeviceInfo->get_Id (&updatedDeviceIdHstr);
if (FAILED (hr))
{
JUCE_WINRT_MIDI_LOG ("Failed to query updated BLE device ID!");
return S_OK;
}
auto* wrtWrapper = WinRTWrapper::getInstanceWithoutCreating();
if (wrtWrapper == nullptr)
{
JUCE_WINRT_MIDI_LOG ("Failed to get the WinRTWrapper singleton!");
return false;
}
auto updatedDeviceId = wrtWrapper->hStringToString (updatedDeviceIdHstr);
JUCE_WINRT_MIDI_LOG ("Updating BLE device: " << updatedDeviceId);
if (auto* connectedValue = getValueFromDeviceInfo ("System.Devices.Aep.IsConnected", updatedDeviceInfo))
{
auto isConnected = getBoolFromInspectable (*connectedValue);
{
const ScopedLock lock (deviceChanges);
if (! devices.contains (updatedDeviceId))
return S_OK;
auto& info = devices.getReference (updatedDeviceId);
if (info.isConnected && ! isConnected)
{
JUCE_WINRT_MIDI_LOG ("BLE device connection status change: " << updatedDeviceId << " " << info.containerID << " " << (isConnected ? "connected" : "disconnected"));
listeners.call ([&info] (Listener& l) { l.bleDeviceDisconnected (info.containerID); });
}
info.isConnected = isConnected;
}
}
return S_OK;
}
//==============================================================================
bool start()
{
WinRTWrapper::ScopedHString deviceSelector ("System.Devices.Aep.ProtocolId:=\"{bb7bb05e-5972-42b5-94fc-76eaa7084d49}\""
" AND System.Devices.Aep.IsPaired:=System.StructuredQueryType.Boolean#True");
return attach (deviceSelector.get(), DeviceInformationKind::DeviceInformationKind_AssociationEndpoint);
}
//==============================================================================
struct Listener
{
virtual ~Listener() {};
virtual void bleDeviceAdded (const String& containerID) = 0;
virtual void bleDeviceDisconnected (const String& containerID) = 0;
};
void addListener (Listener* l)
{
listeners.add (l);
}
void removeListener (Listener* l)
{
listeners.remove (l);
}
//==============================================================================
ListenerList<Listener> listeners;
HashMap<String, DeviceInfo> devices;
CriticalSection deviceChanges;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (BLEDeviceWatcher);
};
//==============================================================================
struct WinRTMIDIDeviceInfo
{
String deviceID, containerID, name;
bool isDefault = false;
};
//==============================================================================
template <typename COMFactoryType>
struct MidiIODeviceWatcher final : private DeviceCallbackHandler
{
MidiIODeviceWatcher (WinRTWrapper::ComPtr<COMFactoryType>& comFactory)
: factory (comFactory)
{
}
~MidiIODeviceWatcher()
{
detach();
}
HRESULT addDevice (IDeviceInformation* addedDeviceInfo) override
{
WinRTMIDIDeviceInfo info;
HSTRING deviceID;
auto hr = addedDeviceInfo->get_Id (&deviceID);
if (FAILED (hr))
{
JUCE_WINRT_MIDI_LOG ("Failed to query added MIDI device ID!");
return S_OK;
}
auto* wrtWrapper = WinRTWrapper::getInstanceWithoutCreating();
if (wrtWrapper == nullptr)
{
JUCE_WINRT_MIDI_LOG ("Failed to get the WinRTWrapper singleton!");
return false;
}
info.deviceID = wrtWrapper->hStringToString (deviceID);
JUCE_WINRT_MIDI_LOG ("Detected MIDI device: " << info.deviceID);
boolean isEnabled = false;
hr = addedDeviceInfo->get_IsEnabled (&isEnabled);
if (FAILED (hr) || ! isEnabled)
{
JUCE_WINRT_MIDI_LOG ("MIDI device not enabled: " << info.deviceID);
return S_OK;
}
// We use the container ID to match a MIDI device with a generic BLE device, if possible
if (auto* containerIDValue = getValueFromDeviceInfo ("System.Devices.ContainerId", addedDeviceInfo))
info.containerID = getGUIDFromInspectable (*containerIDValue);
HSTRING name;
hr = addedDeviceInfo->get_Name (&name);
if (FAILED (hr))
{
JUCE_WINRT_MIDI_LOG ("Failed to query detected MIDI device name for " << info.deviceID);
return S_OK;
}
info.name = wrtWrapper->hStringToString (name);
boolean isDefault = false;
hr = addedDeviceInfo->get_IsDefault (&isDefault);
if (FAILED (hr))
{
JUCE_WINRT_MIDI_LOG ("Failed to query detected MIDI device defaultness for " << info.deviceID << " " << info.name);
return S_OK;
}
info.isDefault = isDefault;
JUCE_WINRT_MIDI_LOG ("Adding MIDI device: " << info.deviceID << " " << info.containerID << " " << info.name);
{
const ScopedLock lock (deviceChanges);
connectedDevices.add (info);
}
return S_OK;
}
HRESULT removeDevice (IDeviceInformationUpdate* removedDeviceInfo) override
{
HSTRING removedDeviceIdHstr;
auto hr = removedDeviceInfo->get_Id (&removedDeviceIdHstr);
if (FAILED (hr))
{
JUCE_WINRT_MIDI_LOG ("Failed to query removed MIDI device ID!");
return S_OK;
}
auto* wrtWrapper = WinRTWrapper::getInstanceWithoutCreating();
if (wrtWrapper == nullptr)
{
JUCE_WINRT_MIDI_LOG ("Failed to get the WinRTWrapper singleton!");
return false;
}
auto removedDeviceId = wrtWrapper->hStringToString (removedDeviceIdHstr);
JUCE_WINRT_MIDI_LOG ("Removing MIDI device: " << removedDeviceId);
{
const ScopedLock lock (deviceChanges);
for (int i = 0; i < connectedDevices.size(); ++i)
{
if (connectedDevices[i].deviceID == removedDeviceId)
{
connectedDevices.remove (i);
JUCE_WINRT_MIDI_LOG ("Removed MIDI device: " << removedDeviceId);
break;
}
}
}
return S_OK;
}
// This is never called
HRESULT updateDevice (IDeviceInformationUpdate*) override { return S_OK; }
bool start()
{
HSTRING deviceSelector;
auto hr = factory->GetDeviceSelector (&deviceSelector);
if (FAILED (hr))
{
JUCE_WINRT_MIDI_LOG ("Failed to get MIDI device selector!");
return false;
}<|fim▁hole|> Array<MidiDeviceInfo> getAvailableDevices()
{
{
const ScopedLock lock (deviceChanges);
lastQueriedConnectedDevices = connectedDevices;
}
StringArray deviceNames, deviceIDs;
for (auto info : lastQueriedConnectedDevices.get())
{
deviceNames.add (info.name);
deviceIDs .add (info.containerID);
}
deviceNames.appendNumbersToDuplicates (false, false, CharPointer_UTF8 ("-"), CharPointer_UTF8 (""));
deviceIDs .appendNumbersToDuplicates (false, false, CharPointer_UTF8 ("-"), CharPointer_UTF8 (""));
Array<MidiDeviceInfo> devices;
for (int i = 0; i < deviceNames.size(); ++i)
devices.add ({ deviceNames[i], deviceIDs[i] });
return devices;
}
MidiDeviceInfo getDefaultDevice()
{
auto& lastDevices = lastQueriedConnectedDevices.get();
for (auto& d : lastDevices)
if (d.isDefault)
return { d.name, d.containerID };
return {};
}
WinRTMIDIDeviceInfo getWinRTDeviceInfoForDevice (const String& deviceIdentifier)
{
auto devices = getAvailableDevices();
for (int i = 0; i < devices.size(); ++i)
if (devices.getUnchecked (i).identifier == deviceIdentifier)
return lastQueriedConnectedDevices.get()[i];
return {};
}
WinRTWrapper::ComPtr<COMFactoryType>& factory;
Array<WinRTMIDIDeviceInfo> connectedDevices;
CriticalSection deviceChanges;
ThreadLocalValue<Array<WinRTMIDIDeviceInfo>> lastQueriedConnectedDevices;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (MidiIODeviceWatcher);
};
//==============================================================================
template <typename COMFactoryType, typename COMInterfaceType, typename COMType>
struct OpenMidiPortThread : public Thread
{
OpenMidiPortThread (String threadName, String midiDeviceID,
WinRTWrapper::ComPtr<COMFactoryType>& comFactory,
WinRTWrapper::ComPtr<COMInterfaceType>& comPort)
: Thread (threadName),
deviceID (midiDeviceID),
factory (comFactory),
port (comPort)
{
}
~OpenMidiPortThread()
{
stopThread (2000);
}
void run() override
{
WinRTWrapper::ScopedHString hDeviceId (deviceID);
WinRTWrapper::ComPtr<IAsyncOperation<COMType*>> asyncOp;
auto hr = factory->FromIdAsync (hDeviceId.get(), asyncOp.resetAndGetPointerAddress());
if (FAILED (hr))
return;
hr = asyncOp->put_Completed (Callback<IAsyncOperationCompletedHandler<COMType*>> (
[this] (IAsyncOperation<COMType*>* asyncOpPtr, AsyncStatus)
{
if (asyncOpPtr == nullptr)
return E_ABORT;
auto hr = asyncOpPtr->GetResults (port.resetAndGetPointerAddress());
if (FAILED (hr))
return hr;
portOpened.signal();
return S_OK;
}
).Get());
// We need to use a timeout here, rather than waiting indefinitely, as the
// WinRT API can occasionally hang!
portOpened.wait (2000);
}
const String deviceID;
WinRTWrapper::ComPtr<COMFactoryType>& factory;
WinRTWrapper::ComPtr<COMInterfaceType>& port;
WaitableEvent portOpened { true };
};
//==============================================================================
template <typename MIDIIOStaticsType, typename MIDIPort>
class WinRTIOWrapper : private BLEDeviceWatcher::Listener
{
public:
WinRTIOWrapper (BLEDeviceWatcher& bleWatcher,
MidiIODeviceWatcher<MIDIIOStaticsType>& midiDeviceWatcher,
const String& deviceIdentifier)
: bleDeviceWatcher (bleWatcher)
{
{
const ScopedLock lock (midiDeviceWatcher.deviceChanges);
deviceInfo = midiDeviceWatcher.getWinRTDeviceInfoForDevice (deviceIdentifier);
}
if (deviceInfo.deviceID.isEmpty())
throw std::runtime_error ("Invalid device index");
JUCE_WINRT_MIDI_LOG ("Creating JUCE MIDI IO: " << deviceInfo.deviceID);
if (deviceInfo.containerID.isNotEmpty())
{
bleDeviceWatcher.addListener (this);
const ScopedLock lock (bleDeviceWatcher.deviceChanges);
HashMap<String, BLEDeviceWatcher::DeviceInfo>::Iterator iter (bleDeviceWatcher.devices);
while (iter.next())
{
if (iter.getValue().containerID == deviceInfo.containerID)
{
isBLEDevice = true;
break;
}
}
}
}
virtual ~WinRTIOWrapper()
{
bleDeviceWatcher.removeListener (this);
disconnect();
}
//==============================================================================
virtual void disconnect()
{
if (midiPort != nullptr)
{
if (isBLEDevice)
midiPort->Release();
}
midiPort = nullptr;
}
private:
//==============================================================================
void bleDeviceAdded (const String& containerID) override
{
if (containerID == deviceInfo.containerID)
isBLEDevice = true;
}
void bleDeviceDisconnected (const String& containerID) override
{
if (containerID == deviceInfo.containerID)
{
JUCE_WINRT_MIDI_LOG ("Disconnecting MIDI port from BLE disconnection: " << deviceInfo.deviceID
<< " " << deviceInfo.containerID << " " << deviceInfo.name);
disconnect();
}
}
protected:
//==============================================================================
BLEDeviceWatcher& bleDeviceWatcher;
WinRTMIDIDeviceInfo deviceInfo;
bool isBLEDevice = false;
WinRTWrapper::ComPtr<MIDIPort> midiPort;
};
//==============================================================================
struct WinRTInputWrapper final : public MidiInput::Pimpl,
private WinRTIOWrapper<IMidiInPortStatics, IMidiInPort>
{
WinRTInputWrapper (WinRTMidiService& service, MidiInput& input, const String& deviceIdentifier, MidiInputCallback& cb)
: WinRTIOWrapper <IMidiInPortStatics, IMidiInPort> (*service.bleDeviceWatcher, *service.inputDeviceWatcher, deviceIdentifier),
inputDevice (input),
callback (cb)
{
OpenMidiPortThread<IMidiInPortStatics, IMidiInPort, MidiInPort> portThread ("Open WinRT MIDI input port",
deviceInfo.deviceID,
service.midiInFactory,
midiPort);
portThread.startThread();
portThread.waitForThreadToExit (-1);
if (midiPort == nullptr)
{
JUCE_WINRT_MIDI_LOG ("Timed out waiting for midi input port creation");
return;
}
startTime = Time::getMillisecondCounterHiRes();
auto hr = midiPort->add_MessageReceived (
Callback<ITypedEventHandler<MidiInPort*, MidiMessageReceivedEventArgs*>> (
[this] (IMidiInPort*, IMidiMessageReceivedEventArgs* args) { return midiInMessageReceived (args); }
).Get(),
&midiInMessageToken);
if (FAILED (hr))
{
JUCE_WINRT_MIDI_LOG ("Failed to set MIDI input callback");
jassertfalse;
}
}
~WinRTInputWrapper()
{
disconnect();
}
//==============================================================================
void start() override
{
if (! isStarted)
{
concatenator.reset();
isStarted = true;
}
}
void stop() override
{
if (isStarted)
{
isStarted = false;
concatenator.reset();
}
}
String getDeviceIdentifier() override { return deviceInfo.containerID; }
String getDeviceName() override { return deviceInfo.name; }
//==============================================================================
void disconnect() override
{
stop();
if (midiPort != nullptr && midiInMessageToken.value != 0)
midiPort->remove_MessageReceived (midiInMessageToken);
WinRTIOWrapper<IMidiInPortStatics, IMidiInPort>::disconnect();
}
//==============================================================================
HRESULT midiInMessageReceived (IMidiMessageReceivedEventArgs* args)
{
if (! isStarted)
return S_OK;
WinRTWrapper::ComPtr<IMidiMessage> message;
auto hr = args->get_Message (message.resetAndGetPointerAddress());
if (FAILED (hr))
return hr;
WinRTWrapper::ComPtr<IBuffer> buffer;
hr = message->get_RawData (buffer.resetAndGetPointerAddress());
if (FAILED (hr))
return hr;
WinRTWrapper::ComPtr<Windows::Storage::Streams::IBufferByteAccess> bufferByteAccess;
hr = buffer->QueryInterface (bufferByteAccess.resetAndGetPointerAddress());
if (FAILED (hr))
return hr;
uint8_t* bufferData = nullptr;
hr = bufferByteAccess->Buffer (&bufferData);
if (FAILED (hr))
return hr;
uint32_t numBytes = 0;
hr = buffer->get_Length (&numBytes);
if (FAILED (hr))
return hr;
ABI::Windows::Foundation::TimeSpan timespan;
hr = message->get_Timestamp (×pan);
if (FAILED (hr))
return hr;
concatenator.pushMidiData (bufferData, numBytes,
convertTimeStamp (timespan.Duration),
&inputDevice, callback);
return S_OK;
}
double convertTimeStamp (int64 timestamp)
{
auto millisecondsSinceStart = static_cast<double> (timestamp) / 10000.0;
auto t = startTime + millisecondsSinceStart;
auto now = Time::getMillisecondCounterHiRes();
if (t > now)
{
if (t > now + 2.0)
startTime -= 1.0;
t = now;
}
return t * 0.001;
}
//==============================================================================
MidiInput& inputDevice;
MidiInputCallback& callback;
MidiDataConcatenator concatenator { 4096 };
EventRegistrationToken midiInMessageToken { 0 };
double startTime = 0;
bool isStarted = false;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (WinRTInputWrapper);
};
//==============================================================================
struct WinRTOutputWrapper final : public MidiOutput::Pimpl,
private WinRTIOWrapper <IMidiOutPortStatics, IMidiOutPort>
{
WinRTOutputWrapper (WinRTMidiService& service, const String& deviceIdentifier)
: WinRTIOWrapper <IMidiOutPortStatics, IMidiOutPort> (*service.bleDeviceWatcher, *service.outputDeviceWatcher, deviceIdentifier)
{
OpenMidiPortThread<IMidiOutPortStatics, IMidiOutPort, IMidiOutPort> portThread ("Open WinRT MIDI output port",
deviceInfo.deviceID,
service.midiOutFactory,
midiPort);
portThread.startThread();
portThread.waitForThreadToExit (-1);
if (midiPort == nullptr)
throw std::runtime_error ("Timed out waiting for midi output port creation");
auto* wrtWrapper = WinRTWrapper::getInstanceWithoutCreating();
if (wrtWrapper == nullptr)
throw std::runtime_error ("Failed to get the WinRTWrapper singleton!");
auto bufferFactory = wrtWrapper->getWRLFactory<IBufferFactory> (&RuntimeClass_Windows_Storage_Streams_Buffer[0]);
if (bufferFactory == nullptr)
throw std::runtime_error ("Failed to create output buffer factory");
auto hr = bufferFactory->Create (static_cast<UINT32> (65536), buffer.resetAndGetPointerAddress());
if (FAILED (hr))
throw std::runtime_error ("Failed to create output buffer");
hr = buffer->QueryInterface (bufferByteAccess.resetAndGetPointerAddress());
if (FAILED (hr))
throw std::runtime_error ("Failed to get buffer byte access");
hr = bufferByteAccess->Buffer (&bufferData);
if (FAILED (hr))
throw std::runtime_error ("Failed to get buffer data pointer");
}
//==============================================================================
void sendMessageNow (const MidiMessage& message) override
{
if (midiPort == nullptr)
return;
auto numBytes = message.getRawDataSize();
auto hr = buffer->put_Length (numBytes);
if (FAILED (hr))
{
jassertfalse;
return;
}
memcpy_s (bufferData, numBytes, message.getRawData(), numBytes);
midiPort->SendBuffer (buffer);
}
String getDeviceIdentifier() override { return deviceInfo.containerID; }
String getDeviceName() override { return deviceInfo.name; }
//==============================================================================
WinRTWrapper::ComPtr<IBuffer> buffer;
WinRTWrapper::ComPtr<Windows::Storage::Streams::IBufferByteAccess> bufferByteAccess;
uint8_t* bufferData = nullptr;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (WinRTOutputWrapper);
};
WinRTWrapper::ComPtr<IMidiInPortStatics> midiInFactory;
WinRTWrapper::ComPtr<IMidiOutPortStatics> midiOutFactory;
std::unique_ptr<MidiIODeviceWatcher<IMidiInPortStatics>> inputDeviceWatcher;
std::unique_ptr<MidiIODeviceWatcher<IMidiOutPortStatics>> outputDeviceWatcher;
std::unique_ptr<BLEDeviceWatcher> bleDeviceWatcher;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (WinRTMidiService)
};
#endif // JUCE_USE_WINRT_MIDI
//==============================================================================
//==============================================================================
#if ! JUCE_MINGW
extern RTL_OSVERSIONINFOW getWindowsVersionInfo();
#endif
struct MidiService : public DeletedAtShutdown
{
MidiService()
{
#if JUCE_USE_WINRT_MIDI && ! JUCE_MINGW
#if ! JUCE_FORCE_WINRT_MIDI
auto windowsVersionInfo = getWindowsVersionInfo();
if (windowsVersionInfo.dwMajorVersion >= 10 && windowsVersionInfo.dwBuildNumber >= 17763)
#endif
{
try
{
internal.reset (new WinRTMidiService());
return;
}
catch (std::runtime_error&) {}
}
#endif
internal.reset (new Win32MidiService());
}
~MidiService()
{
clearSingletonInstance();
}
static MidiServiceType& getService()
{
jassert (getInstance()->internal != nullptr);
return *getInstance()->internal.get();
}
JUCE_DECLARE_SINGLETON (MidiService, false)
private:
std::unique_ptr<MidiServiceType> internal;
};
JUCE_IMPLEMENT_SINGLETON (MidiService)
//==============================================================================
static int findDefaultDeviceIndex (const Array<MidiDeviceInfo>& available, const MidiDeviceInfo& defaultDevice)
{
for (int i = 0; i < available.size(); ++i)
if (available.getUnchecked (i) == defaultDevice)
return i;
return 0;
}
Array<MidiDeviceInfo> MidiInput::getAvailableDevices()
{
return MidiService::getService().getAvailableDevices (true);
}
MidiDeviceInfo MidiInput::getDefaultDevice()
{
return MidiService::getService().getDefaultDevice (true);
}
std::unique_ptr<MidiInput> MidiInput::openDevice (const String& deviceIdentifier, MidiInputCallback* callback)
{
if (deviceIdentifier.isEmpty() || callback == nullptr)
return {};
std::unique_ptr<MidiInput> in (new MidiInput ({}, deviceIdentifier));
std::unique_ptr<Pimpl> wrapper;
try
{
wrapper.reset (MidiService::getService().createInputWrapper (*in, deviceIdentifier, *callback));
}
catch (std::runtime_error&)
{
return {};
}
in->setName (wrapper->getDeviceName());
in->internal = std::move (wrapper);
return in;
}
StringArray MidiInput::getDevices()
{
StringArray deviceNames;
for (auto& d : getAvailableDevices())
deviceNames.add (d.name);
return deviceNames;
}
int MidiInput::getDefaultDeviceIndex()
{
return findDefaultDeviceIndex (getAvailableDevices(), getDefaultDevice());
}
std::unique_ptr<MidiInput> MidiInput::openDevice (int index, MidiInputCallback* callback)
{
return openDevice (getAvailableDevices()[index].identifier, callback);
}
MidiInput::MidiInput (const String& deviceName, const String& deviceIdentifier)
: deviceInfo (deviceName, deviceIdentifier)
{
}
MidiInput::~MidiInput() = default;
void MidiInput::start() { internal->start(); }
void MidiInput::stop() { internal->stop(); }
//==============================================================================
Array<MidiDeviceInfo> MidiOutput::getAvailableDevices()
{
return MidiService::getService().getAvailableDevices (false);
}
MidiDeviceInfo MidiOutput::getDefaultDevice()
{
return MidiService::getService().getDefaultDevice (false);
}
std::unique_ptr<MidiOutput> MidiOutput::openDevice (const String& deviceIdentifier)
{
if (deviceIdentifier.isEmpty())
return {};
std::unique_ptr<Pimpl> wrapper;
try
{
wrapper.reset (MidiService::getService().createOutputWrapper (deviceIdentifier));
}
catch (std::runtime_error&)
{
return {};
}
std::unique_ptr<MidiOutput> out;
out.reset (new MidiOutput (wrapper->getDeviceName(), deviceIdentifier));
out->internal = std::move (wrapper);
return out;
}
StringArray MidiOutput::getDevices()
{
StringArray deviceNames;
for (auto& d : getAvailableDevices())
deviceNames.add (d.name);
return deviceNames;
}
int MidiOutput::getDefaultDeviceIndex()
{
return findDefaultDeviceIndex (getAvailableDevices(), getDefaultDevice());
}
std::unique_ptr<MidiOutput> MidiOutput::openDevice (int index)
{
return openDevice (getAvailableDevices()[index].identifier);
}
MidiOutput::~MidiOutput()
{
stopBackgroundThread();
}
void MidiOutput::sendMessageNow (const MidiMessage& message)
{
internal->sendMessageNow (message);
}
} // namespace juce<|fim▁end|> |
return attach (deviceSelector, DeviceInformationKind::DeviceInformationKind_DeviceInterface);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.