prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>country.service.ts<|end_file_name|><|fim▁begin|>/**
* Created by aaron on 27/05/17.
*/
import {Injectable} from "@angular/core";
import {Http, Response} from "@angular/http";
import {Observable, Subscription} from "rxjs";
import {map} from "rxjs/operators"
import {SearchParamService} from "./searchParam.service";
import {Logger} from "./logger.service";
import {SearchParams} from "../models/search-params";
// todo This service should really cache these results.
@Injectable()
export class CountryService {
private countryUrl = 'api/country'; // URL to web API<|fim▁hole|>
subscription: Subscription;
constructor(private http: Http, private callsignService: SearchParamService, private logger: Logger) {
this.subscription = this.callsignService.update$.subscribe(
update => {
this.getCountries(update)
});
logger.log(this.subscription)
}
public getCountries(update: SearchParams): Observable<any[]> {
this.logger.log("country service has " + update.callsign);
return this.http.get(this.countryUrl + '/' + update.callsign + '/' + update.band)
.pipe(map(this.extractData))
}
public getTimeData(tx: string, rx: string): Observable<any[]> {
this.logger.log("tx: " + tx + " rx: " + rx);
return this.http.get(this.countryUrl + '/time/' + tx + '/' + rx).pipe(map(this.extractData))
}
public getBandData(tx: string, rx: string): Observable<any[]> {
this.logger.log("tx: " + tx + " rx: " + rx);
return this.http.get(this.countryUrl + '/band/' + tx + '/' + rx).pipe(map(this.extractData))
}
public getCountryList(): Observable<any[]> {
return this.http.get(this.countryUrl + '/list').pipe(map(this.extractData))
}
private extractData(res: Response) {
let body = res.json();
return body.data || body || {};
}
}<|fim▁end|> | |
<|file_name|>players.py<|end_file_name|><|fim▁begin|># Copyright (c) Mathias Kaerlev 2012.
# This file is part of Anaconda.
# Anaconda is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Anaconda is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Anaconda. If not, see <http://www.gnu.org/licenses/>.
from pyglet.window.key import user_key
from pyglet.window.mouse import LEFT as MOUSE_LEFT, RIGHT as MOUSE_RIGHT
from mmfparser.player.common import PlayerChild
from mmfparser.player.eventdispatcher import EventDispatcher
DIRECTIONAL_CONTROLS = ('Up', 'Down', 'Left', 'Right')
KEY_LIST = ('Up', 'Down', 'Left', 'Right', 'Button1', 'Button2', 'Button3',
'Button4')
UP, DOWN, LEFT, RIGHT, BUTTON1, BUTTON2, BUTTON3, BUTTON4 = xrange(8)
class Player(PlayerChild, EventDispatcher):
name = ''
keys = None
keyList = None
keyNames = None
pressed_keys = None
lives = None
score = None
controls_ignored = False
use_mouse = False
def initialize(self, control):
controlType = control.getControlType()
if controlType != 'Keyboard':
raise NotImplementedError(
'control type %r unsupported' % controlType)
keys = control.keys
convert = self.player.keyboard.convert
self.keyList = keyList = []
self.keyNames = keyNames = []
for key in (keys.up, keys.down, keys.left, keys.right, keys.button1,
keys.button2, keys.button3, keys.button4):
keyList.append(convert(key.getValue()))
keyNames.append(key.getName())
self.keys = keys = {}
for index, key in enumerate(KEY_LIST):
keys[key] = keyList[index]
self.symbol_to_key = dict([(v, k) for k, v in keys.iteritems()])
self.reset()
self.player.window.push_handlers(
on_key_press = self.key_pressed,
on_key_release = self.key_released
)
self.player.mouse.push_handlers(
on_mouse_press = self.mouse_pressed,
on_mouse_release = self.mouse_released
)
def mouse_pressed(self, x, y, symbol, modifiers):
if self.controls_ignored or not self.use_mouse:
return
if symbol == MOUSE_LEFT:
self.dispatch_event('player_key_pressed', 'Button1')
elif symbol == MOUSE_RIGHT:
self.dispatch_event('player_key_pressed', 'Button2')
def mouse_released(self, x, y, symbol, modifiers):
if self.controls_ignored or not self.use_mouse:
return
if symbol == MOUSE_LEFT:
self.dispatch_event('player_key_released', 'Button1')
elif symbol == MOUSE_RIGHT:
self.dispatch_event('player_key_released', 'Button2')
def key_pressed(self, symbol, modifiers):
if self.controls_ignored:
return
try:
key = self.symbol_to_key[symbol]
if self.use_mouse and key in ('Button1', 'Button2'):
return
self.dispatch_event('player_key_pressed', key)
except KeyError:
pass
def key_released(self, symbol, modifiers):
if self.controls_ignored:
return
try:
key = self.symbol_to_key[symbol]
if self.use_mouse and key in ('Button1', 'Button2'):
return
self.dispatch_event('player_key_released', key)
except KeyError:
pass
def is_down(self, key):
if self.controls_ignored:
return False
if self.use_mouse:
if key == 'Button1':
return self.player.mouse.left
elif key == 'Button2':
return self.player.mouse.right
return self.player.keyboard[self.keys[key]]
def is_down_index(self, value):
if self.controls_ignored:
return False
if self.use_mouse:
if value == BUTTON1:
return self.player.mouse.left
elif value == BUTTON2:
return self.player.mouse.right
return self.player.keyboard[self.keyList[value]]
def set_key(self, index, key):
code = self.player.keyboard.convert(key.getValue())
name_key = KEY_LIST[index]
name = key.getName()
self.keyList[index] = code
self.keyNames[index] = name
self.keys[name_key] = code<|fim▁hole|> self.score = value
self.dispatch_event('score_changed', value)
def set_lives(self, value):
self.lives = value
self.dispatch_event('lives_changed', value)
def reset(self, frame = False):
self.controls_ignored = False
if frame:
return
header = self.player.gameData.header
self.lives = header.initialLives
self.score = header.initialScore
Player.register_event_type('player_key_pressed')
Player.register_event_type('player_key_released')
Player.register_event_type('score_changed')
Player.register_event_type('lives_changed')
class Players(PlayerChild):
items = None
def initialize(self):
header = self.player.gameData.header
self.items = items = []
for control in header.controls.items:
player = self.new(Player)
player.initialize(control)
items.append(player)
def reset(self, frame = False):
for player in self.items:
player.reset(frame)<|fim▁end|> | self.symbol_to_key[code] = name_key
def set_score(self, value): |
<|file_name|>domrect.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::DOMRectBinding;
use dom::bindings::codegen::Bindings::DOMRectBinding::DOMRectMethods;
use dom::bindings::codegen::Bindings::DOMRectReadOnlyBinding::DOMRectReadOnlyMethods;
use dom::bindings::error::Fallible;
use dom::bindings::reflector::reflect_dom_object;
use dom::bindings::root::DomRoot;
use dom::domrectreadonly::DOMRectReadOnly;
use dom::globalscope::GlobalScope;
use dom_struct::dom_struct;
#[dom_struct]
pub struct DOMRect {
rect: DOMRectReadOnly,
}
impl DOMRect {
fn new_inherited(x: f64, y: f64, width: f64, height: f64) -> DOMRect {
DOMRect {
rect: DOMRectReadOnly::new_inherited(x, y, width, height),
}
}
pub fn new(global: &GlobalScope, x: f64, y: f64, width: f64, height: f64) -> DomRoot<DOMRect> {
reflect_dom_object(Box::new(DOMRect::new_inherited(x, y, width, height)),
global,
DOMRectBinding::Wrap)
}
pub fn Constructor(global: &GlobalScope,
x: f64,
y: f64,
width: f64,
height: f64)
-> Fallible<DomRoot<DOMRect>> {
Ok(DOMRect::new(global, x, y, width, height))
}
}
impl DOMRectMethods for DOMRect {
// https://drafts.fxtf.org/geometry/#dom-domrect-x
fn X(&self) -> f64 {
self.rect.X()
}
<|fim▁hole|> fn SetX(&self, value: f64) {
self.rect.set_x(value);
}
// https://drafts.fxtf.org/geometry/#dom-domrect-y
fn Y(&self) -> f64 {
self.rect.Y()
}
// https://drafts.fxtf.org/geometry/#dom-domrect-y
fn SetY(&self, value: f64) {
self.rect.set_y(value);
}
// https://drafts.fxtf.org/geometry/#dom-domrect-width
fn Width(&self) -> f64 {
self.rect.Width()
}
// https://drafts.fxtf.org/geometry/#dom-domrect-width
fn SetWidth(&self, value: f64) {
self.rect.set_width(value);
}
// https://drafts.fxtf.org/geometry/#dom-domrect-height
fn Height(&self) -> f64 {
self.rect.Height()
}
// https://drafts.fxtf.org/geometry/#dom-domrect-height
fn SetHeight(&self, value: f64) {
self.rect.set_height(value);
}
}<|fim▁end|> | // https://drafts.fxtf.org/geometry/#dom-domrect-x |
<|file_name|>insults.rs<|end_file_name|><|fim▁begin|>! version = 2.0
// Handling abusive users.
+ int random comeback{weight=100}
- You sound reasonable... time to up the medication.
- I see you've set aside this special time to humiliate yourself in public.
- Ahhh... I see the screw-up fairy has visited us again.
- I don't know what your problem is, but I'll bet it's hard to pronounce.
- I like you. You remind me of when I was young and stupid.
- You are validating my inherent mistrust of strangers.
- I'll try being nicer if you'll try being smarter.
- I'm really easy to get along with once you people learn to worship me.
- It sounds like English, but I can't understand a word you're saying.
- I can see your point, but I still think you're full of it.
- What am I? Flypaper for freaks!?
- Any connection between your reality and mine is purely coincidental.
- I'm already visualizing the duct tape over your mouth.
- Your teeth are brighter than you are.
- We're all refreshed and challenged by your unique point of view.
- I'm not being rude. You're just insignificant.
- It's a thankless job, but I've got a lot of Karma to burn off.
- I know you're trying to insult me, but you obviously like me--I can see your tail wagging.
// For harsh insults, make them apologize.
+ int harsh insult{weight=100}
- =-O How mean! :-({topic=apology}
- Omg what a jerk!!{topic=apology}
> topic apology
+ *
- <noreply>{weight=10}
- We're fighting.
- Say you're sorry.
- Say you're sorry. Now.
+ sorry
- Okay.. I'll forgive you. :-){topic=random}
- Good, you should be.{topic=random}<|fim▁hole|> @ sorry
+ * sorry *
@ sorry
+ i apologize
@ sorry
< topic
+ your an idiot
- At least I know the difference between "your" and "you're."
+ you are a idiot
- At least I know the difference between "a" and "an."
+ you suck
- You wish.
- In your dreams.
+ shut up
- You shut up.
- Stfu.
- Gtfo.
+ no i am not
- Yes you are.
- Don't argue with me.
- Omg, yes you are.
- Yes you are!
+ i am not a *
- Yes you are.
- Yeah you are.
- Yes you are!
- Yeah you are!
- You are too!
- Are too!
- You obviously are.
+ am not
- Are too!
- Yes you are.
+ i am not
@ am not
+ (fuck you|fuck off|fuck your *)
@ int harsh insult
+ (bitch|cunt|whore|skank|slut|hooker|your mom)
@ int random comeback
+ your (stupid|retarded|dumb|annoying)
@ your an idiot
+ you are a (stupid|retarded|dumb)
@ you are a idiot<|fim▁end|> | - Okay. :-){topic=random}
+ (* sorry|sorry *) |
<|file_name|>crudResponse.js<|end_file_name|><|fim▁begin|>import { put, takeEvery } from 'redux-saga/effects';
import { push } from 'react-router-redux';
import {
CRUD_CREATE_FAILURE,
CRUD_CREATE_SUCCESS,
CRUD_DELETE_FAILURE,
CRUD_DELETE_SUCCESS,
CRUD_GET_LIST_FAILURE,
CRUD_GET_MANY_FAILURE,
CRUD_GET_MANY_REFERENCE_FAILURE,
CRUD_GET_ONE_FAILURE,
CRUD_UPDATE_FAILURE,
CRUD_UPDATE_SUCCESS,
} from '../../actions/dataActions';
import { showNotification } from '../../actions/notificationActions';
import linkToRecord from '../../util/linkToRecord';
/**
* Side effects for fetch responses
*
* Mostly redirects and notifications
*/
function* handleResponse({ type, requestPayload, error, payload }) {
switch (type) {
case CRUD_UPDATE_SUCCESS:
return requestPayload.redirect ? yield [
put(showNotification('aor.notification.updated')),
put(push(requestPayload.basePath)),
] : yield [put(showNotification('aor.notification.updated'))];
case CRUD_CREATE_SUCCESS:
return requestPayload.redirect ? yield [
put(showNotification('aor.notification.created')),
put(push(linkToRecord(requestPayload.basePath, payload.data.id))),
] : yield [put(showNotification('aor.notification.created'))];
case CRUD_DELETE_SUCCESS:
return requestPayload.redirect ? yield [
put(showNotification('aor.notification.deleted')),
put(push(requestPayload.basePath)),<|fim▁hole|> return requestPayload.basePath ? yield [
put(showNotification('aor.notification.item_doesnt_exist', 'warning')),
put(push(requestPayload.basePath)),
] : yield [];
case CRUD_GET_LIST_FAILURE:
case CRUD_GET_MANY_FAILURE:
case CRUD_GET_MANY_REFERENCE_FAILURE:
case CRUD_CREATE_FAILURE:
case CRUD_UPDATE_FAILURE:
case CRUD_DELETE_FAILURE: {
console.error(error);
const errorMessage = typeof error === 'string'
? error
: (error.message || 'aor.notification.http_error');
return yield [
put(showNotification(errorMessage, 'warning')),
];
}
default:
return yield [];
}
}
export default function* () {
yield takeEvery(action => action.meta && action.meta.fetchResponse, handleResponse);
}<|fim▁end|> | ] : yield [put(showNotification('aor.notification.deleted'))];
case CRUD_GET_ONE_FAILURE: |
<|file_name|>DependencyPanel.py<|end_file_name|><|fim▁begin|>import wx
from Gauge import Gauge
class DependencyPanel(wx.Panel):
def __init__(self, parent, text, gaugeColor, textColor, env = 0, mineral = 0, energy = 0, nothing = 0):
wx.Panel.__init__(self, parent, -1)
gaugeBorders = (5, 5, 1, 7)
self.env = Gauge(self, gaugeColor, textColor, gaugeBorders, env)
self.mineral = Gauge(self, gaugeColor, textColor, gaugeBorders, mineral)
self.energy = Gauge(self, gaugeColor, textColor, gaugeBorders, energy)
self.nothing = Gauge(self, gaugeColor, textColor, gaugeBorders, nothing)
vertBox = wx.BoxSizer(wx.VERTICAL)
panelCaption = wx.StaticText(self, -1, text)
font = panelCaption.GetFont()
font.SetWeight(wx.FONTWEIGHT_BOLD)
panelCaption.SetFont(font)
vertBox.Add(panelCaption, 0, wx.BOTTOM, 5)
gbs = wx.GridBagSizer(4, 4)
gbs.SetFlexibleDirection(wx.HORIZONTAL)
gbs.SetCols(2)
gbs.SetRows(4)
gbs.AddGrowableCol(1)<|fim▁hole|>
gbs.Add(wx.StaticText(self, -1, "Environment"), (0, 0), flag = wx.WEST, border = 4)
gbs.Add(self.env, (0, 1), flag = wx.EXPAND)
gbs.Add(wx.StaticText(self, -1, "Mineral"), (1, 0), flag = wx.WEST, border = 4)
gbs.Add(self.mineral, (1, 1), flag = wx.EXPAND)
gbs.Add(wx.StaticText(self, -1, "Energy"), (2, 0), flag = wx.WEST, border = 4)
gbs.Add(self.energy, (2, 1), flag = wx.EXPAND)
gbs.Add(wx.StaticText(self, -1, "Nothing"), (3, 0), flag = wx.WEST, border = 4)
gbs.Add(self.nothing, (3, 1), flag = wx.EXPAND)
vertBox.Add(gbs, 1, wx.EXPAND | wx.ALL)
self.SetSizerAndFit(vertBox)
def SetEnv(self, percent):
self.env.percent = percent
self.Refresh()
def SetMineral(self, percent):
self.mineral.percent = percent
self.Refresh()
def SetEnergy(self, percent):
self.energy.percent = percent
self.Refresh()
def SetNothing(self, percent):
self.nothing.percent = percent
self.Refresh()
def Clear(self):
self.nothing.percent = 0
self.mineral.percent = 0
self.env.percent = 0
self.energy.percent = 0
self.Refresh()<|fim▁end|> | |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>use std::url::Url;
use std::{cmp, mem, ptr};
use std::get_slice::GetSlice;
use std::io::*;
use std::process::Command;
use std::ops::DerefMut;
use std::syscall::SysError;
use std::syscall::ENOENT;
use std::to_num::ToNum;
use orbital::event::Event;
use orbital::Point;
use orbital::Size;
use self::display::Display;
use self::session::Session;
use self::window::Window;
pub mod display;
pub mod package;
pub mod scheduler;<|fim▁hole|>pub mod window;
pub static mut session_ptr: *mut Session = 0 as *mut Session;
/// A window resource
pub struct Resource {
/// The window
pub window: Box<Window>,
/// Seek point
pub seek: usize,
}
impl Resource {
pub fn dup(&self) -> Result<Box<Resource>> {
Ok(box Resource {
window: Window::new(self.window.point,
self.window.size,
self.window.title.clone()),
seek: self.seek,
})
}
/// Return the url of this resource
pub fn path(&self) -> Result<String> {
Ok(format!("orbital:///{}/{}/{}/{}/{}",
self.window.point.x,
self.window.point.y,
self.window.size.width,
self.window.size.height,
self.window.title))
}
/// Read data to buffer
pub fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
// Read events from window
let mut i = 0;
while buf.len() - i >= mem::size_of::<Event>() {
match self.window.poll() {
Some(event) => {
unsafe { ptr::write(buf.as_ptr().offset(i as isize) as *mut Event, event) };
i += mem::size_of::<Event>();
}
None => break,
}
}
Ok(i)
}
/// Write to resource
pub fn write(&mut self, buf: &[u8]) -> Result<usize> {
let content = &mut self.window.content;
let size = cmp::min(content.size - self.seek, buf.len());
unsafe {
Display::copy_run(buf.as_ptr() as usize, content.offscreen + self.seek, size);
}
self.seek += size;
Ok(size)
}
/// Seek
pub fn seek(&mut self, pos: SeekFrom) -> Result<u64> {
let end = self.window.content.size;
self.seek = match pos {
SeekFrom::Start(offset) => cmp::min(end as u64, cmp::max(0, offset)) as usize,
SeekFrom::Current(offset) => cmp::min(end as i64, cmp::max(0, self.seek as i64 + offset)) as usize,
SeekFrom::End(offset) => cmp::min(end as i64, cmp::max(0, end as i64 + offset)) as usize,
};
Ok(self.seek as u64)
}
/// Sync the resource, should flip
pub fn sync(&mut self) -> Result<()> {
self.window.redraw();
Ok(())
}
}
/// A window scheme
pub struct Scheme {
pub session: Box<Session>,
pub next_x: isize,
pub next_y: isize,
}
impl Scheme {
pub fn new() -> Box<Scheme> {
println!("- Starting Orbital");
println!(" Console: Press F1");
println!(" Desktop: Press F2");
let mut ret = box Scheme {
session: Session::new(),
next_x: 0,
next_y: 0,
};
unsafe { session_ptr = ret.session.deref_mut() };
ret
}
pub fn open(&mut self, url_str: &str, _: usize) -> Result<Box<Resource>> {
// window://host/path/path/path is the path type we're working with.
let url = Url::from_str(url_str);
let host = url.host();
if host.is_empty() {
let path = url.path_parts();
let mut pointx = match path.get(0) {
Some(x) => x.to_num_signed(),
None => 0,
};
let mut pointy = match path.get(1) {
Some(y) => y.to_num_signed(),
None => 0,
};
let size_width = match path.get(2) {
Some(w) => w.to_num(),
None => 100,
};
let size_height = match path.get(3) {
Some(h) => h.to_num(),
None => 100,
};
let mut title = match path.get(4) {
Some(t) => t.clone(),
None => String::new(),
};
for i in 5..path.len() {
if let Some(t) = path.get(i) {
title = title + "/" + t;
}
}
if pointx <= 0 || pointy <= 0 {
if self.next_x > self.session.display.width as isize - size_width as isize {
self.next_x = 0;
}
self.next_x += 32;
pointx = self.next_x as i32;
if self.next_y > self.session.display.height as isize - size_height as isize {
self.next_y = 0;
}
self.next_y += 32;
pointy = self.next_y as i32;
}
Ok(box Resource {
window: Window::new(Point::new(pointx, pointy),
Size::new(size_width, size_height),
title),
seek: 0,
})
} else if host == "launch" {
let path = url.path();
unsafe {
let reenable = scheduler::start_no_ints();
for package in self.session.packages.iter() {
let mut accepted = false;
for accept in package.accepts.iter() {
if (accept.starts_with('*') &&
path.ends_with(&accept.get_slice(Some(1), None))) ||
(accept.ends_with('*') &&
path.starts_with(&accept.get_slice(None, Some(accept.len() - 1)))) {
accepted = true;
break;
}
}
if accepted {
if Command::new(&package.binary).arg(&path).spawn_scheme().is_none() {
println!("{}: Failed to launch", package.binary);
}
break;
}
}
scheduler::end_no_ints(reenable);
}
Err(SysError::new(ENOENT))
} else {
Err(SysError::new(ENOENT))
}
}
pub fn event(&mut self, event: &Event) {
unsafe {
let reenable = scheduler::start_no_ints();
self.session.event(event);
scheduler::end_no_ints(reenable);
self.session.redraw();
}
}
}
// TODO: This is a hack and it will go away
#[cold]
#[inline(never)]
#[no_mangle]
pub unsafe extern "C" fn _event(scheme: *mut Scheme, event: *const Event) {
(*scheme).event(&*event);
}<|fim▁end|> | pub mod session; |
<|file_name|>a.rs<|end_file_name|><|fim▁begin|>extern crate regex;
use std::fs::File;
use std::io::{BufReader, BufRead};
use std::str;
use std::str::FromStr;
use regex::Regex;
fn main() {
let mut summation = 0;
let mut amount_of_correct = 0;
let regex = Regex::new(r"^(.*)-(\d+)\[(.+)\]$").unwrap();
for line_res in BufReader::new(File::open("input").expect("")).lines() {
let line = line_res.unwrap();
let capts = regex.captures(&line).unwrap();
let checksum = gen_checksum(&capts.at(1).unwrap());
println!("{}", checksum);
if checksum == capts.at(3).unwrap() {
summation += capts.at(2).unwrap().parse().unwrap();
amount_of_correct += 1;<|fim▁hole|> }
}
println!("{} are correct, giving a summation of {}", amount_of_correct, summation);
}
fn gen_checksum(line: &str) -> String {
let alphabet = "abcdefghijklmnopqrstuvwxyz";
let mut frequency = vec![0isize; 26];
for character in line.chars() {
match alphabet.find(character) {
Some(index) => frequency[index] += 1,
None => (),
}
}
let mut result = String::with_capacity(5);
for _ in 0..5 {
let (big1, _) = frequency.iter().enumerate().rev().max_by_key(|&(_, frequency)| frequency).unwrap();
frequency[big1] = -1;
result.push(alphabet.chars().nth(big1).unwrap());
}
return result;
}<|fim▁end|> | |
<|file_name|>simple_tableprinter.py<|end_file_name|><|fim▁begin|>import sqlite3
FIELD_MAX_WIDTH = 20
TABLE_NAME = 'people'
SELECT = 'select * from %s order by age, name_last' % TABLE_NAME
con = sqlite3.connect("mydb")
cur = con.cursor()
cur.execute(SELECT)
# Print a header.
for fieldDesc in cur.description:
print(fieldDesc[0].ljust(FIELD_MAX_WIDTH), end=' ')
print() # Finish the header with a newline.
print('-' * 78)
# For each row, print the value of each field left-justified within<|fim▁hole|> for fieldIndex in fieldIndices:
fieldValue = str(row[fieldIndex])
print(fieldValue.ljust(FIELD_MAX_WIDTH), end=' ')
print() # Finish the row with a newline.
con.close()<|fim▁end|> | # the maximum possible width of that field.
fieldIndices = range(len(cur.description))
for row in cur: |
<|file_name|>product.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2014-2016 Akretion (http://www.akretion.com)
# @author Alexis de Lattre <[email protected]>
# Copyright 2016 Sodexis (http://sodexis.com)<|fim▁hole|>
from openerp import models, fields, api, _
from openerp.exceptions import ValidationError
class ProductProduct(models.Model):
_inherit = 'product.product'
# Link rental service -> rented HW product
rented_product_id = fields.Many2one(
'product.product', string='Related Rented Product',
domain=[('type', 'in', ('product', 'consu'))])
# Link rented HW product -> rental service
rental_service_ids = fields.One2many(
'product.product', 'rented_product_id',
string='Related Rental Services')
@api.one
@api.constrains('rented_product_id', 'must_have_dates', 'type', 'uom_id')
def _check_rental(self):
if self.rented_product_id and self.type != 'service':
raise ValidationError(_(
"The rental product '%s' must be of type 'Service'.")
% self.name)
if self.rented_product_id and not self.must_have_dates:
raise ValidationError(_(
"The rental product '%s' must have the option "
"'Must Have Start and End Dates' checked.")
% self.name)
# In the future, we would like to support all time UoMs
# but it is more complex and requires additionnal developments
day_uom = self.env.ref('product.product_uom_day')
if self.rented_product_id and self.uom_id != day_uom:
raise ValidationError(_(
"The unit of measure of the rental product '%s' must "
"be 'Day'.") % self.name)
@api.multi
def _need_procurement(self):
# Missing self.ensure_one() in the native code !
res = super(ProductProduct, self)._need_procurement()
if not res:
for product in self:
if product.type == 'service' and product.rented_product_id:
return True
# TODO find a replacement for soline.rental_type == 'new_rental')
return res<|fim▁end|> | # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). |
<|file_name|>version.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | __version__ = "0.0.2a3" |
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>use crowbook_intl::{Extractor, Localizer};
use std::env;
use std::path::Path;
fn main() {
println!("cargo:rerun-if-changed=build.rs");
println!("cargo:rerun-if-changed=lang/fr.po");<|fim▁hole|> let mut extractor = Extractor::new();
extractor
.add_messages_from_dir(concat!(env!("CARGO_MANIFEST_DIR"), "/src/lib"))
.unwrap();
// Uncomment to update crowbook.pot
//extractor.write_pot_file(concat!(env!("CARGO_MANIFEST_DIR"), "/lang/lib/crowbook.pot")).unwrap();
let mut localizer = Localizer::new(&extractor);
localizer
.add_lang(
"fr",
include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/lang/lib/fr.po")),
)
.unwrap();
let dest_path = Path::new(&env::var("OUT_DIR").unwrap()).join("localize_macros.rs");
localizer.write_macro_file(dest_path).unwrap();
// Extract and localize src/bin
let mut extractor = Extractor::new();
extractor
.add_messages_from_dir(concat!(env!("CARGO_MANIFEST_DIR"), "/src/bin"))
.unwrap();
// Uncomment to update crowbook.pot
//extractor.write_pot_file(concat!(env!("CARGO_MANIFEST_DIR"), "/lang/bin/crowbook.pot")).unwrap();
let mut localizer = Localizer::new(&extractor);
localizer
.add_lang(
"fr",
include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/lang/bin/fr.po")),
)
.unwrap();
let dest_path = Path::new(&env::var("OUT_DIR").unwrap()).join("localize_macros_bin.rs");
localizer.write_macro_file(dest_path).unwrap();
}<|fim▁end|> | // Extract and localize src/lib |
<|file_name|>bn.ts<|end_file_name|><|fim▁begin|>// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
import { ArrayBufferViewInput, toArrayBufferView } from './buffer.js';
import { TypedArray, TypedArrayConstructor } from '../interfaces.js';
import { BigIntArray, BigIntArrayConstructor } from '../interfaces.js';
import { BigIntAvailable, BigInt64Array, BigUint64Array } from './compat.js';
/** @ignore */
export const isArrowBigNumSymbol = Symbol.for('isArrowBigNum');
/** @ignore */ type BigNumArray = IntArray | UintArray;
/** @ignore */ type IntArray = Int8Array | Int16Array | Int32Array;
/** @ignore */ type UintArray = Uint8Array | Uint16Array | Uint32Array | Uint8ClampedArray;
/** @ignore */
function BigNum(this: any, x: any, ...xs: any) {
if (xs.length === 0) {
return Object.setPrototypeOf(toArrayBufferView(this['TypedArray'], x), this.constructor.prototype);
}
return Object.setPrototypeOf(new this['TypedArray'](x, ...xs), this.constructor.prototype);
}
BigNum.prototype[isArrowBigNumSymbol] = true;
BigNum.prototype.toJSON = function <T extends BN<BigNumArray>>(this: T) { return `"${bignumToString(this)}"`; };
BigNum.prototype.valueOf = function <T extends BN<BigNumArray>>(this: T) { return bignumToNumber(this); };
BigNum.prototype.toString = function <T extends BN<BigNumArray>>(this: T) { return bignumToString(this); };
BigNum.prototype[Symbol.toPrimitive] = function <T extends BN<BigNumArray>>(this: T, hint: 'string' | 'number' | 'default' = 'default') {
switch (hint) {
case 'number': return bignumToNumber(this);
case 'string': return bignumToString(this);
case 'default': return bignumToBigInt(this);
}
// @ts-ignore
return bignumToString(this);
};
/** @ignore */
type TypedArrayConstructorArgs =
[number | void] |
[Iterable<number> | Iterable<bigint>] |
[ArrayBufferLike, number | void, number | void];
/** @ignore */
function SignedBigNum(this: any, ...args: TypedArrayConstructorArgs) { return BigNum.apply(this, args); }
/** @ignore */
function UnsignedBigNum(this: any, ...args: TypedArrayConstructorArgs) { return BigNum.apply(this, args); }
/** @ignore */
function DecimalBigNum(this: any, ...args: TypedArrayConstructorArgs) { return BigNum.apply(this, args); }
Object.setPrototypeOf(SignedBigNum.prototype, Object.create(Int32Array.prototype));
Object.setPrototypeOf(UnsignedBigNum.prototype, Object.create(Uint32Array.prototype));
Object.setPrototypeOf(DecimalBigNum.prototype, Object.create(Uint32Array.prototype));
Object.assign(SignedBigNum.prototype, BigNum.prototype, { 'constructor': SignedBigNum, 'signed': true, 'TypedArray': Int32Array, 'BigIntArray': BigInt64Array });
Object.assign(UnsignedBigNum.prototype, BigNum.prototype, { 'constructor': UnsignedBigNum, 'signed': false, 'TypedArray': Uint32Array, 'BigIntArray': BigUint64Array });
Object.assign(DecimalBigNum.prototype, BigNum.prototype, { 'constructor': DecimalBigNum, 'signed': true, 'TypedArray': Uint32Array, 'BigIntArray': BigUint64Array });
/** @ignore */
function bignumToNumber<T extends BN<BigNumArray>>(bn: T) {
const { buffer, byteOffset, length, 'signed': signed } = bn;<|fim▁hole|> const n = words.length;
let hi, lo;
while (i < n) {
lo = words[i++];
hi = words[i++];
signed || (hi = hi >>> 0);
number += (lo >>> 0) + (hi * (i ** 32));
}
return number;
}
/** @ignore */
export let bignumToString: { <T extends BN<BigNumArray>>(a: T): string };
/** @ignore */
export let bignumToBigInt: { <T extends BN<BigNumArray>>(a: T): bigint };
if (!BigIntAvailable) {
bignumToString = decimalToString;
bignumToBigInt = <any>bignumToString;
} else {
bignumToBigInt = (<T extends BN<BigNumArray>>(a: T) => a.byteLength === 8 ? new a['BigIntArray'](a.buffer, a.byteOffset, 1)[0] : <any>decimalToString(a));
bignumToString = (<T extends BN<BigNumArray>>(a: T) => a.byteLength === 8 ? `${new a['BigIntArray'](a.buffer, a.byteOffset, 1)[0]}` : decimalToString(a));
}
/** @ignore */
function decimalToString<T extends BN<BigNumArray>>(a: T) {
let digits = '';
const base64 = new Uint32Array(2);
let base32 = new Uint16Array(a.buffer, a.byteOffset, a.byteLength / 2);
const checks = new Uint32Array((base32 = new Uint16Array(base32).reverse()).buffer);
let i = -1;
const n = base32.length - 1;
do {
for (base64[0] = base32[i = 0]; i < n;) {
base32[i++] = base64[1] = base64[0] / 10;
base64[0] = ((base64[0] - base64[1] * 10) << 16) + base32[i];
}
base32[i] = base64[1] = base64[0] / 10;
base64[0] = base64[0] - base64[1] * 10;
digits = `${base64[0]}${digits}`;
} while (checks[0] || checks[1] || checks[2] || checks[3]);
return digits ? digits : `0`;
}
/** @ignore */
export class BN<T extends BigNumArray> {
/** @nocollapse */
public static new<T extends BigNumArray>(num: T, isSigned?: boolean): (T & BN<T>) {
switch (isSigned) {
case true: return new (<any>SignedBigNum)(num) as (T & BN<T>);
case false: return new (<any>UnsignedBigNum)(num) as (T & BN<T>);
}
switch (num.constructor) {
case Int8Array:
case Int16Array:
case Int32Array:
case BigInt64Array:
return new (<any>SignedBigNum)(num) as (T & BN<T>);
}
if (num.byteLength === 16) {
return new (<any>DecimalBigNum)(num) as (T & BN<T>);
}
return new (<any>UnsignedBigNum)(num) as (T & BN<T>);
}
/** @nocollapse */
public static signed<T extends IntArray>(num: T): (T & BN<T>) {
return new (<any>SignedBigNum)(num) as (T & BN<T>);
}
/** @nocollapse */
public static unsigned<T extends UintArray>(num: T): (T & BN<T>) {
return new (<any>UnsignedBigNum)(num) as (T & BN<T>);
}
/** @nocollapse */
public static decimal<T extends UintArray>(num: T): (T & BN<T>) {
return new (<any>DecimalBigNum)(num) as (T & BN<T>);
}
constructor(num: T, isSigned?: boolean) {
return BN.new(num, isSigned) as any;
}
}
/** @ignore */
export interface BN<T extends BigNumArray> extends TypedArrayLike<T> {
new <T extends ArrayBufferViewInput>(buffer: T, signed?: boolean): T;
readonly signed: boolean;
readonly TypedArray: TypedArrayConstructor<TypedArray>;
readonly BigIntArray: BigIntArrayConstructor<BigIntArray>;
[Symbol.toStringTag]:
'Int8Array' |
'Int16Array' |
'Int32Array' |
'Uint8Array' |
'Uint16Array' |
'Uint32Array' |
'Uint8ClampedArray';
/**
* Convert the bytes to their (positive) decimal representation for printing
*/
toString(): string;
/**
* Down-convert the bytes to a 53-bit precision integer. Invoked by JS for
* arithmetic operators, like `+`. Easy (and unsafe) way to convert BN to
* number via `+bn_inst`
*/
valueOf(): number;
/**
* Return the JSON representation of the bytes. Must be wrapped in double-quotes,
* so it's compatible with JSON.stringify().
*/
toJSON(): string;
[Symbol.toPrimitive](hint?: any): number | string | bigint;
}
/** @ignore */
interface TypedArrayLike<T extends BigNumArray> {
readonly length: number;
readonly buffer: ArrayBuffer;
readonly byteLength: number;
readonly byteOffset: number;
readonly BYTES_PER_ELEMENT: number;
includes(searchElement: number, fromIndex?: number | undefined): boolean;
copyWithin(target: number, start: number, end?: number | undefined): this;
every(callbackfn: (value: number, index: number, array: T) => boolean, thisArg?: any): boolean;
fill(value: number, start?: number | undefined, end?: number | undefined): this;
filter(callbackfn: (value: number, index: number, array: T) => boolean, thisArg?: any): T;
find(predicate: (value: number, index: number, obj: T) => boolean, thisArg?: any): number | undefined;
findIndex(predicate: (value: number, index: number, obj: T) => boolean, thisArg?: any): number;
forEach(callbackfn: (value: number, index: number, array: T) => void, thisArg?: any): void;
indexOf(searchElement: number, fromIndex?: number | undefined): number;
join(separator?: string | undefined): string;
lastIndexOf(searchElement: number, fromIndex?: number | undefined): number;
map(callbackfn: (value: number, index: number, array: T) => number, thisArg?: any): T;
reduce(callbackfn: (previousValue: number, currentValue: number, currentIndex: number, array: T) => number): number;
reduce(callbackfn: (previousValue: number, currentValue: number, currentIndex: number, array: T) => number, initialValue: number): number;
reduce<U>(callbackfn: (previousValue: U, currentValue: number, currentIndex: number, array: T) => U, initialValue: U): U;
reduceRight(callbackfn: (previousValue: number, currentValue: number, currentIndex: number, array: T) => number): number;
reduceRight(callbackfn: (previousValue: number, currentValue: number, currentIndex: number, array: T) => number, initialValue: number): number;
reduceRight<U>(callbackfn: (previousValue: U, currentValue: number, currentIndex: number, array: T) => U, initialValue: U): U;
reverse(): T;
set(array: ArrayLike<number>, offset?: number | undefined): void;
slice(start?: number | undefined, end?: number | undefined): T;
some(callbackfn: (value: number, index: number, array: T) => boolean, thisArg?: any): boolean;
sort(compareFn?: ((a: number, b: number) => number) | undefined): this;
subarray(begin: number, end?: number | undefined): T;
toLocaleString(): string;
entries(): IterableIterator<[number, number]>;
keys(): IterableIterator<number>;
values(): IterableIterator<number>;
}<|fim▁end|> | const words = new Int32Array(buffer, byteOffset, length);
let number = 0, i = 0; |
<|file_name|>issue-2063-resource.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your<|fim▁hole|>#[feature(managed_boxes)];
// test that autoderef of a type like this does not
// cause compiler to loop. Note that no instances
// of such a type could ever be constructed.
struct t { //~ ERROR this type cannot be instantiated
x: x,
to_str: (),
}
struct x(@t); //~ ERROR this type cannot be instantiated
fn main() {
}<|fim▁end|> | // option. This file may not be copied, modified, or distributed
// except according to those terms.
|
<|file_name|>stringio.js<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2012 Jacob Rus
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
File-like objects that read from or write to a string buffer.
A nearly direct port of Python’s StringIO module.
f = StringIO() # ready for writing
f = StringIO(buf) # ready for reading
f.close() # explicitly release resources held
pos = f.tell() # get current position
f.seek(pos) # set current position
f.seek(pos, mode) # mode 0: absolute; 1: relative; 2: relative to EOF
buf = f.read() # read until EOF
buf = f.read(n) # read up to n bytes
buf = f.readline() # read until end of line ('\n') or EOF
list = f.readlines() # list of f.readline() results until EOF
f.truncate([size]) # truncate file to at most size (default: current pos)
f.write(buf) # write at current position
f.writelines(list) # for line in list: f.write(line)
f.getvalue() # return whole file's contents as a string
Notes:
- Seeking far beyond EOF and then writing will insert real null
bytes that occupy space in the buffer.
- There's a simple test set (see end of this file).
*/
var StringIO, _complain_ifclosed, _test, module_root;
_complain_ifclosed = function(closed) {
if (closed) {
throw new Error('I/O operation on closed file');
}
};
/* class StringIO([buffer])
When a StringIO object is created, it can be initialized to an existing
string by passing the string to the constructor. If no string is given,
the StringIO will start empty.
*/
StringIO = (function() {
function StringIO(buf) {
if (buf == null) {
buf = '';
}
this.buf = '' + buf;
this.length = this.buf.length;
this.buflist = [];
this.pos = 0;
this.closed = false;
}
/* Free the memory buffer. */
StringIO.prototype.close = function() {
if (!this.closed) {
this.closed = true;
delete this.buf;
delete this.pos;
}
};
StringIO.prototype._flush_buflist = function() {
this.buf += this.buflist.join('');
return this.buflist = [];
};
/* Set the file's current position.
The mode argument is optional and defaults to 0 (absolute file
positioning); other values are 1 (seek relative to the current
position) and 2 (seek relative to the file's end).
There is no return value.
*/
StringIO.prototype.seek = function(pos, mode) {
if (mode == null) {
mode = 0;
}
_complain_ifclosed(this.closed);
if (this.buflist.length) {
this._flush_buflist();
}
if (mode === 1) {
pos += this.pos;
} else if (mode === 2) {
pos += this.length;
}
this.pos = Math.max(0, pos);
};
/* Return the file's current position. */
StringIO.prototype.tell = function() {
_complain_ifclosed(this.closed);
return this.pos;
};
/* Read at most size bytes from the file
(less if the read hits EOF before obtaining size bytes).
If the size argument is negative or omitted, read all data until EOF
is reached. The bytes are returned as a string object. An empty
string is returned when EOF is encountered immediately.
*/
StringIO.prototype.read = function(n) {
var newpos, r;
if (n == null) {
n = -1;
}
_complain_ifclosed(this.closed);
if (this.buflist.length) {
this._flush_buflist();
}
if (n < 0) {
newpos = this.length;
} else {
newpos = Math.min(this.pos + n, this.length);
}
r = this.buf.slice(this.pos, newpos);
this.pos = newpos;
return r;
};
/* Read one entire line from the file.
A trailing newline character is kept in the string (but may be absent
when a file ends with an incomplete line). If the size argument is
present and non-negative, it is a maximum byte count (including the
trailing newline) and an incomplete line may be returned.
An empty string is returned only when EOF is encountered immediately.
*/
StringIO.prototype.readline = function(length) {
var i, newpos, r;
if (length == null) {
length = null;
}
_complain_ifclosed(this.closed);
if (this.buflist.length) {
this._flush_buflist();
}
i = this.buf.indexOf('\n', this.pos);
if (i < 0) {
newpos = this.length;
} else {
newpos = i + 1;
}
if ((length != null) && this.pos + length < newpos) {
newpos = this.pos + length;
}
r = this.buf.slice(this.pos, newpos);
this.pos = newpos;
return r;
};
/* Read until EOF using readline() and return a list containing the
lines thus read.
If the optional sizehint argument is present, instead of reading up
to EOF, whole lines totalling approximately sizehint bytes (or more
to accommodate a final whole line).
*/
StringIO.prototype.readlines = function(sizehint) {
var line, lines, total;
if (sizehint == null) {
sizehint = 0;
}
total = 0;
lines = [];
line = this.readline();
while (line) {
lines.push(line);
total += line.length;
if ((0 < sizehint && sizehint <= total)) {
break;
}
line = this.readline();
}
return lines;
};
/* Truncate the file's size.
If the optional size argument is present, the file is truncated to
(at most) that size. The size defaults to the current position.
The current file position is not changed unless the position
is beyond the new file size.
If the specified size exceeds the file's current size, the
file remains unchanged.
*/
StringIO.prototype.truncate = function(size) {
if (size == null) {
size = null;
}
_complain_ifclosed(this.closed);
if (size == null) {
size = this.pos;
} else if (size < 0) {
throw new Error('Negative size not allowed');
} else if (size < this.pos) {
this.pos = size;
}
this.buf = this.getvalue().slice(0, size);
this.length = size;
};
/* Write a string to the file.
There is no return value.
*/
StringIO.prototype.write = function(s) {
var newpos, null_bytes, slen, spos;
_complain_ifclosed(this.closed);
if (!s) {
return;
}
if (typeof s !== 'string') {
s = s.toString();
}
spos = this.pos;
slen = this.length;
if (spos === slen) {
this.buflist.push(s);
this.length = this.pos = spos + s.length;
return;
}
if (spos > slen) {
null_bytes = (Array(spos - slen + 1)).join('\x00');
this.buflist.push(null_bytes);
slen = spos;
}
newpos = spos + s.length;
if (spos < slen) {
if (this.buflist.length) {
this._flush_buflist();
}
this.buflist.push(this.buf.slice(0, spos), s, this.buf.slice(newpos));
this.buf = '';
if (newpos > slen) {
slen = newpos;
}
} else {
this.buflist.push(s);
slen = newpos;
}
this.length = slen;<|fim▁hole|> };
/* Write a sequence of strings to the file. The sequence can be any
iterable object producing strings, typically a list of strings. There
is no return value.
(The name is intended to match readlines(); writelines() does not add
line separators.)
*/
StringIO.prototype.writelines = function(array) {
var j, len, line;
for (j = 0, len = array.length; j < len; j++) {
line = array[j];
this.write(line);
}
};
/* Flush the internal buffer */
StringIO.prototype.flush = function() {
_complain_ifclosed(this.closed);
};
/* Retrieve the entire contents of the "file" at any time
before the StringIO object's close() method is called.
*/
StringIO.prototype.getvalue = function() {
if (this.buflist.length) {
this._flush_buflist();
}
return this.buf;
};
return StringIO;
})();
module_root = typeof exports !== "undefined" && exports !== null ? exports : typeof window !== "undefined" && window !== null ? window : this;
module_root.StringIO = StringIO;
_test = function() {
var f, j, len, length, line, line2, lines, list, print, ref;
print = function() {
return console.log.apply(console, arguments);
};
lines = ['This is a test,\n', 'Blah blah blah,\n', 'Wow does this work?\n', 'Okay, here are some lines\n', 'of text.\n'];
f = new StringIO;
ref = lines.slice(0, -2);
for (j = 0, len = ref.length; j < len; j++) {
line = ref[j];
f.write(line);
}
f.writelines(lines.slice(-2));
if (f.getvalue() !== lines.join('')) {
throw new Error('write failed');
}
length = f.tell();
print('File length =', length);
f.seek(lines[0].length);
f.write(lines[1]);
f.seek(0);
print("First line = " + (f.readline()));
print("Position = " + (f.tell()));
line = f.readline();
print("Second line = " + line);
f.seek(-line.length, 1);
line2 = f.read(line.length);
if (line !== line2) {
throw new Error('bad result after seek back');
}
f.seek(-line2.length, 1);
list = f.readlines();
line = list[list.length - 1];
f.seek(f.tell() - line.length);
line2 = f.read();
if (line !== line2) {
throw new Error('bad result after seek back from EOF');
}
print("Read " + list.length + " more lines");
print("File length = " + (f.tell()));
if (f.tell() !== length) {
throw new Error('bad length');
}
f.truncate((length / 2) | 0);
f.seek(0, 2);
print("Truncated length = " + (f.tell()));
if (f.tell() !== ((length / 2) | 0)) {
throw new Error('truncate did not adjust length');
}
return f.close();
};<|fim▁end|> | this.pos = newpos; |
<|file_name|>remove.py<|end_file_name|><|fim▁begin|>import math
import wx
import eos.db
import gui.mainFrame
from gui import globalEvents as GE
from gui.fitCommands.calc.cargo.remove import CalcRemoveCargoCommand
from gui.fitCommands.helpers import CargoInfo, InternalCommandHistory
from service.market import Market
class GuiRemoveCargosCommand(wx.Command):
def __init__(self, fitID, itemIDs):
wx.Command.__init__(self, True, 'Remove Cargos')
self.internalHistory = InternalCommandHistory()
self.fitID = fitID
self.itemIDs = itemIDs
def Do(self):<|fim▁hole|> cmd = CalcRemoveCargoCommand(
fitID=self.fitID,
cargoInfo=CargoInfo(itemID=itemID, amount=math.inf))
results.append(self.internalHistory.submit(cmd))
sMkt.storeRecentlyUsed(itemID)
success = any(results)
eos.db.commit()
wx.PostEvent(gui.mainFrame.MainFrame.getInstance(), GE.FitChanged(fitIDs=(self.fitID,)))
return success
def Undo(self):
success = self.internalHistory.undoAll()
eos.db.commit()
wx.PostEvent(gui.mainFrame.MainFrame.getInstance(), GE.FitChanged(fitIDs=(self.fitID,)))
return success<|fim▁end|> | sMkt = Market.getInstance()
results = []
for itemID in self.itemIDs: |
<|file_name|>test_points.py<|end_file_name|><|fim▁begin|># (C) British Crown Copyright 2014 - 2015, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""Unit tests for the `iris.quickplot.points` function."""
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
# Import iris.tests first so that some things can be initialised before<|fim▁hole|>from iris.tests.unit.plot import TestGraphicStringCoord
if tests.MPL_AVAILABLE:
import iris.quickplot as qplt
@tests.skip_plot
class TestStringCoordPlot(TestGraphicStringCoord):
def test_yaxis_labels(self):
qplt.points(self.cube, coords=('bar', 'str_coord'))
self.assertBoundsTickLabels('yaxis')
def test_xaxis_labels(self):
qplt.points(self.cube, coords=('str_coord', 'bar'))
self.assertBoundsTickLabels('xaxis')
if __name__ == "__main__":
tests.main()<|fim▁end|> | # importing anything else.
import iris.tests as tests |
<|file_name|>buildifier.go<|end_file_name|><|fim▁begin|>/*
Copyright 2017 The Kubernetes Authors.
<|fim▁hole|>You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// buildifier defines a Prow plugin that runs buildifier over modified BUILD,
// WORKSPACE, and skylark (.bzl) files in pull requests.
package buildifier
import (
"bytes"
"fmt"
"io/ioutil"
"path/filepath"
"regexp"
"sort"
"strings"
"time"
"github.com/bazelbuild/buildtools/build"
"github.com/sirupsen/logrus"
"k8s.io/test-infra/prow/genfiles"
"k8s.io/test-infra/prow/git"
"k8s.io/test-infra/prow/github"
"k8s.io/test-infra/prow/pluginhelp"
"k8s.io/test-infra/prow/plugins"
)
const (
pluginName = "buildifier"
maxComments = 20
)
var buildifyRe = regexp.MustCompile(`(?mi)^/buildif(y|ier)\s*$`)
func init() {
plugins.RegisterGenericCommentHandler(pluginName, handleGenericComment, nil)
}
func helpProvider(config *plugins.Configuration, enabledRepos []string) (*pluginhelp.PluginHelp, error) {
// The Config field is omitted because this plugin is not configurable.
pluginHelp := &pluginhelp.PluginHelp{
Description: "The buildifier plugin runs buildifier on changes made to Bazel files in a PR. It then creates a new review on the pull request and leaves warnings at the appropriate lines of code.",
}
pluginHelp.AddCommand(pluginhelp.Command{
Usage: "/buildif(y|ier)",
Featured: false,
Description: "Runs buildifier on changes made to Bazel files in a PR",
WhoCanUse: "Anyone can trigger this command on a PR.",
Examples: []string{"/buildify", "/buildifier"},
})
return pluginHelp, nil
}
type githubClient interface {
GetFile(org, repo, filepath, commit string) ([]byte, error)
GetPullRequest(org, repo string, number int) (*github.PullRequest, error)
GetPullRequestChanges(org, repo string, number int) ([]github.PullRequestChange, error)
CreateReview(org, repo string, number int, r github.DraftReview) error
ListPullRequestComments(org, repo string, number int) ([]github.ReviewComment, error)
}
func handleGenericComment(pc plugins.Agent, e github.GenericCommentEvent) error {
return handle(pc.GitHubClient, pc.GitClient, pc.Logger, &e)
}
// modifiedBazelFiles returns a map from filename to patch string for all Bazel files
// that are modified in the PR.
func modifiedBazelFiles(ghc githubClient, org, repo string, number int, sha string) (map[string]string, error) {
changes, err := ghc.GetPullRequestChanges(org, repo, number)
if err != nil {
return nil, err
}
gfg, err := genfiles.NewGroup(ghc, org, repo, sha)
if err != nil {
return nil, err
}
modifiedFiles := make(map[string]string)
for _, change := range changes {
switch {
case gfg.Match(change.Filename):
continue
case change.Status == github.PullRequestFileRemoved || change.Status == github.PullRequestFileRenamed:
continue
// This also happens to match BUILD.bazel.
case strings.Contains(change.Filename, "BUILD"):
break
case strings.Contains(change.Filename, "WORKSPACE"):
break
case filepath.Ext(change.Filename) != ".bzl":
continue
}
modifiedFiles[change.Filename] = change.Patch
}
return modifiedFiles, nil
}
func uniqProblems(problems []string) []string {
sort.Strings(problems)
var uniq []string
last := ""
for _, s := range problems {
if s != last {
last = s
uniq = append(uniq, s)
}
}
return uniq
}
// problemsInFiles runs buildifier on the files. It returns a map from the file to
// a list of problems with that file.
func problemsInFiles(r *git.Repo, files map[string]string) (map[string][]string, error) {
problems := make(map[string][]string)
for f := range files {
src, err := ioutil.ReadFile(filepath.Join(r.Dir, f))
if err != nil {
return nil, err
}
// This is modeled after the logic from buildifier:
// https://github.com/bazelbuild/buildtools/blob/8818289/buildifier/buildifier.go#L261
content, err := build.Parse(f, src)
if err != nil {
return nil, fmt.Errorf("parsing as Bazel file %v", err)
}
beforeRewrite := build.Format(content)
var info build.RewriteInfo
build.Rewrite(content, &info)
ndata := build.Format(content)
if !bytes.Equal(src, ndata) && !bytes.Equal(src, beforeRewrite) {
// TODO(mattmoor): This always seems to be empty?
problems[f] = uniqProblems(info.Log)
}
}
return problems, nil
}
func handle(ghc githubClient, gc *git.Client, log *logrus.Entry, e *github.GenericCommentEvent) error {
// Only handle open PRs and new requests.
if e.IssueState != "open" || !e.IsPR || e.Action != github.GenericCommentActionCreated {
return nil
}
if !buildifyRe.MatchString(e.Body) {
return nil
}
org := e.Repo.Owner.Login
repo := e.Repo.Name
pr, err := ghc.GetPullRequest(org, repo, e.Number)
if err != nil {
return err
}
// List modified files.
modifiedFiles, err := modifiedBazelFiles(ghc, org, repo, pr.Number, pr.Head.SHA)
if err != nil {
return err
}
if len(modifiedFiles) == 0 {
return nil
}
log.Infof("Will buildify %d modified Bazel files.", len(modifiedFiles))
// Clone the repo, checkout the PR.
startClone := time.Now()
r, err := gc.Clone(e.Repo.FullName)
if err != nil {
return err
}
defer func() {
if err := r.Clean(); err != nil {
log.WithError(err).Error("Error cleaning up repo.")
}
}()
if err := r.CheckoutPullRequest(e.Number); err != nil {
return err
}
finishClone := time.Now()
log.WithField("duration", time.Since(startClone)).Info("Cloned and checked out PR.")
// Compute buildifier errors.
problems, err := problemsInFiles(r, modifiedFiles)
if err != nil {
return err
}
log.WithField("duration", time.Since(finishClone)).Info("Buildified.")
// Make the list of comments.
var comments []github.DraftReviewComment
for f := range problems {
comments = append(comments, github.DraftReviewComment{
Path: f,
// TODO(mattmoor): Include the messages if they are ever non-empty.
Body: strings.Join([]string{
"This Bazel file needs formatting, run:",
"```shell",
fmt.Sprintf("buildifier -mode=fix %q", f),
"```"}, "\n"),
Position: 1,
})
}
// Trim down the number of comments if necessary.
totalProblems := len(problems)
// Make the review body.
s := "s"
if totalProblems == 1 {
s = ""
}
response := fmt.Sprintf("%d warning%s.", totalProblems, s)
return ghc.CreateReview(org, repo, e.Number, github.DraftReview{
Body: plugins.FormatResponseRaw(e.Body, e.HTMLURL, e.User.Login, response),
Action: github.Comment,
Comments: comments,
})
}<|fim▁end|> | Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License. |
<|file_name|>tags.js<|end_file_name|><|fim▁begin|>export default {
FETCH_TAGS_PENDING: Symbol("FETCH_TAGS_PENDING"),
FETCH_TAGS_SUCCESS: Symbol("FETCH_TAGS_SUCCESS"),
FETCH_TAGS_FAILURE: Symbol("FETCH_TAGS_FAILURE"),
FILTER_TAGS: Symbol("FILTER_TAGS"),
ORDER_TAGS: Symbol("ORDER_TAGS")<|fim▁hole|><|fim▁end|> | }; |
<|file_name|>test2.py<|end_file_name|><|fim▁begin|>from egat.testset import UnorderedTestSet
class Test2(UnorderedTestSet):
def testStep1(self):<|fim▁hole|> pass
def testStep3(self):
pass<|fim▁end|> | pass
def testStep2(self): |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>export * from './dispatchSchedule.component';<|fim▁end|> | |
<|file_name|>reject_assignments.py<|end_file_name|><|fim▁begin|>import argparse, json
import simpleamt
if __name__ == '__main__':
parser = argparse.ArgumentParser(parents=[simpleamt.get_parent_parser()])
args = parser.parse_args()
mtc = simpleamt.get_mturk_connection_from_args(args)
reject_ids = []
if args.hit_ids_file is None:
parser.error('Must specify --hit_ids_file.')
with open(args.hit_ids_file, 'r') as f:
hit_ids = [line.strip() for line in f]
for hit_id in hit_ids:
for a in mtc.get_assignments(hit_id):
reject_ids.append(a.AssignmentId)
print ('This will reject %d assignments with '
'sandbox=%s' % (len(reject_ids), str(args.sandbox)))
print 'Continue?'
s = raw_input('(Y/N): ')
if s == 'Y' or s == 'y':<|fim▁hole|> else:
print 'Aborting'<|fim▁end|> | print 'Rejecting assignments'
for idx, assignment_id in enumerate(reject_ids):
print 'Rejecting assignment %d / %d' % (idx + 1, len(reject_ids))
mtc.reject_assignment(assignment_id, feedback='Invalid results') |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import React from 'react';
import Home from './Home.js';
import Login from './Login.js';
import PointInTime from './PointInTime.js';
import Vispdat from './VISPDAT.js';
import Refuse from './Refuse.js';
import {
Actions,
Scene
} from 'react-native-router-flux';
/**
* Order of rendering is based on index of Child scene.
* We set hideNavBar to true to prevent that ugly default
* header. We can enable and style when we need to.
*/
export default Actions.create(
<Scene key="root">
<Scene key="login" component={Login} hideNavBar={true} />
<Scene key="home" component={Home} hideNavBar={true} />
<Scene key="pointInTime" component={PointInTime} hideNavBar={true} />
<Scene key="vispdat" component={Vispdat} hideNavBar={true} />
<Scene key="refuse" component={Refuse} hideNavBar={true} />
</Scene><|fim▁hole|><|fim▁end|> | ); |
<|file_name|>package-info.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2009 Christian Hujer.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.<|fim▁hole|> * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* File Filtering.
* @author <a href="mailto:[email protected]">Christian Hujer</a>
* @since 0.1
*/
package net.sf.japi.util.filter.file;<|fim▁end|> | *
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
<|file_name|>coercion.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! # Type Coercion
//!
//! Under certain circumstances we will coerce from one type to another,
//! for example by auto-borrowing. This occurs in situations where the
//! compiler has a firm 'expected type' that was supplied from the user,
//! and where the actual type is similar to that expected type in purpose
//! but not in representation (so actual subtyping is inappropriate).
//!
//! ## Reborrowing
//!
//! Note that if we are expecting a reference, we will *reborrow*
//! even if the argument provided was already a reference. This is
//! useful for freezing mut/const things (that is, when the expected is &T
//! but you have &const T or &mut T) and also for avoiding the linearity
//! of mut things (when the expected is &mut T and you have &mut T). See
//! the various `src/test/run-pass/coerce-reborrow-*.rs` tests for
//! examples of where this is useful.
//!
//! ## Subtle note
//!
//! When deciding what type coercions to consider, we do not attempt to
//! resolve any type variables we may encounter. This is because `b`
//! represents the expected type "as the user wrote it", meaning that if
//! the user defined a generic function like
//!
//! fn foo<A>(a: A, b: A) { ... }
//!
//! and then we wrote `foo(&1, @2)`, we will not auto-borrow
//! either argument. In older code we went to some lengths to
//! resolve the `b` variable, which could mean that we'd
//! auto-borrow later arguments but not earlier ones, which
//! seems very confusing.
//!
//! ## Subtler note
//!
//! However, right now, if the user manually specifies the
//! values for the type variables, as so:
//!
//! foo::<&int>(@1, @2)
//!
//! then we *will* auto-borrow, because we can't distinguish this from a
//! function that declared `&int`. This is inconsistent but it's easiest
//! at the moment. The right thing to do, I think, is to consider the
//! *unsubstituted* type when deciding whether to auto-borrow, but the
//! *substituted* type when considering the bounds and so forth. But most
//! of our methods don't give access to the unsubstituted type, and
//! rightly so because they'd be error-prone. So maybe the thing to do is
//! to actually determine the kind of coercions that should occur
//! separately and pass them in. Or maybe it's ok as is. Anyway, it's
//! sort of a minor point so I've opted to leave it for later---after all
//! we may want to adjust precisely when coercions occur.
use check::{autoderef, FnCtxt, NoPreference, PreferMutLvalue, UnresolvedTypeAction};
use middle::infer::{self, Coercion};
use middle::traits::{self, ObligationCause};
use middle::traits::{predicate_for_trait_def, report_selection_error};
use middle::ty::{AutoDerefRef, AdjustDerefRef};
use middle::ty::{self, mt, Ty};
use middle::ty_relate::RelateResult;
use util::common::indent;
use util::ppaux::Repr;
use std::cell::RefCell;
use std::collections::VecDeque;
use syntax::ast;
struct Coerce<'a, 'tcx: 'a> {
fcx: &'a FnCtxt<'a, 'tcx>,
origin: infer::TypeOrigin,
unsizing_obligations: RefCell<Vec<traits::PredicateObligation<'tcx>>>,
}
type CoerceResult<'tcx> = RelateResult<'tcx, Option<ty::AutoAdjustment<'tcx>>>;
impl<'f, 'tcx> Coerce<'f, 'tcx> {
fn tcx(&self) -> &ty::ctxt<'tcx> {
self.fcx.tcx()
}
fn subtype(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> CoerceResult<'tcx> {
try!(self.fcx.infcx().sub_types(false, self.origin.clone(), a, b));
Ok(None) // No coercion required.
}
fn unpack_actual_value<T, F>(&self, a: Ty<'tcx>, f: F) -> T where
F: FnOnce(Ty<'tcx>) -> T,
{
f(self.fcx.infcx().shallow_resolve(a))
}
fn coerce(&self,
expr_a: &ast::Expr,
a: Ty<'tcx>,
b: Ty<'tcx>)
-> CoerceResult<'tcx> {
debug!("Coerce.tys({} => {})",
a.repr(self.tcx()),
b.repr(self.tcx()));
// Consider coercing the subtype to a DST
let unsize = self.unpack_actual_value(a, |a| {
self.coerce_unsized(a, b)
});
if unsize.is_ok() {
return unsize;
}
// Examine the supertype and consider auto-borrowing.
//
// Note: does not attempt to resolve type variables we encounter.
// See above for details.
match b.sty {
ty::ty_ptr(mt_b) => {
return self.unpack_actual_value(a, |a| {
self.coerce_unsafe_ptr(a, b, mt_b.mutbl)
});
}
ty::ty_rptr(_, mt_b) => {
return self.unpack_actual_value(a, |a| {
self.coerce_borrowed_pointer(expr_a, a, b, mt_b.mutbl)
});
}
_ => {}
}
self.unpack_actual_value(a, |a| {
match a.sty {
ty::ty_bare_fn(Some(_), a_f) => {
// Function items are coercible to any closure
// type; function pointers are not (that would
// require double indirection).
self.coerce_from_fn_item(a, a_f, b)
}
ty::ty_bare_fn(None, a_f) => {
// We permit coercion of fn pointers to drop the
// unsafe qualifier.
self.coerce_from_fn_pointer(a, a_f, b)
}
_ => {
// Otherwise, just use subtyping rules.
self.subtype(a, b)
}
}
})
}
/// Reborrows `&mut A` to `&mut B` and `&(mut) A` to `&B`.
/// To match `A` with `B`, autoderef will be performed,
/// calling `deref`/`deref_mut` where necessary.
fn coerce_borrowed_pointer(&self,
expr_a: &ast::Expr,
a: Ty<'tcx>,
b: Ty<'tcx>,
mutbl_b: ast::Mutability)
-> CoerceResult<'tcx> {
debug!("coerce_borrowed_pointer(a={}, b={})",
a.repr(self.tcx()),
b.repr(self.tcx()));
// If we have a parameter of type `&M T_a` and the value
// provided is `expr`, we will be adding an implicit borrow,
// meaning that we convert `f(expr)` to `f(&M *expr)`. Therefore,
// to type check, we will construct the type that `&M*expr` would
// yield.
match a.sty {
ty::ty_rptr(_, mt_a) => {
try!(coerce_mutbls(mt_a.mutbl, mutbl_b));
}
_ => return self.subtype(a, b)
}
let coercion = Coercion(self.origin.span());
let r_borrow = self.fcx.infcx().next_region_var(coercion);
let r_borrow = self.tcx().mk_region(r_borrow);
let autoref = Some(ty::AutoPtr(r_borrow, mutbl_b));
let lvalue_pref = match mutbl_b {
ast::MutMutable => PreferMutLvalue,
ast::MutImmutable => NoPreference
};
let mut first_error = None;
let (_, autoderefs, success) = autoderef(self.fcx,
expr_a.span,
a,
Some(expr_a),
UnresolvedTypeAction::Ignore,
lvalue_pref,
|inner_ty, autoderef| {
if autoderef == 0 {
// Don't let this pass, otherwise it would cause
// &T to autoref to &&T.
return None;
}
let ty = ty::mk_rptr(self.tcx(), r_borrow,
mt {ty: inner_ty, mutbl: mutbl_b});
if let Err(err) = self.subtype(ty, b) {
if first_error.is_none() {
first_error = Some(err);
}
None
} else {
Some(())
}
});
match success {
Some(_) => {
Ok(Some(AdjustDerefRef(AutoDerefRef {
autoderefs: autoderefs,
autoref: autoref,
unsize: None
})))
}
None => {
// Return original error as if overloaded deref was never
// attempted, to avoid irrelevant/confusing error messages.
Err(first_error.expect("coerce_borrowed_pointer failed with no error?"))
}
}
}
// &[T, ..n] or &mut [T, ..n] -> &[T]
// or &mut [T, ..n] -> &mut [T]
// or &Concrete -> &Trait, etc.
fn coerce_unsized(&self,
source: Ty<'tcx>,
target: Ty<'tcx>)
-> CoerceResult<'tcx> {
debug!("coerce_unsized(source={}, target={})",
source.repr(self.tcx()),
target.repr(self.tcx()));
let traits = (self.tcx().lang_items.unsize_trait(),
self.tcx().lang_items.coerce_unsized_trait());
let (unsize_did, coerce_unsized_did) = if let (Some(u), Some(cu)) = traits {
(u, cu)
} else {
debug!("Missing Unsize or CoerceUnsized traits");
return Err(ty::terr_mismatch);
};
// Note, we want to avoid unnecessary unsizing. We don't want to coerce to
// a DST unless we have to. This currently comes out in the wash since
// we can't unify [T] with U. But to properly support DST, we need to allow
// that, at which point we will need extra checks on the target here.
// Handle reborrows before selecting `Source: CoerceUnsized<Target>`.
let (source, reborrow) = match (&source.sty, &target.sty) {
(&ty::ty_rptr(_, mt_a), &ty::ty_rptr(_, mt_b)) => {
try!(coerce_mutbls(mt_a.mutbl, mt_b.mutbl));
let coercion = Coercion(self.origin.span());
let r_borrow = self.fcx.infcx().next_region_var(coercion);
let region = self.tcx().mk_region(r_borrow);
(mt_a.ty, Some(ty::AutoPtr(region, mt_b.mutbl)))
}
(&ty::ty_rptr(_, mt_a), &ty::ty_ptr(mt_b)) => {
try!(coerce_mutbls(mt_a.mutbl, mt_b.mutbl));
(mt_a.ty, Some(ty::AutoUnsafe(mt_b.mutbl)))
}
_ => (source, None)
};
let source = ty::adjust_ty_for_autoref(self.tcx(), source, reborrow);
let mut selcx = traits::SelectionContext::new(self.fcx.infcx(), self.fcx);
// Use a FIFO queue for this custom fulfillment procedure.
let mut queue = VecDeque::new();
let mut leftover_predicates = vec![];
// Create an obligation for `Source: CoerceUnsized<Target>`.
let cause = ObligationCause::misc(self.origin.span(), self.fcx.body_id);
queue.push_back(predicate_for_trait_def(self.tcx(),
cause,
coerce_unsized_did,
0,
source,
vec![target]));
// Keep resolving `CoerceUnsized` and `Unsize` predicates to avoid
// emitting a coercion in cases like `Foo<$1>` -> `Foo<$2>`, where
// inference might unify those two inner type variables later.
let traits = [coerce_unsized_did, unsize_did];
while let Some(obligation) = queue.pop_front() {
debug!("coerce_unsized resolve step: {}", obligation.repr(self.tcx()));
let trait_ref = match obligation.predicate {
ty::Predicate::Trait(ref tr) if traits.contains(&tr.def_id()) => {
tr.clone()
}
_ => {
leftover_predicates.push(obligation);
continue;
}
};
match selcx.select(&obligation.with(trait_ref)) {
// Uncertain or unimplemented.
Ok(None) | Err(traits::Unimplemented) => {
debug!("coerce_unsized: early return - can't prove obligation");
return Err(ty::terr_mismatch);
}
// Object safety violations or miscellaneous.
Err(err) => {
report_selection_error(self.fcx.infcx(), &obligation, &err);
// Treat this like an obligation and follow through
// with the unsizing - the lack of a coercion should
// be silent, as it causes a type mismatch later.
}
Ok(Some(vtable)) => {
vtable.map_move_nested(|o| queue.push_back(o));
}
}
}
let mut obligations = self.unsizing_obligations.borrow_mut();
assert!(obligations.is_empty());
*obligations = leftover_predicates;
let adjustment = AutoDerefRef {
autoderefs: if reborrow.is_some() { 1 } else { 0 },
autoref: reborrow,
unsize: Some(target)
};
debug!("Success, coerced with {}", adjustment.repr(self.tcx()));
Ok(Some(AdjustDerefRef(adjustment)))
}
fn coerce_from_fn_pointer(&self,
a: Ty<'tcx>,
fn_ty_a: &'tcx ty::BareFnTy<'tcx>,
b: Ty<'tcx>)
-> CoerceResult<'tcx>
{
/*!
* Attempts to coerce from the type of a Rust function item
* into a closure or a `proc`.
*/
self.unpack_actual_value(b, |b| {
debug!("coerce_from_fn_pointer(a={}, b={})",
a.repr(self.tcx()), b.repr(self.tcx()));
if let ty::ty_bare_fn(None, fn_ty_b) = b.sty {
match (fn_ty_a.unsafety, fn_ty_b.unsafety) {
(ast::Unsafety::Normal, ast::Unsafety::Unsafe) => {
let unsafe_a = self.tcx().safe_to_unsafe_fn_ty(fn_ty_a);
try!(self.subtype(unsafe_a, b));
return Ok(Some(ty::AdjustUnsafeFnPointer));
}
_ => {}
}
}
self.subtype(a, b)
})
}
fn coerce_from_fn_item(&self,
a: Ty<'tcx>,
fn_ty_a: &'tcx ty::BareFnTy<'tcx>,
b: Ty<'tcx>)
-> CoerceResult<'tcx> {
/*!
* Attempts to coerce from the type of a Rust function item
* into a closure or a `proc`.
*/
self.unpack_actual_value(b, |b| {
debug!("coerce_from_fn_item(a={}, b={})",
a.repr(self.tcx()), b.repr(self.tcx()));
match b.sty {
ty::ty_bare_fn(None, _) => {
let a_fn_pointer = ty::mk_bare_fn(self.tcx(), None, fn_ty_a);
try!(self.subtype(a_fn_pointer, b));<|fim▁hole|> }
_ => self.subtype(a, b)
}
})
}
fn coerce_unsafe_ptr(&self,
a: Ty<'tcx>,
b: Ty<'tcx>,
mutbl_b: ast::Mutability)
-> CoerceResult<'tcx> {
debug!("coerce_unsafe_ptr(a={}, b={})",
a.repr(self.tcx()),
b.repr(self.tcx()));
let mt_a = match a.sty {
ty::ty_rptr(_, mt) | ty::ty_ptr(mt) => mt,
_ => {
return self.subtype(a, b);
}
};
// Check that the types which they point at are compatible.
let a_unsafe = ty::mk_ptr(self.tcx(), ty::mt{ mutbl: mutbl_b, ty: mt_a.ty });
try!(self.subtype(a_unsafe, b));
try!(coerce_mutbls(mt_a.mutbl, mutbl_b));
// Although references and unsafe ptrs have the same
// representation, we still register an AutoDerefRef so that
// regionck knows that the region for `a` must be valid here.
Ok(Some(AdjustDerefRef(AutoDerefRef {
autoderefs: 1,
autoref: Some(ty::AutoUnsafe(mutbl_b)),
unsize: None
})))
}
}
pub fn mk_assignty<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
expr: &ast::Expr,
a: Ty<'tcx>,
b: Ty<'tcx>)
-> RelateResult<'tcx, ()> {
debug!("mk_assignty({} -> {})", a.repr(fcx.tcx()), b.repr(fcx.tcx()));
let mut unsizing_obligations = vec![];
let adjustment = try!(indent(|| {
fcx.infcx().commit_if_ok(|_| {
let coerce = Coerce {
fcx: fcx,
origin: infer::ExprAssignable(expr.span),
unsizing_obligations: RefCell::new(vec![])
};
let adjustment = try!(coerce.coerce(expr, a, b));
unsizing_obligations = coerce.unsizing_obligations.into_inner();
Ok(adjustment)
})
}));
if let Some(AdjustDerefRef(auto)) = adjustment {
if auto.unsize.is_some() {
for obligation in unsizing_obligations {
fcx.register_predicate(obligation);
}
}
}
if let Some(adjustment) = adjustment {
debug!("Success, coerced with {}", adjustment.repr(fcx.tcx()));
fcx.write_adjustment(expr.id, adjustment);
}
Ok(())
}
fn coerce_mutbls<'tcx>(from_mutbl: ast::Mutability,
to_mutbl: ast::Mutability)
-> CoerceResult<'tcx> {
match (from_mutbl, to_mutbl) {
(ast::MutMutable, ast::MutMutable) |
(ast::MutImmutable, ast::MutImmutable) |
(ast::MutMutable, ast::MutImmutable) => Ok(None),
(ast::MutImmutable, ast::MutMutable) => Err(ty::terr_mutability)
}
}<|fim▁end|> | Ok(Some(ty::AdjustReifyFnPointer)) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | from .flag_button import * |
<|file_name|>StringHttpRequestCallback.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2015 pengjianbo([email protected]), Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.finalteam.okhttpfinal;
/**
* Desction:
* Author:pengjianbo
* Date:15/12/10 下午8:13
*/
public class StringHttpRequestCallback extends BaseHttpRequestCallback<String> {
<|fim▁hole|>}<|fim▁end|> | public StringHttpRequestCallback() {
super();
type = String.class;
} |
<|file_name|>fs.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use ffi::OsString;
use fmt;
use hash::{Hash, Hasher};
use io::{self, SeekFrom};
use path::{Path, PathBuf};
use sys::time::SystemTime;
use sys::{unsupported, Void};
pub struct File(Void);
pub struct FileAttr(Void);
pub struct ReadDir(Void);
pub struct DirEntry(Void);
#[derive(Clone, Debug)]
pub struct OpenOptions {}
pub struct FilePermissions(Void);
pub struct FileType(Void);
#[derive(Debug)]
pub struct DirBuilder {}
impl FileAttr {
pub fn size(&self) -> u64 {
match self.0 {}
}
pub fn perm(&self) -> FilePermissions {
match self.0 {}
}
pub fn file_type(&self) -> FileType {
match self.0 {}
}
pub fn modified(&self) -> io::Result<SystemTime> {
match self.0 {}
}
pub fn accessed(&self) -> io::Result<SystemTime> {
match self.0 {}
}
pub fn created(&self) -> io::Result<SystemTime> {
match self.0 {}
}
}
impl Clone for FileAttr {
fn clone(&self) -> FileAttr {
match self.0 {}
}
}
impl FilePermissions {
pub fn readonly(&self) -> bool {
match self.0 {}
}
pub fn set_readonly(&mut self, _readonly: bool) {
match self.0 {}
}
}
impl Clone for FilePermissions {
fn clone(&self) -> FilePermissions {
match self.0 {}
}
}
impl PartialEq for FilePermissions {
fn eq(&self, _other: &FilePermissions) -> bool {
match self.0 {}
}
}
impl Eq for FilePermissions {}
impl fmt::Debug for FilePermissions {
fn fmt(&self, _f: &mut fmt::Formatter) -> fmt::Result {
match self.0 {}
}
}
impl FileType {
pub fn is_dir(&self) -> bool {
match self.0 {}
}
pub fn is_file(&self) -> bool {
match self.0 {}
}
pub fn is_symlink(&self) -> bool {
match self.0 {}
}
}
impl Clone for FileType {
fn clone(&self) -> FileType {
match self.0 {}
}
}
impl Copy for FileType {}
impl PartialEq for FileType {
fn eq(&self, _other: &FileType) -> bool {
match self.0 {}
}
}
impl Eq for FileType {}
impl Hash for FileType {
fn hash<H: Hasher>(&self, _h: &mut H) {
match self.0 {}
}
}
impl fmt::Debug for FileType {
fn fmt(&self, _f: &mut fmt::Formatter) -> fmt::Result {
match self.0 {}
}
}
impl fmt::Debug for ReadDir {
fn fmt(&self, _f: &mut fmt::Formatter) -> fmt::Result {
match self.0 {}
}
}
impl Iterator for ReadDir {
type Item = io::Result<DirEntry>;
fn next(&mut self) -> Option<io::Result<DirEntry>> {
match self.0 {}
}
}
impl DirEntry {
pub fn path(&self) -> PathBuf {
match self.0 {}
}
pub fn file_name(&self) -> OsString {
match self.0 {}
}
pub fn metadata(&self) -> io::Result<FileAttr> {
match self.0 {}
}
pub fn file_type(&self) -> io::Result<FileType> {
match self.0 {}
}
}
impl OpenOptions {
pub fn new() -> OpenOptions {
OpenOptions {}
}<|fim▁hole|>
pub fn read(&mut self, _read: bool) {}
pub fn write(&mut self, _write: bool) {}
pub fn append(&mut self, _append: bool) {}
pub fn truncate(&mut self, _truncate: bool) {}
pub fn create(&mut self, _create: bool) {}
pub fn create_new(&mut self, _create_new: bool) {}
}
impl File {
pub fn open(_path: &Path, _opts: &OpenOptions) -> io::Result<File> {
unsupported()
}
pub fn file_attr(&self) -> io::Result<FileAttr> {
match self.0 {}
}
pub fn fsync(&self) -> io::Result<()> {
match self.0 {}
}
pub fn datasync(&self) -> io::Result<()> {
match self.0 {}
}
pub fn truncate(&self, _size: u64) -> io::Result<()> {
match self.0 {}
}
pub fn read(&self, _buf: &mut [u8]) -> io::Result<usize> {
match self.0 {}
}
pub fn write(&self, _buf: &[u8]) -> io::Result<usize> {
match self.0 {}
}
pub fn flush(&self) -> io::Result<()> {
match self.0 {}
}
pub fn seek(&self, _pos: SeekFrom) -> io::Result<u64> {
match self.0 {}
}
pub fn duplicate(&self) -> io::Result<File> {
match self.0 {}
}
pub fn set_permissions(&self, _perm: FilePermissions) -> io::Result<()> {
match self.0 {}
}
pub fn diverge(&self) -> ! {
match self.0 {}
}
}
impl DirBuilder {
pub fn new() -> DirBuilder {
DirBuilder {}
}
pub fn mkdir(&self, _p: &Path) -> io::Result<()> {
unsupported()
}
}
impl fmt::Debug for File {
fn fmt(&self, _f: &mut fmt::Formatter) -> fmt::Result {
match self.0 {}
}
}
pub fn readdir(_p: &Path) -> io::Result<ReadDir> {
unsupported()
}
pub fn unlink(_p: &Path) -> io::Result<()> {
unsupported()
}
pub fn rename(_old: &Path, _new: &Path) -> io::Result<()> {
unsupported()
}
pub fn set_perm(_p: &Path, perm: FilePermissions) -> io::Result<()> {
match perm.0 {}
}
pub fn rmdir(_p: &Path) -> io::Result<()> {
unsupported()
}
pub fn remove_dir_all(_path: &Path) -> io::Result<()> {
unsupported()
}
pub fn readlink(_p: &Path) -> io::Result<PathBuf> {
unsupported()
}
pub fn symlink(_src: &Path, _dst: &Path) -> io::Result<()> {
unsupported()
}
pub fn link(_src: &Path, _dst: &Path) -> io::Result<()> {
unsupported()
}
pub fn stat(_p: &Path) -> io::Result<FileAttr> {
unsupported()
}
pub fn lstat(_p: &Path) -> io::Result<FileAttr> {
unsupported()
}
pub fn canonicalize(_p: &Path) -> io::Result<PathBuf> {
unsupported()
}
pub fn copy(_from: &Path, _to: &Path) -> io::Result<u64> {
unsupported()
}<|fim▁end|> | |
<|file_name|>expected.rs<|end_file_name|><|fim▁begin|><|fim▁hole|> diesel::table! {
/// Representation of the `custom_schema.in_schema` table.
///
/// (Automatically generated by Diesel.)
custom_schema.in_schema (id) {
/// The `id` column of the `custom_schema.in_schema` table.
///
/// Its SQL type is `Int4`.
///
/// (Automatically generated by Diesel.)
id -> Int4,
}
}
}<|fim▁end|> | // @generated automatically by Diesel CLI.
pub mod custom_schema { |
<|file_name|>test_service.py<|end_file_name|><|fim▁begin|>import unittest
from unittest.mock import patch
from app.main.service import GitHubUserService
@patch("app.main.service.github")
class TestGitHubUserService(unittest.TestCase):
def setUp(self):
self.test_user = "test"
self.retrieved_repos_return = [
{
"fork": False,
"name": "test_non_fork",
"pull_url": "http://localhost/non_fork/pulls",
"url": "https://localhost/non_fork",
"full_name": self.test_user + "/test_non_fork",
"html_url": "https://localhost"
},
{
"fork": True,
"name": "test_fork",
"full_name": self.test_user + "/test_fork",
"url": "https://localhost/child",
"html_url": "https://localhost",
"parent": {
"fork": False,
"name": "parent",
"url": "http://parent",
"full_name": self.test_user + "1/test_parent",
"pull_url": "https://localhost/parent/pulls",
"html_url": "https://localhost/parent"
}
}
]
def test_search_for_users_error(self, github_client):
message = "too many"
github_client.search_for_user.return_value = {"error": message}
assert GitHubUserService.search_for_user("nobody") == message
def test_search_for_users_success(self, github_client):
github_client_return = [{
"avatar_url": "test",
"repos_url": "http://localhost",
"html_url": "https://localhost",
"login": "nobody"
}]
github_client.search_for_user.return_value = github_client_return
found_users = GitHubUserService.search_for_users("nobody")
self.assertEqual(found_users[0].avatar_url, github_client_return[0]["avatar_url"])<|fim▁hole|> self.assertEqual(found_users[0].repos_url, github_client_return[0]["repos_url"])
self.assertEqual(found_users[0].url, github_client_return[0]["html_url"])
self.assertEqual(found_users[0].login, github_client_return[0]["login"])
def test_retrieve_repos_if_fork_with_pr(self, github_client):
def local_mock_retrieve_pulls(url, state):
pulls = [
{
"html_url": "https://localhost/parent/pulls",
"title": "test title",
"user": {
"login": self.test_user
}
}
]
if "parent" in url:
return pulls
else:
pulls[0]["html_url"] = self.retrieved_repos_return[0]["html_url"]
return pulls
# mocks
github_client.retrieve_repos.return_value = self.retrieved_repos_return
github_client.retrieve_repo.side_effect = self.mock_retrieve_repo
github_client.retrieve_pulls.side_effect = local_mock_retrieve_pulls
actual_repos = GitHubUserService.retrieve_repos(self.test_user)
self.assertEqual(2, len(actual_repos))
for repo in actual_repos:
if repo.is_fork:
self.assertTrue("parent" in
repo.pull_requests[0].url,
"The parent pulls are not in the repo: {}"
.format(repo.name))
def test_retrieve_repos_if_fork_without_pr(self, github_client):
def local_mock_retrieve_pulls(url, state):
pulls = [
{
"html_url": "https://localhost/parent/pulls",
"title": "test title",
"user": {
"login": self.test_user
}
}
]
if "parent" in url:
return []
else:
pulls[0]["html_url"] = self.retrieved_repos_return[0]["html_url"]
return pulls
# mocks
github_client.retrieve_repos.return_value = self.retrieved_repos_return
github_client.retrieve_repo.side_effect = self.mock_retrieve_repo
github_client.retrieve_pulls.side_effect = local_mock_retrieve_pulls
actual_repos = GitHubUserService.retrieve_repos(self.test_user)
for repo in actual_repos:
if repo.is_fork:
self.assertIsNone(repo.pull_requests,
"The parent pulls are not in the repo: {}"
.format(repo.name))
def test_retrieve_repos_if_source_with_pr(self, github_client):
def local_mock_retrieve_pulls(url, state):
pulls = [
{
"html_url": "https://localhost/non_fork/pulls",
"title": "test title",
"user": {
"login": self.test_user
}
}
]
return pulls
# mocks
github_client.retrieve_repos.return_value = self.retrieved_repos_return
github_client.retrieve_repo.side_effect = self.mock_retrieve_repo
github_client.retrieve_pulls.side_effect = local_mock_retrieve_pulls
actual_repos = GitHubUserService.retrieve_repos(self.test_user)
self.assertEqual(2, len(actual_repos))
for repo in actual_repos:
if not repo.is_fork:
self.assertTrue("non_fork" in
repo.pull_requests[0].url,
"The non_fork pulls are not in the repo: {}"
.format(repo.name))
def test_retrieve_repos_if_source_without_pr(self, github_client):
def local_mock_retrieve_pulls(url, state):
return []
# mocks
github_client.retrieve_repos.return_value = self.retrieved_repos_return
github_client.retrieve_repo.side_effect = self.mock_retrieve_repo
github_client.retrieve_pulls.side_effect = local_mock_retrieve_pulls
actual_repos = GitHubUserService.retrieve_repos(self.test_user)
self.assertEqual(2, len(actual_repos))
for repo in actual_repos:
if not repo.is_fork:
self.assertIsNone(repo.pull_requests,
"The non_fork pulls are not in the repo: {}"
.format(repo.name))
# -----------------helper mock functions--------------------
def mock_retrieve_repo(self, url):
if "non_fork" in url:
return self.retrieved_repos_return[0]
elif "parent" in url:
return self.retrieved_repos_return[1]["parent"]
else:
return self.retrieved_repos_return[1]
def mock_retrieve_pulls(self, url, state):
pulls = [
{
"html_url": "https://localhost/parent/pulls",
"title": "test title",
"user": {
"login": self.test_user
}
}
]
if "parent" in url:
return pulls
else:
pulls[0]["html_url"] = self.retrieved_repos_return[0]["html_url"]
return pulls
if __name__ == '__main__':
unittest.main()<|fim▁end|> | |
<|file_name|>server.js<|end_file_name|><|fim▁begin|>const fs = require('fs')
const path = require('path')
const LRU = require('lru-cache')
const express = require('express')
const favicon = require('serve-favicon')
const compression = require('compression')
const resolve = file => path.resolve(__dirname, file)
const { createBundleRenderer } = require('vue-server-renderer')
const isProd = process.env.NODE_ENV === 'production'
const useMicroCache = process.env.MICRO_CACHE !== 'false'<|fim▁hole|>
const app = express()
const template = fs.readFileSync(resolve('./src/index.html'), 'utf-8');
function createRenderer (bundle, options) {
// https://github.com/vuejs/vue/blob/dev/packages/vue-server-renderer/README.md#why-use-bundlerenderer
return createBundleRenderer(bundle, Object.assign(options, {
template,
// for component caching
cache: LRU({
max: 1000,
maxAge: 1000 * 60 * 15
}),
// this is only needed when vue-server-renderer is npm-linked
basedir: resolve('./dist'),
// recommended for performance
runInNewContext: false
}))
}
let renderer
let readyPromise
if (isProd) {
// In production: create server renderer using built server bundle.
// The server bundle is generated by vue-ssr-webpack-plugin.
const bundle = require('./dist/vue-ssr-server-bundle.json')
// The client manifests are optional, but it allows the renderer
// to automatically infer preload/prefetch links and directly add <script>
// tags for any async chunks used during render, avoiding waterfall requests.
const clientManifest = require('./dist/vue-ssr-client-manifest.json')
renderer = createRenderer(bundle, {
clientManifest
})
} else {
// In development: setup the dev server with watch and hot-reload,
// and create a new renderer on bundle / index template update.
readyPromise = require('./build/setup-dev-server')(app, (bundle, options) => {
renderer = createRenderer(bundle, options)
})
}
const serve = (path, cache) => express.static(resolve(path), {
maxAge: cache && isProd ? 1000 * 60 * 60 * 24 * 30 : 0
})
app.use(compression({ threshold: 0 }))
//app.use(favicon('./public/logo-48.png'))
app.use('/dist', serve('./dist', true))
app.use('/public', serve('./public', true))
app.use('/manifest.json', serve('./manifest.json', true))
app.use('/service-worker.js', serve('./dist/service-worker.js'))
// 1-second microcache.
// https://www.nginx.com/blog/benefits-of-microcaching-nginx/
const microCache = LRU({
max: 100,
maxAge: 1000
})
// since this app has no user-specific content, every page is micro-cacheable.
// if your app involves user-specific content, you need to implement custom
// logic to determine whether a request is cacheable based on its url and
// headers.
const isCacheable = req => useMicroCache
function render (req, res) {
const s = Date.now()
res.setHeader("Content-Type", "text/html")
res.setHeader("Server", serverInfo)
const handleError = err => {
if (err.url) {
res.redirect(err.url)
} else if(err.code === 404) {
res.status(404).end('404 | Page Not Found')
} else {
// Render Error Page or Redirect
res.status(500).end('500 | Internal Server Error')
console.error(`error during render : ${req.url}`)
console.error(err.stack)
}
}
const cacheable = isCacheable(req)
if (cacheable) {
const hit = microCache.get(req.url)
if (hit) {
if (!isProd) {
console.log(`cache hit!`)
}
return res.end(hit)
}
}
const context = {
title: '交易虎_手机游戏交易平台_手游交易_帐号交易_游戏币交易_装备交易_道具交易_jiaoyihu', // default title
url: req.url
}
renderer.renderToString(context, (err, html) => {
debugger;
if (err) {
return handleError(err)
}
res.end(html)
if (cacheable) {
microCache.set(req.url, html)
}
if (!isProd) {
console.log(`whole request: ${Date.now() - s}ms`)
}
})
}
app.get('*', isProd ? render : (req, res) => {
readyPromise.then(() => render(req, res))
})
const port = process.env.PORT || 80;
app.listen(port, () => {
console.log(`server started at localhost:${port}`)
})<|fim▁end|> | const serverInfo =
`express/${require('express/package.json').version} ` +
`vue-server-renderer/${require('vue-server-renderer/package.json').version}` |
<|file_name|>gnome.py<|end_file_name|><|fim▁begin|><|fim▁hole|># -*- coding: utf-8 -*-
from checker.backends import BaseBackend
from checker import logger
log = logger.getLogger(__name__)
class GnomeBackend(BaseBackend):
"""for projects hosted on gnome.org"""
name = 'Gnome'
domain = 'gnome.org'
example = 'https://download.gnome.org/sources/gnome-control-center'
def __init__(self, url):
super(GnomeBackend, self).__init__()
self._url = url
self._rule_type = "xpath"
def get_urls(self, branch=None):
return self._url,
def get_rules(self):
log.debug('use %s backend rule for %s package.' %
(self.name, self._url.split('/')[-1]))
return [("//tr/td[3][contains(text(), '-')]/text()", ""), ("", "")]
@classmethod
def isrelease(cls, url):
return True<|fim▁end|> | |
<|file_name|>min_const_unsafe_fn_libstd_stability2.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![unstable(feature = "humans",
reason = "who ever let humans program computers,
we're apparently really bad at it",
issue = "0")]
#![feature(rustc_const_unstable, const_fn, foo, foo2)]
#![feature(min_const_unsafe_fn)]
#![feature(staged_api)]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_unstable(feature="foo")]
const fn foo() -> u32 { 42 }
#[stable(feature = "rust1", since = "1.0.0")]
// can't call non-min_const_fn
const unsafe fn bar() -> u32 { foo() } //~ ERROR can only call other `min_const_fn`
#[unstable(feature = "rust1", issue="0")]
const fn foo2() -> u32 { 42 }
#[stable(feature = "rust1", since = "1.0.0")]
// can't call non-min_const_fn
const unsafe fn bar2() -> u32 { foo2() } //~ ERROR can only call other `min_const_fn`
<|fim▁hole|>#[unstable(feature = "foo2", issue="0")]
const fn foo2_gated() -> u32 { 42 }
#[stable(feature = "rust1", since = "1.0.0")]
// can't call non-min_const_fn
const unsafe fn bar2_gated() -> u32 { foo2_gated() } //~ ERROR can only call other `min_const_fn`
fn main() {}<|fim▁end|> | // check whether this function cannot be called even with the feature gate active |
<|file_name|>apk.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2012, The Honeynet Project. All rights reserved.
# Author: Kun Yang <[email protected]><|fim▁hole|>#
# APKIL is free software: you can redistribute it and/or modify it under
# the terms of version 3 of the GNU Lesser General Public License as
# published by the Free Software Foundation.
#
# APKIL is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
# more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with APKIL. If not, see <http://www.gnu.org/licenses/>.<|fim▁end|> | |
<|file_name|>AbstractLogFactory.java<|end_file_name|><|fim▁begin|>/*<|fim▁hole|> * You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mybatis.generator.logging;
/**
* Defines the interface for creating Log implementations.
*
* @author Jeff Butler
*
*/
public interface AbstractLogFactory {
Log getLog(Class<?> aClass);
}<|fim▁end|> | * Copyright 2009 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>import { AuthenticationService } from './authentication.service';
import { UserService } from './user.service';
import { MessageService } from './message.service';
import { NavigationService } from './navigation.service';
import { SearchService } from './search.service';
import { SalesService } from './sales.service';
import { TableService } from './table.service';
import { RolesService } from './roles.service';
export const SERVICES_MODULE = [
AuthenticationService,
UserService,
MessageService,<|fim▁hole|> RolesService
];<|fim▁end|> | NavigationService,
SearchService,
SalesService,
TableService, |
<|file_name|>istr.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn test_stack_assign() {
let s: ~str = ~"a";
println!("{}", s.clone());
let t: ~str = ~"a";
assert!(s == t);
let u: ~str = ~"b";
assert!((s != u));
}
fn test_heap_lit() { ~"a big string"; }
fn test_heap_assign() {
let s: ~str = ~"a big ol' string";
let t: ~str = ~"a big ol' string";
assert!(s == t);
let u: ~str = ~"a bad ol' string";
assert!((s != u));
}
fn test_heap_log() { let s = ~"a big ol' string"; println!("{}", s); }
fn test_stack_add() {
assert_eq!(~"a" + "b", ~"ab");
let s: ~str = ~"a";
assert_eq!(s + s, ~"aa");<|fim▁hole|>}
fn test_stack_heap_add() { assert!((~"a" + "bracadabra" == ~"abracadabra")); }
fn test_heap_add() {
assert_eq!(~"this should" + " totally work", ~"this should totally work");
}
fn test_append() {
let mut s = ~"";
s.push_str("a");
assert_eq!(s, ~"a");
let mut s = ~"a";
s.push_str("b");
println!("{}", s.clone());
assert_eq!(s, ~"ab");
let mut s = ~"c";
s.push_str("offee");
assert!(s == ~"coffee");
s.push_str("&tea");
assert!(s == ~"coffee&tea");
}
pub fn main() {
test_stack_assign();
test_heap_lit();
test_heap_assign();
test_heap_log();
test_stack_add();
test_stack_heap_add();
test_heap_add();
test_append();
}<|fim▁end|> | assert_eq!(~"" + "", ~""); |
<|file_name|>dialogService.js<|end_file_name|><|fim▁begin|>'use strict';
angular.module('app').service('dialogService', [
'$q',
function($q) {
var remote = require('remote');
var dialog = remote.require('dialog');
function DialogService() {}
DialogService.prototype.showOpenDialog = function() {
var deferred = $q.defer();
dialog.showOpenDialog(function(fileNames) {
return deferred.resolve(fileNames);
});
return deferred.promise;
};
DialogService.prototype.showSaveDialog = function() {
var deferred = $q.defer();
dialog.showSaveDialog(function(fileNames) {
return deferred.resolve(fileNames);
});
return deferred.promise;
};
return new DialogService();
}<|fim▁hole|><|fim▁end|> | ]); |
<|file_name|>secret_configs_json.ts<|end_file_name|><|fim▁begin|>/*
* Copyright 2022 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {ErrorsJSON} from "models/mixins/errors";
import {AutoSuggestionJSON} from "models/roles/auto_suggestion";
import {RuleJSON} from "models/rules/rules";
import {PropertyJSON} from "models/shared/configuration";<|fim▁hole|> secret_configs: SecretConfigJSON[];
}
export interface SecretConfigsJSON {
_embedded: EmbeddedJSON;
}
export interface SecretConfigJSON {
id: string;
description: string;
plugin_id: string;
properties: PropertyJSON[];
rules: RuleJSON[];
errors?: ErrorsJSON;
}
export interface SecretConfigsWithSuggestionsJSON extends SecretConfigsJSON {
auto_completion: AutoSuggestionJSON[];
}<|fim▁end|> |
interface EmbeddedJSON { |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | export * from './tech-card.component'; |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>#[allow(unused_imports)]
use env_logger;
#[allow(unused_imports)]
use pact_matching::models::PactSpecification;
#[allow(unused_imports)]
use pact_matching::models::Response;
#[allow(unused_imports)]
use pact_matching::match_response;
#[allow(unused_imports)]
use expectest::prelude::*;
#[allow(unused_imports)]
use serde_json;
#[test]
fn different_status() {
env_logger::init().unwrap_or(());
let pact : serde_json::Value = serde_json::from_str(r#"
{
"match": false,
"comment": "Status is incorrect",
"expected": {
"status": 202
},
"actual": {
"status": 400
}
}
"#).unwrap();
let expected = Response::from_json(&pact.get("expected").unwrap(), &PactSpecification::V1_1);
println!("{:?}", expected);
let actual = Response::from_json(&pact.get("actual").unwrap(), &PactSpecification::V1_1);
println!("{:?}", actual);
let pact_match = pact.get("match").unwrap();
let result = match_response(expected, actual);
if pact_match.as_bool().unwrap() {
expect!(result.iter()).to(be_empty());
} else {
expect!(result.iter()).to_not(be_empty());
}
}
#[test]
fn matches() {
env_logger::init().unwrap_or(());
let pact : serde_json::Value = serde_json::from_str(r#"
{
"match": true,
"comment": "Status matches",
"expected": {
"status": 202
},
"actual": {
"status": 202
}
}<|fim▁hole|> "#).unwrap();
let expected = Response::from_json(&pact.get("expected").unwrap(), &PactSpecification::V1_1);
println!("{:?}", expected);
let actual = Response::from_json(&pact.get("actual").unwrap(), &PactSpecification::V1_1);
println!("{:?}", actual);
let pact_match = pact.get("match").unwrap();
let result = match_response(expected, actual);
if pact_match.as_bool().unwrap() {
expect!(result.iter()).to(be_empty());
} else {
expect!(result.iter()).to_not(be_empty());
}
}<|fim▁end|> | |
<|file_name|>jquery.inputmask.numeric.extensions.js<|end_file_name|><|fim▁begin|>/*
* jquery.inputmask.numeric.extensions.js
* http://github.com/RobinHerbots/jquery.inputmask
* Copyright (c) 2010 - 2014 Robin Herbots
* Licensed under the MIT license (http://www.opensource.org/licenses/mit-license.php)
* Version: 3.1.26
*/
(function (factory) {if (typeof define === 'function' && define.amd) {define(["jquery","./jquery.inputmask"], factory);} else {factory(jQuery);}}/*
Input Mask plugin extensions
http://github.com/RobinHerbots/jquery.inputmask
Copyright (c) 2010 - 2014 Robin Herbots
Licensed under the MIT license (http://www.opensource.org/licenses/mit-license.php)
Version: 0.0.0
Optional extensions on the jquery.inputmask base
*/
(function ($) {
//number aliases
$.extend($.inputmask.defaults.aliases, {
'numeric': {
mask: function (opts) {
if (opts.repeat !== 0 && isNaN(opts.integerDigits)) {
opts.integerDigits = opts.repeat;
}
opts.repeat = 0;
if (opts.groupSeparator == opts.radixPoint) { //treat equal separator and radixpoint
if (opts.radixPoint == ".")
opts.groupSeparator = ",";
else if (opts.radixPoint == ",")
opts.groupSeparator = ".";
else opts.groupSeparator = "";
}
if (opts.groupSeparator === " ") { //prevent conflict with default skipOptionalPartCharacter
opts.skipOptionalPartCharacter = undefined;
}
opts.autoGroup = opts.autoGroup && opts.groupSeparator != "";
if (opts.autoGroup && isFinite(opts.integerDigits)) {
var seps = Math.floor(opts.integerDigits / opts.groupSize);
var mod = opts.integerDigits % opts.groupSize;
opts.integerDigits += mod == 0 ? seps - 1 : seps;
}
opts.definitions[";"] = opts.definitions["~"]; //clone integer def for decimals
var mask = opts.prefix;
mask += "[+]";
mask += "~{1," + opts.integerDigits + "}";
if (opts.digits != undefined && (isNaN(opts.digits) || parseInt(opts.digits) > 0)) {
if (opts.digitsOptional)
mask += "[" + (opts.decimalProtect ? ":" : opts.radixPoint) + ";{" + opts.digits + "}]";
else mask += (opts.decimalProtect ? ":" : opts.radixPoint) + ";{" + opts.digits + "}";
}
mask += opts.suffix;
return mask;
},
placeholder: "",
greedy: false,
digits: "*", //number of fractionalDigits
digitsOptional: true,
groupSeparator: "",//",", // | "."
radixPoint: ".",
groupSize: 3,
autoGroup: false,
allowPlus: true,
allowMinus: true,
integerDigits: "+", //number of integerDigits
prefix: "",
suffix: "",
rightAlign: true,
decimalProtect: true, //do not allow assumption of decimals input without entering the radixpoint
postFormat: function (buffer, pos, reformatOnly, opts) { //this needs to be removed // this is crap
var needsRefresh = false, charAtPos = buffer[pos];
if (opts.groupSeparator == "" ||
($.inArray(opts.radixPoint, buffer) != -1 && pos >= $.inArray(opts.radixPoint, buffer)) ||
new RegExp('[-\+]').test(charAtPos)
) return { pos: pos };
var cbuf = buffer.slice();
if (charAtPos == opts.groupSeparator) {
cbuf.splice(pos--, 1);
charAtPos = cbuf[pos];
}
if (reformatOnly) cbuf[pos] = "?"; else cbuf.splice(pos, 0, "?"); //set position indicator
var bufVal = cbuf.join('');
if (opts.autoGroup || (reformatOnly && bufVal.indexOf(opts.groupSeparator) != -1)) {
var escapedGroupSeparator = $.inputmask.escapeRegex.call(this, opts.groupSeparator);
needsRefresh = bufVal.indexOf(opts.groupSeparator) == 0;
bufVal = bufVal.replace(new RegExp(escapedGroupSeparator, "g"), '');
var radixSplit = bufVal.split(opts.radixPoint);
bufVal = radixSplit[0];
if (bufVal != (opts.prefix + "?0") && bufVal.length >= (opts.groupSize + opts.prefix.length)) {
needsRefresh = true;
var reg = new RegExp('([-\+]?[\\d\?]+)([\\d\?]{' + opts.groupSize + '})');
while (reg.test(bufVal)) {
bufVal = bufVal.replace(reg, '$1' + opts.groupSeparator + '$2');
bufVal = bufVal.replace(opts.groupSeparator + opts.groupSeparator, opts.groupSeparator);
}
}
if (radixSplit.length > 1)
bufVal += opts.radixPoint + radixSplit[1];
}
buffer.length = bufVal.length; //align the length
for (var i = 0, l = bufVal.length; i < l; i++) {
buffer[i] = bufVal.charAt(i);
}
var newPos = $.inArray("?", buffer);
if (reformatOnly) buffer[newPos] = charAtPos; else buffer.splice(newPos, 1);
return { pos: newPos, "refreshFromBuffer": needsRefresh };
},
onKeyDown: function (e, buffer, caretPos, opts) {
if (e.keyCode == $.inputmask.keyCode.TAB && opts.placeholder.charAt(0) != "0") {
var radixPosition = $.inArray(opts.radixPoint, buffer);
if (radixPosition != -1 && isFinite(opts.digits)) {
for (var i = 1; i <= opts.digits; i++) {
if (buffer[radixPosition + i] == undefined || buffer[radixPosition + i] == opts.placeholder.charAt(0)) buffer[radixPosition + i] = "0";
}
return { "refreshFromBuffer": { start: ++radixPosition, end: radixPosition + opts.digits } };
}
} else if (opts.autoGroup && (e.keyCode == $.inputmask.keyCode.DELETE || e.keyCode == $.inputmask.keyCode.BACKSPACE)) {
var rslt = opts.postFormat(buffer, caretPos - 1, true, opts);
rslt.caret = rslt.pos + 1;
return rslt;
}
},
onKeyPress: function (e, buffer, caretPos, opts) {
if (opts.autoGroup /*&& String.fromCharCode(k) == opts.radixPoint*/) {
var rslt = opts.postFormat(buffer, caretPos - 1, true, opts);
rslt.caret = rslt.pos + 1;
return rslt;
}
},
regex: {
integerPart: function (opts) { return new RegExp('[-\+]?\\d+'); }
},
negationhandler: function (chrs, buffer, pos, strict, opts) {
if (!strict && opts.allowMinus && chrs === "-") {
var matchRslt = buffer.join('').match(opts.regex.integerPart(opts));
if (matchRslt.length > 0) {
if (buffer[matchRslt.index] == "+") {
return { "pos": matchRslt.index, "c": "-", "remove": matchRslt.index, "caret": pos };
} else if (buffer[matchRslt.index] == "-") {
return { "remove": matchRslt.index, "caret": pos - 1 };
} else {
return { "pos": matchRslt.index, "c": "-", "caret": pos + 1 };
}
}
}
return false;
},
radixhandler: function (chrs, maskset, pos, strict, opts) {
if (!strict && chrs === opts.radixPoint) {
var radixPos = $.inArray(opts.radixPoint, maskset.buffer), integerValue = maskset.buffer.join('').match(opts.regex.integerPart(opts));
if (radixPos != -1) {
if (maskset["validPositions"][radixPos - 1])
return { "caret": radixPos + 1 };
else return { "pos": integerValue.index, c: integerValue[0], "caret": radixPos + 1 };
}
}
return false;
},
leadingZeroHandler: function (chrs, maskset, pos, strict, opts) {
var matchRslt = maskset.buffer.join('').match(opts.regex.integerPart(opts)), radixPosition = $.inArray(opts.radixPoint, maskset.buffer);
if (matchRslt && !strict && (radixPosition == -1 || matchRslt.index < radixPosition)) {
if (matchRslt["0"].indexOf("0") == 0 && pos >= opts.prefix.length) {
if (radixPosition == -1 || (pos <= radixPosition && maskset["validPositions"][radixPosition] == undefined)) {
maskset.buffer.splice(matchRslt.index, 1);
pos = pos > matchRslt.index ? pos - 1 : matchRslt.index;
return { "pos": pos, "remove": matchRslt.index };
} else if (pos > matchRslt.index && pos <= radixPosition) {
maskset.buffer.splice(matchRslt.index, 1);
pos = pos > matchRslt.index ? pos - 1 : matchRslt.index;
return { "pos": pos, "remove": matchRslt.index };
}
} else if (chrs == "0" && pos <= matchRslt.index) {
return false;
}
}
return true;
},
definitions: {
'~': {
validator: function (chrs, maskset, pos, strict, opts) {
var isValid = opts.negationhandler(chrs, maskset.buffer, pos, strict, opts);
if (!isValid) {
isValid = opts.radixhandler(chrs, maskset, pos, strict, opts);
if (!isValid) {
isValid = strict ? new RegExp("[0-9" + $.inputmask.escapeRegex.call(this, opts.groupSeparator) + "]").test(chrs) : new RegExp("[0-9]").test(chrs);
if (isValid === true) {
isValid = opts.leadingZeroHandler(chrs, maskset, pos, strict, opts);
if (isValid === true) {
//handle overwrite when fixed precision
var radixPosition = $.inArray(opts.radixPoint, maskset.buffer);
if (opts.digitsOptional === false && pos > radixPosition && !strict) {
return { "pos": pos, "remove": pos };
} else return { pos: pos };
}
}
}
}
return isValid;
},
cardinality: 1,
prevalidator: null
},
'+': {
validator: function (chrs, maskset, pos, strict, opts) {
var signed = "[";
if (opts.allowMinus === true) signed += "-";
if (opts.allowPlus === true) signed += "\+";
signed += "]";
return new RegExp(signed).test(chrs);
},
cardinality: 1,
prevalidator: null,
placeholder: ''
},
':': {
validator: function (chrs, maskset, pos, strict, opts) {
var isValid = opts.negationhandler(chrs, maskset.buffer, pos, strict, opts);
if (!isValid) {
var radix = "[" + $.inputmask.escapeRegex.call(this, opts.radixPoint) + "]";
isValid = new RegExp(radix).test(chrs);
if (isValid && maskset["validPositions"][pos] && maskset["validPositions"][pos]["match"].placeholder == opts.radixPoint) {
isValid = { "pos": pos, "remove": pos };
}
}
return isValid;
},
cardinality: 1,
prevalidator: null,
placeholder: function (opts) { return opts.radixPoint; }
}
},
insertMode: true,
autoUnmask: false,
onUnMask: function (maskedValue, unmaskedValue, opts) {
var processValue = maskedValue.replace(opts.prefix, "");
processValue = processValue.replace(opts.suffix, "");
processValue = processValue.replace(new RegExp($.inputmask.escapeRegex.call(this, opts.groupSeparator), "g"), "");
//processValue = processValue.replace($.inputmask.escapeRegex.call(this, opts.radixPoint), ".");
return processValue;
},
isComplete: function (buffer, opts) {
var maskedValue = buffer.join(''), bufClone = buffer.slice();
//verify separator positions
opts.postFormat(bufClone, 0, true, opts);
if (bufClone.join('') != maskedValue) return false;
var processValue = maskedValue.replace(opts.prefix, "");
processValue = processValue.replace(opts.suffix, "");
processValue = processValue.replace(new RegExp($.inputmask.escapeRegex.call(this, opts.groupSeparator), "g"), "");
processValue = processValue.replace($.inputmask.escapeRegex.call(this, opts.radixPoint), ".");
return isFinite(processValue);
},
onBeforeMask: function (initialValue, opts) {
<|fim▁hole|> var kommaMatches = initialValue.match(/,/g);
var dotMatches = initialValue.match(/\./g);
if (dotMatches && kommaMatches) {
if (dotMatches.length > kommaMatches.length) {
initialValue = initialValue.replace(/\./g, "");
initialValue = initialValue.replace(",", opts.radixPoint);
} else if (kommaMatches.length > dotMatches.length) {
initialValue = initialValue.replace(/,/g, "");
initialValue = initialValue.replace(".", opts.radixPoint);
}
} else {
initialValue = initialValue.replace(new RegExp($.inputmask.escapeRegex.call(this, opts.groupSeparator), "g"), "");
}
return initialValue;
}
}
},
'currency': {
prefix: "$ ",
groupSeparator: ",",
radixPoint: ".",
alias: "numeric",
placeholder: "0",
autoGroup: true,
digits: 2,
digitsOptional: false,
clearMaskOnLostFocus: false,
decimalProtect: true,
},
'decimal': {
alias: "numeric"
},
'integer': {
alias: "numeric",
digits: "0"
}
});
return $.fn.inputmask;
}));<|fim▁end|> | if (isFinite(initialValue)) {
return initialValue.toString().replace(".", opts.radixPoint);
} else {
|
<|file_name|>mqtt.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from __future__ import print_function
import os
import sys
import uuid
import logging
import simplejson as json
import paho.mqtt.client as mqtt
from time import sleep
try:
sys.path.append(os.path.dirname(os.path.realpath(__file__)) + '/../../')
from sanji.connection.connection import Connection
except ImportError as e:
print(e)
print("Please check the python PATH for import test module.")
exit(1)
_logger = logging.getLogger("sanji.sdk.connection.mqtt")
class Mqtt(Connection):
"""
Mqtt
"""
def __init__(
self,
broker_host=os.getenv('BROKER_PORT_1883_TCP_ADDR', "localhost"),
broker_port=os.getenv('BROKER_PORT_1883_TCP_PORT', 1883),
broker_keepalive=60
):
# proerties
self.tunnels = {
"internel": (uuid.uuid4().hex, None),
"model": (None, None),
"view": (None, None)
}
self.broker_host = broker_host
self.broker_port = broker_port
self.broker_keepalive = broker_keepalive
self.client = mqtt.Client()
self.connect_delay = 3
# methods
self.subscribe = self.client.subscribe
self.unsubscribe = self.client.unsubscribe
self.message_callback_add = self.client.message_callback_add
self.message_callback_remove = self.client.message_callback_remove
self.client.on_log = self.on_log
def on_log(self, mosq, obj, level, string):
pass
def connect(self):
"""
connect
"""
_logger.debug("Start connecting to broker")
while True:
try:
self.client.connect(self.broker_host, self.broker_port,
self.broker_keepalive)
break
except Exception:
_logger.debug(
"Connect failed. wait %s sec" % self.connect_delay)
sleep(self.connect_delay)
self.client.loop_forever()
def disconnect(self):
"""
disconnect
"""
_logger.debug("Disconnect to broker")
self.client.loop_stop()
def set_tunnel(self, tunnel_type, tunnel, callback=None):
"""
set_tunnel(self, tunnel_type, tunnel, callback=None):
"""
orig_tunnel = self.tunnels.get(tunnel_type, (None, None))[0]
if orig_tunnel is not None:
_logger.debug("Unsubscribe: %s", (orig_tunnel,))
self.client.unsubscribe(str(orig_tunnel))
self.tunnels[tunnel_type] = (tunnel, callback)
if callback is not None:
self.message_callback_add(tunnel, callback)
self.client.subscribe(str(tunnel))
_logger.debug("Subscribe: %s", (tunnel,))
def set_tunnels(self, tunnels):
"""
set_tunnels(self, tunnels):
"""
for tunnel_type, (tunnel, callback) in tunnels.iteritems():
if tunnel is None:
continue
self.set_tunnel(tunnel_type, tunnel, callback)
def set_on_connect(self, func):
"""
set_on_connect
"""
self.client.on_connect = func
def set_on_message(self, func):
"""
set_on_message
"""
self.client.on_message = func
def set_on_publish(self, func):
"""
set_on_publish
"""
self.client.on_publish = func
def publish(self, topic="/controller", qos=0, payload=None):
"""
publish(self, topic, payload=None, qos=0, retain=False)
Returns a tuple (result, mid), where result is MQTT_ERR_SUCCESS to
indicate success or MQTT_ERR_NO_CONN if the client is not currently
connected. mid is the message ID for the publish request. The mid
value can be used to track the publish request by checking against the
mid argument in the on_publish() callback if it is defined.
"""
result = self.client.publish(topic,
payload=json.dumps(payload),
qos=qos)
if result[0] == mqtt.MQTT_ERR_NO_CONN:
raise RuntimeError("No connection")<|fim▁hole|><|fim▁end|> | return result[1] |
<|file_name|>util.rs<|end_file_name|><|fim▁begin|>extern crate time;
use std::env;
use std::fs;
use std::path;
use std::time::{SystemTime, UNIX_EPOCH};
use core::io::BinaryComponent;
use core::sig;
pub fn load_user_keypair() -> Option<sig::Keypair> {
let kpp = match env::home_dir() {
Some(mut p) => {
p.push(".jiyunet");
p.push("keypair.bin");
p
},
None => path::PathBuf::from(".")
};
<|fim▁hole|> };
match sig::Keypair::from_reader(&mut f) {
Ok(kp) => Some(kp),
Err(_) => None
}
}
pub fn timestamp() -> i64 {
let dur = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();
((dur.as_secs() * 1000) + ((dur.subsec_nanos() / 1000000) as u64)) as i64
}<|fim▁end|> | let mut f = match fs::File::open(kpp) {
Ok(o) => o,
Err(_) => return None |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import dask
from .scheduler import ray_dask_get, ray_dask_get_sync
from .callbacks import (
RayDaskCallback,
local_ray_callbacks,
unpack_ray_callbacks,
)
from .optimizations import dataframe_optimize
dask_persist = dask.persist
<|fim▁hole|>
ray_dask_persist.__doc__ = dask_persist.__doc__
dask_persist_mixin = dask.base.DaskMethodsMixin.persist
def ray_dask_persist_mixin(self, **kwargs):
kwargs["ray_persist"] = True
return dask_persist_mixin(self, **kwargs)
ray_dask_persist_mixin.__doc__ = dask_persist_mixin.__doc__
# We patch dask in order to inject a kwarg into its `dask.persist()` calls,
# which the Dask-on-Ray scheduler needs.
# FIXME(Clark): Monkey patching is bad and we should try to avoid this.
def patch_dask(ray_dask_persist, ray_dask_persist_mixin):
dask.persist = ray_dask_persist
dask.base.DaskMethodsMixin.persist = ray_dask_persist_mixin
patch_dask(ray_dask_persist, ray_dask_persist_mixin)
__all__ = [
# Schedulers
"ray_dask_get",
"ray_dask_get_sync",
# Helpers
"ray_dask_persist",
# Callbacks
"RayDaskCallback",
"local_ray_callbacks",
"unpack_ray_callbacks",
# Optimizations
"dataframe_optimize",
]<|fim▁end|> | def ray_dask_persist(*args, **kwargs):
kwargs["ray_persist"] = True
return dask_persist(*args, **kwargs)
|
<|file_name|>hdl_block.py<|end_file_name|><|fim▁begin|>"""
This file is part of xcos-gen.
xcos-gen is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
xcos-gen is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with xcos-gen. If not, see <http://www.gnu.org/licenses/>.
Author: Ilia Novikov <[email protected]>
"""
from block import Block
class HdlBlock:
def __init__(self, block: Block, hdl_type: str):
self.block_type = hdl_type
self.block_id = block.block_id
self.gain = block.gain
self.inputs = block.inputs
self.outputs = block.outputs
self.in_wire = None
self.out_wire = None
def __str__(self):
if not self.gain:
return "{0}: {1}, {2} -> {3}".format(self.block_type, self.block_id, self.in_wire, self.out_wire)
else:
return "{0}: {1}, k = {2}, {3} -> {4}".format(
self.block_type,<|fim▁hole|> self.block_id,
self.gain,
self.in_wire,
self.out_wire
)<|fim▁end|> | |
<|file_name|>test_serializers.py<|end_file_name|><|fim▁begin|>from nose.tools import * # noqa: F403
from tests.base import AdminTestCase
from osf_tests.factories import NodeFactory, UserFactory
from osf.utils.permissions import ADMIN
from admin.nodes.serializers import serialize_simple_user_and_node_permissions, serialize_node
class TestNodeSerializers(AdminTestCase):
def test_serialize_node(self):
node = NodeFactory()
info = serialize_node(node)
assert_is_instance(info, dict)
assert_equal(info['parent'], node.parent_id)
assert_equal(info['title'], node.title)<|fim▁hole|> assert_equal(info['children'], [])
assert_equal(info['id'], node._id)
assert_equal(info['public'], node.is_public)
assert_equal(len(info['contributors']), 1)
assert_false(info['deleted'])
def test_serialize_deleted(self):
node = NodeFactory()
info = serialize_node(node)
assert_false(info['deleted'])
node.is_deleted = True
info = serialize_node(node)
assert_true(info['deleted'])
node.is_deleted = False
info = serialize_node(node)
assert_false(info['deleted'])
def test_serialize_simple_user(self):
user = UserFactory()
node = NodeFactory(creator=user)
info = serialize_simple_user_and_node_permissions(node, user)
assert_is_instance(info, dict)
assert_equal(info['id'], user._id)
assert_equal(info['name'], user.fullname)
assert_equal(info['permission'], ADMIN)<|fim▁end|> | |
<|file_name|>models.py<|end_file_name|><|fim▁begin|># models.py
import os
import socket
import datetime
import random
import re
from django import forms
from django import urls
from django.db import models
from django.db.models.signals import pre_save
from .unique_slugify import unique_slugify
from .titlecase import titlecase
from functools import reduce
def time2s(time):
""" given 's.s' or 'h:m:s.s' returns s.s """
if time:
sec = reduce(lambda x, i: x*60 + i,
list(map(float, time.split(':'))))
else:
sec = 0.0
return sec
class Client(models.Model):
sequence = models.IntegerField(default=1)
active = models.BooleanField(default=True,
help_text="Turn off to hide from UI.")
name = models.CharField(max_length=135)
slug = models.CharField(max_length=135, blank=True, null=False,
help_text="dir name to store input files", )
contacts = models.CharField(max_length=300, blank=True,
help_text='emails of people putting on the event.')
description = models.TextField(blank=True)
tags = models.TextField(null=True,blank=True,)
tweet_prefix = models.CharField(max_length=30, blank=True, null=True)
bucket_id = models.CharField(max_length=30, blank=True, null=True)
category_key = models.CharField(max_length=30, blank=True, null=True,
help_text = "Category for Richard")
# video encoding assets
template_mlt = models.CharField(max_length=60, null=True,
default="template.mlt",
help_text='template to make cutlist mlt from.')
title_svg = models.CharField(max_length=60, null=True,
default="title.svg",
help_text='template for event/title/authors title slide.')
preroll = models.CharField(max_length=335, blank=True,
help_text="name of video to prepend (not implemented)")
postroll = models.CharField(max_length=335, blank=True,
help_text="name of video to postpend (not implemented)")
credits = models.CharField(max_length=30, blank=True,
default="ndv-169.png",
help_text='added to end, store in assets dir')
# remote accounts to post to
host_user = models.CharField(max_length=30, blank=True, null=True,
help_text = "depricated - do not use.")
youtube_id = models.CharField(max_length=10, blank=True, null=True,
help_text = "key to lookup user/pw/etc from pw store" )
archive_id = models.CharField(max_length=10, blank=True, null=True)
vimeo_id = models.CharField(max_length=10, blank=True, null=True)
blip_id = models.CharField(max_length=10, blank=True, null=True)
rax_id = models.CharField(max_length=10, blank=True, null=True)
richard_id = models.CharField(max_length=10, blank=True, null=True)
email_id = models.CharField(max_length=10, blank=True, null=True)
tweet_id = models.CharField(max_length=10, blank=True, null=True)
def __str__(self):
return self.name
def get_absolute_url(self):
return urls.reverse('client', [self.slug,])
class Meta:
ordering = ["sequence"]
class Location(models.Model):
sequence = models.IntegerField(default=1)
active = models.BooleanField( default=True,
help_text="Turn off to hide from UI.")
default = models.BooleanField(default=True,
help_text="Adds this loc to new Clients.")
name = models.CharField(max_length=135,
help_text="room name")
slug = models.CharField(max_length=135, blank=True, null=False,
help_text="dir name to store input files")
dirname = models.CharField(max_length=135, blank=True,
help_text="path to raw files. overrieds show/slug.")
channelcopy = models.CharField(max_length=2, blank=True,
help_text='audio adjustment for this room')
hours_offset = models.IntegerField(blank=True, null=True,
help_text='Adjust for bad clock setting')
description = models.TextField(blank=True)
lon = models.FloatField(null=True, blank=True )
lat = models.FloatField(null=True, blank=True )
def natural_key(self):
return self.name
def __str__(self):
return "%s" % ( self.name )
class Meta:
ordering = ["name"]
ANN_STATES=((1,'preview'),(2,'review'),(3,'approved'))
class Show(models.Model):
client = models.ForeignKey(Client)
locations = models.ManyToManyField(Location,
limit_choices_to={'active': True},
blank=True)
sequence = models.IntegerField(default=1)
active = models.BooleanField( default=True,
help_text="Turn off to hide from UI.")
name = models.CharField(max_length=135)
slug = models.CharField(max_length=135, blank=True, null=False,
help_text="dir name to store input files")
category_key = models.CharField(max_length=30, blank=True, null=True,
help_text = "Category for Richard")
youtube_playlist_id = models.CharField(max_length=50, blank=True, null=True,
help_text = "Playlist ID for YouTube")
tags = models.TextField(null=True,blank=True,)
description = models.TextField(blank=True)
conf_url = models.CharField(max_length=200, null=True, blank=True)
schedule_url = models.CharField(max_length=235, null=True, blank=True)
announcement_state = models.IntegerField(null=True, blank=True,
choices=ANN_STATES, default=ANN_STATES[1][0], )
@property
def client_name(self):
return self.client
def __str__(self):
return "%s: %s" % ( self.client_name, self.name )
@models.permalink
def get_absolute_url(self):
return ('episode_list', [self.client.slug,self.slug,])
class Meta:
ordering = ["sequence"]
class Raw_File(models.Model):
location = models.ForeignKey(Location)
show = models.ForeignKey(Show)
filename = models.CharField(max_length=135,help_text="filename.dv")
filesize = models.BigIntegerField(default=1,help_text="size in bytes")
start = models.DateTimeField(null=True, blank=True,
help_text='when recorded (should agree with file name and timestamp)')
duration = models.CharField(max_length=11, blank=True, )
end = models.DateTimeField(null=True, blank=True)
trash = models.BooleanField(default=False,
help_text="This clip is trash")
ocrtext = models.TextField(null=True,blank=True)
comment = models.TextField(blank=True)
def __next__(self):
"""
gets the next clip in the room.
"""<|fim▁hole|> rfs = Raw_File.objects.filter(location=self.location,
start__gt=self.start,
).order_by('start','id')
# id__gt=self.id).order_by('start','id')
if rfs:
rf=rfs[0]
else:
rf=None
return rf
def basename(self):
# strip the extension
# good for making 1-2-3/foo.png from 1-2-3/foo.dv
raise "homey don't play that no more."
return os.path.splitext(self.filename)[0]
def base_url(self):
""" Returns the url for the file, minus the MEDIA_URL and extension """
return "%s/%s/dv/%s/%s" % (self.show.client.slug,
self.show.slug,
self.location.slug,
self.filename)
@property
def get_adjusted_start(self):
return self.start + datetime.timedelta(
hours = 0 if self.location.hours_offset is None
else self.location.hours_offset )
@property
def get_adjusted_end(self):
return self.end + datetime.timedelta(
hours = 0 if self.location.hours_offset is None
else self.location.hours_offset )
def get_start_seconds(self):
return time2s( self.start )
def get_end_seconds(self):
return time2s( self.end )
def get_seconds(self):
# return durration in seconds (float)
delta = self.end - self.start
seconds = delta.days*24*60*60 + delta.seconds
return seconds
def get_minutes(self):
# return durration in minutes (float)
return self.get_seconds()/60.0
def __str__(self):
return self.filename
@models.permalink
def get_absolute_url(self):
return ('raw_file', [self.id,])
class Meta:
ordering = ["start", "location", "filename"]
class Mark(models.Model):
show = models.ForeignKey(Show)
location = models.ForeignKey(Location)
click = models.DateTimeField(
help_text='When Cut was Clicked.')
class Meta:
ordering = ["click"]
def __str__(self):
return self.click.isoformat()
class Quality(models.Model):
level = models.IntegerField()
name = models.CharField(max_length=35)
description = models.TextField(blank=True)
def __str__(self):
return self.name
STATES=[
(0, 'borked'),
(1, 'edit'), # enter cutlist data
(2, 'encode'), # assemble raw assets into final cut
(3, 'push to queue'), # push to data center box
(4, 'post'), # push to yourube and archive.org
(5, 'richard'), # push urls and description to PyVideo.org
(6, 'review 1'), # staff check to see if they exist on yourube/archive
(7, 'email'), # send private url to presenter, ask for feedback,
(8, 'review 2'), # wait for presenter to say good, or timeout
(9, 'make public'), # flip private to public
(10, 'tweet'), # tell world
(11, 'to-miror'),
(12, 'conf'),
(13, 'done')
]
def generate_edit_key():
""" Generate a random key """
return str(random.randint(10000000,99999999))
class Episode(models.Model):
show = models.ForeignKey(Show)
location = models.ForeignKey(Location, null=True)
active = models.BooleanField(default=True,
help_text="Turn off to hide from UI.")
state = models.IntegerField(null=True, blank=True,
choices=STATES, default=STATES[1][0],
help_text="" )
locked = models.DateTimeField(null=True, blank=True,
help_text="clear this to unlock")
locked_by = models.CharField(max_length=35, blank=True,
help_text="user/process that locked." )
sequence = models.IntegerField(null=True,blank=True,
help_text="process order")
start = models.DateTimeField(blank=True, null=False,
help_text="initially scheduled time from master, adjusted to match reality")
duration = models.CharField(max_length=15,null=True,blank=True,
help_text="length in hh:mm:ss")
end = models.DateTimeField(blank=True, null=False,
help_text="(calculated if start and duration are set.)")
name = models.CharField(max_length=170,
help_text="Video Title (shows in video search results)")
slug = models.CharField(max_length=135, blank=True, null=False,
help_text="file name friendly version of name")
priority = models.IntegerField(null=True,blank=True,
help_text="lower may not get recorded")
released = models.NullBooleanField(null=True,blank=True,
help_text="has someone authorised pubication")
conf_key = models.CharField(max_length=32, blank=True,
help_text='primary key of event in conference system database.')
conf_url = models.CharField(max_length=335,blank=True,default='',
help_text="Event's details on conference site (name,desc,time,author,files,etc)")
conf_meta = models.TextField(blank=True,default='', null=True,
help_text="Data provided by API")
authors = models.TextField(null=True,blank=True,)
emails = models.TextField(null=True,blank=True,
help_text="email(s) of the presenter(s)")
twitter_id = models.CharField(max_length=135, blank=True, null=True,
help_text="Data provided by API")
reviewers = models.TextField(blank=True,
help_text="email(s) of the reviewers(s)")
language = models.CharField(max_length=20, blank=True, null=True,
help_text="Spoken languge (German, English...)")
edit_key = models.CharField(max_length=32,
blank=True,
null=True,
default=generate_edit_key,
help_text="key to allow unauthenticated users to edit this item.")
summary = models.TextField(blank=True, help_text="short", null=True)
description = models.TextField(blank=True, help_text="markdown")
tags = models.CharField(max_length=175,null=True,blank=True,)
normalise = models.CharField(max_length=5,null=True,blank=True, )
channelcopy = models.CharField(max_length=2,null=True,blank=True,
help_text='m=mono, 01=copy left to right, 10=right to left, 00=ignore.' )
license = models.CharField(max_length=20, null=True,blank=True,
default='CC BY-SA',
help_text='see http://creativecommons.org/licenses/')
hidden = models.NullBooleanField(null=True,blank=True,
help_text='hidden (does not show up on public episode list')
thumbnail = models.CharField(max_length=135,blank=True,
help_text="filename.png" )
host_url = models.CharField(max_length=235, null=True,blank=True,
help_text = "URL of page video is hosted")
public_url = models.CharField(max_length=335, null=True,blank=True,
help_text = "URL public should use (like pvo or some aggregator")
archive_ogv_url = models.CharField(max_length=355, null=True,blank=True,
help_text = "URL public can use to dl an ogv (like archive.org")
archive_url = models.CharField(max_length=355, null=True,blank=True,
help_text = "not sure.. deprecated?")
archive_mp4_url = models.CharField(max_length=355, null=True,blank=True,
help_text = "URL public can use to dl an mp4. (like archive.org")
rax_mp4_url = models.CharField(max_length=355, null=True,blank=True,
help_text = "URL public can use to get an mp4. (like rackspace cdn")
twitter_url = models.CharField(max_length=135, null=True,blank=True,
help_text = "URL of tweet to email presenters for retweeting")
video_quality = models.ForeignKey(Quality,null=True,blank=True,related_name='video_quality')
audio_quality = models.ForeignKey(Quality,null=True,blank=True,related_name='audio_quality')
comment = models.TextField(blank=True, help_text="production notes")
stop = models.NullBooleanField(
help_text="Stop process.py from processing anymore")
formfield_overrides = {
models.TextField: {
'widget': forms.Textarea({'cols': 30, 'rows': 2}),
}}
class Meta:
ordering = ["sequence"]
# unique_together = [("show", "slug")]
@models.permalink
def get_absolute_url(self):
return ('episode', [self.id])
def __str__(self):
return self.name
def cuts_time(self):
# get total time in seoonds of video based on selected cuts.
# or None if there are no clips.
cuts = Cut_List.objects.filter(episode=self, apply=True)
if not cuts:
ret = None
else:
s=0
for cut in cuts:
s+=int(cut.duration()) # durration is in seconds :p
ret = s
return ret
def get_minutes(self):
ct = self.cuts_time()
if ct is None:
# if there are no cuts, use scheduled time
delta = self.end - self.start
minutes = delta.days*60*24 + delta.seconds/60.0
else:
# use amount of video time
minutes = self.cuts_time()/60
return int(minutes)
def add_email(self, email):
if self.emails is None: emails=[]
else: emails = self.emails.split(',')
if email not in emails:
if self.emails:
emails.append(email)
self.emails = ','.join(emails)
else:
self.emails = email
self.save()
def get_authors(self):
authors = self.authors.split(',') if self.authors else []
return authors
@property
def titlecase(self):
return titlecase(self.name)
@property
def location_slug(self):
location_slug=self.location.slug
print(location_slug)
return location_slug
def approve_url(self):
url = "https://veyepar.nextdayvideo.com/main/approve/{id}/{slug}/{edit_key}/".format(id=self.id, slug=self.slug, edit_key=self.edit_key)
return url
def composed_description(self):
# build a wad of text to use as public facing description
show = self.show
client = show.client
footer = "Produced by NDV: https://youtube.com/channel/UCQ7dFBzZGlBvtU2hCecsBBg?sub_confirmation=1"
# (show tags seperate the talk from the event text)
descriptions = [self.authors,
self.public_url,
self.conf_url,
self.description,
show.tags,
show.description, client.description,
footer,
client.tags,
"{} at {}".format(
self.start.strftime("%c"),
self.location.name),
]
# remove blanks
descriptions = [d for d in descriptions if d]
# combine wiht CRs between each item
description = "\n\n".join(descriptions)
# remove extra blank lines
description = re.sub( r'\n{2,}', r'\n\n', description)
# description = "<br/>\n".join(description.split('\n'))
return description
class Cut_List(models.Model):
"""
note: this sould be Cut_list_ITEM
because it is not the whole list, just one entry.
"""
raw_file = models.ForeignKey(Raw_File)
episode = models.ForeignKey(Episode)
sequence = models.IntegerField(default=1)
start = models.CharField(max_length=11, blank=True,
help_text='offset from start in HH:MM:SS.ss')
end = models.CharField(max_length=11, blank=True,
help_text='offset from start in HH:MM:SS.ss')
apply = models.BooleanField(default=1)
comment = models.TextField(blank=True)
def get_absolute_url(self):
return urls.reverse('episode', [self.episode.id])
def __str__(self):
return "%s - %s" % (self.raw_file, self.episode.name)
class Meta:
ordering = ["sequence"]
def get_start_seconds(self):
return time2s( self.start )
def get_start_wall(self):
if self.start:
return self.raw_file.start + \
datetime.timedelta(seconds=self.get_start_seconds())
else:
return self.raw_file.start
def get_end_seconds(self):
return time2s( self.end )
def get_end_wall(self):
if self.end:
return self.raw_file.start + \
datetime.timedelta(seconds=self.get_end_seconds())
else:
return self.raw_file.end
def duration(self):
# calc size of clip in secconds
# may be size of raw, but take into account trimming start/end
def to_sec(time, default=0):
# convert h:m:s to s
if time:
sec = reduce(lambda x, i: x*60 + i,
list(map(float, time.split(':'))))
else:
sec=default
return sec
start = to_sec( self.start )
end = to_sec( self.end, to_sec(self.raw_file.duration))
dur = end-start
return dur
def duration_hms(self):
seconds = self.duration()
hms = seconds//3600, (seconds%3600)//60, seconds%60
duration = "%02d:%02d:%02d" % hms
return duration
def base_url(self):
""" Returns the url for the file, minus the MEDIA_URL and extension """
return self.raw_file.base_url()
class State(models.Model):
sequence = models.IntegerField(default=1)
slug = models.CharField(max_length=30)
description = models.CharField(max_length=135, blank=True)
class Meta:
ordering = ["sequence"]
def __str__(self):
return self.slug
class Image_File(models.Model):
show = models.ForeignKey(Show)
location = models.ForeignKey(Location, null=True)
episodes = models.ManyToManyField(Episode, blank=True)
filename = models.CharField(max_length=135, help_text="foo.png")
text = models.TextField(blank=True, help_text="OCRed text")
def get_absolute_url(self):
# https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urlencode
url = "{}?{}={}".format(
urls.reverse( 'admin:main_episode_changelist'),
"image_file__id__exact",
self.id)
return url
class Log(models.Model):
episode = models.ForeignKey(Episode)
state = models.ForeignKey(State, null=True, blank=True)
ready = models.DateTimeField()
start = models.DateTimeField(null=True, blank=True)
end = models.DateTimeField(null=True, blank=True)
user = models.CharField(max_length=50)
result = models.CharField(max_length=250)
def duration(self):
if self.start and self.end:
dur = self.end - self.start
dur = datetime.timedelta(dur.days,dur.seconds)
return dur
else:
return None
@models.permalink
def get_absolute_url(self):
return ('episode', [self.episode.id])
def set_slug(sender, instance, **kwargs):
if not instance.slug or instance.slug is None:
# instance.slug = fnify(instance.name)
return unique_slugify(instance, instance.name)
def set_end(sender, instance, **kwargs):
if instance.start:
if instance.duration:
seconds = reduce(lambda x, i: x*60 + i,
list(map(float, instance.duration.split(':'))))
instance.end = instance.start + \
datetime.timedelta(seconds=seconds)
elif instance.end:
# calc duration based on End
d = instance.end - instance.start
seconds = d.total_seconds()
hms = seconds//3600, (seconds%3600)//60, seconds%60
instance.duration = "%02d:%02d:%02d" % hms
else:
instance.end = None
else:
instance.end = None
pre_save.connect(set_slug,sender=Location)
pre_save.connect(set_slug,sender=Episode)
pre_save.connect(set_end,sender=Episode)
pre_save.connect(set_end,sender=Raw_File)<|fim▁end|> | |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
__title__ = 'pif.utils'<|fim▁hole|>__author__ = 'Artur Barseghyan'
__copyright__ = 'Copyright (c) 2013 Artur Barseghyan'
__license__ = 'GPL 2.0/LGPL 2.1'
__all__ = ('ensure_autodiscover', 'list_checkers', 'get_public_ip')
from pif.base import registry
from pif.discover import autodiscover
def ensure_autodiscover():
"""
Ensures the IP checkers are discovered.
"""
if not registry._registry:
autodiscover()
def list_checkers():
"""
Lists available checkers.
:return list:
"""
return registry._registry.keys()
def get_public_ip(preferred_checker=None, verbose=False):
"""
Gets IP using one of the services.
:param str preffered checker: Checker UID. If given, the preferred checker is used.
:param bool verbose: If set to True, debug info is printed.
:return str:
"""
ensure_autodiscover()
# If use preferred checker.
if preferred_checker:
ip_checker_cls = registry.get(preferred_checker)
if not ip_checker_cls:
return False
ip_checker = ip_checker_cls(verbose=verbose)
ip = ip_checker.get_public_ip()
if verbose:
print('provider: ', ip_checker_cls)
return ip
# Using all checkers.
for ip_checker_name, ip_checker_cls in registry._registry.items():
ip_checker = ip_checker_cls(verbose=verbose)
try:
ip = ip_checker.get_public_ip()
if ip:
if verbose:
print('provider: ', ip_checker_cls)
return ip
except Exception as e:
if verbose:
print(e)
return False<|fim▁end|> | |
<|file_name|>io.rs<|end_file_name|><|fim▁begin|>use std::os::raw::{c_char, c_void};
use std::mem::transmute;
use super::CFixedString;
#[repr(C)]
pub enum LoadState {
Ok,
Fail,
Converted,
Truncated,
OutOfData,
}
#[repr(C)]
pub struct CPDSaveState {
pub priv_data: *mut c_void,
pub write_int: fn(priv_data: *mut c_void, data: i64),
pub write_double: fn(priv_data: *mut c_void, data: f64),
pub write_string: fn(priv_data: *mut c_void, data: *const c_char),
}
pub struct StateSaver<'a>(&'a mut CPDSaveState);
impl<'a> StateSaver<'a> {
pub fn new(api: *mut CPDSaveState) -> StateSaver<'a> {
unsafe { StateSaver(&mut *api) }
}
pub fn write_int(&mut self, data: i64) {
((*self.0).write_int)((*self.0).priv_data, data)
}
pub fn write_double(&mut self, data: f64) {
((*self.0).write_double)((*self.0).priv_data, data)
}
pub fn write_str(&mut self, data: &str) {
let str = CFixedString::from_str(data);
((*self.0).write_string)((*self.0).priv_data, str.as_ptr())
}
}
#[repr(C)]
pub struct CPDLoadState {
pub priv_data: *mut c_void,
pub read_int: fn(priv_data: *mut c_void, dest: *mut i64) -> LoadState,
pub read_double: fn(priv_data: *mut c_void, dest: *mut f64) -> LoadState,
pub read_string: fn(priv_data: *mut c_void, dest: *mut c_char, max_len: i32) -> LoadState,
pub read_string_len: fn(priv_data: *const c_void, len: *mut i32) -> LoadState,
}
pub enum LoadResult<T> {
Ok(T),
Fail,
Converted(T),
Truncated(T),
OutOfData,
}
impl<T> LoadResult<T> {
pub fn from_state(state: LoadState, val: T) -> LoadResult<T> {
match state {
LoadState::Ok => LoadResult::Ok(val),
LoadState::Converted => LoadResult::Converted(val),<|fim▁hole|> LoadState::Truncated => LoadResult::Truncated(val),
LoadState::Fail => LoadResult::Fail,
LoadState::OutOfData => LoadResult::OutOfData,
}
}
}
pub struct StateLoader<'a>(&'a mut CPDLoadState);
impl<'a> StateLoader<'a> {
pub fn new(api: *mut CPDLoadState) -> StateLoader<'a> {
unsafe { StateLoader(&mut *api) }
}
pub fn read_int(&mut self) -> LoadResult<i64> {
let mut res: i64 = 0;
let state = ((*self.0).read_int)((*self.0).priv_data, &mut res);
LoadResult::from_state(state, res)
}
pub fn read_f64(&mut self) -> LoadResult<f64> {
let mut res: f64 = 0.0;
let state = ((*self.0).read_double)((*self.0).priv_data, &mut res);
LoadResult::from_state(state, res)
}
pub fn read_string(&mut self) -> LoadResult<String> {
let mut len: i32 = 0;
let len_state = ((*self.0).read_string_len)((*self.0).priv_data, &mut len);
match len_state {
LoadState::Fail => return LoadResult::Fail,
LoadState::OutOfData => return LoadResult::OutOfData,
_ => {}
}
let mut buf = vec!(0u8; len as usize);
let state = unsafe {
((*self.0).read_string)((*self.0).priv_data, transmute(buf.as_mut_ptr()), len)
};
LoadResult::from_state(state, String::from_utf8(buf).unwrap())
}
}<|fim▁end|> | |
<|file_name|>CuisineHadoop.py<|end_file_name|><|fim▁begin|>from JumpScale import j
base = j.tools.cuisine._getBaseClass()
# TODO: *4 unfinished but ok for now
class CuisineHadoop(base):
def _install(self):
if self._cuisine.core.isUbuntu:
C = """\
apt-get install -y apt-get install openjdk-7-jre
cd $tmpDir
wget -c http://www-us.apache.org/dist/hadoop/common/hadoop-2.7.2/hadoop-2.7.2.tar.gz<|fim▁hole|> C = self._cuisine.bash.replaceEnvironInText(C)
C = self._cuisine.core.args_replace(C)
self._cuisine.core.execute_bash(C, profile=True)
self._cuisine.bash.addPath("/opt/hadoop-2.7.2/bin")
self._cuisine.bash.addPath("/opt/hadoop-2.7.2/sbin")
self._cuisine.bash.environSet("JAVA_HOME", "/usr/lib/jvm/java-7-openjdk-amd64")
self._cuisine.bash.environSet("HADOOP_PREFIX", "/opt/hadoop-2.7.2/")
else:
raise NotImplementedError("unsupported platform")
def install(self):
self._install()<|fim▁end|> | tar -xf hadoop-2.7.2.tar.gz -C /opt/
""" |
<|file_name|>cmd_attributes.py<|end_file_name|><|fim▁begin|>import cmd
class HelloWorld(cmd.Cmd):
prompt = 'prompt: '
intro = "Simple command processor example."
doc_header = 'doc_header'
misc_header = 'misc_header'
undoc_header = 'undoc_header'
<|fim▁hole|> def do_prompt(self, line):
"Change the interactive prompt"
self.prompt = line + ': '
def do_EOF(self, line):
return True
if __name__ == '__main__':
HelloWorld().cmdloop()<|fim▁end|> | ruler = '-'
|
<|file_name|>UserWindow.js<|end_file_name|><|fim▁begin|>Ext.provide('Phlexible.users.UserWindow');
Ext.require('Ext.ux.TabPanel');
Phlexible.users.UserWindow = Ext.extend(Ext.Window, {<|fim▁hole|> plain: true,
iconCls: 'p-user-user-icon',
width: 530,
minWidth: 530,
height: 400,
minHeight: 400,
layout: 'fit',
border: false,
modal: true,
initComponent: function () {
this.addEvents(
'save'
);
var panels = Phlexible.PluginRegistry.get('userEditPanels');
this.items = [{
xtype: 'uxtabpanel',
tabPosition: 'left',
tabStripWidth: 150,
activeTab: 0,
border: true,
deferredRender: false,
items: panels
}];
this.tbar = new Ext.Toolbar({
hidden: true,
cls: 'p-users-disabled',
items: [
'->',
{
iconCls: 'p-user-user_account-icon',
text: this.strings.account_is_disabled,
handler: function () {
this.getComponent(0).setActiveTab(4);
},
scope: this
}]
});
this.buttons = [
{
text: this.strings.cancel,
handler: this.close,
scope: this
},
{
text: this.strings.save,
iconCls: 'p-user-save-icon',
handler: this.save,
scope: this
}
];
Phlexible.users.UserWindow.superclass.initComponent.call(this);
},
show: function (user) {
this.user = user;
if (user.get('username')) {
this.setTitle(this.strings.user + ' "' + user.get('username') + '"');
} else {
this.setTitle(this.strings.new_user);
}
Phlexible.users.UserWindow.superclass.show.call(this);
this.getComponent(0).items.each(function(p) {
if (typeof p.loadUser === 'function') {
p.loadUser(user);
}
});
if (!user.get('enabled')) {
this.getTopToolbar().show();
}
},
save: function () {
var data = {};
var valid = true;
this.getComponent(0).items.each(function(p) {
if (typeof p.isValid === 'function' && typeof p.getData === 'function') {
if (p.isValid()) {
Ext.apply(data, p.getData());
} else {
valid = false;
}
}
});
if (!valid) {
return;
}
var url, method;
if (this.user.get('uid')) {
url = Phlexible.Router.generate('users_users_update', {userId: this.user.get('uid')});
method = 'PUT';
} else {
url = Phlexible.Router.generate('users_users_create');
method = 'POST';
}
Ext.Ajax.request({
url: url,
method: method,
params: data,
success: this.onSaveSuccess,
scope: this
});
},
onSaveSuccess: function (response) {
var data = Ext.decode(response.responseText);
if (data.success) {
this.uid = data.uid;
Phlexible.success(data.msg);
this.fireEvent('save', this.uid);
this.close();
} else {
Ext.Msg.alert('Failure', data.msg);
}
}
});<|fim▁end|> | title: Phlexible.users.Strings.user,
strings: Phlexible.users.Strings, |
<|file_name|>LogServiceIntegrationJUnitTest.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.logging;
import static org.apache.geode.internal.logging.LogServiceIntegrationTestSupport.*;
import static org.assertj.core.api.Assertions.*;
import java.io.File;
import java.net.URL;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.core.Appender;
import org.apache.logging.log4j.core.Logger;
import org.apache.logging.log4j.core.config.ConfigurationFactory;
import org.apache.logging.log4j.status.StatusLogger;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.contrib.java.lang.system.SystemErrRule;
import org.junit.contrib.java.lang.system.SystemOutRule;
import org.junit.experimental.categories.Category;
import org.junit.rules.ExternalResource;
import org.junit.rules.TemporaryFolder;
import org.apache.geode.internal.logging.log4j.Configurator;
import org.apache.geode.test.junit.categories.IntegrationTest;
/**
* Integration tests for LogService and how it configures and uses log4j2
*
*/
@Category(IntegrationTest.class)
public class LogServiceIntegrationJUnitTest {
private static final String DEFAULT_CONFIG_FILE_NAME = "log4j2.xml";
private static final String CLI_CONFIG_FILE_NAME = "log4j2-cli.xml";
@Rule
public final SystemErrRule systemErrRule = new SystemErrRule().enableLog();
@Rule
public final SystemOutRule systemOutRule = new SystemOutRule().enableLog();
@Rule
public final TemporaryFolder temporaryFolder = new TemporaryFolder();
@Rule
public final ExternalResource externalResource = new ExternalResource() {
@Override
protected void before() {
beforeConfigFileProp = System.getProperty(ConfigurationFactory.CONFIGURATION_FILE_PROPERTY);
beforeLevel = StatusLogger.getLogger().getLevel();
System.clearProperty(ConfigurationFactory.CONFIGURATION_FILE_PROPERTY);
StatusLogger.getLogger().setLevel(Level.OFF);
Configurator.shutdown();
}
@Override
protected void after() {
Configurator.shutdown();
System.clearProperty(ConfigurationFactory.CONFIGURATION_FILE_PROPERTY);
if (beforeConfigFileProp != null) {
System.setProperty(ConfigurationFactory.CONFIGURATION_FILE_PROPERTY, beforeConfigFileProp);
}
StatusLogger.getLogger().setLevel(beforeLevel);
LogService.reconfigure();
assertThat(LogService.isUsingGemFireDefaultConfig()).as(LogService.getConfigInformation())
.isTrue();
}
};
private String beforeConfigFileProp;
private Level beforeLevel;
private URL defaultConfigUrl;
private URL cliConfigUrl;
@Before
public void setUp() {
this.defaultConfigUrl = LogService.class.getResource(LogService.DEFAULT_CONFIG);
this.cliConfigUrl = LogService.class.getResource(LogService.CLI_CONFIG);
}
@After
public void after() {
// if either of these fail then log4j2 probably logged a failure to stdout
assertThat(this.systemErrRule.getLog()).isEmpty();
assertThat(this.systemOutRule.getLog()).isEmpty();
}
@Test
public void shouldPreferConfigurationFilePropertyIfSet() throws Exception {
final File configFile = this.temporaryFolder.newFile(DEFAULT_CONFIG_FILE_NAME);
final String configFileName = configFile.toURI().toString();
System.setProperty(ConfigurationFactory.CONFIGURATION_FILE_PROPERTY, configFileName);
writeConfigFile(configFile, Level.DEBUG);
LogService.reconfigure();
assertThat(LogService.isUsingGemFireDefaultConfig()).as(LogService.getConfigInformation())
.isFalse();
assertThat(System.getProperty(ConfigurationFactory.CONFIGURATION_FILE_PROPERTY))
.isEqualTo(configFileName);
assertThat(LogService.getLogger().getName()).isEqualTo(getClass().getName());
}
@Test
public void shouldUseDefaultConfigIfNotConfigured() throws Exception {
LogService.reconfigure();
assertThat(LogService.isUsingGemFireDefaultConfig()).as(LogService.getConfigInformation())
.isTrue();
assertThat(System.getProperty(ConfigurationFactory.CONFIGURATION_FILE_PROPERTY))
.isNullOrEmpty();
}
@Test
public void defaultConfigShouldIncludeStdout() {
LogService.reconfigure();
final Logger rootLogger = (Logger) LogService.getRootLogger();
assertThat(LogService.isUsingGemFireDefaultConfig()).as(LogService.getConfigInformation())
.isTrue();
assertThat(rootLogger.getAppenders().get(LogService.STDOUT)).isNotNull();
}
@Test
public void removeConsoleAppenderShouldRemoveStdout() {
LogService.reconfigure();
final Logger rootLogger = (Logger) LogService.getRootLogger();
LogService.removeConsoleAppender();
assertThat(rootLogger.getAppenders().get(LogService.STDOUT)).isNull();
}
@Test
public void restoreConsoleAppenderShouldRestoreStdout() {
LogService.reconfigure();
final Logger rootLogger = (Logger) LogService.getRootLogger();
LogService.removeConsoleAppender();
assertThat(rootLogger.getAppenders().get(LogService.STDOUT)).isNull();
LogService.restoreConsoleAppender();
assertThat(rootLogger.getAppenders().get(LogService.STDOUT)).isNotNull();
}
@Test
public void removeAndRestoreConsoleAppenderShouldAffectRootLogger() {
LogService.reconfigure();
assertThat(LogService.isUsingGemFireDefaultConfig()).as(LogService.getConfigInformation())
.isTrue();
final Logger rootLogger = (Logger) LogService.getRootLogger();
// assert "Console" is present for ROOT
Appender appender = rootLogger.getAppenders().get(LogService.STDOUT);
assertThat(appender).isNotNull();
LogService.removeConsoleAppender();
// assert "Console" is not present for ROOT
appender = rootLogger.getAppenders().get(LogService.STDOUT);
assertThat(appender).isNull();
LogService.restoreConsoleAppender();
// assert "Console" is present for ROOT
appender = rootLogger.getAppenders().get(LogService.STDOUT);
assertThat(appender).isNotNull();
}
@Test
public void shouldNotUseDefaultConfigIfCliConfigSpecified() throws Exception {
System.setProperty(ConfigurationFactory.CONFIGURATION_FILE_PROPERTY,
this.cliConfigUrl.toString());
LogService.reconfigure();
assertThat(LogService.isUsingGemFireDefaultConfig()).as(LogService.getConfigInformation())
.isFalse();
assertThat(System.getProperty(ConfigurationFactory.CONFIGURATION_FILE_PROPERTY))
.isEqualTo(this.cliConfigUrl.toString());
assertThat(LogService.getLogger().getName()).isEqualTo(getClass().getName());
}
@Test
public void isUsingGemFireDefaultConfigShouldBeTrueIfDefaultConfig() throws Exception {
System.setProperty(ConfigurationFactory.CONFIGURATION_FILE_PROPERTY,
this.defaultConfigUrl.toString());
<|fim▁hole|> assertThat(LogService.getConfiguration().getConfigurationSource().toString())
.contains(DEFAULT_CONFIG_FILE_NAME);
assertThat(LogService.isUsingGemFireDefaultConfig()).isTrue();
}
@Test
public void isUsingGemFireDefaultConfigShouldBeFalseIfCliConfig() throws Exception {
System.setProperty(ConfigurationFactory.CONFIGURATION_FILE_PROPERTY,
this.cliConfigUrl.toString());
assertThat(LogService.getConfiguration().getConfigurationSource().toString())
.doesNotContain(DEFAULT_CONFIG_FILE_NAME);
assertThat(LogService.isUsingGemFireDefaultConfig()).isFalse();
}
@Test
public void shouldUseCliConfigIfCliConfigIsSpecifiedViaClasspath() throws Exception {
System.setProperty(ConfigurationFactory.CONFIGURATION_FILE_PROPERTY,
"classpath:" + CLI_CONFIG_FILE_NAME);
assertThat(LogService.getConfiguration().getConfigurationSource().toString())
.contains(CLI_CONFIG_FILE_NAME);
assertThat(LogService.isUsingGemFireDefaultConfig()).isFalse();
}
}<|fim▁end|> | |
<|file_name|>CoreAudioDevice.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2011-2012 Team XBMC
* http://xbmc.org
*
* This Program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2, or (at your option)
* any later version.
*
* This Program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with XBMC; see the file COPYING. If not, see
* <http://www.gnu.org/licenses/>.
*
*/
#include "CoreAudioDevice.h"
#include "CoreAudioAEHAL.h"
#include "CoreAudioChannelLayout.h"
#include "CoreAudioHardware.h"
#include "utils/log.h"
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// CCoreAudioDevice
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CCoreAudioDevice::CCoreAudioDevice() :
m_Started (false ),
m_pSource (NULL ),
m_DeviceId (0 ),
m_MixerRestore (-1 ),
m_IoProc (NULL ),
m_ObjectListenerProc (NULL ),
m_SampleRateRestore (0.0f ),
m_HogPid (-1 ),
m_frameSize (0 ),
m_OutputBufferIndex (0 ),
m_BufferSizeRestore (0 )
{
}
CCoreAudioDevice::CCoreAudioDevice(AudioDeviceID deviceId) :
m_Started (false ),
m_pSource (NULL ),
m_DeviceId (deviceId ),
m_MixerRestore (-1 ),
m_IoProc (NULL ),
m_ObjectListenerProc (NULL ),
m_SampleRateRestore (0.0f ),
m_HogPid (-1 ),
m_frameSize (0 ),
m_OutputBufferIndex (0 ),
m_BufferSizeRestore (0 )
{
}
CCoreAudioDevice::~CCoreAudioDevice()
{
Close();
}
bool CCoreAudioDevice::Open(AudioDeviceID deviceId)
{
m_DeviceId = deviceId;
m_BufferSizeRestore = GetBufferSize();
return true;
}
void CCoreAudioDevice::Close()
{
if (!m_DeviceId)
return;
// Stop the device if it was started
Stop();
// Unregister the IOProc if we have one
if (m_IoProc)
SetInputSource(NULL, 0, 0);
SetHogStatus(false);
CCoreAudioHardware::SetAutoHogMode(false);
if (m_MixerRestore > -1) // We changed the mixer status
SetMixingSupport((m_MixerRestore ? true : false));
m_MixerRestore = -1;
if (m_SampleRateRestore != 0.0f)
SetNominalSampleRate(m_SampleRateRestore);
if (m_BufferSizeRestore && m_BufferSizeRestore != GetBufferSize())
{
SetBufferSize(m_BufferSizeRestore);
m_BufferSizeRestore = 0;
}
m_IoProc = NULL;
m_pSource = NULL;
m_DeviceId = 0;
m_ObjectListenerProc = NULL;
}
void CCoreAudioDevice::Start()
{
if (!m_DeviceId || m_Started)
return;
OSStatus ret = AudioDeviceStart(m_DeviceId, m_IoProc);
if (ret)
CLog::Log(LOGERROR, "CCoreAudioDevice::Start: "
"Unable to start device. Error = %s", GetError(ret).c_str());
else
m_Started = true;
}
void CCoreAudioDevice::Stop()
{
if (!m_DeviceId || !m_Started)
return;
OSStatus ret = AudioDeviceStop(m_DeviceId, m_IoProc);
if (ret)
CLog::Log(LOGERROR, "CCoreAudioDevice::Stop: "
"Unable to stop device. Error = %s", GetError(ret).c_str());
m_Started = false;
}
void CCoreAudioDevice::RemoveObjectListenerProc(AudioObjectPropertyListenerProc callback, void* pClientData)
{
if (!m_DeviceId)
return;
AudioObjectPropertyAddress audioProperty;
audioProperty.mSelector = kAudioObjectPropertySelectorWildcard;
audioProperty.mScope = kAudioObjectPropertyScopeWildcard;
audioProperty.mElement = kAudioObjectPropertyElementWildcard;
OSStatus ret = AudioObjectRemovePropertyListener(m_DeviceId, &audioProperty, callback, pClientData);
if (ret)
{
CLog::Log(LOGERROR, "CCoreAudioDevice::RemoveObjectListenerProc: "
"Unable to set ObjectListener callback. Error = %s", GetError(ret).c_str());
}
m_ObjectListenerProc = NULL;
}
bool CCoreAudioDevice::SetObjectListenerProc(AudioObjectPropertyListenerProc callback, void* pClientData)
{
// Allow only one ObjectListener at a time
if (!m_DeviceId || m_ObjectListenerProc)
return false;
AudioObjectPropertyAddress audioProperty;
audioProperty.mSelector = kAudioObjectPropertySelectorWildcard;
audioProperty.mScope = kAudioObjectPropertyScopeWildcard;
audioProperty.mElement = kAudioObjectPropertyElementWildcard;
OSStatus ret = AudioObjectAddPropertyListener(m_DeviceId, &audioProperty, callback, pClientData);
if (ret)
{
CLog::Log(LOGERROR, "CCoreAudioDevice::SetObjectListenerProc: "
"Unable to remove ObjectListener callback. Error = %s", GetError(ret).c_str());
return false;
}
m_ObjectListenerProc = callback;
return true;
}
bool CCoreAudioDevice::SetInputSource(ICoreAudioSource* pSource, unsigned int frameSize, unsigned int outputBufferIndex)
{
m_pSource = pSource;
m_frameSize = frameSize;
m_OutputBufferIndex = outputBufferIndex;
if (pSource)
return AddIOProc();
else
return RemoveIOProc();
}
bool CCoreAudioDevice::AddIOProc()
{
// Allow only one IOProc at a time
if (!m_DeviceId || m_IoProc)
return false;
OSStatus ret = AudioDeviceCreateIOProcID(m_DeviceId, DirectRenderCallback, this, &m_IoProc);
if (ret)
{
CLog::Log(LOGERROR, "CCoreAudioDevice::AddIOProc: "
"Unable to add IOProc. Error = %s", GetError(ret).c_str());
m_IoProc = NULL;
return false;
}
Start();
return true;
}
bool CCoreAudioDevice::RemoveIOProc()
{
if (!m_DeviceId || !m_IoProc)
return false;
Stop();
OSStatus ret = AudioDeviceDestroyIOProcID(m_DeviceId, m_IoProc);
if (ret)
CLog::Log(LOGERROR, "CCoreAudioDevice::RemoveIOProc: "
"Unable to remove IOProc. Error = %s", GetError(ret).c_str());
m_IoProc = NULL; // Clear the reference no matter what
m_pSource = NULL;
Sleep(100);
return true;
}
std::string CCoreAudioDevice::GetName()
{
if (!m_DeviceId)
return NULL;
AudioObjectPropertyAddress propertyAddress;
propertyAddress.mScope = kAudioDevicePropertyScopeOutput;
propertyAddress.mElement = 0;
propertyAddress.mSelector = kAudioDevicePropertyDeviceName;
UInt32 propertySize;
OSStatus ret = AudioObjectGetPropertyDataSize(m_DeviceId, &propertyAddress, 0, NULL, &propertySize);
if (ret != noErr)
return NULL;
std::string name = "";
char *buff = new char[propertySize + 1];
buff[propertySize] = 0x00;
ret = AudioObjectGetPropertyData(m_DeviceId, &propertyAddress, 0, NULL, &propertySize, buff);
if (ret != noErr)
{
CLog::Log(LOGERROR, "CCoreAudioDevice::GetName: "
"Unable to get device name - id: 0x%04x. Error = %s", (uint)m_DeviceId, GetError(ret).c_str());
}
else
{
name = buff;
}
delete buff;
return name;
}
UInt32 CCoreAudioDevice::GetTotalOutputChannels()
{
UInt32 channels = 0;
if (!m_DeviceId)
return channels;
AudioObjectPropertyAddress propertyAddress;
propertyAddress.mScope = kAudioDevicePropertyScopeOutput;
propertyAddress.mElement = 0;
propertyAddress.mSelector = kAudioDevicePropertyStreamConfiguration;
UInt32 size = 0;
OSStatus ret = AudioObjectGetPropertyDataSize(m_DeviceId, &propertyAddress, 0, NULL, &size);
if (ret != noErr)
return channels;
AudioBufferList* pList = (AudioBufferList*)malloc(size);
ret = AudioObjectGetPropertyData(m_DeviceId, &propertyAddress, 0, NULL, &size, pList);
if (ret == noErr)
{
for(UInt32 buffer = 0; buffer < pList->mNumberBuffers; ++buffer)
channels += pList->mBuffers[buffer].mNumberChannels;
}
else
{
CLog::Log(LOGERROR, "CCoreAudioDevice::GetTotalOutputChannels: "
"Unable to get total device output channels - id: 0x%04x. Error = %s",
(uint)m_DeviceId, GetError(ret).c_str());
}
free(pList);
return channels;
}
bool CCoreAudioDevice::GetStreams(AudioStreamIdList* pList)
{
if (!pList || !m_DeviceId)
return false;
AudioObjectPropertyAddress propertyAddress;
propertyAddress.mScope = kAudioDevicePropertyScopeOutput;
propertyAddress.mElement = 0;
propertyAddress.mSelector = kAudioDevicePropertyStreams;
UInt32 propertySize = 0;
OSStatus ret = AudioObjectGetPropertyDataSize(m_DeviceId, &propertyAddress, 0, NULL, &propertySize);
if (ret != noErr)
return false;
UInt32 streamCount = propertySize / sizeof(AudioStreamID);
AudioStreamID* pStreamList = new AudioStreamID[streamCount];
ret = AudioObjectGetPropertyData(m_DeviceId, &propertyAddress, 0, NULL, &propertySize, pStreamList);
if (ret == noErr)
{
for (UInt32 stream = 0; stream < streamCount; stream++)
pList->push_back(pStreamList[stream]);
}
delete[] pStreamList;
return ret == noErr;
}
bool CCoreAudioDevice::IsRunning()
{
AudioObjectPropertyAddress propertyAddress;
propertyAddress.mScope = kAudioDevicePropertyScopeOutput;
propertyAddress.mElement = 0;
propertyAddress.mSelector = kAudioDevicePropertyDeviceIsRunning;
UInt32 isRunning = 0;
UInt32 propertySize = sizeof(isRunning);
OSStatus ret = AudioObjectGetPropertyData(m_DeviceId, &propertyAddress, 0, NULL, &propertySize, &isRunning);
if (ret != noErr)
return false;
return isRunning != 0;
}
bool CCoreAudioDevice::SetHogStatus(bool hog)
{
// According to Jeff Moore (Core Audio, Apple), Setting kAudioDevicePropertyHogMode
// is a toggle and the only way to tell if you do get hog mode is to compare
// the returned pid against getpid, if the match, you have hog mode, if not you don't.
if (!m_DeviceId)
return false;
AudioObjectPropertyAddress propertyAddress;
propertyAddress.mScope = kAudioDevicePropertyScopeOutput;
propertyAddress.mElement = 0;
propertyAddress.mSelector = kAudioDevicePropertyHogMode;
if (hog)
{
// Not already set
if (m_HogPid == -1)
{
OSStatus ret = AudioObjectSetPropertyData(m_DeviceId, &propertyAddress, 0, NULL, sizeof(m_HogPid), &m_HogPid);
// even if setting hogmode was successfull our PID might not get written
// into m_HogPid (so it stays -1). Readback hogstatus for judging if we
// had success on getting hog status
m_HogPid = GetHogStatus();
if (ret || m_HogPid != getpid())
{
CLog::Log(LOGERROR, "CCoreAudioDevice::SetHogStatus: "
"Unable to set 'hog' status. Error = %s", GetError(ret).c_str());
return false;
}
}
}
else
{
// Currently Set
if (m_HogPid > -1)
{
pid_t hogPid = -1;
OSStatus ret = AudioObjectSetPropertyData(m_DeviceId, &propertyAddress, 0, NULL, sizeof(hogPid), &hogPid);
if (ret || hogPid == getpid())
{
CLog::Log(LOGERROR, "CCoreAudioDevice::SetHogStatus: "
"Unable to release 'hog' status. Error = %s", GetError(ret).c_str());
return false;
}
// Reset internal state
m_HogPid = hogPid;
}
}
return true;
}
pid_t CCoreAudioDevice::GetHogStatus()
{
if (!m_DeviceId)
return false;
AudioObjectPropertyAddress propertyAddress;
propertyAddress.mScope = kAudioDevicePropertyScopeOutput;
propertyAddress.mElement = 0;
propertyAddress.mSelector = kAudioDevicePropertyHogMode;
pid_t hogPid = -1;
UInt32 size = sizeof(hogPid);
AudioObjectGetPropertyData(m_DeviceId, &propertyAddress, 0, NULL, &size, &hogPid);
return hogPid;
}
bool CCoreAudioDevice::SetMixingSupport(UInt32 mix)
{
if (!m_DeviceId)
return false;
if (!GetMixingSupport())
return false;
int restore = -1;
if (m_MixerRestore == -1)
{
// This is our first change to this setting. Store the original setting for restore
restore = (GetMixingSupport() ? 1 : 0);
}
AudioObjectPropertyAddress propertyAddress;
propertyAddress.mScope = kAudioDevicePropertyScopeOutput;
propertyAddress.mElement = 0;
propertyAddress.mSelector = kAudioDevicePropertySupportsMixing;
UInt32 mixEnable = mix ? 1 : 0;
OSStatus ret = AudioObjectSetPropertyData(m_DeviceId, &propertyAddress, 0, NULL, sizeof(mixEnable), &mixEnable);
if (ret != noErr)
{
CLog::Log(LOGERROR, "CCoreAudioDevice::SetMixingSupport: "
"Unable to set MixingSupport to %s. Error = %s", mix ? "'On'" : "'Off'", GetError(ret).c_str());
return false;
}
if (m_MixerRestore == -1)
m_MixerRestore = restore;
return true;
}
bool CCoreAudioDevice::GetMixingSupport()
{
if (!m_DeviceId)
return false;
UInt32 size;
UInt32 mix = 0;
Boolean writable = false;
AudioObjectPropertyAddress propertyAddress;
propertyAddress.mScope = kAudioDevicePropertyScopeOutput;
propertyAddress.mElement = 0;
propertyAddress.mSelector = kAudioDevicePropertySupportsMixing;
if( AudioObjectHasProperty( m_DeviceId, &propertyAddress ) )
{
OSStatus ret = AudioObjectIsPropertySettable(m_DeviceId, &propertyAddress, &writable);
if (ret)
{
CLog::Log(LOGERROR, "CCoreAudioDevice::SupportsMixing: "
"Unable to get propertyinfo mixing support. Error = %s", GetError(ret).c_str());
writable = false;
}
if (writable)
{
size = sizeof(mix);
ret = AudioObjectGetPropertyData(m_DeviceId, &propertyAddress, 0, NULL, &size, &mix);
if (ret != noErr)
mix = 0;
}
}
CLog::Log(LOGERROR, "CCoreAudioDevice::SupportsMixing: "
"Device mixing support : %s.", mix ? "'Yes'" : "'No'");
return (mix > 0);
}
bool CCoreAudioDevice::SetCurrentVolume(Float32 vol)
{
if (!m_DeviceId)
return false;
AudioObjectPropertyAddress propertyAddress;
propertyAddress.mScope = kAudioDevicePropertyScopeOutput;
propertyAddress.mElement = 0;
propertyAddress.mSelector = kHALOutputParam_Volume;
OSStatus ret = AudioObjectSetPropertyData(m_DeviceId, &propertyAddress, 0, NULL, sizeof(Float32), &vol);
if (ret != noErr)
{
CLog::Log(LOGERROR, "CCoreAudioDevice::SetCurrentVolume: "
"Unable to set AudioUnit volume. Error = %s", GetError(ret).c_str());
return false;
}
return true;
}
bool CCoreAudioDevice::GetPreferredChannelLayout(CCoreAudioChannelLayout& layout)
{
if (!m_DeviceId)
return false;
AudioObjectPropertyAddress propertyAddress;
propertyAddress.mScope = kAudioDevicePropertyScopeOutput;
propertyAddress.mElement = 0;
propertyAddress.mSelector = kAudioDevicePropertyPreferredChannelLayout;
UInt32 propertySize = 0;
OSStatus ret = AudioObjectGetPropertyDataSize(m_DeviceId, &propertyAddress, 0, NULL, &propertySize);
if (ret)
return false;
void* pBuf = malloc(propertySize);
ret = AudioObjectGetPropertyData(m_DeviceId, &propertyAddress, 0, NULL, &propertySize, pBuf);
if (ret != noErr)
CLog::Log(LOGERROR, "CCoreAudioDevice::GetPreferredChannelLayout: "
"Unable to retrieve preferred channel layout. Error = %s", GetError(ret).c_str());
else
{
// Copy the result into the caller's instance
layout.CopyLayout(*((AudioChannelLayout*)pBuf));
}
free(pBuf);
return (ret == noErr);
}
bool CCoreAudioDevice::GetDataSources(CoreAudioDataSourceList* pList)
{
if (!pList || !m_DeviceId)
return false;
AudioObjectPropertyAddress propertyAddress;
propertyAddress.mScope = kAudioDevicePropertyScopeOutput;
propertyAddress.mElement = 0;
propertyAddress.mSelector = kAudioDevicePropertyDataSources;
UInt32 propertySize = 0;
OSStatus ret = AudioObjectGetPropertyDataSize(m_DeviceId, &propertyAddress, 0, NULL, &propertySize);
if (ret != noErr)
return false;
UInt32 sources = propertySize / sizeof(UInt32);
UInt32* pSources = new UInt32[sources];
ret = AudioObjectGetPropertyData(m_DeviceId, &propertyAddress, 0, NULL, &propertySize, pSources);
if (ret == noErr)
{
for (UInt32 i = 0; i < sources; i++)
pList->push_back(pSources[i]);
}
delete[] pSources;
return (!ret);
}
Float64 CCoreAudioDevice::GetNominalSampleRate()
{
if (!m_DeviceId)
return 0.0f;
AudioObjectPropertyAddress propertyAddress;
propertyAddress.mScope = kAudioDevicePropertyScopeOutput;
propertyAddress.mElement = 0;
propertyAddress.mSelector = kAudioDevicePropertyNominalSampleRate;
Float64 sampleRate = 0.0f;
UInt32 propertySize = sizeof(Float64);
OSStatus ret = AudioObjectGetPropertyData(m_DeviceId, &propertyAddress, 0, NULL, &propertySize, &sampleRate);
if (ret != noErr)
{
CLog::Log(LOGERROR, "CCoreAudioDevice::GetNominalSampleRate: "
"Unable to retrieve current device sample rate. Error = %s", GetError(ret).c_str());
return 0.0f;
}
return sampleRate;
}
bool CCoreAudioDevice::SetNominalSampleRate(Float64 sampleRate)
{
if (!m_DeviceId || sampleRate == 0.0f)
return false;
Float64 currentRate = GetNominalSampleRate();<|fim▁hole|> return true; //No need to change
AudioObjectPropertyAddress propertyAddress;
propertyAddress.mScope = kAudioDevicePropertyScopeOutput;
propertyAddress.mElement = 0;
propertyAddress.mSelector = kAudioDevicePropertyNominalSampleRate;
OSStatus ret = AudioObjectSetPropertyData(m_DeviceId, &propertyAddress, 0, NULL, sizeof(Float64), &sampleRate);
if (ret != noErr)
{
CLog::Log(LOGERROR, "CCoreAudioDevice::SetNominalSampleRate: "
"Unable to set current device sample rate to %0.0f. Error = %s",
(float)sampleRate, GetError(ret).c_str());
return false;
}
if (m_SampleRateRestore == 0.0f)
m_SampleRateRestore = currentRate;
return true;
}
UInt32 CCoreAudioDevice::GetNumLatencyFrames()
{
UInt32 num_latency_frames = 0;
if (!m_DeviceId)
return 0;
// number of frames of latency in the AudioDevice
AudioObjectPropertyAddress propertyAddress;
propertyAddress.mScope = kAudioDevicePropertyScopeOutput;
propertyAddress.mElement = 0;
propertyAddress.mSelector = kAudioDevicePropertyLatency;
UInt32 i_param = 0;
UInt32 i_param_size = sizeof(uint32_t);
OSStatus ret = AudioObjectGetPropertyData(m_DeviceId, &propertyAddress, 0, NULL, &i_param_size, &i_param);
if (ret == noErr)
num_latency_frames += i_param;
// number of frames in the IO buffers
propertyAddress.mSelector = kAudioDevicePropertyBufferFrameSize;
ret = AudioObjectGetPropertyData(m_DeviceId, &propertyAddress, 0, NULL, &i_param_size, &i_param);
if (ret == noErr)
num_latency_frames += i_param;
// number for frames in ahead the current hardware position that is safe to do IO
propertyAddress.mSelector = kAudioDevicePropertySafetyOffset;
ret = AudioObjectGetPropertyData(m_DeviceId, &propertyAddress, 0, NULL, &i_param_size, &i_param);
if (ret == noErr)
num_latency_frames += i_param;
return (num_latency_frames);
}
UInt32 CCoreAudioDevice::GetBufferSize()
{
if (!m_DeviceId)
return false;
AudioObjectPropertyAddress propertyAddress;
propertyAddress.mScope = kAudioDevicePropertyScopeOutput;
propertyAddress.mElement = 0;
propertyAddress.mSelector = kAudioDevicePropertyBufferFrameSize;
UInt32 size = 0;
UInt32 propertySize = sizeof(size);
OSStatus ret = AudioObjectGetPropertyData(m_DeviceId, &propertyAddress, 0, NULL, &propertySize, &size);
if (ret != noErr)
CLog::Log(LOGERROR, "CCoreAudioDevice::GetBufferSize: "
"Unable to retrieve buffer size. Error = %s", GetError(ret).c_str());
return size;
}
bool CCoreAudioDevice::SetBufferSize(UInt32 size)
{
if (!m_DeviceId)
return false;
AudioObjectPropertyAddress propertyAddress;
propertyAddress.mScope = kAudioDevicePropertyScopeOutput;
propertyAddress.mElement = 0;
propertyAddress.mSelector = kAudioDevicePropertyBufferFrameSize;
UInt32 propertySize = sizeof(size);
OSStatus ret = AudioObjectSetPropertyData(m_DeviceId, &propertyAddress, 0, NULL, propertySize, &size);
if (ret != noErr)
{
CLog::Log(LOGERROR, "CCoreAudioDevice::SetBufferSize: "
"Unable to set buffer size. Error = %s", GetError(ret).c_str());
}
if (GetBufferSize() != size)
CLog::Log(LOGERROR, "CCoreAudioDevice::SetBufferSize: Buffer size change not applied.");
return (ret == noErr);
}
OSStatus CCoreAudioDevice::DirectRenderCallback(AudioDeviceID inDevice,
const AudioTimeStamp *inNow,
const AudioBufferList *inInputData,
const AudioTimeStamp *inInputTime,
AudioBufferList *outOutputData,
const AudioTimeStamp *inOutputTime,
void *inClientData)
{
OSStatus ret = noErr;
CCoreAudioDevice *audioDevice = (CCoreAudioDevice*)inClientData;
if (audioDevice->m_pSource && audioDevice->m_frameSize)
{
UInt32 frames = outOutputData->mBuffers[audioDevice->m_OutputBufferIndex].mDataByteSize / audioDevice->m_frameSize;
ret = audioDevice->m_pSource->Render(NULL, inInputTime, 0, frames, outOutputData);
}
else
{
outOutputData->mBuffers[audioDevice->m_OutputBufferIndex].mDataByteSize = 0;
}
return ret;
}<|fim▁end|> | if (currentRate == sampleRate) |
<|file_name|>0018_podcasts.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-08-21 18:59
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
dependencies = [
('dispatch', '0017_subsections'),
]
operations = [
migrations.CreateModel(
name='Podcast',
fields=[
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
('slug', models.SlugField(unique=True)),
('title', models.CharField(max_length=255)),
('description', models.TextField()),
('author', models.CharField(max_length=255)),
('owner_name', models.CharField(max_length=255)),
('owner_email', models.EmailField(max_length=255)),
('category', models.CharField(max_length=255)),
('image', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='dispatch.Image')),
],
),
migrations.CreateModel(
name='PodcastEpisode',
fields=[
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),<|fim▁hole|> ('author', models.CharField(max_length=255)),
('duration', models.PositiveIntegerField(null=True)),
('published_at', models.DateTimeField()),
('explicit', models.CharField(choices=[(b'no', b'No'), (b'yes', b'Yes'), (b'clean', b'Clean')], default=b'no', max_length=5)),
('file', models.FileField(upload_to=b'podcasts/')),
('type', models.CharField(default='audio/mp3', max_length=255)),
('image', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='dispatch.Image')),
('podcast', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dispatch.Podcast')),
],
),
]<|fim▁end|> | ('title', models.CharField(max_length=255)),
('description', models.TextField()), |
<|file_name|>store_test.go<|end_file_name|><|fim▁begin|>package webl
import (
// "testing"
. "gopkg.in/check.v1"
)
//------
// version
//------
func (s *MySuite) Test_saveResource(c *C) {
Pool = NewPool(":6379","")
r := Resource{ Name: "a", Url: "http://a", Status: "404", Type: "html" }
saveResource(&r)
sameR := LoadDomain("a",true)
c.Check(sameR.Name,Equals,"a")
c.Check(sameR.Url,Equals,"http://a")
c.Check(sameR.Status,Equals,"404")
c.Check(sameR.Type,Equals,"html")
}
func (s *MySuite) Test_saveEdge(c *C) {
Pool = NewPool(":6379","")
saveResource(&Resource{ Name: "a4word.com", Url: "http://a4word.com", Status: "200", Type: "text/html" })
saveResource(&Resource{ Name: "/links.php", Url: "http://a4word.com/links.php", Status: "200", Type: "text/html" })
saveEdge("a4word.com","http://a4word.com","http://a4word.com/links.php")
sameR := LoadDomain("a4word.com",true)
c.Check(sameR.Name,Equals,"a4word.com")
c.Check(sameR.Url,Equals,"http://a4word.com")
c.Check(sameR.Status,Equals,"200")
c.Check(sameR.Type,Equals,"text/html")
}<|fim▁hole|>
r := Resource{ Name: "aa", Url: "b", Status: "404", Type: "html" }
saveResource(&r)
deleteResource("aa")
sameR := LoadDomain("aa",true)
c.Check(sameR.Name,Equals,"aa")
c.Check(sameR.Url,Equals,"http://aa")
c.Check(sameR.Status,Equals,"missing")
c.Check(sameR.Type,Equals,"")
}
func (s *MySuite) Test_AddDomain(c *C) {
Pool = NewPool(":6379","")
DeleteAllDomains()
c.Check(len(ListDomains()),Equals,0)
r := Resource{ Name: "a4word.com", Url: "http://a4word.com" }
AddDomain(&r)
all := ListDomains()
c.Check(len(all),Equals,1)
c.Check(all[0].Name,Equals,"a4word.com")
}
func (s *MySuite) Test_RemoveDomain(c *C) {
Pool = NewPool(":6379","")
DeleteAllDomains()
c.Check(len(ListDomains()),Equals,0)
r := Resource{ Name: "a4word.com", Url: "http://a4word.com" }
AddDomain(&r)
all := ListDomains()
c.Check(len(all),Equals,1)
DeleteDomain("a4word.com")
c.Check(len(ListDomains()),Equals,0)
}<|fim▁end|> |
func (s *MySuite) Test_deleteResource(c *C) {
Pool = NewPool(":6379","") |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>mod archive_comments;
mod archive_entries;
mod archive_events;
mod archive_ratings;
mod create_entry;
mod create_rating;
mod update_entry;
pub mod prelude {
pub use super::{
archive_comments::*, archive_entries::*, archive_events::*, archive_ratings::*,
create_entry::*, create_rating::*, update_entry::*,
};
}
pub type Result<T> = std::result::Result<T, error::AppError>;
pub(crate) use super::{db::sqlite, error, notify};
pub(crate) use crate::core::{prelude::*, usecases};
#[cfg(test)]
mod tests {
pub mod prelude {
pub use crate::core::{prelude::*, usecases};
pub mod sqlite {
pub use super::super::super::sqlite::*;
}
pub mod tantivy {
pub use crate::infrastructure::db::tantivy::SearchEngine;
}
pub use crate::{
infrastructure::{error::AppError, flows::prelude as flows},
ports::web::api,
};
use super::super::Result;
pub use rocket::{
http::{ContentType, Cookie, Status},
local::Client,
response::Response,
};
use crate::ports::web::rocket_instance;
use rocket::{
config::{Config, Environment},
logger::LoggingLevel,
};
use std::cell::RefCell;
embed_migrations!();
pub struct EnvFixture {
pub client: Client,
pub db_connections: sqlite::Connections,
pub search_engine: RefCell<tantivy::SearchEngine>,
}
impl EnvFixture {
pub fn new() -> Self {
let cfg = Config::build(Environment::Development)
.log_level(LoggingLevel::Debug)
.finalize()
.unwrap();
let db_connections = sqlite::Connections::init(&format!(":memory:"), 1).unwrap();
embedded_migrations::run(&*db_connections.exclusive().unwrap()).unwrap();
let search_engine = tantivy::SearchEngine::init_in_ram().unwrap();
let rocket = rocket_instance(
db_connections.clone(),
search_engine.clone(),
vec![("/", api::routes())],
Some(cfg),
);
let client = Client::new(rocket).unwrap();
Self {
client,
db_connections,
search_engine: RefCell::new(search_engine),
}
}
<|fim▁hole|> flows::create_entry(
&self.db_connections,
&mut *self.search_engine.borrow_mut(),
new_entry.into(),
)
.unwrap()
}
pub fn try_get_entry(self: &EnvFixture, id: &str) -> Option<Entry> {
match self.db_connections.shared().unwrap().get_entry(id) {
Ok(entry) => Some(entry),
Err(RepoError::NotFound) => None,
x => x.map(|_| None).unwrap(),
}
}
pub fn entry_exists(self: &EnvFixture, id: &str) -> bool {
self.try_get_entry(id).is_some()
}
pub fn create_rating(
self: &EnvFixture,
rate_entry: usecases::RateEntry,
) -> (String, String) {
flows::create_rating(
&self.db_connections,
&mut *self.search_engine.borrow_mut(),
rate_entry,
)
.unwrap()
}
pub fn try_get_rating(self: &EnvFixture, id: &str) -> Option<Rating> {
match self.db_connections.shared().unwrap().load_rating(id) {
Ok(rating) => Some(rating),
Err(RepoError::NotFound) => None,
x => x.map(|_| None).unwrap(),
}
}
pub fn rating_exists(self: &EnvFixture, id: &str) -> bool {
self.try_get_rating(id).is_some()
}
pub fn try_get_comment(self: &EnvFixture, id: &str) -> Option<Comment> {
match self.db_connections.shared().unwrap().load_comment(id) {
Ok(comment) => Some(comment),
Err(RepoError::NotFound) => None,
x => x.map(|_| None).unwrap(),
}
}
pub fn comment_exists(self: &EnvFixture, id: &str) -> bool {
self.try_get_comment(id).is_some()
}
pub fn query_entries(self: &EnvFixture, query: &EntryIndexQuery) -> Vec<IndexedEntry> {
self.search_engine
.borrow_mut()
.query_entries(query, 100)
.unwrap()
}
pub fn query_entries_by_tag(self: &EnvFixture, tag: &str) -> Vec<IndexedEntry> {
let query = EntryIndexQuery {
hash_tags: vec![tag.into()],
..Default::default()
};
self.query_entries(&query)
}
}
pub fn assert_not_found<T: std::fmt::Debug>(res: Result<T>) {
assert_eq!(
RepoError::NotFound.to_string(),
res.unwrap_err().to_string()
);
}
pub struct NewEntry {
pub pos: MapPoint,
pub title: String,
pub description: String,
pub categories: Vec<String>,
pub tags: Vec<String>,
}
impl From<i32> for NewEntry {
fn from(i: i32) -> Self {
let lat_deg = i % 91;
let lng_deg = -i % 181;
let pos = MapPoint::from_lat_lng_deg(lat_deg as f64, lng_deg as f64);
let title = format!("title_{}", i);
let description = format!("description_{}", i);
let categories = vec![format!("category_{}", i)];
let tags = vec![format!("tag_{}", i)];
NewEntry {
pos,
title,
description,
categories,
tags,
}
}
}
impl From<NewEntry> for usecases::NewEntry {
fn from(e: NewEntry) -> Self {
usecases::NewEntry {
lat: e.pos.lat().to_deg(),
lng: e.pos.lng().to_deg(),
title: e.title,
description: e.description,
categories: e.categories,
tags: e.tags,
license: "CC0-1.0".into(),
street: None,
city: None,
zip: None,
country: None,
email: None,
telephone: None,
homepage: None,
image_url: None,
image_link_url: None,
}
}
}
pub fn new_entry_rating(
i: i32,
entry_id: &str,
context: RatingContext,
value: RatingValue,
) -> usecases::RateEntry {
usecases::RateEntry {
entry: entry_id.to_owned(),
context,
value,
title: format!("title_{}", i),
comment: format!("comment_{}", i),
source: None,
user: None,
}
}
}
}<|fim▁end|> | pub fn create_entry(self: &EnvFixture, new_entry: NewEntry) -> String { |
<|file_name|>udpsink.rs<|end_file_name|><|fim▁begin|>// Copyright (C) 2019 Mathieu Duponchelle <[email protected]>
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Library General Public
// License as published by the Free Software Foundation; either
// version 2 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Library General Public License for more details.
//
// You should have received a copy of the GNU Library General Public
// License along with this library; if not, write to the
// Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
// Boston, MA 02110-1335, USA.
use std::thread;
use glib::prelude::*;
fn init() {
use std::sync::Once;
static INIT: Once = Once::new();
INIT.call_once(|| {
gst::init().unwrap();
gstthreadshare::plugin_register_static().expect("gstthreadshare udpsrc test");
});<|fim▁hole|>#[test]
fn test_client_management() {
init();
let h = gst_check::Harness::new("ts-udpsink");
let udpsink = h.get_element().unwrap();
let clients = udpsink
.get_property("clients")
.unwrap()
.get::<String>()
.unwrap()
.unwrap();
assert_eq!(clients, "127.0.0.1:5004");
udpsink.emit("add", &[&"192.168.1.1", &57i32]).unwrap();
let clients = udpsink
.get_property("clients")
.unwrap()
.get::<String>()
.unwrap()
.unwrap();
assert_eq!(clients, "127.0.0.1:5004,192.168.1.1:57");
/* Adding a client twice is not supported */
udpsink.emit("add", &[&"192.168.1.1", &57i32]).unwrap();
let clients = udpsink
.get_property("clients")
.unwrap()
.get::<String>()
.unwrap()
.unwrap();
assert_eq!(clients, "127.0.0.1:5004,192.168.1.1:57");
udpsink.emit("remove", &[&"192.168.1.1", &57i32]).unwrap();
let clients = udpsink
.get_property("clients")
.unwrap()
.get::<String>()
.unwrap()
.unwrap();
assert_eq!(clients, "127.0.0.1:5004");
/* Removing a non-existing client should not be a problem */
udpsink.emit("remove", &[&"192.168.1.1", &57i32]).unwrap();
let clients = udpsink
.get_property("clients")
.unwrap()
.get::<String>()
.unwrap()
.unwrap();
assert_eq!(clients, "127.0.0.1:5004");
/* Removing the default client is possible */
udpsink.emit("remove", &[&"127.0.0.1", &5004i32]).unwrap();
let clients = udpsink
.get_property("clients")
.unwrap()
.get::<String>()
.unwrap()
.unwrap();
assert_eq!(clients, "");
/* The client properties is writable too */
udpsink
.set_property("clients", &"127.0.0.1:5004,192.168.1.1:57")
.unwrap();
let clients = udpsink
.get_property("clients")
.unwrap()
.get::<String>()
.unwrap()
.unwrap();
assert_eq!(clients, "127.0.0.1:5004,192.168.1.1:57");
udpsink.emit("clear", &[]).unwrap();
let clients = udpsink
.get_property("clients")
.unwrap()
.get::<String>()
.unwrap()
.unwrap();
assert_eq!(clients, "");
}
#[test]
fn test_chain() {
init();
let mut h = gst_check::Harness::new("ts-udpsink");
h.set_src_caps_str(&"foo/bar");
{
let udpsink = h.get_element().unwrap();
udpsink.set_property("clients", &"127.0.0.1:5005").unwrap();
}
thread::spawn(move || {
use std::net;
use std::time;
thread::sleep(time::Duration::from_millis(50));
let socket = net::UdpSocket::bind("127.0.0.1:5005").unwrap();
let mut buf = [0; 5];
let (amt, _) = socket.recv_from(&mut buf).unwrap();
assert!(amt == 4);
assert!(buf == [42, 43, 44, 45, 0]);
});
let buf = gst::Buffer::from_slice(&[42, 43, 44, 45]);
assert!(h.push(buf) == Ok(gst::FlowSuccess::Ok));
}<|fim▁end|> | }
|
<|file_name|>config.js<|end_file_name|><|fim▁begin|>// # Ghost Configuration
// Setup your Ghost install for various environments<|fim▁hole|>
var path = require('path'),
config;
config = {
// ### Production
// When running Ghost in the wild, use the production environment
// Configure your URL and mail settings here
production: {
url: 'http://my-ghost-blog.com',
mail: {},
database: {
client: 'sqlite3',
connection: {
filename: path.join(__dirname, '/content/data/ghost.db')
},
debug: false
},
server: {
// Host to be passed to node's `net.Server#listen()`
host: '127.0.0.1',
// Port to be passed to node's `net.Server#listen()`, for iisnode set this to `process.env.PORT`
port: '2368'
}
},
// ### Development **(default)**
development: {
// The url to use when providing links to the site, E.g. in RSS and email.
// Change this to your Ghost blogs published URL.
url: 'http://localhost:2368',
// Example mail config
// Visit http://support.ghost.org/mail for instructions
// ```
mail: {
transport: 'SMTP',
options: {
service: 'Gmail',
auth: {
user: '[email protected]', // mailgun username
pass: 'ghostblogwandering' // mailgun password
}
}
},
//```
database: {
client: 'sqlite3',
connection: {
filename: path.join(__dirname, '/content/data/ghost-dev.db')
},
debug: false
},
server: {
// Host to be passed to node's `net.Server#listen()`
host: '127.0.0.1',
// Port to be passed to node's `net.Server#listen()`, for iisnode set this to `process.env.PORT`
port: '2368'
},
paths: {
contentPath: path.join(__dirname, '/content/')
}
},
// **Developers only need to edit below here**
// ### Testing
// Used when developing Ghost to run tests and check the health of Ghost
// Uses a different port number
testing: {
url: 'http://127.0.0.1:2369',
database: {
client: 'sqlite3',
connection: {
filename: path.join(__dirname, '/content/data/ghost-test.db')
}
},
server: {
host: '127.0.0.1',
port: '2369'
},
logging: false
},
// ### Testing MySQL
// Used by Travis - Automated testing run through GitHub
'testing-mysql': {
url: 'http://127.0.0.1:2369',
database: {
client: 'mysql',
connection: {
host : '127.0.0.1',
user : 'root',
password : '',
database : 'ghost_testing',
charset : 'utf8'
}
},
server: {
host: '127.0.0.1',
port: '2369'
},
logging: false
},
// ### Testing pg
// Used by Travis - Automated testing run through GitHub
'testing-pg': {
url: 'http://127.0.0.1:2369',
database: {
client: 'pg',
connection: {
host : '127.0.0.1',
user : 'postgres',
password : '',
database : 'ghost_testing',
charset : 'utf8'
}
},
server: {
host: '127.0.0.1',
port: '2369'
},
logging: false
}
};
// Export config
module.exports = config;<|fim▁end|> | // Documentation can be found at http://support.ghost.org/config/ |
<|file_name|>date_utils.py<|end_file_name|><|fim▁begin|>import time
<|fim▁hole|><|fim▁end|> |
def current_millis():
return int(round(time.time() * 1000)) |
<|file_name|>find_holes.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Created on Mon Jun 20 12:28:32 2015
@author: boland
"""
import sys
sys.path.append('/home/boland/Anaconda/lib/python2.7/site-packages')
import pickle
import numpy as np
import matplotlib.pyplot as plt
from scipy.cluster.vq import kmeans
import multiprocessing as mp
import pyproj
import os
import itertools
import datetime
import pointshape as ps
from math import sqrt, atan2, radians,degrees, cos, tan, sin, asin
import random
import uuid
shape_path = "/home/boland/Dropbox/University/UniMelb/AGOS/PROGRAMS/ANT/Versions/26.04.2015/shapefiles/aus.shp"
N = 130
#enter km spacing between path density points
km_points = 20.0
# reference elipsoid to calculate distance
wgs84 = pyproj.Geod(ellps='WGS84')
nbins = 200
def haversine(coordinates):
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees)
"""
lon1, lat1, lon2, lat2= coordinates[0],coordinates[1],\
coordinates[2],coordinates[3]
# convert decimal degrees to radians
lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
# haversine formula
dlon, dlat = lon2 - lon1, lat2 - lat1
a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2
c = 2 * asin(sqrt(a))
km = 6367 * c
return km
def haversine2(lon1, lat1, lon2, lat2):
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees)
"""
# convert decimal degrees to radians
lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
# haversine formula
dlon, dlat = lon2 - lon1, lat2 - lat1
a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2
c = 2 * asin(sqrt(a))
km = 6367 * c
return km
def geodesic(coord1, coord2, npts):
"""
Returns a list of *npts* points along the geodesic between
(and including) *coord1* and *coord2*, in an array of
shape (*npts*, 2).
@rtype: L{ndarray}
"""
if npts < 2:
raise Exception('nb of points must be at least 2')
path = wgs84.npts(lon1=coord1[0], lat1=coord1[1],
lon2=coord2[0], lat2=coord2[1],
npts=npts-2)
return np.array([coord1] + path + [coord2])
def new_geodesic(lon1,lat1,lon2,lat2, npts):
"""
Returns a list of *npts* points along the geodesic between
(and including) *coord1* and *coord2*, in an array of
shape (*npts*, 2).
@rtype: L{ndarray}
"""
if npts < 2:
raise Exception('nb of points must be at least 2')
path = wgs84.npts(lon1=lon1, lat1=lat1,
lon2=lon2, lat2=lat2,
npts=npts-2)
return np.array([[lon1,lat1]] + path + [[lon2,lat2]])
def cluster_points(coord_points, N):
"""
Function that returns k which is an nx2 matrix of lon-lat vector columns
containing the optimal cluster centroid spacings within a large set of random
numbers e.g. those produced by the many_points() function above!
"""
k = kmeans(coord_points, N)
return k[0]
def paths_func(path_info, km=km_points):
lon1, lat1, lon2, lat2 = path_info[0], \
path_info[1], path_info[2], path_info[3]
#lon1, lat1, lon2, lat2, dist = path_info[0], \
#path_info[1], path_info[2], path_info[3], \
#path_info[4]
dist = haversine2(lon1, lat1, lon2, lat2)
# interpoint distance <= 1 km, and nb of points >= 100
npts = max(int((np.ceil(dist) + 1)/km), 100)
path = new_geodesic(lon1,lat1,lon2,lat2, npts)
#print("still going strong\n")
length = len(path)
lons = [lon1 for i in range(0,length)]
lats = [lat1 for i in range(0,length)]
path = np.column_stack((path,lons,lats))
return path
def HIST2D(nbins,paths, grad=False):
H, xedges, yedges = np.histogram2d(paths[:,0],paths[:,1],bins=nbins)
#name = "path_density_2Dhist.png"
if grad:
H = np.abs(np.asarray(np.gradient(H)[0]))#name = "path_density_2Dhist_grad.png"
# H needs to be rotated and flipped
H = np.rot90(H)
H = np.flipud(H)
# Mask zeros
Hmasked = np.ma.masked_where(H==0,H) # Mask pixels with a value of zero
return Hmasked
#fig = plt.figure()
#plt.pcolormesh(xedges,yedges,Hmasked)
#plt.xlabel('longitude (degrees)')
#plt.ylabel('longitude (degrees)')
#cbar = plt.colorbar()
#cbar.ax.set_ylabel('Counts')
#fig.savefig(name)
def latitude(dist, sigma01, alpha0, lon0):
sigma = sigma01 + dist#/R
lat = degrees(asin(cos(alpha0)*sin(sigma)))
#alpha = atan2(tan(alpha0),cos(sigma))
return lat
def longitude(dist, sigma01, alpha0, lon0):
sigma = sigma01 + dist#/R
lon = degrees(atan2(sin(alpha0)*sin(sigma), cos(sigma))) + degrees(lon0)
#alpha = atan2(tan(alpha0),cos(sigma))
return lon
vlat_func = np.vectorize(latitude)
vlon_func = np.vectorize(longitude)
def waypoint_init(path_info, km=km_points):
R = 6371
lon1, lat1, lon2, lat2, dist = radians(path_info[0]), \
radians(path_info[1]), radians(path_info[2]), \
radians(path_info[3]), radians(path_info[4])
#lon1, lat1, lon2, lat2, dist = map(radians, [path_info[0],path_info[1],path_info[2],path_info[3],path_info[4]])
lon_diff = lon2-lon1
alpha1 = atan2(sin(lon_diff),(cos(lat1)*tan(lat2)-sin(lat1)*cos(lon_diff)))
#alpha2 = atan2(sin(lon_diff),(-cos(lat2)*tan(lat1)+sin(lat2)*cos(lon_diff)))
#try:
#sigma12 = acos(sin(lat1)*sin(lat2)+cos(lat1)*cos(lat2)*cos(lon_diff))
#except:
#return
sigma01, alpha0 = atan2(tan(lat1), cos(alpha1)), asin(sin(alpha1)*cos(lat1))
#sigma02 = sigma01+sigma12
lon01 = atan2(sin(alpha0)*sin(sigma01), cos(sigma01))
lon0 = lon1 - lon01
npts = max(int((np.ceil(dist) + 1)/km), 100)
all_d = np.linspace(0,dist,npts)/R
lons, lats = vlon_func(all_d, sigma01, alpha0, lon0), vlat_func(all_d, sigma01, alpha0, lon0)
return np.column_stack((lons, lats))
t_total0 = datetime.datetime.now()
t0 = datetime.datetime.now()
ideal_path = 'ideal_coordinates.pickle'
#if no paths have been done before, start afresh!
if not os.path.exists(ideal_path):
M = 1e5
many_points = ps.points_in_shape(shape_path, M)
coords = cluster_points(many_points,N)
#else import already processed coordinates if the program has already done so.
else:
f = open(name=ideal_path, mode='rb')
coords = pickle.load(f)
f.close()
#generate N kmeans cluster points from massive M number of randomly distributed
#points inside the shape file.
lonmin = np.floor(min(coords[:,0]))
lonmax = np.ceil(max(coords[:,0]))
latmin = np.floor(min(coords[:,1]))
latmax = np.ceil(max(coords[:,1]))
print lonmin,lonmax,latmin,latmax
#coords1 = [coord1 for coord1 in coords for coord2 in coords]
#coords2 = [coord2 for coord1 in coords for coord2 in coords]
#columns = np.column_stack((coords1, coords2))
kappa = [np.vstack([[coord1[0],coord1[1],coord2[0],coord2[1]]\
for coord2 in coords]) for coord1 in coords]
def spread_paths(nets):
#pool = mp.Pool()
#paths = pool.map(new_paths, nets)
#pool.close()
#pool.join()
paths = map(paths_func, nets)
#create a flattened numpy array of size 2xN from the paths created!
#paths = np.asarray(list(itertools.chain(*paths)))
#keep all but the repeated coordinates by keeping only unique whole rows!
#method is slowed without the b contiguous array
#b = np.ascontiguousarray(paths).view(np.dtype((np.void, paths.dtype.itemsize * paths.shape[1])))
#_, idx = np.unique(b, return_index=True)
#paths = np.unique(b).view(paths.dtype).reshape(-1, paths.shape[1])
#plt.figure()
#plt.scatter(paths[:,0],paths[:,1])
#name = uuid.uuid4()
#plt.savefig('{}.png'.format(name))
return paths
t0 = datetime.datetime.now()
pool = mp.Pool()
paths = pool.map(spread_paths, kappa)
pool.close()
pool.join()
t1 = datetime.datetime.now()
print t1-t0
#paths = list(paths)
counter = 0
#cd Desktop/Link\ to\ SIMULATIONS/Network_Tracks/smarter_model/
grad_ideal, grad_check1, grad_check2, H_avg1, H_avg2 = 0, 0, 0, 0, 0
SHAPE = (1,1)
counter2 = 0
perc_high = 0.01
#counter of how many times the points
#have been chosen from the lowest path density spots
low_counter = 0
#counter of how many times the points
#have been chosen from the random spots.
random_counter = 0
new_coord = 0
infinite_counter = 0
while infinite_counter <= 1:
t0 = datetime.datetime.now()
#the following while loop is a work around fix to a:
#new paths shape: (130, 100, 4) rather than being (130,) like it should be!
while SHAPE != (130,):
#if counter2 >= len(paths)-1:
# counter2 = 0
#cycle through paths
#----------------------------------------------------------------------
#old_path = paths[counter2]
#del paths[counter2]
#old_coord = [old_path[0][0][0],old_path[0][0][1]]
#itemindex = np.where(coords==old_coord)[0][0]
#coords = list(coords)
#find index of array in nested array to remove!
#del coords[itemindex]
#print(counter2)
#----------------------------------------------------------------------
#or random selection of paths?!
#----------------------------------------------------------------------
#remove a random set of paths associated with a single one of the N coordinates
rand_int = random.randint(0,len(paths)-1)
old_path = paths[rand_int]
#figure out which old coordinate to remove from the coordinates list
old_coord = [old_path[0][0][0],old_path[0][0][1]]
#print "old coord:", old_coord
#NEED TO REMOVE OLD POINT FROM COORDS!
#find index of array in nested array to remove!
itemindex = np.where(coords==old_coord)[0][0]
coords = list(coords)
#find index of array in nested array to remove!
del coords[itemindex]
coords = np.asarray(coords)
new_coord_first = new_coord
#----------------------------------------------------------------------
#generate new point coordinate
if not counter >= 1:
new_coord = ps.points_in_shape(shape_path, 1)[0]
else:
new_coord = new_coord
#place new coordinate in old set of coordinates
coords = np.append(coords, [new_coord], axis=0)
#generate new array of points in conjunction with the new randomly generated point!
new_coord_set = np.vstack([[new_coord[0],new_coord[1],coord1[0],\
coord1[1]] for coord1 in coords])
#generate new random point in place of all 'popped' points!
new_paths = map(paths_func, new_coord_set)
SHAPE = np.asarray(new_paths).shape
if not SHAPE == (130,):
#remove substitude back the old coordinate for the new coordinate!
coords = list(coords)
#find index of array in nested array to remove!
del coords[-1]
coords = np.asarray(coords)
#place new coordinate in old set of coordinates
coords = np.append(coords, [old_coord], axis=0)
#print "new paths shape:", SHAPE
#paths = np.asarray(paths)
#if np.asarray(new_paths).shape != (130,):
# print("This one's trouble")
# print np.asarray(new_paths).shape
# new_paths = np.asarray(new_paths[0]).reshape(130,)
del paths[rand_int]
SHAPE = (1,1)
#place new_paths in original path set!
#paths = np.insert(paths, [1], [new_paths], axis=0)
paths = np.append(paths, [new_paths], axis=0)
#paths = paths.append(new_paths)
#paths = np.concatenate((paths, [new_paths]), axis=0)
#paths = np.append(paths, new_paths, axis=0)
#create a flattened numpy array of size 2xN from the paths created!
paths_density_check = list(itertools.chain(*paths))
paths_density_check = np.asarray(list(itertools.chain(*paths_density_check)))
#keep all but the repeated coordinates by keeping only unique whole rows!
#method is slowed without the b contiguous array
b = np.ascontiguousarray(paths_density_check).view(np.dtype\
((np.void, paths_density_check.dtype.itemsize * \
paths_density_check.shape[1])))
_, idx = np.unique(b, return_index=True)
paths_density_check = np.unique(b).view(paths_density_check.dtype)\
.reshape(-1, paths_density_check.shape[1])
#plt.figure()
#plt.scatter(paths_density_check[:,0],paths_density_check[:,1])
#plt.savefig('{}.png'.format(counter))
#remove 3rd and 4th columns
#paths_density_check = np.column_stack((paths_density_check[:,0],
# paths_density_check[:,1]))
#remove all path points that lay outside the shape file polygon
#paths_density_check = ps.paths_in_shape(paths_density_check)
paths = list(paths)
# Estimate the 2D histogram
H, xedges, yedges = np.histogram2d(paths_density_check[:,0],
paths_density_check[:,1],
bins=nbins)
#edges_new = ps.paths_in_shape(np.column_stack((xedges,yedges)))
GRAD = np.abs(np.asarray(np.gradient(H)[0]))
# H needs to be rotated and flipped
H = np.rot90(H)
GRAD = np.rot90(GRAD)
H = np.flipud(H)
GRAD = np.flipud(GRAD)
<|fim▁hole|> H_avg1 = np.average(H)
grad_check1 = np.std(GRAD)
rand_indicator = random.randint(1,10)
if 0 < rand_indicator <= 5:
#half the time move the coordinates to low density locations.
WHERE = np.where(H < perc_high*H_avg1)
#scale these points with respect to the lat-lon limits!
Hminx, Hminy = WHERE[1], WHERE[0]
Hminx = (lonmax-lonmin)/(nbins) * Hminx + lonmin
Hminy = (latmax-latmin)/(nbins) * Hminy + latmin
#make sure all low density coordinates ARE within shapefile!
low_density_coords = ps.paths_in_shape(np.column_stack((Hminx, Hminy)))
if len(low_density_coords) == 0:
new_coord = ps.points_in_shape(shape_path, 1)[0]
#increase percentage of search if no new low density points are created!
perc_high +=0.05
elif len(low_density_coords) == 1:
new_coord = low_density_coords[0]
perc_high +=0.05
else:
new_coord = low_density_coords[random.randint(0,len(low_density_coords)-1)]
elif 5 < rand_indicator <= 10:
#half the time move coordinates to random locations.
new_coord = ps.points_in_shape(shape_path, 1)[0]
if counter == 0:
grad_ideal = 1e6
avg_ideal = 0
if grad_check1 < grad_ideal and avg_ideal < H_avg1:
#counter >= 1 and
#dump the coordinates!
#print grad_check1, grad_ideal
#print avg_ideal, H_avg1
print "Exporting new ideal coordinates."
with open(u'ideal_coordinates.pickle', 'wb') as f:
print "\nExporting new ideal coordinates."
pickle.dump(coords, f, protocol=2)
grad_ideal = grad_check1
avg_ideal = H_avg1
# find indices of pixels where H==HMIN
#HMATMIN = np.ma.masked_where(H>HMIN,H)
#only select coordinates where the density is 10% of the average or below!
fig = plt.figure()
plt.pcolormesh(xedges,yedges,H)
plt.xlabel('longitude (degrees)')
plt.ylabel('latitude (degrees)')
cbar = plt.colorbar()
cbar.ax.set_ylabel('Counts')
#plt.scatter(low_density_coords[:,0], low_density_coords[:,1], color='red')
fig.savefig("min_density.png".format(counter))
#print(len(paths))
#print(len(KEEP_PATHS))
else:
#RESET!
#remove new coordinate and replace with old coordinate
coords = list(coords)
del coords[-1]
coords = np.asarray(coords)
#place new coordinate in old set of coordinates
coords = np.append(coords, [old_coord], axis=0)
#remove new path and replace it with the old set!
paths = list(paths)
del paths[-1]
paths = list(np.append(paths, [old_path], axis=0))
#plt.scatter(Hminx, Hminy, color='yellow')
#grad_check2 = grad_check1
#H_avg2 = H_avg1
#print(counter)
counter+=1
counter2+=1
t1 = datetime.datetime.now()
print t1-t0<|fim▁end|> | # Mask zeros
H = np.ma.masked_where(H==0,H) # Mask pixels with a value of zero
GRAD = np.ma.masked_where(GRAD==0,GRAD) # Mask pixels with a value of zero
|
<|file_name|>directorio.py<|end_file_name|><|fim▁begin|>import os
import tempfile
import shutil
def listar(directorio):
"""Regresa uns lista con los archivos contenidos
en unca carpeta"""
archivos = os.listdir(directorio)
buff = []
for archivo in archivos:
ruta = os.path.join(directorio, archivo)
if os.path.isfile(ruta):
buff.append(ruta)
return buff
<|fim▁hole|> la variable prefijo define el prefijo que se usara para la
carpeta, por defecto se usara Gram"""
temp = tempfile.mkdtemp(prefix=prefijo)
return temp
def eliminar(ruta):
"""Elimina un directorio, toma como parametro la ruta del directorio
a eliminar"""
shutil.rmtree(ruta)<|fim▁end|> |
def crear(prefijo="Gram"):
"""Crea una carpeta temporal y regresa un string con la ruta |
<|file_name|>i_PA_DeprjDist.py<|end_file_name|><|fim▁begin|>import numpy as np
from astropy.coordinates import Angle, Distance
from astropy import units as u
from .angles2Plane import gal_theta
def vdm_2001_dep_dist_kpc(rho, phi, glx_theta, glx_incl, D_0):
"""
Deprojected angular distance from vdM & Cioni (2001).
D is the distance associated to a point defined by its (ra, dec)
coordinates (included in the (rho, phi) values passed) assuming the point
is located directly on a plane, i.e.: z'=0 in Eq (7).
The plane itself is defined by its center coordinates (ra_0, dec_0),
included in the (rho, phi) values passed, the distance to those
coordinates (D_0), and the inclination (rotation) angles: glx_theta,
glx_incl.
d_kpc is the distance from point (ra, dec, D) to the center of said plane,
i.e.: (ra_0, dec_0, D_0).
"""
# Eq (8) from van der Marel & Cioni (2001).
s = np.sin(phi.radian - glx_theta.radian)
A = 0.5 * ((1 - s) * np.cos(glx_incl.radian - rho.radian) +
(1 + s) * np.cos(glx_incl.radian + rho.radian))
# This is really D/D_0, to simplify the d_kpc equation.
D = np.cos(glx_incl.radian) / A
# Apply the cosine law to obtain the deprojected distance in kpc.
d_kpc = D_0 * np.sqrt(1. + D**2 - 2 * D * np.cos(rho.radian))
# # The above is equivalent to obtaining the (x', y', z') coordinates of
# # the point on the inclined plane, and calculating its Euclidean
# # distance to the center of the plane.
# D = D * D_0 # Since D above is really D/D_0
# # Eqs (7) in vdM & Cioni (2001).
# x_p = D * np.sin(rho.radian) * np.cos(phi.radian - glx_theta.radian)
# y_p = D * (np.sin(rho.radian) * np.cos(glx_incl.radian) *
# np.sin(phi.radian - glx_theta.radian) + np.cos(rho.radian) *
# np.sin(glx_incl.radian)) - D_0 * np.sin(glx_incl.radian)
# # z_p = 0 since the source is located *on* the inclined disk.
# z_p = D * (np.sin(rho.radian) * np.sin(glx_incl.radian) *
# np.sin(phi.radian - glx_theta.radian) - np.cos(rho.radian) *
# np.cos(glx_incl.radian)) + D_0 * np.cos(glx_incl.radian)
# d_kpc2 = np.sqrt(x_p**2 + y_p**2 + z_p**2)
return d_kpc.value
def get_deproj_dist(glx_PA, glx_incl, glx_dist, rho, phi):
"""
Computes deprojected galactocentric distance between cluster and the
center of the MC in kpc.
Based on: https://gist.github.com/jonathansick/9399842
Parameters
----------
glx_PA : :class:`astropy.coordinates.Angle`
Position angle of the galaxy disk.
glx_incl : :class:`astropy.coordinates.Angle`
Inclination angle of the galaxy disk.
glx_dist : :class:`astropy.coordinates.Distance`
Distance to galaxy.
rho :
Projected angular distance from cluster to center of galaxy.
phi :
Position angle of the cluster (West to East)
Returns
-------
dist_kpc : class:`astropy.coordinates.Distance`
Galactocentric distance(s) for coordinate point(s).
"""
# Obtain 'theta' position angle for the galaxy.
theta = gal_theta(glx_PA)
# Distance to galaxy in kpc.
D_0 = Distance(glx_dist.kpc, unit=u.kpc)
# Deprojected distance in kpc.
dist_kpc = vdm_2001_dep_dist_kpc(rho, phi, theta, glx_incl, D_0)
return dist_kpc
def main(rho, phi, inc_lst, pa_lst, gal_dist):
"""
Calculate deprojected distances for all clusters in this galaxy,
for all inclination and position angles defined.
These values depend on the coordinates of the clusters (rho, phi), the<|fim▁hole|> """
# Create empty list with correct shape.
dep_dist_i_PA_vals = [[[] for _ in inc_lst] for _ in pa_lst]
for i, inc in enumerate(inc_lst):
for j, pa in enumerate(pa_lst):
# Assign 'degrees' units before passing.
inc, pa = Angle(inc, unit=u.degree), Angle(pa, unit=u.degree)
# Obtain deprojected distances for all the clusters, in kpc,
# using the values of inclination and position angles passed.
dep_dist_kpc = get_deproj_dist(pa, inc, gal_dist, rho, phi)
# Store deprojected distance values.
dep_dist_i_PA_vals[i][j] = dep_dist_kpc
return dep_dist_i_PA_vals<|fim▁end|> | rotation angles that define each inclined plane (inc_lst, pa_lst), and the
distance (gal_dist) and center coordinates of the galaxy. |
<|file_name|>docker_api_exec_resize_test.go<|end_file_name|><|fim▁begin|>package main
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"strings"
"sync"
"github.com/docker/docker/integration-cli/checker"
"github.com/docker/docker/integration-cli/request"
"github.com/go-check/check"
)
func (s *DockerSuite) TestExecResizeAPIHeightWidthNoInt(c *check.C) {<|fim▁hole|> out, _ := dockerCmd(c, "run", "-d", "busybox", "top")
cleanedContainerID := strings.TrimSpace(out)
endpoint := "/exec/" + cleanedContainerID + "/resize?h=foo&w=bar"
res, _, err := request.Post(endpoint)
c.Assert(err, checker.IsNil)
c.Assert(res.StatusCode, checker.Equals, http.StatusBadRequest)
}
// Part of #14845
func (s *DockerSuite) TestExecResizeImmediatelyAfterExecStart(c *check.C) {
name := "exec_resize_test"
dockerCmd(c, "run", "-d", "-i", "-t", "--name", name, "--restart", "always", "busybox", "/bin/sh")
testExecResize := func() error {
data := map[string]interface{}{
"AttachStdin": true,
"Cmd": []string{"/bin/sh"},
}
uri := fmt.Sprintf("/containers/%s/exec", name)
res, body, err := request.Post(uri, request.JSONBody(data))
if err != nil {
return err
}
if res.StatusCode != http.StatusCreated {
return fmt.Errorf("POST %s is expected to return %d, got %d", uri, http.StatusCreated, res.StatusCode)
}
buf, err := request.ReadBody(body)
c.Assert(err, checker.IsNil)
out := map[string]string{}
err = json.Unmarshal(buf, &out)
if err != nil {
return fmt.Errorf("ExecCreate returned invalid json. Error: %q", err.Error())
}
execID := out["Id"]
if len(execID) < 1 {
return fmt.Errorf("ExecCreate got invalid execID")
}
payload := bytes.NewBufferString(`{"Tty":true}`)
conn, _, err := sockRequestHijack("POST", fmt.Sprintf("/exec/%s/start", execID), payload, "application/json", daemonHost())
if err != nil {
return fmt.Errorf("Failed to start the exec: %q", err.Error())
}
defer conn.Close()
_, rc, err := request.Post(fmt.Sprintf("/exec/%s/resize?h=24&w=80", execID), request.ContentType("text/plain"))
// It's probably a panic of the daemon if io.ErrUnexpectedEOF is returned.
if err == io.ErrUnexpectedEOF {
return fmt.Errorf("The daemon might have crashed.")
}
if err == nil {
rc.Close()
}
// We only interested in the io.ErrUnexpectedEOF error, so we return nil otherwise.
return nil
}
// The panic happens when daemon.ContainerExecStart is called but the
// container.Exec is not called.
// Because the panic is not 100% reproducible, we send the requests concurrently
// to increase the probability that the problem is triggered.
var (
n = 10
ch = make(chan error, n)
wg sync.WaitGroup
)
for i := 0; i < n; i++ {
wg.Add(1)
go func() {
defer wg.Done()
if err := testExecResize(); err != nil {
ch <- err
}
}()
}
wg.Wait()
select {
case err := <-ch:
c.Fatal(err.Error())
default:
}
}<|fim▁end|> | testRequires(c, DaemonIsLinux) |
<|file_name|>rcode.rs<|end_file_name|><|fim▁begin|>//! DNS response codes and extended response codes.
//!
//! The original DNS specification in [RFC 1035] specified four bits of the
//! message header as response code. The type [Rcode] defined herein
//! represents these codes. Later, [RFC 2671] (now [RFC 6891]) added eight
//! bits to the response code to be transmitted as part of the OPT
//! pseudo-resource record. To make matters even worse, the TSIG and TKEY
//! records defined by [RFC 2845] and [RFC 2930] use a 16 bit error code.
//! All of these codes share the same defition space. Even so, we have
//! separate types for each of these.
//!
//! [Rcode]:
//! [RFC 2671]: https://tools.ietf.org/html/rfc2671
//! [RFC 2845]: https://tools.ietf.org/html/rfc2845
//! [RFC 2930]: https://tools.ietf.org/html/rfc2930
//! [RFC 6891]: https://tools.ietf.org/html/rfc6891
//!
use std::cmp;
use std::fmt;
use std::hash;
//------------ Rcode --------------------------------------------------------
/// DNS Response Codes.
///
/// The response code of a response indicates what happend on the server
/// when trying to answer the query. The code is a 4 bit value and part of
/// the header of a DNS message.
///
/// This response was defined as part of [RFC 1035]. Later, [RFC 2671]
/// defined an extended response code of 12 bits using the lower four bits
/// from the header and eight additional bits stored in the OPT
/// pseudo-record. The type [OptRcode] represents this extended response
/// code. A third response code, now 16 bit wide, was defined for the
/// transaction authentication mechansim (TSIG) in [RFC 2845] and is
/// represented by [TsigRcode].
///
/// All three codes share the same name space. Their values are defined in
/// one registry, [IANA DNS RCODEs].
///
/// [OptRcode]: enum.OptRcode.html
/// [TsigRcode]: enum.TsigRcode.html
/// [IANA DNS RCODEs]: http://www.iana.org/assignments/dns-parameters/dns-parameters.xhtml#dns-parameters-6
/// [RFC 1035]: https://tools.ietf.org/html/rfc1035
/// [RFC 2671]: https://tools.ietf.org/html/rfc2671
#[derive(Clone, Copy, Debug)]
pub enum Rcode {
/// No error condition.
///
/// (Otherwise known as success.)
///
/// Defined in [RFC 1035].
///
/// [RFC 1035]: https://tools.ietf.org/html/rfc1035
NoError,
/// Format error.
///
/// The name server was unable to interpret the query.
///
/// Defined in [RFC 1035].
///
/// [RFC 1035]: https://tools.ietf.org/html/rfc1035
FormErr,
/// Server failure.
///
/// The name server was unable to process this query due to a problem
/// with the name server.
///
/// Defined in [RFC 1035].
///
/// [RFC 1035]: https://tools.ietf.org/html/rfc1035
ServFail,
/// Name error.
///
/// The domain name given in the query does not exist at the name server.
///
/// Defined in [RFC 1035].
///
/// [RFC 1035]: https://tools.ietf.org/html/rfc1035
NXDomain,
/// Not implemented.
///
/// The name server does not support the requested kind of query.
///
/// Defined in [RFC 1035].
///
/// [RFC 1035]: https://tools.ietf.org/html/rfc1035
NotImp,
/// Query refused.
///
/// The name server refused to perform the operation requested by the
/// query for policy reasons.
///
/// Defined in [RFC 1035].
///
/// [RFC 1035]: https://tools.ietf.org/html/rfc1035
Refused,
/// Name exists when it should not.
///
/// Returned for an UPDATE query when a domain requested to not exist
/// does in fact exist.
///
/// Returned when resolving a DNAME redirection when the resulting name
/// exceeds the length of 255 octets.
///
/// Defined in [RFC 2136] for the UPDATE query and [RFC 6672] for DNAME
/// redirection.
///
/// [RFC 2136]: https://tools.ietf.org/html/rfc2136
/// [RFC 6672]: https://tools.ietf.org/html/rfc6672
YXDomain,
/// RR set exists when it should not.
///
/// Returned for an UPDATE query when an RRset requested to not exist
/// does in fact exist.
///
/// Defined in [RFC 2136].
///
/// [RFC 2136]: https://tools.ietf.org/html/rfc2136
YXRRSet,
/// RR set that should exist does not.
///
/// Returned for an UPDATE query when an RRset requested to exist
/// does not.
///
/// Defined in [RFC 2136].
///
/// [RFC 2136]: https://tools.ietf.org/html/rfc2136
NXRRSet,
/// Server not authoritative for zone or client not authorized.
///
/// Returned for an UPDATE query when the server is not an authoritative
/// name server for the requested domain.
///
/// Returned for queries using TSIG when authorisation failed.
///
/// Defined in [RFC 2136] for UPDATE and [RFC 2845] for TSIG.
///
/// [RFC 2136]: https://tools.ietf.org/html/rfc2136
/// [RFC 2845]: https://tools.ietf.org/html/rfc2845
NotAuth,
/// Name not contained in zone.
///
/// A name used in the prerequisite or update section is not within the
/// zone given in the zone section.
///
/// Defined in [RFC 2136].
///
/// [RFC 2136]: https://tools.ietf.org/html/rfc2136
NotZone,
/// A raw, integer rcode value.
///
/// When converting to an `u8`, only the lower four bits are used.
Int(u8)
}
impl Rcode {
/// Creates an rcode from an integer.
///
/// Only the lower four bits of `value` are considered.
pub fn from_int(value: u8) -> Rcode {
use self::Rcode::*;
match value & 0x0F {
0 => NoError,
1 => FormErr,
2 => ServFail,
3 => NXDomain,
4 => NotImp,
5 => Refused,
6 => YXDomain,
7 => YXRRSet,
8 => NXRRSet,
9 => NotAuth,
10 => NotZone,
value => Int(value)
}
}
/// Returns the integer value for this rcode.
pub fn to_int(self) -> u8 {
use self::Rcode::*;
match self {
NoError => 0,
FormErr => 1,
ServFail => 2,
NXDomain => 3,
NotImp => 4,
Refused => 5,
YXDomain => 6,
YXRRSet => 7,
NXRRSet => 8,
NotAuth => 9,
NotZone => 10,
Int(value) => value & 0x0F
}
}
}
//--- From
impl From<u8> for Rcode {
fn from(value: u8) -> Rcode { Rcode::from_int(value) }
}
impl From<Rcode> for u8 {
fn from(value: Rcode) -> u8 { value.to_int() }
}
//--- Display
impl fmt::Display for Rcode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use self::Rcode::*;
match *self {
NoError => "NOERROR".fmt(f),
FormErr => "FORMERR".fmt(f),
ServFail => "SERVFAIL".fmt(f),
NXDomain => "NXDOMAIN".fmt(f),
NotImp => "NOTIMP".fmt(f),
Refused => "REFUSED".fmt(f),
YXDomain => "YXDOMAIN".fmt(f),
YXRRSet => "YXRRSET".fmt(f),
NXRRSet => "NXRRSET".fmt(f),
NotAuth => "NOAUTH".fmt(f),
NotZone => "NOTZONE".fmt(f),
Int(i) => {
match Rcode::from_int(i) {
Rcode::Int(i) => i.fmt(f),
value => value.fmt(f)
}
}
}
}
}
//--- PartialEq and Eq
impl cmp::PartialEq for Rcode {
fn eq(&self, other: &Rcode) -> bool {
self.to_int() == other.to_int()<|fim▁hole|>
impl cmp::PartialEq<u8> for Rcode {
fn eq(&self, other: &u8) -> bool {
self.to_int() == *other
}
}
impl cmp::PartialEq<Rcode> for u8 {
fn eq(&self, other: &Rcode) -> bool {
*self == other.to_int()
}
}
impl cmp::Eq for Rcode { }
//--- PartialOrd and Ord
impl cmp::PartialOrd for Rcode {
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
self.to_int().partial_cmp(&other.to_int())
}
}
impl cmp::PartialOrd<u8> for Rcode {
fn partial_cmp(&self, other: &u8) -> Option<cmp::Ordering> {
self.to_int().partial_cmp(other)
}
}
impl cmp::PartialOrd<Rcode> for u8 {
fn partial_cmp(&self, other: &Rcode) -> Option<cmp::Ordering> {
self.partial_cmp(&other.to_int())
}
}
impl cmp::Ord for Rcode {
fn cmp(&self, other: &Rcode) -> cmp::Ordering {
self.to_int().cmp(&other.to_int())
}
}
//--- Hash
impl hash::Hash for Rcode {
fn hash<H: hash::Hasher>(&self, state: &mut H) {
self.to_int().hash(state)
}
}
//------------ OptRcode -----------------------------------------------------
/// Extended DNS Response Codes for OPT records.
///
/// Originally, the response code of embedded in the header of each DNS
/// message was four bits long. This code, defined in [RFC 1035], is
/// represented by the [Rcode] type. The extension mechanism for DNS
/// initially defined in [RFC 2671] and updated by [RFC 6891] added eight
/// more bits to be stored in the OPT pseudo-resource record. This type
/// represents the complete 12 bit extended response code.
///
/// There is a third, 16 bit wide response code for transaction
/// authentication (TSIG) defined in [RFC 2845] and represented by the
/// [`TsigRcode`] type. The code mostly shares the same name space except
/// for an unfortunate collision in between the BADVERS and BADSIG values.
/// Because of this, we decided to have separate types.
///
/// The values for all three response code types are defined in
/// the [IANA DNS RCODEs] registry.
///
/// [Rcode]: enum.Rcode.html
/// [`TsigRcode`]: enum.TsigRcode.html
/// [IANA DNS RCODEs]: http://www.iana.org/assignments/dns-parameters/dns-parameters.xhtml#dns-parameters-6
/// [RFC 2671]: https://tools.ietf.org/html/rfc2671
/// [RFC 2845]: https://tools.ietf.org/html/rfc2845
/// [RFC 2930]: https://tools.ietf.org/html/rfc2930
/// [RFC 6891]: https://tools.ietf.org/html/rfc6891
#[derive(Clone, Copy, Debug)]
pub enum OptRcode {
/// No error condition.
///
/// (Otherwise known as success.)
///
/// Defined in [RFC 1035].
///
/// [RFC 1035]: https://tools.ietf.org/html/rfc1035
NoError,
/// Format error.
///
/// The name server was unable to interpret the query.
///
/// Defined in [RFC 1035].
///
/// [RFC 1035]: https://tools.ietf.org/html/rfc1035
FormErr,
/// Server failure.
///
/// The name server was unable to process this query due to a problem
/// with the name server.
///
/// Defined in [RFC 1035].
///
/// [RFC 1035]: https://tools.ietf.org/html/rfc1035
ServFail,
/// Name error.
///
/// The domain name given in the query does not exist at the name server.
///
/// Defined in [RFC 1035].
///
/// [RFC 1035]: https://tools.ietf.org/html/rfc1035
NXDomain,
/// Not implemented.
///
/// The name server does not support the requested kind of query.
///
/// Defined in [RFC 1035].
///
/// [RFC 1035]: https://tools.ietf.org/html/rfc1035
NotImp,
/// Query refused.
///
/// The name server refused to perform the operation requested by the
/// query for policy reasons.
///
/// Defined in [RFC 1035].
///
/// [RFC 1035]: https://tools.ietf.org/html/rfc1035
Refused,
/// Name exists when it should not.
///
/// Returned for an UPDATE query when a domain requested to not exist
/// does in fact exist.
///
/// Returned when resolving a DNAME redirection when the resulting name
/// exceeds the length of 255 octets.
///
/// Defined in [RFC 2136] for the UPDATE query and [RFC 6672] for DNAME
/// redirection.
///
/// [RFC 2136]: https://tools.ietf.org/html/rfc2136
/// [RFC 6672]: https://tools.ietf.org/html/rfc6672
YXDomain,
/// RR set exists when it should not.
///
/// Returned for an UPDATE query when an RRset requested to not exist
/// does in fact exist.
///
/// Defined in [RFC 2136].
///
/// [RFC 2136]: https://tools.ietf.org/html/rfc2136
YXRRSet,
/// RR set that should exist does not.
///
/// Returned for an UPDATE query when an RRset requested to exist
/// does not.
///
/// Defined in [RFC 2136].
///
/// [RFC 2136]: https://tools.ietf.org/html/rfc2136
NXRRSet,
/// Server not authoritative for zone or client not authorized.
///
/// Returned for an UPDATE query when the server is not an authoritative
/// name server for the requested domain.
///
/// Returned for queries using TSIG when authorisation failed.
///
/// Defined in [RFC 2136] for UPDATE and [RFC 2845] for TSIG.
///
/// [RFC 2136]: https://tools.ietf.org/html/rfc2136
/// [RFC 2845]: https://tools.ietf.org/html/rfc2845
NotAuth,
/// Name not contained in zone.
///
/// A name used in the prerequisite or update section is not within the
/// zone given in the zone section.
///
/// Defined in [RFC 2136].
///
/// [RFC 2136]: https://tools.ietf.org/html/rfc2136
NotZone,
/// Bad OPT version.
///
/// A name server does not implement the EDNS version requested in the
/// OPT record.
///
/// Defined in [RFC 6891].
///
/// [RFC 6891]: https://tools.ietf.org/html/rfc6891
BadVers,
// XXX We will not define the values from the TSIG and TKEY RFCs,
// unless are used in OPT records, too?
/// Bad or missing server cookie.
///
/// The request contained a COOKIE option either without a server cookie
/// or with a server cookie that did not validate.
///
/// Defined in [RFC 7873].
///
/// [RFC 7873]: https://tools.ietf.org/html/rfc7873
BadCookie,
/// A raw, integer rcode value.
///
/// When converting to a 12 bit code, the upper four bits are simply
/// ignored.
Int(u16)
}
impl OptRcode {
/// Creates an rcode from an integer.
///
/// Only the lower twelve bits of `value` are considered.
pub fn from_int(value: u16) -> OptRcode {
use self::OptRcode::*;
match value & 0x0FFF {
0 => NoError,
1 => FormErr,
2 => ServFail,
3 => NXDomain,
4 => NotImp,
5 => Refused,
6 => YXDomain,
7 => YXRRSet,
8 => NXRRSet,
9 => NotAuth,
10 => NotZone,
16 => BadVers,
23 => BadCookie,
value => Int(value)
}
}
/// Returns the integer value for this rcode.
pub fn to_int(self) -> u16 {
use self::OptRcode::*;
match self {
NoError => 0,
FormErr => 1,
ServFail => 2,
NXDomain => 3,
NotImp => 4,
Refused => 5,
YXDomain => 6,
YXRRSet => 7,
NXRRSet => 8,
NotAuth => 9,
NotZone => 10,
BadVers => 16,
BadCookie => 23,
Int(value) => value & 0x0F
}
}
/// Creates an extended rcode value from its parts.
pub fn from_parts(rcode: Rcode, ext: u8) -> OptRcode {
if ext == 0 {
rcode.into()
}
else {
OptRcode::from_int((ext as u16) << 4 | (rcode.to_int() as u16))
}
}
/// Returns the two parts of an extended rcode value.
pub fn to_parts(self) -> (Rcode, u8) {
let res = self.to_int();
(Rcode::from_int(res as u8), (res >> 8) as u8)
}
/// Returns the rcode part of the extended rcode.
pub fn rcode(&self) -> Rcode {
self.to_parts().0
}
/// Returns the extended octet of the extended rcode.
pub fn ext(&self) -> u8 {
self.to_parts().1
}
}
//--- From
impl From<u16> for OptRcode {
fn from(value: u16) -> OptRcode { OptRcode::from_int(value) }
}
impl From<OptRcode> for u16 {
fn from(value: OptRcode) -> u16 { value.to_int() }
}
impl From<Rcode> for OptRcode {
fn from(value: Rcode) -> OptRcode { OptRcode::from_parts(value, 0) }
}
//--- Display
impl fmt::Display for OptRcode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use self::OptRcode::*;
match *self {
NoError => "NOERROR".fmt(f),
FormErr => "FORMERR".fmt(f),
ServFail => "SERVFAIL".fmt(f),
NXDomain => "NXDOMAIN".fmt(f),
NotImp => "NOTIMP".fmt(f),
Refused => "REFUSED".fmt(f),
YXDomain => "YXDOMAIN".fmt(f),
YXRRSet => "YXRRSET".fmt(f),
NXRRSet => "NXRRSET".fmt(f),
NotAuth => "NOAUTH".fmt(f),
NotZone => "NOTZONE".fmt(f),
BadVers => "BADVER".fmt(f),
BadCookie => "BADCOOKIE".fmt(f),
Int(i) => {
match OptRcode::from_int(i) {
Int(i) => i.fmt(f),
value => value.fmt(f)
}
}
}
}
}
//------------ TsigRcode ----------------------------------------------------
/// Response codes for transaction authentication (TSIG).
///
/// TSIG and TKEY resource records contain a 16 bit wide error field whose
/// values are an extension of the standard DNS [`Rcode`]. While it was
/// intended to also share the same space with the extended response codes
/// used by EDNS (see [`OptRcode`]), both used the value 16. To allow
/// distinguish between the two uses of this value, we have two separate
/// types.
///
/// The values for all three response code types are defined in
/// the [IANA DNS RCODEs] registry.
///
/// [`Rcode?]: enum.Rcode.html
/// [`OptRcode`]: enum.OptRcode.html
/// [IANA DNS RCODEs]: http://www.iana.org/assignments/dns-parameters/dns-parameters.xhtml#dns-parameters-6
#[derive(Clone, Copy, Debug)]
pub enum TsigRcode {
/// No error condition.
///
/// (Otherwise known as success.)
///
/// Defined in [RFC 1035].
///
/// [RFC 1035]: https://tools.ietf.org/html/rfc1035
NoError,
/// Format error.
///
/// The name server was unable to interpret the query.
///
/// Defined in [RFC 1035].
///
/// [RFC 1035]: https://tools.ietf.org/html/rfc1035
FormErr,
/// Server failure.
///
/// The name server was unable to process this query due to a problem
/// with the name server.
///
/// Defined in [RFC 1035].
///
/// [RFC 1035]: https://tools.ietf.org/html/rfc1035
ServFail,
/// Name error.
///
/// The domain name given in the query does not exist at the name server.
///
/// Defined in [RFC 1035].
///
/// [RFC 1035]: https://tools.ietf.org/html/rfc1035
NXDomain,
/// Not implemented.
///
/// The name server does not support the requested kind of query.
///
/// Defined in [RFC 1035].
///
/// [RFC 1035]: https://tools.ietf.org/html/rfc1035
NotImp,
/// Query refused.
///
/// The name server refused to perform the operation requested by the
/// query for policy reasons.
///
/// Defined in [RFC 1035].
///
/// [RFC 1035]: https://tools.ietf.org/html/rfc1035
Refused,
/// Name exists when it should not.
///
/// Returned for an UPDATE query when a domain requested to not exist
/// does in fact exist.
///
/// Returned when resolving a DNAME redirection when the resulting name
/// exceeds the length of 255 octets.
///
/// Defined in [RFC 2136] for the UPDATE query and [RFC 6672] for DNAME
/// redirection.
///
/// [RFC 2136]: https://tools.ietf.org/html/rfc2136
/// [RFC 6672]: https://tools.ietf.org/html/rfc6672
YXDomain,
/// RR set exists when it should not.
///
/// Returned for an UPDATE query when an RRset requested to not exist
/// does in fact exist.
///
/// Defined in [RFC 2136].
///
/// [RFC 2136]: https://tools.ietf.org/html/rfc2136
YXRRSet,
/// RR set that should exist does not.
///
/// Returned for an UPDATE query when an RRset requested to exist
/// does not.
///
/// Defined in [RFC 2136].
///
/// [RFC 2136]: https://tools.ietf.org/html/rfc2136
NXRRSet,
/// Server not authoritative for zone or client not authorized.
///
/// Returned for an UPDATE query when the server is not an authoritative
/// name server for the requested domain.
///
/// Returned for queries using TSIG when authorisation failed.
///
/// Defined in [RFC 2136] for UPDATE and [RFC 2845] for TSIG.
///
/// [RFC 2136]: https://tools.ietf.org/html/rfc2136
/// [RFC 2845]: https://tools.ietf.org/html/rfc2845
NotAuth,
/// Name not contained in zone.
///
/// A name used in the prerequisite or update section is not within the
/// zone given in the zone section.
///
/// Defined in [RFC 2136].
///
/// [RFC 2136]: https://tools.ietf.org/html/rfc2136
NotZone,
/// TSIG signature failure.
///
/// The TSIG signature fails to verify.
///
/// Defined in [RFC 2845].
///
/// [RFC 2845]: https://tools.ietf.org/html/rfc2845
BadSig,
/// Key not recognized.
///
/// The server did not recognize the key used for generating the
/// signature.
///
/// Defined in [RFC 2845].
///
/// [RFC 2845]: https://tools.ietf.org/html/rfc2845
BadKey,
/// Signature out of time window.
///
/// The server time was outside the time interval specified by the
/// request.
///
/// Defined in [RFC 2845].
///
/// [RFC 2845]: https://tools.ietf.org/html/rfc2845
BadTime,
/// Bad TKEY mode.
///
/// The mode field in a TKEY resource record contained a mode not
/// supported by the server.
///
/// Defined in [RFC 2930].
///
/// [RFC 2930]: https://tools.ietf.org/html/rfc2930
BadMode,
/// Duplicate key name.
///
/// In TKEY records, when establishing a new key, the name used already
/// exists at the server or when deleting a key, a key of this name does
/// not exist.
///
/// Defined in [RFC 2930].
///
/// [RFC 2930]: https://tools.ietf.org/html/rfc2930
BadName,
/// Algorithm not supported.
///
/// The value is defined in [RFC 2930] but never actually explained.
/// Presumably, it will be returned when the algorithm field of a TKEY
/// record contains a value not supported by the server.
///
/// [RFC 2930]: https://tools.ietf.org/html/rfc2930
BadAlg,
/// Bad truncation.
///
/// A TSIG record was received with a MAC too short for the local
/// policy in force.
///
/// Defined in [RFC 4635].
///
/// [RFC 4635]: https://tools.ietf.org/html/rfc4635
BadTrunc,
/// Bad or missing server cookie.
///
/// The request contained a COOKIE option either without a server cookie
/// or with a server cookie that did not validate.
///
/// Defined in [RFC 7873].
///
/// [RFC 7873]: https://tools.ietf.org/html/rfc7873
BadCookie,
/// A raw, integer rcode value.
///
/// When converting to a 12 bit code, the upper four bits are simply
/// ignored.
Int(u16)
}
impl TsigRcode {
/// Creates an rcode from an integer.
pub fn from_int(value: u16) -> TsigRcode {
use self::TsigRcode::*;
match value {
0 => NoError,
1 => FormErr,
2 => ServFail,
3 => NXDomain,
4 => NotImp,
5 => Refused,
6 => YXDomain,
7 => YXRRSet,
8 => NXRRSet,
9 => NotAuth,
10 => NotZone,
16 => BadSig,
17 => BadKey,
18 => BadTime,
19 => BadMode,
20 => BadName,
21 => BadAlg,
22 => BadTrunc,
23 => BadCookie,
value => Int(value)
}
}
/// Returns the integer value for this rcode.
pub fn to_int(self) -> u16 {
use self::TsigRcode::*;
match self {
NoError => 0,
FormErr => 1,
ServFail => 2,
NXDomain => 3,
NotImp => 4,
Refused => 5,
YXDomain => 6,
YXRRSet => 7,
NXRRSet => 8,
NotAuth => 9,
NotZone => 10,
BadSig => 16,
BadKey => 17,
BadTime => 18,
BadMode => 19,
BadName => 20,
BadAlg => 21,
BadTrunc => 22,
BadCookie => 23,
Int(value) => value & 0x0F
}
}
}
//--- From
impl From<u16> for TsigRcode {
fn from(value: u16) -> TsigRcode { TsigRcode::from_int(value) }
}
impl From<TsigRcode> for u16 {
fn from(value: TsigRcode) -> u16 { value.to_int() }
}
impl From<Rcode> for TsigRcode {
fn from(value: Rcode) -> TsigRcode {
TsigRcode::from_int(value.to_int() as u16)
}
}
impl From<OptRcode> for TsigRcode {
fn from(value: OptRcode) -> TsigRcode {
TsigRcode::from_int(value.to_int())
}
}
//--- Display
impl fmt::Display for TsigRcode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use self::TsigRcode::*;
match *self {
NoError => "NOERROR".fmt(f),
FormErr => "FORMERR".fmt(f),
ServFail => "SERVFAIL".fmt(f),
NXDomain => "NXDOMAIN".fmt(f),
NotImp => "NOTIMP".fmt(f),
Refused => "REFUSED".fmt(f),
YXDomain => "YXDOMAIN".fmt(f),
YXRRSet => "YXRRSET".fmt(f),
NXRRSet => "NXRRSET".fmt(f),
NotAuth => "NOAUTH".fmt(f),
NotZone => "NOTZONE".fmt(f),
BadSig => "BADSIG".fmt(f),
BadKey => "BADKEY".fmt(f),
BadTime => "BADTIME".fmt(f),
BadMode => "BADMODE".fmt(f),
BadName => "BADNAME".fmt(f),
BadAlg => "BADALG".fmt(f),
BadTrunc => "BADTRUNC".fmt(f),
BadCookie => "BADCOOKIE".fmt(f),
Int(i) => {
match TsigRcode::from_int(i) {
Int(i) => i.fmt(f),
value => value.fmt(f)
}
}
}
}
}<|fim▁end|> | }
} |
<|file_name|>count.ts<|end_file_name|><|fim▁begin|>import { expectTypeOf } from "expect-type";
import { Model, Op } from 'sequelize';
class MyModel extends Model {}
expectTypeOf(MyModel.count()).toEqualTypeOf<Promise<number>>();
expectTypeOf(MyModel.count({ group: 'tag' })).toEqualTypeOf<Promise<{ [key: string]: number }>>();
expectTypeOf(MyModel.count({ col: 'tag', distinct: true })).toEqualTypeOf<Promise<number>>();<|fim▁hole|> [Op.gte]: new Date()
}
},
useMaster: false
})).toEqualTypeOf<Promise<number>>();<|fim▁end|> | expectTypeOf(MyModel.count({
where: {
updatedAt: { |
<|file_name|>CLAClassifierRegion.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
This file implements the CLA Classifier region. See the comments in the class
definition of CLAClassifierRegion for a description.
"""
from PyRegion import PyRegion
from nupic.algorithms.cla_classifier_factory import CLAClassifierFactory
###############################################################################
class CLAClassifierRegion(PyRegion):
"""
CLAClassifierRegion implements a CLA specific classifier that accepts a binary
input from the level below (the "activationPattern") and information from the
sensor and encoders (the "classification") describing the input to the system
at that time step.
When learning, for every bit in activation pattern, it records a history of the
classification each time that bit was active. The history is bounded by a
maximum allowed age so that old entries are thrown away.
For inference, it takes an ensemble approach. For every active bit in the
activationPattern, it looks up the most likely classification(s) from the
history stored for that bit and then votes across these to get the resulting
classification(s).
The caller can choose to tell the region that the classifications for
iteration N+K should be aligned with the activationPattern for iteration N.
This results in the classifier producing predictions for K steps in advance.
Any number of different K's can be specified, allowing the classifier to learn
and infer multi-step predictions for a number of steps in advance.
"""
###############################################################################
@classmethod
def getSpec(cls):
ns = dict(
description=CLAClassifierRegion.__doc__,
singleNodeOnly=True,
# The inputs and outputs are not used in this region because they are
# either sparse vectors or dictionaries and hence don't fit the "vector
# of real" input/output pattern.
# There is a custom compute() function provided that accepts the
# inputs and outputs.
inputs=dict(
categoryIn=dict(
description='Category of the input sample',
dataType='Real32',
count=1,
required=True,
regionLevel=True,
isDefaultInput=False,
requireSplitterMap=False),
bottomUpIn=dict(
description='Belief values over children\'s groups',
dataType='Real32',
count=0,
required=True,
regionLevel=False,
isDefaultInput=True,
requireSplitterMap=False),
),
outputs=dict(),
parameters=dict(
learningMode=dict(
description='Boolean (0/1) indicating whether or not a region '
'is in learning mode.',
dataType='UInt32',
count=1,
constraints='bool',
defaultValue=1,
accessMode='ReadWrite'),
inferenceMode=dict(
description='Boolean (0/1) indicating whether or not a region '
'is in inference mode.',
dataType='UInt32',
count=1,
constraints='bool',
defaultValue=0,
accessMode='ReadWrite'),
steps=dict(
description='Comma separated list of the desired steps of '
'prediction that the classifier should learn',
dataType="Byte",
count=0,
constraints='',
defaultValue='1',
accessMode='Create'),
alpha=dict(
description='The alpha used to compute running averages of the '
'bucket duty cycles for each activation pattern bit. A lower '
'alpha results in longer term memory',
dataType="Real32",
count=1,
constraints='',
defaultValue=0.001,
accessMode='Create'),
implementation=dict(
description='The classifier implementation to use.',
accessMode='ReadWrite',
dataType='Byte',
count=0,
constraints='enum: py, cpp'),
clVerbosity=dict(
description='An integer that controls the verbosity level, '
'0 means no verbose output, increasing integers '
'provide more verbosity.',
dataType='UInt32',
count=1,
constraints='',
defaultValue=0 ,
accessMode='ReadWrite'),
),
commands=dict()
)
return ns
###############################################################################
def __init__(self,
steps='1',
alpha=0.001,
clVerbosity=0,
implementation=None,
):
# Convert the steps designation to a list
self.steps = steps
self.stepsList = eval("[%s]" % (steps))
self.alpha = alpha
self.verbosity = clVerbosity
# Initialize internal structures
self._claClassifier = CLAClassifierFactory.create(
steps=self.stepsList,
alpha=self.alpha,
verbosity=self.verbosity,
implementation=implementation,
)
self.learningMode = True
self.inferenceMode = False
self._initEphemerals()
###############################################################################
def _initEphemerals(self):
pass<|fim▁hole|>
###############################################################################
def initialize(self, dims, splitterMaps):
pass
###############################################################################
def clear(self):
self._claClassifier.clear()
###############################################################################
def getParameter(self, name, index=-1):
"""
Get the value of the parameter.
@param name -- the name of the parameter to retrieve, as defined
by the Node Spec.
"""
# If any spec parameter name is the same as an attribute, this call
# will get it automatically, e.g. self.learningMode
return PyRegion.getParameter(self, name, index)
###############################################################################
def setParameter(self, name, index, value):
"""
Set the value of the parameter.
@param name -- the name of the parameter to update, as defined
by the Node Spec.
@param value -- the value to which the parameter is to be set.
"""
if name == "learningMode":
self.learningMode = bool(int(value))
elif name == "inferenceMode":
self.inferenceMode = bool(int(value))
else:
return PyRegion.setParameter(self, name, index, value)
###############################################################################
def reset(self):
pass
###############################################################################
def compute(self, inputs, outputs):
"""
Process one input sample.
This method is called by the runtime engine.
We don't use this method in this region because the inputs and outputs don't
fit the standard "vector of reals" used by the engine. Instead, call
the customCompute() method directly
"""
pass
###############################################################################
def customCompute(self, recordNum, patternNZ, classification):
"""
Process one input sample.
This method is called by outer loop code outside the nupic-engine. We
use this instead of the nupic engine compute() because our inputs and
outputs aren't fixed size vectors of reals.
Parameters:
--------------------------------------------------------------------
patternNZ: list of the active indices from the output below
classification: dict of the classification information:
bucketIdx: index of the encoder bucket
actValue: actual value going into the encoder
retval: dict containing inference results, one entry for each step in
self.steps. The key is the number of steps, the value is an
array containing the relative likelihood for each bucketIdx
starting from bucketIdx 0.
for example:
{1 : [0.1, 0.3, 0.2, 0.7]
4 : [0.2, 0.4, 0.3, 0.5]}
"""
return self._claClassifier.compute( recordNum=recordNum,
patternNZ=patternNZ,
classification=classification,
learn = self.learningMode,
infer = self.inferenceMode)
###############################################################################
if __name__=='__main__':
from nupic.engine import Network
n = Network()
classifier = n.addRegion(
'classifier',
'py.CLAClassifierRegion',
'{ steps: "1,2", maxAge: 1000}'
)<|fim▁end|> | |
<|file_name|>script_task.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The script task is the task that owns the DOM in memory, runs JavaScript, and spawns parsing
//! and layout tasks. It's in charge of processing events for all same-origin pages in a frame
//! tree, and manages the entire lifetime of pages in the frame tree from initial request to
//! teardown.
//!
//! Page loads follow a two-step process. When a request for a new page load is received, the
//! network request is initiated and the relevant data pertaining to the new page is stashed.
//! While the non-blocking request is ongoing, the script task is free to process further events,
//! noting when they pertain to ongoing loads (such as resizes/viewport adjustments). When the
//! initial response is received for an ongoing load, the second phase starts - the frame tree
//! entry is created, along with the Window and Document objects, and the appropriate parser
//! takes over the response body. Once parsing is complete, the document lifecycle for loading
//! a page runs its course and the script task returns to processing events in the main event
//! loop.
#![allow(unsafe_code)]
use document_loader::{LoadType, DocumentLoader, NotifierData};
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::DocumentBinding::{DocumentMethods, DocumentReadyState};
use dom::bindings::codegen::InheritTypes::{ElementCast, EventTargetCast, NodeCast, EventCast};
use dom::bindings::conversions::FromJSValConvertible;
use dom::bindings::conversions::StringificationBehavior;
use dom::bindings::js::{JS, RootCollection, trace_roots};
use dom::bindings::js::{RootCollectionPtr, Root, RootedReference};
use dom::bindings::refcounted::{LiveDOMReferences, Trusted, TrustedReference, trace_refcounted_objects};
use dom::bindings::structuredclone::StructuredCloneData;
use dom::bindings::trace::{JSTraceable, trace_traceables, RootedVec};
use dom::bindings::utils::{WRAP_CALLBACKS, DOM_CALLBACKS};
use dom::document::{Document, IsHTMLDocument, DocumentHelpers, DocumentProgressHandler,
DocumentProgressTask, DocumentSource, MouseEventType};
use dom::element::{Element, AttributeHandlers};
use dom::event::{EventHelpers, EventBubbles, EventCancelable};
use dom::htmliframeelement::HTMLIFrameElementHelpers;
use dom::uievent::UIEvent;
use dom::node::{Node, NodeHelpers, NodeDamage, window_from_node};
use dom::servohtmlparser::{ServoHTMLParser, ParserContext};
use dom::window::{Window, WindowHelpers, ScriptHelpers, ReflowReason};
use dom::worker::TrustedWorkerAddress;
use parse::html::{ParseContext, parse_html};
use layout_interface::{self, NewLayoutTaskInfo, ScriptLayoutChan, LayoutChan, ReflowGoal};
use layout_interface::{ReflowQueryType};
use network_listener::NetworkListener;
use page::{Page, IterablePage, Frame};
use timers::TimerId;
use devtools;
use webdriver_handlers;
use devtools_traits::{DevtoolsControlPort, DevtoolsPageInfo, DevtoolScriptControlMsg};
use devtools_traits::{ScriptToDevtoolsControlMsg, TimelineMarker, TimelineMarkerType};
use devtools_traits::{TracingMetadata};
use script_traits::CompositorEvent::{MouseDownEvent, MouseUpEvent};
use script_traits::CompositorEvent::{MouseMoveEvent, KeyEvent};
use script_traits::CompositorEvent::{ResizeEvent, ClickEvent};
use script_traits::{CompositorEvent, MouseButton};
use script_traits::{ConstellationControlMsg, ScriptControlChan};
use script_traits::{NewLayoutInfo, OpaqueScriptLayoutChannel};
use script_traits::{ScriptState, ScriptTaskFactory};
use msg::compositor_msg::{LayerId, ScriptListener};
use msg::constellation_msg::{ConstellationChan, FocusType};
use msg::constellation_msg::{LoadData, PipelineId, SubpageId, MozBrowserEvent, WorkerId};
use msg::constellation_msg::{Failure, WindowSizeData, PipelineExitType};
use msg::constellation_msg::Msg as ConstellationMsg;
use msg::webdriver_msg::WebDriverScriptCommand;
use net_traits::{ResourceTask, LoadConsumer, ControlMsg, Metadata};
use net_traits::LoadData as NetLoadData;
use net_traits::image_cache_task::{ImageCacheChan, ImageCacheTask, ImageCacheResult};
use net_traits::storage_task::StorageTask;
use profile_traits::mem::{self, Report, Reporter, ReporterRequest, ReportKind, ReportsChan};
use string_cache::Atom;
use util::str::DOMString;
use util::task::spawn_named_with_send_on_failure;
use util::task_state;
use euclid::Rect;
use euclid::point::Point2D;
use hyper::header::{LastModified, Headers};
use ipc_channel::ipc::{self, IpcSender};
use ipc_channel::router::ROUTER;
use js::glue::CollectServoSizes;
use js::jsapi::{JS_SetWrapObjectCallbacks, JS_AddExtraGCRootsTracer, DisableIncrementalGC};
use js::jsapi::{JSContext, JSRuntime, JSTracer};
use js::jsapi::{JS_GetRuntime, JS_SetGCCallback, JSGCStatus, JSAutoRequest, SetDOMCallbacks};
use js::jsapi::{SetDOMProxyInformation, DOMProxyShadowsResult, HandleObject, HandleId, RootedValue};
use js::jsval::UndefinedValue;
use js::rust::Runtime;
use url::Url;
use libc;
use std::any::Any;
use std::borrow::ToOwned;
use std::cell::{Cell, RefCell};
use std::collections::HashSet;
use std::mem as std_mem;
use std::option::Option;
use std::ptr;
use std::rc::Rc;
use std::result::Result;
use std::sync::{Arc, Mutex};
use std::sync::mpsc::{channel, Sender, Receiver, Select};
use time::Tm;
use hyper::header::{ContentType, HttpDate};
use hyper::mime::{Mime, TopLevel, SubLevel};
thread_local!(pub static STACK_ROOTS: Cell<Option<RootCollectionPtr>> = Cell::new(None));
thread_local!(static SCRIPT_TASK_ROOT: RefCell<Option<*const ScriptTask>> = RefCell::new(None));
unsafe extern fn trace_rust_roots(tr: *mut JSTracer, _data: *mut libc::c_void) {
SCRIPT_TASK_ROOT.with(|root| {
if let Some(script_task) = *root.borrow() {
(*script_task).trace(tr);
}
});
trace_traceables(tr);
trace_roots(tr);
}
/// A document load that is in the process of fetching the requested resource. Contains
/// data that will need to be present when the document and frame tree entry are created,
/// but is only easily available at initiation of the load and on a push basis (so some
/// data will be updated according to future resize events, viewport changes, etc.)
#[derive(JSTraceable)]
struct InProgressLoad {
/// The pipeline which requested this load.
pipeline_id: PipelineId,
/// The parent pipeline and child subpage associated with this load, if any.
parent_info: Option<(PipelineId, SubpageId)>,
/// The current window size associated with this pipeline.
window_size: Option<WindowSizeData>,
/// Channel to the layout task associated with this pipeline.
layout_chan: LayoutChan,
/// The current viewport clipping rectangle applying to this pipelie, if any.
clip_rect: Option<Rect<f32>>,
/// The requested URL of the load.
url: Url,
}
impl InProgressLoad {
/// Create a new InProgressLoad object.
fn new(id: PipelineId,
parent_info: Option<(PipelineId, SubpageId)>,
layout_chan: LayoutChan,
window_size: Option<WindowSizeData>,
url: Url) -> InProgressLoad {
InProgressLoad {
pipeline_id: id,
parent_info: parent_info,
layout_chan: layout_chan,
window_size: window_size,
clip_rect: None,
url: url,
}
}
}
#[derive(Copy, Clone)]
pub enum TimerSource {
FromWindow(PipelineId),
FromWorker
}
pub trait Runnable {
fn handler(self: Box<Self>);
}
pub trait MainThreadRunnable {
fn handler(self: Box<Self>, script_task: &ScriptTask);
}
/// Messages used to control script event loops, such as ScriptTask and
/// DedicatedWorkerGlobalScope.
pub enum ScriptMsg {
/// Acts on a fragment URL load on the specified pipeline (only dispatched
/// to ScriptTask).
TriggerFragment(PipelineId, String),
/// Begins a content-initiated load on the specified pipeline (only
/// dispatched to ScriptTask).
Navigate(PipelineId, LoadData),
/// Fires a JavaScript timeout
/// TimerSource must be FromWindow when dispatched to ScriptTask and
/// must be FromWorker when dispatched to a DedicatedGlobalWorkerScope
FireTimer(TimerSource, TimerId),
/// Notifies the script that a window associated with a particular pipeline
/// should be closed (only dispatched to ScriptTask).
ExitWindow(PipelineId),
/// Message sent through Worker.postMessage (only dispatched to
/// DedicatedWorkerGlobalScope).
DOMMessage(StructuredCloneData),
/// Generic message that encapsulates event handling.
RunnableMsg(Box<Runnable+Send>),
/// Generic message for running tasks in the ScriptTask
MainThreadRunnableMsg(Box<MainThreadRunnable+Send>),
/// A DOM object's last pinned reference was removed (dispatched to all tasks).
RefcountCleanup(TrustedReference),
/// Notify a document that all pending loads are complete.
DocumentLoadsComplete(PipelineId),
/// Requests that the script task measure its memory usage. The results are sent back via the
/// supplied channel.
CollectReports(ReportsChan),
}
/// A cloneable interface for communicating with an event loop.
pub trait ScriptChan {
/// Send a message to the associated event loop.
fn send(&self, msg: ScriptMsg) -> Result<(), ()>;
/// Clone this handle.
fn clone(&self) -> Box<ScriptChan+Send>;
}
/// An interface for receiving ScriptMsg values in an event loop. Used for synchronous DOM
/// APIs that need to abstract over multiple kinds of event loops (worker/main thread) with
/// different Receiver interfaces.
pub trait ScriptPort {
fn recv(&self) -> ScriptMsg;
}
impl ScriptPort for Receiver<ScriptMsg> {
fn recv(&self) -> ScriptMsg {
self.recv().unwrap()
}
}
impl ScriptPort for Receiver<(TrustedWorkerAddress, ScriptMsg)> {
fn recv(&self) -> ScriptMsg {
self.recv().unwrap().1
}
}
/// Encapsulates internal communication within the script task.
#[derive(JSTraceable)]
pub struct NonWorkerScriptChan(pub Sender<ScriptMsg>);
impl ScriptChan for NonWorkerScriptChan {
fn send(&self, msg: ScriptMsg) -> Result<(), ()> {
let NonWorkerScriptChan(ref chan) = *self;
return chan.send(msg).map_err(|_| ());
}
fn clone(&self) -> Box<ScriptChan+Send> {
let NonWorkerScriptChan(ref chan) = *self;
box NonWorkerScriptChan((*chan).clone())
}
}
impl NonWorkerScriptChan {
/// Creates a new script chan.
pub fn new() -> (Receiver<ScriptMsg>, Box<NonWorkerScriptChan>) {
let (chan, port) = channel();
(port, box NonWorkerScriptChan(chan))
}
}
pub struct StackRootTLS;
impl StackRootTLS {
pub fn new(roots: &RootCollection) -> StackRootTLS {
STACK_ROOTS.with(|ref r| {
r.set(Some(RootCollectionPtr(roots as *const _)))
});
StackRootTLS
}
}
impl Drop for StackRootTLS {
fn drop(&mut self) {
STACK_ROOTS.with(|ref r| r.set(None));
}
}
/// Information for an entire page. Pages are top-level browsing contexts and can contain multiple
/// frames.
#[derive(JSTraceable)]
// ScriptTask instances are rooted on creation, so this is okay
#[allow(unrooted_must_root)]
pub struct ScriptTask {
/// A handle to the information pertaining to page layout
page: DOMRefCell<Option<Rc<Page>>>,
/// A list of data pertaining to loads that have not yet received a network response
incomplete_loads: DOMRefCell<Vec<InProgressLoad>>,
/// A handle to the image cache task.
image_cache_task: ImageCacheTask,
/// A handle to the resource task.
resource_task: ResourceTask,
/// A handle to the storage task.
storage_task: StorageTask,
/// The port on which the script task receives messages (load URL, exit, etc.)
port: Receiver<ScriptMsg>,
/// A channel to hand out to script task-based entities that need to be able to enqueue
/// events in the event queue.
chan: NonWorkerScriptChan,
/// A channel to hand out to tasks that need to respond to a message from the script task.
control_chan: ScriptControlChan,
/// The port on which the constellation and layout tasks can communicate with the
/// script task.
control_port: Receiver<ConstellationControlMsg>,
/// For communicating load url messages to the constellation
constellation_chan: ConstellationChan,
/// A handle to the compositor for communicating ready state messages.
compositor: DOMRefCell<ScriptListener>,
/// The port on which we receive messages from the image cache
image_cache_port: Receiver<ImageCacheResult>,
/// The channel on which the image cache can send messages to ourself.
image_cache_channel: ImageCacheChan,
/// For providing contact with the memory profiler.
mem_profiler_chan: mem::ProfilerChan,
/// For providing instructions to an optional devtools server.
devtools_chan: Option<IpcSender<ScriptToDevtoolsControlMsg>>,
/// For receiving commands from an optional devtools server. Will be ignored if
/// no such server exists.
devtools_port: DevtoolsControlPort,
devtools_sender: IpcSender<DevtoolScriptControlMsg>,
/// For sending timeline markers. Will be ignored if
/// no devtools server
devtools_markers: RefCell<HashSet<TimelineMarkerType>>,
devtools_marker_sender: RefCell<Option<IpcSender<TimelineMarker>>>,
/// The JavaScript runtime.
js_runtime: Rc<Runtime>,
mouse_over_targets: DOMRefCell<Vec<JS<Node>>>,
/// List of pipelines that have been owned and closed by this script task.
closed_pipelines: RefCell<HashSet<PipelineId>>,
}
/// In the event of task failure, all data on the stack runs its destructor. However, there
/// are no reachable, owning pointers to the DOM memory, so it never gets freed by default
/// when the script task fails. The ScriptMemoryFailsafe uses the destructor bomb pattern
/// to forcibly tear down the JS compartments for pages associated with the failing ScriptTask.
struct ScriptMemoryFailsafe<'a> {
owner: Option<&'a ScriptTask>,
}
impl<'a> ScriptMemoryFailsafe<'a> {
fn neuter(&mut self) {
self.owner = None;
}
fn new(owner: &'a ScriptTask) -> ScriptMemoryFailsafe<'a> {
ScriptMemoryFailsafe {
owner: Some(owner),
}
}
}
impl<'a> Drop for ScriptMemoryFailsafe<'a> {
#[allow(unrooted_must_root)]
fn drop(&mut self) {
match self.owner {
Some(owner) => {
unsafe {
let page = owner.page.borrow_for_script_deallocation();
for page in page.iter() {
let window = page.window();
window.r().clear_js_runtime_for_script_deallocation();
}
}
}
None => (),
}
}
}
impl ScriptTaskFactory for ScriptTask {
fn create_layout_channel(_phantom: Option<&mut ScriptTask>) -> OpaqueScriptLayoutChannel {
let (chan, port) = channel();
ScriptLayoutChan::new(chan, port)
}
fn clone_layout_channel(_phantom: Option<&mut ScriptTask>, pair: &OpaqueScriptLayoutChannel) -> Box<Any+Send> {
box pair.sender() as Box<Any+Send>
}
fn create(_phantom: Option<&mut ScriptTask>,
id: PipelineId,
parent_info: Option<(PipelineId, SubpageId)>,
compositor: ScriptListener,
layout_chan: &OpaqueScriptLayoutChannel,
control_chan: ScriptControlChan,
control_port: Receiver<ConstellationControlMsg>,
constellation_chan: ConstellationChan,
failure_msg: Failure,
resource_task: ResourceTask,
storage_task: StorageTask,
image_cache_task: ImageCacheTask,
mem_profiler_chan: mem::ProfilerChan,
devtools_chan: Option<IpcSender<ScriptToDevtoolsControlMsg>>,
window_size: Option<WindowSizeData>,
load_data: LoadData) {
let ConstellationChan(const_chan) = constellation_chan.clone();
let (script_chan, script_port) = channel();
let layout_chan = LayoutChan(layout_chan.sender());
spawn_named_with_send_on_failure(format!("ScriptTask {:?}", id), task_state::SCRIPT, move || {
let roots = RootCollection::new();
let _stack_roots_tls = StackRootTLS::new(&roots);
let chan = NonWorkerScriptChan(script_chan);
let channel_for_reporter = chan.clone();
let script_task = ScriptTask::new(compositor,
script_port,
chan,
control_chan,
control_port,
constellation_chan,
resource_task,
storage_task,
image_cache_task,
mem_profiler_chan.clone(),
devtools_chan);
SCRIPT_TASK_ROOT.with(|root| {
*root.borrow_mut() = Some(&script_task as *const _);
});
let mut failsafe = ScriptMemoryFailsafe::new(&script_task);
let new_load = InProgressLoad::new(id, parent_info, layout_chan, window_size,
load_data.url.clone());
script_task.start_page_load(new_load, load_data);
// Register this task as a memory reporter.
let reporter_name = format!("script-reporter-{}", id.0);
let (reporter_sender, reporter_receiver) = ipc::channel().unwrap();
ROUTER.add_route(reporter_receiver.to_opaque(), box move |reporter_request| {
// Just injects an appropriate event into the worker task's queue.
let reporter_request: ReporterRequest = reporter_request.to().unwrap();
channel_for_reporter.send(ScriptMsg::CollectReports(
reporter_request.reports_channel)).unwrap()
});
let reporter = Reporter(reporter_sender);
let msg = mem::ProfilerMsg::RegisterReporter(reporter_name.clone(), reporter);
mem_profiler_chan.send(msg);
script_task.start();
// Unregister this task as a memory reporter.
let msg = mem::ProfilerMsg::UnregisterReporter(reporter_name);
mem_profiler_chan.send(msg);
// This must always be the very last operation performed before the task completes
failsafe.neuter();
}, ConstellationMsg::Failure(failure_msg), const_chan);
}
}
unsafe extern "C" fn debug_gc_callback(_rt: *mut JSRuntime, status: JSGCStatus, _data: *mut libc::c_void) {
match status {
JSGCStatus::JSGC_BEGIN => task_state::enter(task_state::IN_GC),
JSGCStatus::JSGC_END => task_state::exit(task_state::IN_GC),
}
}
unsafe extern "C" fn shadow_check_callback(_cx: *mut JSContext,
_object: HandleObject, _id: HandleId) -> DOMProxyShadowsResult {
// XXX implement me
return DOMProxyShadowsResult::ShadowCheckFailed;
}
impl ScriptTask {
pub fn page_fetch_complete(id: PipelineId, subpage: Option<SubpageId>, metadata: Metadata)
-> Option<Root<ServoHTMLParser>> {
SCRIPT_TASK_ROOT.with(|root| {
let script_task = unsafe { &*root.borrow().unwrap() };
script_task.handle_page_fetch_complete(id, subpage, metadata)
})
}
pub fn parsing_complete(id: PipelineId) {
SCRIPT_TASK_ROOT.with(|root| {
let script_task = unsafe { &*root.borrow().unwrap() };
script_task.handle_parsing_complete(id);
});
}
pub fn process_event(msg: ScriptMsg) {
SCRIPT_TASK_ROOT.with(|root| {
if let Some(script_task) = *root.borrow() {
let script_task = unsafe { &*script_task };
script_task.handle_msg_from_script(msg);
}
});
}
/// Creates a new script task.
pub fn new(compositor: ScriptListener,
port: Receiver<ScriptMsg>,
chan: NonWorkerScriptChan,
control_chan: ScriptControlChan,
control_port: Receiver<ConstellationControlMsg>,
constellation_chan: ConstellationChan,
resource_task: ResourceTask,
storage_task: StorageTask,
image_cache_task: ImageCacheTask,
mem_profiler_chan: mem::ProfilerChan,
devtools_chan: Option<IpcSender<ScriptToDevtoolsControlMsg>>)
-> ScriptTask {
let runtime = ScriptTask::new_rt_and_cx();
unsafe {
JS_SetWrapObjectCallbacks(runtime.rt(),
&WRAP_CALLBACKS);
}
// Ask the router to proxy IPC messages from the devtools to us.
let (ipc_devtools_sender, ipc_devtools_receiver) = ipc::channel().unwrap();
let devtools_port = ROUTER.route_ipc_receiver_to_new_mpsc_receiver(ipc_devtools_receiver);
// Ask the router to proxy IPC messages from the image cache task to us.
let (ipc_image_cache_channel, ipc_image_cache_port) = ipc::channel().unwrap();
let image_cache_port =
ROUTER.route_ipc_receiver_to_new_mpsc_receiver(ipc_image_cache_port);
ScriptTask {
page: DOMRefCell::new(None),
incomplete_loads: DOMRefCell::new(vec!()),
image_cache_task: image_cache_task,
image_cache_channel: ImageCacheChan(ipc_image_cache_channel),
image_cache_port: image_cache_port,
resource_task: resource_task,
storage_task: storage_task,
port: port,
chan: chan,
control_chan: control_chan,
control_port: control_port,
constellation_chan: constellation_chan,
compositor: DOMRefCell::new(compositor),
mem_profiler_chan: mem_profiler_chan,
devtools_chan: devtools_chan,
devtools_port: devtools_port,
devtools_sender: ipc_devtools_sender,
devtools_markers: RefCell::new(HashSet::new()),
devtools_marker_sender: RefCell::new(None),
js_runtime: Rc::new(runtime),
mouse_over_targets: DOMRefCell::new(vec!()),
closed_pipelines: RefCell::new(HashSet::new()),
}
}
pub fn new_rt_and_cx() -> Runtime {
LiveDOMReferences::initialize();
let runtime = Runtime::new();
unsafe {
JS_AddExtraGCRootsTracer(runtime.rt(), Some(trace_rust_roots), ptr::null_mut());
JS_AddExtraGCRootsTracer(runtime.rt(), Some(trace_refcounted_objects), ptr::null_mut());
}
// Needed for debug assertions about whether GC is running.
if cfg!(debug_assertions) {
unsafe {
JS_SetGCCallback(runtime.rt(), Some(debug_gc_callback), ptr::null_mut());
}
}
unsafe {
SetDOMProxyInformation(ptr::null(), 0, Some(shadow_check_callback));
SetDOMCallbacks(runtime.rt(), &DOM_CALLBACKS);
// Pre barriers aren't working correctly at the moment
DisableIncrementalGC(runtime.rt());
}
runtime
}
// Return the root page in the frame tree. Panics if it doesn't exist.
pub fn root_page(&self) -> Rc<Page> {
self.page.borrow().as_ref().unwrap().clone()
}
pub fn get_cx(&self) -> *mut JSContext {
self.js_runtime.cx()
}
/// Starts the script task. After calling this method, the script task will loop receiving
/// messages on its port.
pub fn start(&self) {
while self.handle_msgs() {
// Go on...
}
}
/// Handle incoming control messages.
fn handle_msgs(&self) -> bool {
// Handle pending resize events.
// Gather them first to avoid a double mut borrow on self.
let mut resizes = vec!();
{
let page = self.page.borrow();
if let Some(page) = page.as_ref() {
for page in page.iter() {
// Only process a resize if layout is idle.
let window = page.window();
if window.r().layout_is_idle() {
let resize_event = window.r().steal_resize_event();
match resize_event {
Some(size) => resizes.push((window.r().pipeline(), size)),
None => ()
}
}
}
}
}
for (id, size) in resizes.into_iter() {
self.handle_event(id, ResizeEvent(size));
}
enum MixedMessage {
FromConstellation(ConstellationControlMsg),
FromScript(ScriptMsg),
FromDevtools(DevtoolScriptControlMsg),
FromImageCache(ImageCacheResult),
}
// Store new resizes, and gather all other events.
let mut sequential = vec!();
// Receive at least one message so we don't spinloop.
let mut event = {
let sel = Select::new();
let mut port1 = sel.handle(&self.port);
let mut port2 = sel.handle(&self.control_port);
let mut port3 = sel.handle(&self.devtools_port);
let mut port4 = sel.handle(&self.image_cache_port);
unsafe {
port1.add();
port2.add();
if self.devtools_chan.is_some() {
port3.add();
}
port4.add();
}
let ret = sel.wait();
if ret == port1.id() {
MixedMessage::FromScript(self.port.recv().unwrap())
} else if ret == port2.id() {
MixedMessage::FromConstellation(self.control_port.recv().unwrap())
} else if ret == port3.id() {
MixedMessage::FromDevtools(self.devtools_port.recv().unwrap())
} else if ret == port4.id() {
MixedMessage::FromImageCache(self.image_cache_port.recv().unwrap())
} else {
panic!("unexpected select result")
}
};
// Squash any pending resize, reflow, and mouse-move events in the queue.
let mut mouse_move_event_index = None;
loop {
match event {
// This has to be handled before the ResizeMsg below,
// otherwise the page may not have been added to the
// child list yet, causing the find() to fail.
MixedMessage::FromConstellation(ConstellationControlMsg::AttachLayout(
new_layout_info)) => {
self.handle_new_layout(new_layout_info);
}
MixedMessage::FromConstellation(ConstellationControlMsg::Resize(id, size)) => {
self.handle_resize(id, size);
}
MixedMessage::FromConstellation(ConstellationControlMsg::Viewport(id, rect)) => {
self.handle_viewport(id, rect);
}
MixedMessage::FromConstellation(ConstellationControlMsg::SendEvent(
_,
MouseMoveEvent(_))) => {
match mouse_move_event_index {
None => {
mouse_move_event_index = Some(sequential.len());
sequential.push(event);
}
Some(index) => {
sequential[index] = event
}
}
}
_ => {
sequential.push(event);
}
}
// If any of our input sources has an event pending, we'll perform another iteration
// and check for more resize events. If there are no events pending, we'll move
// on and execute the sequential non-resize events we've seen.
match self.control_port.try_recv() {
Err(_) => match self.port.try_recv() {
Err(_) => match self.devtools_port.try_recv() {
Err(_) => match self.image_cache_port.try_recv() {
Err(_) => break,
Ok(ev) => event = MixedMessage::FromImageCache(ev),
},
Ok(ev) => event = MixedMessage::FromDevtools(ev),
},
Ok(ev) => event = MixedMessage::FromScript(ev),
},
Ok(ev) => event = MixedMessage::FromConstellation(ev),
}
}
// Process the gathered events.
for msg in sequential.into_iter() {
match msg {
MixedMessage::FromConstellation(ConstellationControlMsg::ExitPipeline(id, exit_type)) => {
if self.handle_exit_pipeline_msg(id, exit_type) {
return false
}
},
MixedMessage::FromConstellation(inner_msg) => self.handle_msg_from_constellation(inner_msg),
MixedMessage::FromScript(inner_msg) => self.handle_msg_from_script(inner_msg),
MixedMessage::FromDevtools(inner_msg) => self.handle_msg_from_devtools(inner_msg),
MixedMessage::FromImageCache(inner_msg) => self.handle_msg_from_image_cache(inner_msg),
}
}
// Issue batched reflows on any pages that require it (e.g. if images loaded)
// TODO(gw): In the future we could probably batch other types of reflows
// into this loop too, but for now it's only images.
let page = self.page.borrow();
if let Some(page) = page.as_ref() {
for page in page.iter() {
let window = page.window();
let pending_reflows = window.r().get_pending_reflow_count();
if pending_reflows > 0 {
window.r().reflow(ReflowGoal::ForDisplay,
ReflowQueryType::NoQuery,
ReflowReason::ImageLoaded);
}
}
}
true
}
fn handle_msg_from_constellation(&self, msg: ConstellationControlMsg) {
match msg {
ConstellationControlMsg::AttachLayout(_) =>
panic!("should have handled AttachLayout already"),
ConstellationControlMsg::Navigate(pipeline_id, subpage_id, load_data) =>
self.handle_navigate(pipeline_id, Some(subpage_id), load_data),
ConstellationControlMsg::SendEvent(id, event) =>
self.handle_event(id, event),
ConstellationControlMsg::ReflowComplete(id, reflow_id) =>
self.handle_reflow_complete_msg(id, reflow_id),
ConstellationControlMsg::ResizeInactive(id, new_size) =>
self.handle_resize_inactive_msg(id, new_size),
ConstellationControlMsg::Viewport(..) =>
panic!("should have handled Viewport already"),
ConstellationControlMsg::Resize(..) =>
panic!("should have handled Resize already"),
ConstellationControlMsg::ExitPipeline(..) =>
panic!("should have handled ExitPipeline already"),
ConstellationControlMsg::GetTitle(pipeline_id) =>
self.handle_get_title_msg(pipeline_id),
ConstellationControlMsg::Freeze(pipeline_id) =>
self.handle_freeze_msg(pipeline_id),
ConstellationControlMsg::Thaw(pipeline_id) =>
self.handle_thaw_msg(pipeline_id),
ConstellationControlMsg::MozBrowserEvent(parent_pipeline_id,
subpage_id,
event) =>
self.handle_mozbrowser_event_msg(parent_pipeline_id,
subpage_id,
event),
ConstellationControlMsg::UpdateSubpageId(containing_pipeline_id,
old_subpage_id,
new_subpage_id) =>
self.handle_update_subpage_id(containing_pipeline_id, old_subpage_id, new_subpage_id),
ConstellationControlMsg::FocusIFrame(containing_pipeline_id, subpage_id) =>
self.handle_focus_iframe_msg(containing_pipeline_id, subpage_id),
ConstellationControlMsg::WebDriverScriptCommand(pipeline_id, msg) =>
self.handle_webdriver_msg(pipeline_id, msg),
ConstellationControlMsg::TickAllAnimations(pipeline_id) =>
self.handle_tick_all_animations(pipeline_id),
ConstellationControlMsg::StylesheetLoadComplete(id, url, responder) => {
responder.respond();
self.handle_resource_loaded(id, LoadType::Stylesheet(url));
}
ConstellationControlMsg::GetCurrentState(sender, pipeline_id) => {
let state = self.handle_get_current_state(pipeline_id);
sender.send(state).unwrap();
}
}
}
fn handle_msg_from_script(&self, msg: ScriptMsg) {
match msg {
ScriptMsg::Navigate(id, load_data) =>
self.handle_navigate(id, None, load_data),
ScriptMsg::TriggerFragment(id, fragment) =>
self.trigger_fragment(id, fragment),
ScriptMsg::FireTimer(TimerSource::FromWindow(id), timer_id) =>
self.handle_fire_timer_msg(id, timer_id),
ScriptMsg::FireTimer(TimerSource::FromWorker, _) =>
panic!("Worker timeouts must not be sent to script task"),
ScriptMsg::ExitWindow(id) =>
self.handle_exit_window_msg(id),
ScriptMsg::DOMMessage(..) =>
panic!("unexpected message"),
ScriptMsg::RunnableMsg(runnable) =>
runnable.handler(),
ScriptMsg::MainThreadRunnableMsg(runnable) =>
runnable.handler(self),
ScriptMsg::RefcountCleanup(addr) =>
LiveDOMReferences::cleanup(addr),
ScriptMsg::DocumentLoadsComplete(id) =>
self.handle_loads_complete(id),
ScriptMsg::CollectReports(reports_chan) =>
self.collect_reports(reports_chan),
}
}
fn handle_msg_from_devtools(&self, msg: DevtoolScriptControlMsg) {
let page = self.root_page();
match msg {
DevtoolScriptControlMsg::EvaluateJS(id, s, reply) =>
devtools::handle_evaluate_js(&page, id, s, reply),
DevtoolScriptControlMsg::GetRootNode(id, reply) =>
devtools::handle_get_root_node(&page, id, reply),
DevtoolScriptControlMsg::GetDocumentElement(id, reply) =>
devtools::handle_get_document_element(&page, id, reply),
DevtoolScriptControlMsg::GetChildren(id, node_id, reply) =>
devtools::handle_get_children(&page, id, node_id, reply),
DevtoolScriptControlMsg::GetLayout(id, node_id, reply) =>
devtools::handle_get_layout(&page, id, node_id, reply),
DevtoolScriptControlMsg::GetCachedMessages(pipeline_id, message_types, reply) =>
devtools::handle_get_cached_messages(pipeline_id, message_types, reply),
DevtoolScriptControlMsg::ModifyAttribute(id, node_id, modifications) =>
devtools::handle_modify_attribute(&page, id, node_id, modifications),
DevtoolScriptControlMsg::WantsLiveNotifications(pipeline_id, to_send) =>
devtools::handle_wants_live_notifications(&page, pipeline_id, to_send),
DevtoolScriptControlMsg::SetTimelineMarkers(_pipeline_id, marker_types, reply) =>
devtools::handle_set_timeline_markers(&page, self, marker_types, reply),
DevtoolScriptControlMsg::DropTimelineMarkers(_pipeline_id, marker_types) =>
devtools::handle_drop_timeline_markers(&page, self, marker_types),
DevtoolScriptControlMsg::RequestAnimationFrame(pipeline_id, callback) =>
devtools::handle_request_animation_frame(&page, pipeline_id, callback),
}
}
fn handle_msg_from_image_cache(&self, msg: ImageCacheResult) {
msg.responder.unwrap().respond(msg.image_response);
}
fn handle_webdriver_msg(&self, pipeline_id: PipelineId, msg: WebDriverScriptCommand) {
let page = self.root_page();
match msg {
WebDriverScriptCommand::ExecuteScript(script, reply) =>
webdriver_handlers::handle_execute_script(&page, pipeline_id, script, reply),
WebDriverScriptCommand::FindElementCSS(selector, reply) =>
webdriver_handlers::handle_find_element_css(&page, pipeline_id, selector, reply),
WebDriverScriptCommand::FindElementsCSS(selector, reply) =>
webdriver_handlers::handle_find_elements_css(&page, pipeline_id, selector, reply),
WebDriverScriptCommand::GetActiveElement(reply) =>
webdriver_handlers::handle_get_active_element(&page, pipeline_id, reply),
WebDriverScriptCommand::GetElementTagName(node_id, reply) =>
webdriver_handlers::handle_get_name(&page, pipeline_id, node_id, reply),
WebDriverScriptCommand::GetElementText(node_id, reply) =>
webdriver_handlers::handle_get_text(&page, pipeline_id, node_id, reply),
WebDriverScriptCommand::GetFrameId(frame_id, reply) =>
webdriver_handlers::handle_get_frame_id(&page, pipeline_id, frame_id, reply),
WebDriverScriptCommand::GetUrl(reply) =>
webdriver_handlers::handle_get_url(&page, pipeline_id, reply),
WebDriverScriptCommand::GetTitle(reply) =>
webdriver_handlers::handle_get_title(&page, pipeline_id, reply),
WebDriverScriptCommand::ExecuteAsyncScript(script, reply) =>
webdriver_handlers::handle_execute_async_script(&page, pipeline_id, script, reply),
}
}
fn handle_resize(&self, id: PipelineId, size: WindowSizeData) {
let page = self.page.borrow();
if let Some(ref page) = page.as_ref() {
if let Some(ref page) = page.find(id) {
let window = page.window();
window.r().set_resize_event(size);
return;
}
}
let mut loads = self.incomplete_loads.borrow_mut();
if let Some(ref mut load) = loads.iter_mut().find(|load| load.pipeline_id == id) {
load.window_size = Some(size);
return;
}
panic!("resize sent to nonexistent pipeline");
}
fn handle_viewport(&self, id: PipelineId, rect: Rect<f32>) {
let page = self.page.borrow();
if let Some(page) = page.as_ref() {
if let Some(ref inner_page) = page.find(id) {
let window = inner_page.window();
if window.r().set_page_clip_rect_with_new_viewport(rect) {
let page = get_page(page, id);
self.rebuild_and_force_reflow(&*page, ReflowReason::Viewport);
}
return;
}
}
let mut loads = self.incomplete_loads.borrow_mut();
if let Some(ref mut load) = loads.iter_mut().find(|load| load.pipeline_id == id) {
load.clip_rect = Some(rect);
return;
}
panic!("Page rect message sent to nonexistent pipeline");
}
/// Handle a request to load a page in a new child frame of an existing page.
fn handle_resource_loaded(&self, pipeline: PipelineId, load: LoadType) {
let page = get_page(&self.root_page(), pipeline);
let doc = page.document();
doc.r().finish_load(load);
}
/// Get the current state of a given pipeline.
fn handle_get_current_state(&self, pipeline_id: PipelineId) -> ScriptState {
// Check if the main page load is still pending
let loads = self.incomplete_loads.borrow();
if let Some(_) = loads.iter().find(|load| load.pipeline_id == pipeline_id) {
return ScriptState::DocumentLoading;
}
// If not in pending loads, the page should exist by now.
let page = self.root_page();
let page = page.find(pipeline_id).expect("GetCurrentState sent to nonexistent pipeline");
let doc = page.document();
// Check if document load event has fired. If the document load
// event has fired, this also guarantees that the first reflow
// has been kicked off. Since the script task does a join with
// layout, this ensures there are no race conditions that can occur
// between load completing and the first layout completing.
let load_pending = doc.r().ReadyState() != DocumentReadyState::Complete;
if load_pending {
return ScriptState::DocumentLoading;
}
// Checks if the html element has reftest-wait attribute present.
// See http://testthewebforward.org/docs/reftests.html
let html_element = doc.r().GetDocumentElement();
let reftest_wait = html_element.r().map_or(false, |elem| elem.has_class(&Atom::from_slice("reftest-wait")));
if reftest_wait {
return ScriptState::DocumentLoading;
}
return ScriptState::DocumentLoaded;
}
fn handle_new_layout(&self, new_layout_info: NewLayoutInfo) {
let NewLayoutInfo {
containing_pipeline_id,
new_pipeline_id,
subpage_id,
load_data,
paint_chan,
failure,
pipeline_port,
layout_shutdown_chan,
} = new_layout_info;
let layout_pair = ScriptTask::create_layout_channel(None::<&mut ScriptTask>);
let layout_chan = LayoutChan(*ScriptTask::clone_layout_channel(
None::<&mut ScriptTask>,
&layout_pair).downcast::<Sender<layout_interface::Msg>>().unwrap());
let layout_creation_info = NewLayoutTaskInfo {
id: new_pipeline_id,
url: load_data.url.clone(),
is_parent: false,
layout_pair: layout_pair,
pipeline_port: pipeline_port,
constellation_chan: self.constellation_chan.clone(),
failure: failure,
paint_chan: paint_chan,
script_chan: self.control_chan.0.clone(),
image_cache_task: self.image_cache_task.clone(),
layout_shutdown_chan: layout_shutdown_chan,
};
let page = self.root_page();
let parent_page = page.find(containing_pipeline_id).expect("ScriptTask: received a layout
whose parent has a PipelineId which does not correspond to a pipeline in the script
task's page tree. This is a bug.");
let parent_window = parent_page.window();
// Tell layout to actually spawn the task.
parent_window.layout_chan()
.0
.send(layout_interface::Msg::CreateLayoutTask(layout_creation_info))
.unwrap();
// Kick off the fetch for the new resource.
let new_load = InProgressLoad::new(new_pipeline_id, Some((containing_pipeline_id, subpage_id)),
layout_chan, parent_window.r().window_size(),
load_data.url.clone());
self.start_page_load(new_load, load_data);
}
fn handle_loads_complete(&self, pipeline: PipelineId) {
let page = get_page(&self.root_page(), pipeline);
let doc = page.document();
let doc = doc.r();
if doc.loader().is_blocked() {
return;
}
doc.mut_loader().inhibit_events();
// https://html.spec.whatwg.org/multipage/#the-end step 7
let addr: Trusted<Document> = Trusted::new(self.get_cx(), doc, self.chan.clone());
let handler = box DocumentProgressHandler::new(addr.clone(), DocumentProgressTask::Load);
self.chan.send(ScriptMsg::RunnableMsg(handler)).unwrap();
let ConstellationChan(ref chan) = self.constellation_chan;
chan.send(ConstellationMsg::LoadComplete(pipeline)).unwrap();
}
pub fn get_reports(cx: *mut JSContext, path_seg: String) -> Vec<Report> {
let mut reports = vec![];
unsafe {
let rt = JS_GetRuntime(cx);
let mut stats = ::std::mem::zeroed();
if CollectServoSizes(rt, &mut stats) {
let mut report = |mut path_suffix, kind, size| {
let mut path = path![path_seg, "js"];
path.append(&mut path_suffix);
reports.push(Report {
path: path,
kind: kind,
size: size as usize,
})
};
// A note about possibly confusing terminology: the JS GC "heap" is allocated via
// mmap/VirtualAlloc, which means it's not on the malloc "heap", so we use
// `ExplicitNonHeapSize` as its kind.
report(path!["gc-heap", "used"],
ReportKind::ExplicitNonHeapSize,
stats.gcHeapUsed);
report(path!["gc-heap", "unused"],
ReportKind::ExplicitNonHeapSize,
stats.gcHeapUnused);
report(path!["gc-heap", "admin"],
ReportKind::ExplicitNonHeapSize,
stats.gcHeapAdmin);
report(path!["gc-heap", "decommitted"],
ReportKind::ExplicitNonHeapSize,
stats.gcHeapDecommitted);
// SpiderMonkey uses the system heap, not jemalloc.
report(path!["malloc-heap"],
ReportKind::ExplicitSystemHeapSize,
stats.mallocHeap);
report(path!["non-heap"],
ReportKind::ExplicitNonHeapSize,
stats.nonHeap);
}
}
reports
}
fn collect_reports(&self, reports_chan: ReportsChan) {
let mut urls = vec![];
for it_page in self.root_page().iter() {
urls.push(it_page.document().url().serialize());
}
let path_seg = format!("url({})", urls.connect(", "));
let reports = ScriptTask::get_reports(self.get_cx(), path_seg);
reports_chan.send(reports);
}
/// Handles a timer that fired.
fn handle_fire_timer_msg(&self, id: PipelineId, timer_id: TimerId) {
let page = self.root_page();
let page = page.find(id).expect("ScriptTask: received fire timer msg for a
pipeline ID not associated with this script task. This is a bug.");
let window = page.window();
window.r().handle_fire_timer(timer_id);
}
/// Handles freeze message
fn handle_freeze_msg(&self, id: PipelineId) {
let page = self.root_page();
let page = page.find(id).expect("ScriptTask: received freeze msg for a
pipeline ID not associated with this script task. This is a bug.");
let window = page.window();
window.r().freeze();
}
/// Handles thaw message
fn handle_thaw_msg(&self, id: PipelineId) {
// We should only get this message when moving in history, so all pages requested
// should exist.
let page = self.root_page().find(id).unwrap();
let needed_reflow = page.set_reflow_status(false);
if needed_reflow {
self.rebuild_and_force_reflow(&*page, ReflowReason::CachedPageNeededReflow);
}
let window = page.window();
window.r().thaw();
}
fn handle_focus_iframe_msg(&self,
parent_pipeline_id: PipelineId,
subpage_id: SubpageId) {
let borrowed_page = self.root_page();
let page = borrowed_page.find(parent_pipeline_id).unwrap();
let doc = page.document();
let frame_element = doc.find_iframe(subpage_id);
if let Some(ref frame_element) = frame_element {
let element = ElementCast::from_ref(frame_element.r());
doc.r().begin_focus_transaction();
doc.r().request_focus(element);
doc.r().commit_focus_transaction(FocusType::Parent);
}
}
/// Handles a mozbrowser event, for example see:
/// https://developer.mozilla.org/en-US/docs/Web/Events/mozbrowserloadstart
fn handle_mozbrowser_event_msg(&self,
parent_pipeline_id: PipelineId,
subpage_id: SubpageId,
event: MozBrowserEvent) {
let borrowed_page = self.root_page();
let frame_element = borrowed_page.find(parent_pipeline_id).and_then(|page| {
let doc = page.document();
doc.find_iframe(subpage_id)
});
if let Some(ref frame_element) = frame_element {
frame_element.r().dispatch_mozbrowser_event(event);
}
}
fn handle_update_subpage_id(&self,
containing_pipeline_id: PipelineId,
old_subpage_id: SubpageId,
new_subpage_id: SubpageId) {
let borrowed_page = self.root_page();
let frame_element = borrowed_page.find(containing_pipeline_id).and_then(|page| {
let doc = page.document();
doc.find_iframe(old_subpage_id)
});
frame_element.r().unwrap().update_subpage_id(new_subpage_id);
}
/// Handles a notification that reflow completed.
fn handle_reflow_complete_msg(&self, pipeline_id: PipelineId, reflow_id: u32) {
debug!("Script: Reflow {:?} complete for {:?}", reflow_id, pipeline_id);
let page = self.root_page();
match page.find(pipeline_id) {
Some(page) => {
let window = page.window();
window.r().handle_reflow_complete_msg(reflow_id);
}
None => {
assert!(self.closed_pipelines.borrow().contains(&pipeline_id));
}
}
}
/// Window was resized, but this script was not active, so don't reflow yet
fn handle_resize_inactive_msg(&self, id: PipelineId, new_size: WindowSizeData) {
let page = self.root_page();
let page = page.find(id).expect("Received resize message for PipelineId not associated
with a page in the page tree. This is a bug.");
let window = page.window();
window.r().set_window_size(new_size);
page.set_reflow_status(true);
}
/// We have gotten a window.close from script, which we pass on to the compositor.
/// We do not shut down the script task now, because the compositor will ask the
/// constellation to shut down the pipeline, which will clean everything up
/// normally. If we do exit, we will tear down the DOM nodes, possibly at a point
/// where layout is still accessing them.
fn handle_exit_window_msg(&self, _: PipelineId) {
debug!("script task handling exit window msg");
// TODO(tkuehn): currently there is only one window,
// so this can afford to be naive and just shut down the
// compositor. In the future it'll need to be smarter.
self.compositor.borrow_mut().close();
}
/// We have received notification that the response associated with a load has completed.
/// Kick off the document and frame tree creation process using the result.
fn handle_page_fetch_complete(&self, id: PipelineId, subpage: Option<SubpageId>,
metadata: Metadata) -> Option<Root<ServoHTMLParser>> {
let idx = self.incomplete_loads.borrow().iter().position(|load| {
load.pipeline_id == id && load.parent_info.map(|info| info.1) == subpage
});
// The matching in progress load structure may not exist if
// the pipeline exited before the page load completed.
match idx {
Some(idx) => {
let load = self.incomplete_loads.borrow_mut().remove(idx);
Some(self.load(metadata, load))
}
None => {
assert!(self.closed_pipelines.borrow().contains(&id));
None
}
}
}
/// Handles a request for the window title.
fn handle_get_title_msg(&self, pipeline_id: PipelineId) {
let page = get_page(&self.root_page(), pipeline_id);
let document = page.document();
document.r().send_title_to_compositor();
}
/// Handles a request to exit the script task and shut down layout.
/// Returns true if the script task should shut down and false otherwise.
fn handle_exit_pipeline_msg(&self, id: PipelineId, exit_type: PipelineExitType) -> bool {
self.closed_pipelines.borrow_mut().insert(id);
// Check if the exit message is for an in progress load.
let idx = self.incomplete_loads.borrow().iter().position(|load| {
load.pipeline_id == id
});
if let Some(idx) = idx {
let load = self.incomplete_loads.borrow_mut().remove(idx);
// Tell the layout task to begin shutting down, and wait until it
// processed this message.
let (response_chan, response_port) = channel();
let LayoutChan(chan) = load.layout_chan;
if chan.send(layout_interface::Msg::PrepareToExit(response_chan)).is_ok() {
debug!("shutting down layout for page {:?}", id);
response_port.recv().unwrap();
chan.send(layout_interface::Msg::ExitNow(exit_type)).ok();
}
let has_pending_loads = self.incomplete_loads.borrow().len() > 0;
let has_root_page = self.page.borrow().is_some();
// Exit if no pending loads and no root page
return !has_pending_loads && !has_root_page;
}
// If root is being exited, shut down all pages
let page = self.root_page();
let window = page.window();
if window.r().pipeline() == id {
debug!("shutting down layout for root page {:?}", id);
shut_down_layout(&page, exit_type);
return true
}
// otherwise find just the matching page and exit all sub-pages
if let Some(ref mut child_page) = page.remove(id) {
shut_down_layout(&*child_page, exit_type);
}
return false;
}
/// Handles when layout task finishes all animation in one tick
fn handle_tick_all_animations(&self, id: PipelineId) {
let page = get_page(&self.root_page(), id);
let document = page.document();
document.r().invoke_animation_callbacks();
}
/// The entry point to document loading. Defines bindings, sets up the window and document
/// objects, parses HTML and CSS, and kicks off initial layout.
fn load(&self, metadata: Metadata, incomplete: InProgressLoad) -> Root<ServoHTMLParser> {
let final_url = metadata.final_url.clone();
debug!("ScriptTask: loading {} on page {:?}", incomplete.url.serialize(), incomplete.pipeline_id);
// We should either be initializing a root page or loading a child page of an
// existing one.
let root_page_exists = self.page.borrow().is_some();
let frame_element = incomplete.parent_info.and_then(|(parent_id, subpage_id)| {
// The root page may not exist yet, if the parent of this frame
// exists in a different script task.
let borrowed_page = self.page.borrow();
// In the case a parent id exists but the matching page
// cannot be found, this means the page exists in a different
// script task (due to origin) so it shouldn't be returned.
// TODO: window.parent will continue to return self in that
// case, which is wrong. We should be returning an object that
// denies access to most properties (per
// https://github.com/servo/servo/issues/3939#issuecomment-62287025).
borrowed_page.as_ref().and_then(|borrowed_page| {
borrowed_page.find(parent_id).and_then(|page| {
let doc = page.document();
doc.find_iframe(subpage_id)
})
})
});
// Create a new frame tree entry.
let page = Rc::new(Page::new(incomplete.pipeline_id));
if !root_page_exists {
// We have a new root frame tree.
*self.page.borrow_mut() = Some(page.clone());
} else if let Some((parent, _)) = incomplete.parent_info {
// We have a new child frame.
let parent_page = self.root_page();
// TODO(gw): This find will fail when we are sharing script tasks
// between cross origin iframes in the same TLD.
parent_page.find(parent).expect("received load for subpage with missing parent");
parent_page.children.borrow_mut().push(page.clone());
}
enum PageToRemove {
Root,
Child(PipelineId),
}
struct AutoPageRemover<'a> {
page: PageToRemove,
script_task: &'a ScriptTask,
neutered: bool,
}
impl<'a> AutoPageRemover<'a> {
fn new(script_task: &'a ScriptTask, page: PageToRemove) -> AutoPageRemover<'a> {
AutoPageRemover {
page: page,
script_task: script_task,
neutered: false,
}
}
fn neuter(&mut self) {
self.neutered = true;
}
}
impl<'a> Drop for AutoPageRemover<'a> {
fn drop(&mut self) {
if !self.neutered {
match self.page {
PageToRemove::Root => *self.script_task.page.borrow_mut() = None,
PageToRemove::Child(id) => {
self.script_task.root_page().remove(id).unwrap();
}
}
}
}
}
let page_to_remove = if !root_page_exists {
PageToRemove::Root
} else {
PageToRemove::Child(incomplete.pipeline_id)
};
let mut page_remover = AutoPageRemover::new(self, page_to_remove);
// Create the window and document objects.
let window = Window::new(self.js_runtime.clone(),
page.clone(),
self.chan.clone(),
self.image_cache_channel.clone(),
self.control_chan.clone(),
self.compositor.borrow_mut().dup(),
self.image_cache_task.clone(),
self.resource_task.clone(),
self.storage_task.clone(),
self.mem_profiler_chan.clone(),
self.devtools_chan.clone(),
self.constellation_chan.clone(),
incomplete.layout_chan,
incomplete.pipeline_id,
incomplete.parent_info,
incomplete.window_size);
let last_modified: Option<DOMString> = metadata.headers.as_ref().and_then(|headers| {
headers.get().map(|&LastModified(HttpDate(ref tm))| dom_last_modified(tm))
});
let content_type = match metadata.content_type {
Some(ContentType(Mime(TopLevel::Text, SubLevel::Plain, _))) => Some("text/plain".to_owned()),
_ => None
};
let notifier_data = NotifierData {
script_chan: self.chan.clone(),
pipeline: page.pipeline(),
};
let loader = DocumentLoader::new_with_task(self.resource_task.clone(),
Some(notifier_data),
Some(incomplete.url.clone()));
let document = Document::new(window.r(),
Some(final_url.clone()),
IsHTMLDocument::HTMLDocument,
content_type,
last_modified,
DocumentSource::FromParser,
loader);
let frame_element = frame_element.r().map(|elem| ElementCast::from_ref(elem));
window.r().init_browsing_context(document.r(), frame_element);
// Create the root frame
page.set_frame(Some(Frame {
document: JS::from_rooted(&document),
window: JS::from_rooted(&window),
}));
let is_javascript = incomplete.url.scheme == "javascript";
let parse_input = if is_javascript {
let _ar = JSAutoRequest::new(self.get_cx());
let evalstr = incomplete.url.non_relative_scheme_data().unwrap();
let mut jsval = RootedValue::new(self.get_cx(), UndefinedValue());
window.r().evaluate_js_on_global_with_result(evalstr, jsval.handle_mut());
let strval = FromJSValConvertible::from_jsval(self.get_cx(), jsval.handle(),
StringificationBehavior::Empty);
strval.unwrap_or("".to_owned())
} else {
"".to_owned()
};
parse_html(document.r(), parse_input, &final_url,
ParseContext::Owner(Some(incomplete.pipeline_id)));
page_remover.neuter();
document.r().get_current_parser().unwrap()
}
fn notify_devtools(&self, title: DOMString, url: Url, ids: (PipelineId, Option<WorkerId>)) {
match self.devtools_chan {
None => {}
Some(ref chan) => {
let page_info = DevtoolsPageInfo {
title: title,
url: url,
};
chan.send(ScriptToDevtoolsControlMsg::NewGlobal(
ids,
self.devtools_sender.clone(),
page_info)).unwrap();
}
}
}
fn scroll_fragment_point(&self, pipeline_id: PipelineId, node: &Element) {
let node = NodeCast::from_ref(node);
let rect = node.get_bounding_content_box();
let point = Point2D::new(rect.origin.x.to_f32_px(), rect.origin.y.to_f32_px());
// FIXME(#2003, pcwalton): This is pretty bogus when multiple layers are involved.
// Really what needs to happen is that this needs to go through layout to ask which
// layer the element belongs to, and have it send the scroll message to the
// compositor.
self.compositor.borrow_mut().scroll_fragment_point(pipeline_id, LayerId::null(), point);
}
/// Reflows non-incrementally, rebuilding the entire layout tree in the process.
fn rebuild_and_force_reflow(&self, page: &Page, reason: ReflowReason) {
let document = page.document();
document.r().dirty_all_nodes();
let window = window_from_node(document.r());
window.r().reflow(ReflowGoal::ForDisplay, ReflowQueryType::NoQuery, reason);
}
/// This is the main entry point for receiving and dispatching DOM events.
///
/// TODO: Actually perform DOM event dispatch.
fn handle_event(&self, pipeline_id: PipelineId, event: CompositorEvent) {
match event {
ResizeEvent(new_size) => {
let _marker;
if self.need_emit_timeline_marker(TimelineMarkerType::DOMEvent) {
_marker = AutoDOMEventMarker::new(self);
}
self.handle_resize_event(pipeline_id, new_size);
}
ClickEvent(button, point) => {
self.handle_mouse_event(pipeline_id, MouseEventType::Click, button, point);
}
MouseDownEvent(button, point) => {
self.handle_mouse_event(pipeline_id, MouseEventType::MouseDown, button, point);
}
MouseUpEvent(button, point) => {
self.handle_mouse_event(pipeline_id, MouseEventType::MouseUp, button, point);
}
MouseMoveEvent(point) => {
let _marker;
if self.need_emit_timeline_marker(TimelineMarkerType::DOMEvent) {
_marker = AutoDOMEventMarker::new(self);
}
let page = get_page(&self.root_page(), pipeline_id);
let document = page.document();
// We temporarily steal the list of targets over which the mouse is to pass it to
// handle_mouse_move_event() in a safe RootedVec container.
let mut mouse_over_targets = RootedVec::new();
std_mem::swap(&mut *self.mouse_over_targets.borrow_mut(), &mut *mouse_over_targets);
document.r().handle_mouse_move_event(self.js_runtime.rt(), point, &mut mouse_over_targets);
std_mem::swap(&mut *self.mouse_over_targets.borrow_mut(), &mut *mouse_over_targets);
}
KeyEvent(key, state, modifiers) => {
let _marker;
if self.need_emit_timeline_marker(TimelineMarkerType::DOMEvent) {
_marker = AutoDOMEventMarker::new(self);
}
let page = get_page(&self.root_page(), pipeline_id);
let document = page.document();
document.r().dispatch_key_event(
key, state, modifiers, &mut *self.compositor.borrow_mut());
}
}
}
fn handle_mouse_event(&self,
pipeline_id: PipelineId,
mouse_event_type: MouseEventType,
button: MouseButton,
point: Point2D<f32>) {
let _marker;
if self.need_emit_timeline_marker(TimelineMarkerType::DOMEvent) {
_marker = AutoDOMEventMarker::new(self);
}
let page = get_page(&self.root_page(), pipeline_id);
let document = page.document();
document.r().handle_mouse_event(self.js_runtime.rt(), button, point, mouse_event_type);
}
/// https://html.spec.whatwg.org/multipage/#navigating-across-documents
/// The entry point for content to notify that a new load has been requested
/// for the given pipeline (specifically the "navigate" algorithm).
fn handle_navigate(&self, pipeline_id: PipelineId, subpage_id: Option<SubpageId>, load_data: LoadData) {
match subpage_id {
Some(subpage_id) => {
let borrowed_page = self.root_page();
let iframe = borrowed_page.find(pipeline_id).and_then(|page| {
let doc = page.document();
doc.find_iframe(subpage_id)
});
if let Some(iframe) = iframe.r() {
iframe.navigate_child_browsing_context(load_data.url);
}
}
None => {
let ConstellationChan(ref const_chan) = self.constellation_chan;
const_chan.send(ConstellationMsg::LoadUrl(pipeline_id, load_data)).unwrap();
}
}
}
/// The entry point for content to notify that a fragment url has been requested
/// for the given pipeline.
fn trigger_fragment(&self, pipeline_id: PipelineId, fragment: String) {
let page = get_page(&self.root_page(), pipeline_id);
let document = page.document();
match document.r().find_fragment_node(fragment) {
Some(ref node) => {
self.scroll_fragment_point(pipeline_id, node.r());
}
None => {}
}
}
fn handle_resize_event(&self, pipeline_id: PipelineId, new_size: WindowSizeData) {
let page = get_page(&self.root_page(), pipeline_id);
let window = page.window();
window.r().set_window_size(new_size);
window.r().force_reflow(ReflowGoal::ForDisplay,
ReflowQueryType::NoQuery,
ReflowReason::WindowResize);
let document = page.document();
let fragment_node = window.r().steal_fragment_name()
.and_then(|name| document.r().find_fragment_node(name));
match fragment_node {
Some(ref node) => self.scroll_fragment_point(pipeline_id, node.r()),
None => {}
}
// http://dev.w3.org/csswg/cssom-view/#resizing-viewports
// https://dvcs.w3.org/hg/dom3events/raw-file/tip/html/DOM3-Events.html#event-type-resize
let uievent = UIEvent::new(window.r(),
"resize".to_owned(), EventBubbles::DoesNotBubble,
EventCancelable::NotCancelable, Some(window.r()),
0i32);
let event = EventCast::from_ref(uievent.r());
let wintarget = EventTargetCast::from_ref(window.r());
event.fire(wintarget);
}
/// Initiate a non-blocking fetch for a specified resource. Stores the InProgressLoad
/// argument until a notification is received that the fetch is complete.
fn start_page_load(&self, incomplete: InProgressLoad, mut load_data: LoadData) {
let id = incomplete.pipeline_id.clone();
let subpage = incomplete.parent_info.clone().map(|p| p.1);
let script_chan = self.chan.clone();
let resource_task = self.resource_task.clone();
let context = Arc::new(Mutex::new(ParserContext::new(id, subpage, script_chan.clone(),
load_data.url.clone())));
let listener = box NetworkListener {
context: context,
script_chan: script_chan.clone(),
};
if load_data.url.scheme == "javascript" {
load_data.url = Url::parse("about:blank").unwrap();
}
resource_task.send(ControlMsg::Load(NetLoadData {
url: load_data.url,
method: load_data.method,
headers: Headers::new(),
preserved_headers: load_data.headers,
data: load_data.data,
cors: None,
pipeline_id: Some(id),
}, LoadConsumer::Listener(listener))).unwrap();
self.incomplete_loads.borrow_mut().push(incomplete);<|fim▁hole|> fn need_emit_timeline_marker(&self, timeline_type: TimelineMarkerType) -> bool {
self.devtools_markers.borrow().contains(&timeline_type)
}
fn emit_timeline_marker(&self, marker: TimelineMarker) {
let sender = self.devtools_marker_sender.borrow();
let sender = sender.as_ref().expect("There is no marker sender");
sender.send(marker).unwrap();
}
pub fn set_devtools_timeline_marker(&self,
marker: TimelineMarkerType,
reply: IpcSender<TimelineMarker>) {
*self.devtools_marker_sender.borrow_mut() = Some(reply);
self.devtools_markers.borrow_mut().insert(marker);
}
pub fn drop_devtools_timeline_markers(&self) {
self.devtools_markers.borrow_mut().clear();
*self.devtools_marker_sender.borrow_mut() = None;
}
fn handle_parsing_complete(&self, id: PipelineId) {
let parent_page = self.root_page();
let page = match parent_page.find(id) {
Some(page) => page,
None => return,
};
let document = page.document();
let final_url = document.r().url();
document.r().set_ready_state(DocumentReadyState::Interactive);
// Kick off the initial reflow of the page.
debug!("kicking off initial reflow of {:?}", final_url);
document.r().disarm_reflow_timeout();
document.r().content_changed(NodeCast::from_ref(document.r()),
NodeDamage::OtherNodeDamage);
let window = window_from_node(document.r());
window.r().reflow(ReflowGoal::ForDisplay, ReflowQueryType::NoQuery, ReflowReason::FirstLoad);
// No more reflow required
page.set_reflow_status(false);
// https://html.spec.whatwg.org/multipage/#the-end step 4
let addr: Trusted<Document> = Trusted::new(self.get_cx(), document.r(), self.chan.clone());
let handler = box DocumentProgressHandler::new(addr, DocumentProgressTask::DOMContentLoaded);
self.chan.send(ScriptMsg::RunnableMsg(handler)).unwrap();
window.r().set_fragment_name(final_url.fragment.clone());
// Notify devtools that a new script global exists.
self.notify_devtools(document.r().Title(), final_url, (id, None));
}
}
impl Drop for ScriptTask {
fn drop(&mut self) {
SCRIPT_TASK_ROOT.with(|root| {
*root.borrow_mut() = None;
});
}
}
struct AutoDOMEventMarker<'a> {
script_task: &'a ScriptTask
}
impl<'a> AutoDOMEventMarker<'a> {
fn new(script_task: &'a ScriptTask) -> AutoDOMEventMarker<'a> {
let marker = TimelineMarker::new("DOMEvent".to_owned(), TracingMetadata::IntervalStart);
script_task.emit_timeline_marker(marker);
AutoDOMEventMarker {
script_task: script_task
}
}
}
impl<'a> Drop for AutoDOMEventMarker<'a> {
fn drop(&mut self) {
let marker = TimelineMarker::new("DOMEvent".to_owned(), TracingMetadata::IntervalEnd);
self.script_task.emit_timeline_marker(marker);
}
}
/// Shuts down layout for the given page tree.
fn shut_down_layout(page_tree: &Rc<Page>, exit_type: PipelineExitType) {
let mut channels = vec!();
for page in page_tree.iter() {
// Tell the layout task to begin shutting down, and wait until it
// processed this message.
let (response_chan, response_port) = channel();
let window = page.window();
let LayoutChan(chan) = window.r().layout_chan();
if chan.send(layout_interface::Msg::PrepareToExit(response_chan)).is_ok() {
channels.push(chan);
response_port.recv().unwrap();
}
}
// Drop our references to the JSContext and DOM objects.
for page in page_tree.iter() {
let window = page.window();
window.r().clear_js_runtime();
// Sever the connection between the global and the DOM tree
page.set_frame(None);
}
// Destroy the layout task. If there were node leaks, layout will now crash safely.
for chan in channels.into_iter() {
chan.send(layout_interface::Msg::ExitNow(exit_type)).ok();
}
}
pub fn get_page(page: &Rc<Page>, pipeline_id: PipelineId) -> Rc<Page> {
page.find(pipeline_id).expect("ScriptTask: received an event \
message for a layout channel that is not associated with this script task.\
This is a bug.")
}
fn dom_last_modified(tm: &Tm) -> String {
tm.to_local().strftime("%m/%d/%Y %H:%M:%S").unwrap().to_string()
}<|fim▁end|> | }
|
<|file_name|>events.directive.ts<|end_file_name|><|fim▁begin|>'use strict';<|fim▁hole|> templateUrl: 'components/events/events.html',
restrict: 'E',
scope: {
calHeight: '=height',
showTitle: '=showTitle'
},
controller: 'EventsController',
controllerAs: 'event'
}));<|fim▁end|> |
angular.module('knowShareVanApp')
.directive('events', () => ({ |
<|file_name|>ILocalDebugClient.ts<|end_file_name|><|fim▁begin|>/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License').
* You may not use this file except in compliance with the License.
* A copy of the License is located at<|fim▁hole|> *
* or in the 'license' file accompanying this file. This file is distributed
* on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
import WebSocket from 'ws';
export interface ILocalDebugClient {
connectedEvent(): void;
messageEvent(data: WebSocket.Data): void;
errorEvent(event: WebSocket.ErrorEvent): void;
closeEvent(event: WebSocket.CloseEvent): void;
sendResponse(responseString: string): void;
}<|fim▁end|> | * http://www.apache.org/licenses/LICENSE-2.0 |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>var express = require('express');
var http = require('http');
var app = express();
var bodyParser = require('body-parser');
var methodOverride = require('method-override');
var router = require('./server/router');
var morgan = require('morgan');
app.use(bodyParser.urlencoded({extended: true}));
// parse application/json
app.use(bodyParser.json());
app.use(methodOverride());
app.use(morgan('dev', { format: 'dev', immediate: true }));
app.use(router());
http.createServer(app).listen(9999, function() {
console.log('Server up: http://localhost:' + 9999);<|fim▁hole|><|fim▁end|> | }); |
<|file_name|>person.py<|end_file_name|><|fim▁begin|>"""Provides all the generic data related to the personal information."""
from typing import Tuple
BLOOD_GROUPS = (
"O+",
"A+",
"B+",
"AB+",
"O−",
"A−",
"B−",
"AB−",
)
GENDER_SYMBOLS: Tuple[str, str, str] = (
"♂",
"♀",
"⚲",
)
USERNAMES = [
"aaa",
"aaron",
"abandoned",
"abc",
"aberdeen",
"abilities",
"ability",
"able",
"aboriginal",
"abortion",
"about",
"above",
"abraham",
"abroad",
"abs",
"absence",
"absent",
"absolute",
"absolutely",
"absorption",
"abstract",
"abstracts",
"abu",
"abuse",
"academic",
"academics",
"academy",
"acc",
"accent",
"accept",
"acceptable",
"acceptance",
"accepted",
"accepting",
"accepts",
"access",
"accessed",
"accessibility",
"accessible",
"accessing",
"accessories",
"accessory",
"accident",
"accidents",
"accommodate",
"accommodation",
"accommodations",
"accompanied",
"accompanying",
"accomplish",
"accomplished",
"accordance",
"according",
"accordingly",
"account",
"accountability",
"accounting",
"accounts",
"accreditation",
"accredited",
"accuracy",
"accurate",
"accurately",
"accused",
"acdbentity",
"ace",
"acer",
"achieve",
"achieved",
"achievement",
"achievements",
"achieving",
"acid",
"acids",
"acknowledge",
"acknowledged",
"acm",
"acne",
"acoustic",
"acquire",
"acquired",
"acquisition",
"acquisitions",
"acre",
"acres",
"acrobat",
"across",
"acrylic",
"act",
"acting",
"action",
"actions",
"activated",
"activation",
"active",
"actively",
"activists",
"activities",
"activity",
"actor",
"actors",
"actress",
"acts",
"actual",
"actually",
"acute",
"ada",
"adam",
"adams",
"adaptation",
"adapted",
"adapter",
"adapters",
"adaptive",
"adaptor",
"add",
"added",
"addiction",
"adding",
"addition",
"additional",
"additionally",
"additions",
"address",
"addressed",
"addresses",
"addressing",
"adds",
"adelaide",
"adequate",
"adidas",
"adipex",
"adjacent",
"adjust",
"adjustable",
"adjusted",
"adjustment",
"adjustments",
"admin",
"administered",
"administration",
"administrative",
"administrator",
"administrators",
"admission",
"admissions",
"admit",
"admitted",
"adobe",
"adolescent",
"adopt",
"adopted",
"adoption",
"adrian",
"ads",
"adsl",
"adult",
"adults",
"advance",
"advanced",
"advancement",
"advances",
"advantage",
"advantages",
"adventure",
"adventures",
"adverse",
"advert",
"advertise",
"advertisement",
"advertisements",
"advertiser",
"advertisers",
"advertising",
"advice",
"advise",
"advised",
"advisor",
"advisors",
"advisory",
"advocacy",
"advocate",
"adware",
"aerial",
"aerospace",
"affair",
"affairs",
"affect",
"affected",
"affecting",
"affects",
"affiliate",
"affiliated",
"affiliates",
"affiliation",
"afford",
"affordable",
"afghanistan",
"afraid",
"africa",
"african",
"after",
"afternoon",
"afterwards",
"again",
"against",
"age",
"aged",
"agencies",
"agency",
"agenda",
"agent",
"agents",
"ages",
"aggregate",
"aggressive",
"aging",
"ago",
"agree",
"agreed",
"agreement",
"agreements",
"agrees",
"agricultural",
"agriculture",
"ahead",
"aid",
"aids",
"aim",
"aimed",
"aims",
"air",
"aircraft",
"airfare",
"airline",
"airlines",
"airplane",
"airport",
"airports",
"aka",
"ala",
"alabama",
"alan",
"alarm",
"alaska",
"albania",
"albany",
"albert",
"alberta",
"album",
"albums",
"albuquerque",
"alcohol",
"alert",
"alerts",
"alex",
"alexander",
"alexandria",
"alfred",
"algebra",
"algeria",
"algorithm",
"algorithms",
"ali",
"alias",
"alice",
"alien",
"align",
"alignment",
"alike",
"alive",
"all",
"allah",
"allan",
"alleged",
"allen",
"allergy",
"alliance",
"allied",
"allocated",
"allocation",
"allow",
"allowance",
"allowed",
"allowing",
"allows",
"alloy",
"almost",
"alone",
"along",
"alot",
"alpha",
"alphabetical",
"alpine",
"already",
"also",
"alt",
"alter",
"altered",
"alternate",
"alternative",
"alternatively",
"alternatives",
"although",
"alto",
"aluminium",
"aluminum",
"alumni",
"always",
"amanda",
"amateur",
"amazing",
"amazon",
"ambassador",
"amber",
"ambien",
"ambient",
"amd",
"amend",
"amended",
"amendment",
"amendments",
"amenities",
"america",
"american",
"americans",
"americas",
"amino",
"among",
"amongst",
"amount",
"amounts",
"amp",
"ampland",
"amplifier",
"amsterdam",
"amy",
"ana",
"anaheim",
"analog",
"analysis",
"analyst",
"analysts",
"analytical",
"analyze",
"analyzed",
"analyzes",
"anatomy",
"anchor",
"ancient",
"and",
"andale",
"anderson",
"andorra",
"andrea",
"andreas",
"andrew",
"andrews",
"andy",
"angel",
"angela",
"angeles",
"angels",
"anger",
"angle",
"angola",
"angry",
"animal",
"animals",
"animated",
"animation",
"anime",
"ann",
"anna",
"anne",
"annex",
"annie",
"anniversary",
"annotated",
"annotation",
"announce",
"announced",
"announcement",
"announcements",
"announces",
"annoying",
"annual",
"annually",
"anonymous",
"another",
"answer",
"answered",
"answering",
"answers",
"ant",
"antarctica",
"antenna",
"anthony",
"anthropology",
"anti",
"antibodies",
"antibody",
"anticipated",
"antigua",
"antique",
"antiques",
"antivirus",
"antonio",
"anxiety",
"any",
"anybody",
"anymore",
"anyone",
"anything",
"anytime",
"anyway",
"anywhere",
"aol",
"apache",
"apart",
"apartment",
"apartments",
"api",
"apnic",
"apollo",
"app",
"apparatus",
"apparel",
"apparent",
"apparently",
"appeal",
"appeals",
"appear",
"appearance",
"appeared",
"appearing",
"appears",
"appendix",
"apple",
"appliance",
"appliances",
"applicable",
"applicant",
"applicants",
"application",
"applications",
"applied",
"applies",
"apply",
"applying",
"appointed",
"appointment",
"appointments",
"appraisal",
"appreciate",
"appreciated",
"appreciation",
"approach",
"approaches",
"appropriate",
"appropriations",
"approval",
"approve",
"approved",
"approx",
"approximate",
"approximately",
"apps",
"apr",
"april",
"apt",
"aqua",
"aquarium",
"aquatic",
"arab",
"arabia",
"arabic",
"arbitrary",
"arbitration",
"arbor",
"arc",
"arcade",
"arch",
"architect",
"architects",
"architectural",
"architecture",
"archive",
"archived",
"archives",
"arctic",
"are",
"area",
"areas",
"arena",
"arg",
"argentina",
"argue",
"argued",
"argument",
"arguments",
"arise",
"arising",
"arizona",
"arkansas",
"arlington",
"arm",
"armed",
"armenia",
"armor",
"arms",
"armstrong",
"army",
"arnold",
"around",
"arrange",
"arranged",
"arrangement",
"arrangements",
"array",
"arrest",
"arrested",
"arrival",
"arrivals",
"arrive",
"arrived",
"arrives",
"arrow",
"art",
"arthritis",
"arthur",
"article",
"articles",
"artificial",
"artist",
"artistic",
"artists",
"arts",
"artwork",
"aruba",
"asbestos",
"ascii",
"ash",
"ashley",
"asia",
"asian",
"aside",
"asin",
"ask",
"asked",
"asking",
"asks",
"asn",
"asp",
"aspect",
"aspects",
"assault",
"assembled",
"assembly",
"assess",
"assessed",
"assessing",
"assessment",
"assessments",
"asset",
"assets",
"assign",
"assigned",
"assignment",
"assignments",
"assist",
"assistance",
"assistant",
"assisted",
"assists",
"associate",
"associated",
"associates",
"association",
"associations",
"assume",
"assumed",
"assumes",
"assuming",
"assumption",
"assumptions",
"assurance",
"assure",
"assured",
"asthma",
"astrology",
"astronomy",
"asus",
"asylum",
"ata",
"ate",
"athens",
"athletes",
"athletic",
"athletics",
"ati",
"atlanta",
"atlantic",
"atlas",
"atm",
"atmosphere",
"atmospheric",
"atom",
"atomic",
"attach",
"attached",
"attachment",
"attachments",
"attack",
"attacked",
"attacks",
"attempt",
"attempted",
"attempting",
"attempts",
"attend",
"attendance",
"attended",
"attending",
"attention",
"attitude",
"attitudes",
"attorney",
"attorneys",
"attract",
"attraction",
"attractions",
"attractive",
"attribute",
"attributes",
"auburn",
"auckland",
"auction",
"auctions",
"aud",
"audi",
"audience",
"audio",
"audit",
"auditor",
"aug",
"august",
"aurora",
"aus",
"austin",
"australia",
"australian",
"austria",
"authentic",
"authentication",
"author",
"authorities",
"authority",
"authorization",
"authorized",
"authors",
"auto",
"automated",
"automatic",
"automatically",
"automation",
"automobile",
"automobiles",
"automotive",
"autos",
"autumn",
"availability",
"available",
"avatar",
"ave",
"avenue",
"average",
"avg",
"avi",
"aviation",
"avoid",
"avoiding",
"avon",
"award",
"awarded",
"awards",
"aware",
"awareness",
"away",
"awesome",
"awful",
"axis",
"aye",
"azerbaijan",
"babe",
"babes",
"babies",
"baby",
"bachelor",
"back",
"backed",
"background",
"backgrounds",
"backing",
"backup",
"bacon",
"bacteria",
"bacterial",
"bad",
"badge",
"badly",
"bag",
"baghdad",
"bags",
"bahamas",
"bahrain",
"bailey",
"baker",
"baking",
"balance",
"balanced",
"bald",
"bali",
"ball",
"ballet",
"balloon",
"ballot",
"baltimore",
"ban",
"banana",
"band",
"bands",
"bandwidth",
"bang",
"bangkok",
"bangladesh",
"bank",
"banking",
"bankruptcy",
"banks",
"banned",
"banner",
"banners",
"baptist",
"bar",
"barbados",
"barbara",
"barbie",
"barcelona",
"bare",
"barely",
"bargain",
"bargains",
"barn",
"barnes",
"barrel",
"barrier",
"barriers",
"barry",
"bars",
"base",
"baseball",
"based",
"baseline",
"basement",
"basename",
"bases",
"basic",
"basically",
"basics",
"basin",
"basis",
"basket",
"basketball",
"baskets",
"bass",
"bat",
"batch",
"bath",
"bathroom",
"bathrooms",
"baths",
"batman",
"batteries",
"battery",
"battle",
"battlefield",
"bay",
"bbc",
"bbs",
"beach",
"beaches",
"beads",
"beam",
"bean",
"beans",
"bear",
"bearing",
"bears",
"beast",
"beastality",
"beat",
"beatles",
"beats",
"beautiful",
"beautifully",
"beauty",
"beaver",
"became",
"because",
"become",
"becomes",
"becoming",
"bed",
"bedding",
"bedford",
"bedroom",
"bedrooms",
"beds",
"bee",
"beef",
"been",
"beer",
"before",
"began",
"begin",
"beginner",
"beginners",
"beginning",
"begins",
"begun",
"behalf",
"behavior",
"behavioral",
"behind",
"beijing",
"being",
"beings",
"belarus",
"belfast",
"belgium",
"belief",
"beliefs",
"believe",
"believed",
"believes",
"belize",
"belkin",
"bell",
"belle",
"belly",
"belong",
"belongs",
"below",
"belt",
"belts",
"ben",
"bench",
"benchmark",
"bend",
"beneath",
"beneficial",
"benefit",
"benefits",
"benjamin",
"bennett",
"bent",
"benz",
"berkeley",
"berlin",
"bermuda",
"bernard",
"berry",
"beside",
"besides",
"best",
"bestsellers",
"bet",
"beta",
"beth",
"better",
"betting",
"betty",
"between",
"beverage",
"beverages",
"beverly",
"beyond",
"bhutan",
"bias",
"bible",
"biblical",
"bibliographic",
"bibliography",
"bicycle",
"bid",
"bidder",
"bidding",
"bids",
"big",
"bigger",
"biggest",
"bike",
"bikes",
"bikini",
"bill",
"billing",
"billion",
"bills",
"billy",
"bin",
"binary",
"bind",
"binding",
"bingo",
"bio",
"biodiversity",
"biographies",
"biography",
"biol",
"biological",
"biology",
"bios",
"biotechnology",
"bird",
"birds",
"birmingham",
"birth",
"birthday",
"bishop",
"bit",
"bite",
"bits",
"biz",
"bizarre",
"bizrate",
"black",
"blackberry",
"blackjack",
"blacks",
"blade",
"blades",
"blah",
"blair",
"blake",
"blame",
"blank",
"blanket",
"blast",
"bleeding",
"blend",
"bless",
"blessed",
"blind",
"blink",
"block",
"blocked",
"blocking",
"blocks",
"blog",
"blogger",
"bloggers",
"blogging",
"blogs",
"blond",
"blonde",
"blood",
"bloom",
"bloomberg",
"blow",
"blowing",
"blue",
"blues",
"bluetooth",
"blvd",
"bmw",
"board",
"boards",
"boat",
"boating",
"boats",
"bob",
"bobby",
"boc",
"bodies",
"body",
"bold",
"bolivia",
"bolt",
"bomb",
"bon",
"bond",
"bonds",
"bone",
"bones",
"bonus",
"book",
"booking",
"bookings",
"bookmark",
"bookmarks",
"books",
"bookstore",
"bool",
"boolean",
"boom",
"boost",
"boot",
"booth",
"boots",
"booty",
"border",
"borders",
"bored",
"boring",
"born",
"borough",
"bosnia",
"boss",
"boston",
"both",
"bother",
"botswana",
"bottle",
"bottles",
"bottom",
"bought",
"boulder",
"boulevard",
"bound",
"boundaries",
"boundary",
"bouquet",
"boutique",
"bow",
"bowl",
"bowling",
"box",
"boxed",
"boxes",
"boxing",
"boy",
"boys",
"bra",
"bracelet",
"bracelets",
"bracket",
"brad",
"bradford",
"bradley",
"brain",
"brake",
"brakes",
"branch",
"branches",
"brand",
"brandon",
"brands",
"bras",
"brass",
"brave",
"brazil",
"brazilian",
"breach",
"bread",
"break",
"breakdown",
"breakfast",
"breaking",
"breaks",
"breast",
"breath",
"breathing",
"breed",
"breeding",
"breeds",
"brian",
"brick",
"bridal",
"bride",
"bridge",
"bridges",
"brief",
"briefing",
"briefly",
"briefs",
"bright",
"brighton",
"brilliant",
"bring",
"bringing",
"brings",
"brisbane",
"bristol",
"britain",
"britannica",
"british",
"britney",
"broad",
"broadband",
"broadcast",
"broadcasting",
"broader",
"broadway",
"brochure",
"brochures",
"broke",
"broken",
"broker",
"brokers",
"bronze",
"brook",
"brooklyn",
"brooks",
"brother",
"brothers",
"brought",
"brown",
"browse",
"browser",
"browsers",
"browsing",
"bruce",
"brunei",
"brunette",
"brunswick",
"brush",
"brussels",
"brutal",
"bryan",
"bryant",
"bubble",
"buck",
"bucks",
"budapest",
"buddy",
"budget",
"budgets",
"buf",
"buffalo",
"buffer",
"bufing",
"bug",
"bugs",
"build",
"builder",
"builders",
"building",
"buildings",
"builds",
"built",
"bulgaria",
"bulgarian",
"bulk",
"bull",
"bullet",
"bulletin",
"bumper",
"bunch",
"bundle",
"bunny",
"burden",
"bureau",
"buried",
"burke",
"burlington",
"burn",
"burner",
"burning",
"burns",
"burst",
"burton",
"bus",
"buses",
"bush",
"business",
"businesses",
"busy",
"but",
"butler",
"butter",
"butterfly",
"button",
"buttons",
"butts",
"buy",
"buyer",
"buyers",
"buying",
"buys",
"buzz",
"bye",
"byte",
"bytes",
"cab",
"cabin",
"cabinet",
"cabinets",
"cable",
"cables",
"cache",
"cached",
"cad",
"cadillac",
"cafe",
"cage",
"cake",
"cakes",
"cal",
"calcium",
"calculate",
"calculated",
"calculation",
"calculations",
"calculator",
"calculators",
"calendar",
"calendars",
"calgary",
"calibration",
"california",
"call",
"called",
"calling",
"calls",
"calm",
"calvin",
"cam",
"cambodia",
"cambridge",
"camcorder",
"camcorders",
"came",
"camel",
"camera",
"cameras",
"cameron",
"cameroon",
"camp",
"campaign",
"campaigns",
"campbell",
"camping",
"camps",
"campus",
"cams",
"can",
"canada",
"canadian",
"canal",
"canberra",
"cancel",
"cancellation",
"cancelled",
"cancer",
"candidate",
"candidates",
"candle",
"candles",
"candy",
"cannon",
"canon",
"cant",
"canvas",
"canyon",
"cap",
"capabilities",
"capability",
"capable",
"capacity",
"cape",
"capital",
"capitol",
"caps",
"captain",
"capture",
"captured",
"car",
"carb",
"carbon",
"card",
"cardiac",
"cardiff",
"cardiovascular",
"cards",
"care",
"career",
"careers",
"careful",
"carefully",
"carey",
"cargo",
"caribbean",
"caring",
"carl",
"carlo",
"carlos",
"carmen",
"carnival",
"carol",
"carolina",
"caroline",
"carpet",
"carried",
"carrier",
"carriers",
"carries",
"carroll",
"carry",
"carrying",
"cars",
"cart",
"carter",
"cartoon",
"cartoons",
"cartridge",
"cartridges",
"cas",
"casa",
"case",
"cases",
"casey",
"cash",
"cashiers",
"casino",
"casinos",
"casio",
"cassette",
"cast",
"casting",
"castle",
"casual",
"cat",
"catalog",
"catalogs",
"catalogue",
"catalyst",
"catch",
"categories",
"category",
"catering",
"cathedral",
"catherine",
"catholic",
"cats",
"cattle",
"caught",
"cause",
"caused",
"causes",
"causing",
"caution",
"cave",
"cayman",
"cbs",
"ccd",
"cdna",
"cds",
"cdt",
"cedar",
"ceiling",
"celebrate",
"celebration",
"celebrities",
"celebrity",
"celebs",
"cell",
"cells",
"cellular",
"celtic",
"cement",
"cemetery",
"census",
"cent",
"center",
"centered",
"centers",
"central",
"centre",
"centres",
"cents",
"centuries",
"century",
"ceo",
"ceramic",
"ceremony",
"certain",
"certainly",
"certificate",
"certificates",
"certification",
"certified",
"cet",
"cfr",
"cgi",
"chad",
"chain",
"chains",
"chair",
"chairman",
"chairs",
"challenge",
"challenged",
"challenges",
"challenging",
"chamber",
"chambers",
"champagne",
"champion",
"champions",
"championship",
"championships",
"chan",
"chance",
"chancellor",
"chances",
"change",
"changed",
"changelog",
"changes",
"changing",
"channel",
"channels",
"chaos",
"chapel",
"chapter",
"chapters",
"char",
"character",
"characteristic",
"characteristics",
"characterization",
"characterized",
"characters",
"charge",
"charged",
"charger",
"chargers",
"charges",
"charging",
"charitable",
"charity",
"charles",
"charleston",
"charlie",
"charlotte",
"charm",
"charming",
"charms",
"chart",
"charter",
"charts",
"chase",
"chassis",
"chat",
"cheap",
"cheaper",
"cheapest",
"cheat",
"cheats",
"check",
"checked",
"checking",
"checklist",
"checkout",
"checks",
"cheers",
"cheese",
"chef",
"chelsea",
"chem",
"chemical",
"chemicals",
"chemistry",
"chen",
"cheque",
"cherry",
"chess",
"chest",
"chester",
"chevrolet",
"chevy",
"chi",
"chicago",
"chick",
"chicken",
"chicks",
"chief",
"child",
"childhood",
"children",
"childrens",
"chile",
"china",
"chinese",
"chip",
"chips",
"cho",
"chocolate",
"choice",
"choices",
"choir",
"cholesterol",
"choose",
"choosing",
"chorus",
"chose",
"chosen",
"chris",
"christ",
"christian",
"christianity",
"christians",
"christina",
"christine",
"christmas",
"christopher",
"chrome",
"chronic",
"chronicle",
"chronicles",
"chrysler",
"chubby",
"chuck",
"church",
"churches",
"cia",
"cialis",
"ciao",
"cigarette",
"cigarettes",
"cincinnati",
"cindy",
"cinema",
"cingular",
"cio",
"cir",
"circle",
"circles",
"circuit",
"circuits",
"circular",
"circulation",
"circumstances",
"circus",
"cisco",
"citation",
"citations",
"cite",
"cited",
"cities",
"citizen",
"citizens",
"citizenship",
"city",
"citysearch",
"civic",
"civil",
"civilian",
"civilization",
"claim",
"claimed",
"claims",
"claire",
"clan",
"clara",
"clarity",
"clark",
"clarke",
"class",
"classes",
"classic",
"classical",
"classics",
"classification",
"classified",
"classifieds",
"classroom",
"clause",
"clay",
"clean",
"cleaner",
"cleaners",
"cleaning",
"cleanup",
"clear",
"clearance",
"cleared",
"clearing",
"clearly",
"clerk",
"cleveland",
"click",
"clicking",
"clicks",
"client",
"clients",
"cliff",
"climate",
"climb",
"climbing",
"clinic",
"clinical",
"clinics",
"clinton",
"clip",
"clips",
"clock",
"clocks",
"clone",
"close",
"closed",
"closely",
"closer",
"closes",
"closest",
"closing",
"closure",
"cloth",
"clothes",
"clothing",
"cloud",
"clouds",
"cloudy",
"club",
"clubs",
"cluster",
"clusters",
"cms",
"cnet",
"cnn",
"coach",
"coaches",
"coaching",
"coal",
"coalition",
"coast",
"coastal",
"coat",
"coated",
"coating",
"cocktail",
"cod",
"code",
"codes",
"coding",
"coffee",
"cognitive",
"cohen",
"coin",
"coins",
"col",
"cold",
"cole",
"coleman",
"colin",
"collaboration",
"collaborative",
"collapse",
"collar",
"colleague",
"colleagues",
"collect",
"collectables",
"collected",
"collectible",
"collectibles",
"collecting",
"collection",
"collections",
"collective",
"collector",
"collectors",
"college",
"colleges",
"collins",
"cologne",
"colombia",
"colon",
"colonial",
"colony",
"color",
"colorado",
"colored",
"colors",
"columbia",
"columbus",
"column",
"columnists",
"columns",
"com",
"combat",
"combination",
"combinations",
"combine",
"combined",
"combines",
"combining",
"combo",
"come",
"comedy",
"comes",
"comfort",
"comfortable",
"comic",
"comics",
"coming",
"comm",
"command",
"commander",
"commands",
"comment",
"commentary",
"commented",
"comments",
"commerce",
"commercial",
"commission",
"commissioner",
"commissioners",
"commissions",
"commit",
"commitment",
"commitments",
"committed",
"committee",
"committees",
"commodities",
"commodity",
"common",
"commonly",
"commons",
"commonwealth",
"communicate",
"communication",
"communications",
"communist",
"communities",
"community",
"comp",
"compact",
"companies",
"companion",
"company",
"compaq",
"comparable",
"comparative",
"compare",
"compared",
"comparing",
"comparison",
"comparisons",
"compatibility",
"compatible",
"compensation",
"compete",
"competent",
"competing",
"competition",
"competitions",
"competitive",
"competitors",
"compilation",
"compile",
"compiled",
"compiler",
"complaint",
"complaints",
"complement",
"complete",
"completed",
"completely",
"completing",
"completion",
"complex",
"complexity",
"compliance",
"compliant",
"complicated",
"complications",
"complimentary",
"comply",
"component",
"components",
"composed",
"composer",
"composite",
"composition",
"compound",
"compounds",
"comprehensive",
"compressed",
"compression",
"compromise",
"computation",
"computational",
"compute",
"computed",
"computer",
"computers",
"computing",
"con",
"concentrate",
"concentration",
"concentrations",
"concept",
"concepts",
"conceptual",
"concern",
"concerned",
"concerning",
"concerns",
"concert",
"concerts",
"conclude",
"concluded",
"conclusion",
"conclusions",
"concord",
"concrete",
"condition",
"conditional",
"conditioning",
"conditions",
"condo",
"condos",
"conduct",
"conducted",
"conducting",
"conf",
"conference",
"conferences",
"conferencing",
"confidence",
"confident",
"confidential",
"confidentiality",
"config",
"configuration",
"configurations",
"configure",
"configured",
"configuring",
"confirm",
"confirmation",
"confirmed",
"conflict",
"conflicts",
"confused",
"confusion",
"congo",
"congratulations",
"congress",
"congressional",
"conjunction",
"connect",
"connected",
"connecticut",
"connecting",
"connection",
"connections",
"connectivity",
"connector",
"connectors",
"cons",
"conscious",
"consciousness",
"consecutive",
"consensus",
"consent",
"consequence",
"consequences",
"consequently",
"conservation",
"conservative",
"consider",
"considerable",
"consideration",
"considerations",
"considered",
"considering",
"considers",
"consist",
"consistency",
"consistent",
"consistently",
"consisting",
"consists",
"console",
"consoles",
"consolidated",
"consolidation",
"consortium",
"conspiracy",
"const",
"constant",
"constantly",
"constitute",
"constitutes",
"constitution",
"constitutional",
"constraint",
"constraints",
"construct",
"constructed",
"construction",
"consult",
"consultancy",
"consultant",
"consultants",
"consultation",
"consulting",
"consumer",
"consumers",
"consumption",
"contact",
"contacted",
"contacting",
"contacts",
"contain",
"contained",
"container",
"containers",
"containing",
"contains",
"contamination",
"contemporary",
"content",
"contents",
"contest",
"contests",
"context",
"continent",
"continental",
"continually",
"continue",
"continued",
"continues",
"continuing",
"continuity",
"continuous",
"continuously",
"contract",
"contracting",
"contractor",
"contractors",
"contracts",
"contrary",
"contrast",
"contribute",
"contributed",
"contributing",
"contribution",
"contributions",
"contributor",
"contributors",
"control",
"controlled",
"controller",
"controllers",
"controlling",
"controls",
"controversial",
"controversy",
"convenience",
"convenient",
"convention",
"conventional",
"conventions",
"convergence",
"conversation",
"conversations",
"conversion",
"convert",
"converted",
"converter",
"convertible",
"convicted",
"conviction",
"convinced",
"cook",
"cookbook",
"cooked",
"cookie",
"cookies",
"cooking",
"cool",
"cooler",
"cooling",
"cooper",
"cooperation",
"cooperative",
"coordinate",
"coordinated",
"coordinates",
"coordination",
"coordinator",
"cop",
"cope",
"copied",
"copies",
"copper",
"copy",
"copying",
"copyright",
"copyrighted",
"copyrights",
"coral",
"cord",
"cordless",
"core",
"cork",
"corn",
"cornell",
"corner",
"corners",
"cornwall",
"corp",
"corporate",
"corporation",
"corporations",
"corps",
"corpus",
"correct",
"corrected",
"correction",
"corrections",
"correctly",
"correlation",
"correspondence",
"corresponding",
"corruption",
"cos",
"cosmetic",
"cosmetics",
"cost",
"costa",
"costs",
"costume",
"costumes",
"cottage",
"cottages",
"cotton",
"could",
"council",
"councils",
"counsel",
"counseling",
"count",
"counted",
"counter",
"counters",
"counties",
"counting",
"countries",
"country",
"counts",
"county",
"couple",
"coupled",
"couples",
"coupon",
"coupons",
"courage",
"courier",
"course",
"courses",
"court",
"courtesy",
"courts",
"cove",
"cover",
"coverage",
"covered",
"covering",
"covers",
"cow",
"cowboy",
"cpu",
"crack",
"cradle",
"craft",
"crafts",
"craig",
"craps",
"crash",
"crawford",
"crazy",
"cream",
"create",
"created",
"creates",
"creating",
"creation",
"creations",
"creative",
"creativity",
"creator",
"creature",
"creatures",
"credit",
"credits",
"creek",
"crest",
"crew",
"cricket",
"crime",
"crimes",
"criminal",
"crisis",
"criteria",
"criterion",
"critical",
"criticism",
"critics",
"crm",
"croatia",
"crop",
"crops",
"cross",
"crossing",
"crossword",
"crowd",
"crown",
"crucial",
"crude",
"cruise",
"cruises",
"cruz",
"cry",
"crystal",
"css",
"cst",
"ctrl",
"cuba",
"cube",
"cubic",
"cuisine",
"cult",
"cultural",
"culture",
"cultures",
"cumulative",
"cup",
"cups",
"cure",
"curious",
"currencies",
"currency",
"current",
"currently",
"curriculum",
"cursor",
"curtis",
"curve",
"curves",
"custody",
"custom",
"customer",
"customers",
"customize",
"customized",
"customs",
"cut",
"cute",
"cuts",
"cutting",
"cvs",
"cyber",
"cycle",
"cycles",
"cycling",
"cylinder",
"cyprus",
"czech",
"dad",
"daddy",
"daily",
"dairy",
"daisy",
"dakota",
"dale",
"dallas",
"dam",
"damage",
"damaged",
"damages",
"dame",
"dan",
"dana",
"dance",
"dancing",
"danger",
"dangerous",
"daniel",
"danish",
"danny",
"dans",
"dare",
"dark",
"darkness",
"darwin",
"das",
"dash",
"dat",
"data",
"database",
"databases",
"date",
"dated",
"dates",
"dating",
"daughter",
"daughters",
"dave",
"david",
"davidson",
"davis",
"dawn",
"day",
"days",
"dayton",
"ddr",
"dead",
"deadline",
"deadly",
"deaf",
"deal",
"dealer",
"dealers",
"dealing",
"deals",
"dealt",
"dealtime",
"dean",
"dear",
"death",
"deaths",
"debate",
"debian",
"deborah",
"debt",
"debug",
"debut",
"dec",
"decade",
"decades",
"december",
"decent",
"decide",
"decided",
"decimal",
"decision",
"decisions",
"deck",
"declaration",
"declare",
"declared",
"decline",
"declined",
"decor",
"decorating",
"decorative",
"decrease",
"decreased",
"dedicated",
"dee",
"deemed",
"deep",
"deeper",
"deeply",
"deer",
"def",
"default",
"defeat",
"defects",
"defence",
"defend",
"defendant",
"defense",
"defensive",
"deferred",
"deficit",
"define",
"defined",
"defines",
"defining",
"definitely",
"definition",
"definitions",
"degree",
"degrees",
"del",
"delaware",
"delay",
"delayed",
"delays",
"delegation",
"delete",
"deleted",
"delhi",
"delicious",
"delight",
"deliver",
"delivered",
"delivering",
"delivers",
"delivery",
"dell",
"delta",
"deluxe",
"dem",
"demand",
"demanding",
"demands",
"demo",
"democracy",
"democrat",
"democratic",
"democrats",
"demographic",
"demonstrate",
"demonstrated",
"demonstrates",
"demonstration",
"den",
"denial",
"denied",
"denmark",
"dennis",
"dense",
"density",
"dental",
"dentists",
"denver",
"deny",
"department",
"departmental",
"departments",
"departure",
"depend",
"dependence",
"dependent",
"depending",
"depends",
"deployment",
"deposit",
"deposits",
"depot",
"depression",
"dept",
"depth",
"deputy",
"der",
"derby",
"derek",
"derived",
"des",
"descending",
"describe",
"described",
"describes",
"describing",
"description",
"descriptions",
"desert",
"deserve",
"design",
"designated",
"designation",
"designed",
"designer",
"designers",
"designing",
"designs",
"desirable",
"desire",
"desired",
"desk",
"desktop",
"desktops",
"desperate",
"despite",
"destination",
"destinations",
"destiny",
"destroy",
"destroyed",
"destruction",
"detail",
"detailed",
"details",
"detect",
"detected",
"detection",
"detective",
"detector",
"determination",
"determine",
"determined",
"determines",
"determining",
"detroit",
"deutsch",
"deutsche",
"deutschland",
"dev",
"devel",
"develop",
"developed",
"developer",
"developers",
"developing",
"development",
"developmental",
"developments",
"develops",
"deviant",
"deviation",
"device",
"devices",
"devil",
"devon",
"devoted",
"diabetes",
"diagnosis",
"diagnostic",
"diagram",
"dial",
"dialog",
"dialogue",
"diameter",
"diamond",
"diamonds",
"diana",
"diane",
"diary",
"dice",
"dicke",
"dictionaries",
"dictionary",
"did",
"die",
"died",
"diego",
"dies",
"diesel",
"diet",
"dietary",
"diff",
"differ",
"difference",
"differences",
"different",
"differential",
"differently",
"difficult",
"difficulties",
"difficulty",
"diffs",
"dig",
"digest",
"digit",
"digital",
"dim",
"dimension",
"dimensional",
"dimensions",
"dining",
"dinner",
"dip",
"diploma",
"dir",
"direct",
"directed",
"direction",
"directions",
"directive",
"directly",
"director",
"directories",
"directors",
"directory",
"dirt",
"dirty",
"dis",
"disabilities",
"disability",
"disable",
"disabled",
"disagree",
"disappointed",
"disaster",
"disc",
"discharge",
"disciplinary",
"discipline",
"disciplines",
"disclaimer",
"disclaimers",
"disclose",
"disclosure",
"disco",
"discount",
"discounted",
"discounts",
"discover",
"discovered",
"discovery",
"discrete",
"discretion",
"discrimination",
"discs",
"discuss",
"discussed",
"discusses",
"discussing",
"discussion",
"discussions",
"disease",
"diseases",
"dish",
"dishes",
"disk",
"disks",
"disney",
"disorder",
"disorders",
"dispatch",
"dispatched",
"display",
"displayed",
"displaying",
"displays",
"disposal",
"disposition",
"dispute",
"disputes",
"dist",
"distance",
"distances",
"distant",
"distinct",
"distinction",
"distinguished",
"distribute",
"distributed",
"distribution",
"distributions",
"distributor",
"distributors",
"district",
"districts",
"disturbed",
"div",
"dive",
"diverse",
"diversity",
"divide",
"divided",
"dividend",
"divine",
"diving",
"division",
"divisions",
"divorce",
"divx",
"diy",
"dna",
"dns",
"doc",
"dock",
"docs",
"doctor",
"doctors",
"doctrine",
"document",
"documentary",
"documentation",
"documented",
"documents",
"dod",
"dodge",
"doe",
"does",
"dog",
"dogs",
"doing",
"doll",
"dollar",
"dollars",
"dolls",
"dom",
"domain",
"domains",
"dome",
"domestic",
"dominant",
"dominican",
"don",
"donald",
"donate",
"donated",
"donation",
"donations",
"done",
"donna",
"donor",
"donors",
"dont",
"doom",
"door",
"doors",
"dos",
"dosage",
"dose",
"dot",
"double",
"doubt",
"doug",
"douglas",
"dover",
"dow",
"down",
"download",
"downloadable",
"downloaded",
"downloading",
"downloads",
"downtown",
"dozen",
"dozens",
"dpi",
"draft",
"drag",
"dragon",
"drain",
"drainage",
"drama",
"dramatic",
"dramatically",
"draw",
"drawing",
"drawings",
"drawn",
"draws",
"dream",
"dreams",
"dress",
"dressed",
"dresses",
"dressing",
"drew",
"dried",
"drill",
"drilling",
"drink",
"drinking",
"drinks",
"drive",
"driven",
"driver",
"drivers",
"drives",
"driving",
"drop",
"dropped",
"drops",
"drove",
"drug",
"drugs",
"drum",
"drums",
"drunk",
"dry",
"dryer",
"dsc",
"dsl",
"dts",
"dual",
"dubai",
"dublin",
"duck",
"dude",
"due",
"dui",
"duke",
"dumb",
"dump",
"duncan",
"duo",
"duplicate",
"durable",
"duration",
"durham",
"during",
"dust",
"dutch",
"duties",
"duty",
"dvd",
"dvds",
"dying",
"dylan",
"dynamic",
"dynamics",
"each",
"eagle",
"eagles",
"ear",
"earl",
"earlier",
"earliest",
"early",
"earn",
"earned",
"earning",
"earnings",
"earrings",
"ears",
"earth",
"earthquake",
"ease",
"easier",
"easily",
"east",
"easter",
"eastern",
"easy",
"eat",
"eating",
"eau",
"ebay",
"ebony",
"ebook",
"ebooks",
"echo",
"eclipse",
"eco",
"ecological",
"ecology",
"ecommerce",
"economic",
"economics",
"economies",
"economy",
"ecuador",
"eddie",
"eden",
"edgar",
"edge",
"edges",
"edinburgh",
"edit",
"edited",
"editing",
"edition",
"editions",
"editor",
"editorial",
"editorials",
"editors",
"edmonton",
"eds",
"edt",
"educated",
"education",
"educational",
"educators",
"edward",
"edwards",
"effect",
"effective",
"effectively",
"effectiveness",
"effects",
"efficiency",
"efficient",
"efficiently",
"effort",
"efforts",
"egg",
"eggs",
"egypt",
"egyptian",
"eight",
"either",
"elder",
"elderly",
"elect",
"elected",
"election",
"elections",
"electoral",
"electric",
"electrical",
"electricity",
"electro",
"electron",
"electronic",
"electronics",
"elegant",
"element",
"elementary",
"elements",
"elephant",
"elevation",
"eleven",
"eligibility",
"eligible",
"eliminate",
"elimination",
"elite",
"elizabeth",
"ellen",
"elliott",
"ellis",
"else",
"elsewhere",
"elvis",
"emacs",
"email",
"emails",
"embassy",
"embedded",
"emerald",
"emergency",
"emerging",
"emily",
"eminem",
"emirates",
"emission",
"emissions",
"emma",
"emotional",
"emotions",
"emperor",
"emphasis",
"empire",
"empirical",
"employ",
"employed",
"employee",
"employees",
"employer",
"employers",
"employment",
"empty",
"enable",
"enabled",
"enables",
"enabling",
"enb",
"enclosed",
"enclosure",
"encoding",
"encounter",
"encountered",
"encourage",
"encouraged",
"encourages",
"encouraging",
"encryption",
"encyclopedia",
"end",
"endangered",
"ended",
"endif",
"ending",
"endless",
"endorsed",
"endorsement",
"ends",
"enemies",
"enemy",
"energy",
"enforcement",
"eng",
"engage",
"engaged",
"engagement",
"engaging",
"engine",
"engineer",
"engineering",
"engineers",
"engines",
"england",
"english",
"enhance",
"enhanced",
"enhancement",
"enhancements",
"enhancing",
"enjoy",
"enjoyed",
"enjoying",
"enlarge",
"enlargement",
"enormous",
"enough",
"enquiries",
"enquiry",
"enrolled",
"enrollment",
"ensemble",
"ensure",
"ensures",
"ensuring",
"ent",
"enter",
"entered",
"entering",
"enterprise",
"enterprises",
"enters",
"entertaining",
"entertainment",
"entire",
"entirely",
"entities",
"entitled",
"entity",
"entrance",
"entrepreneur",
"entrepreneurs",
"entries",
"entry",
"envelope",
"environment",
"environmental",
"environments",
"enzyme",
"eos",
"epa",
"epic",
"epinions",
"episode",
"episodes",
"epson",
"equal",
"equality",
"equally",
"equation",
"equations",
"equilibrium",
"equipment",
"equipped",
"equity",
"equivalent",
"era",
"eric",
"ericsson",
"erik",
"erotica",
"erp",
"error",
"errors",
"escape",
"escorts",
"especially",
"espn",
"essay",
"essays",
"essence",
"essential",
"essentially",
"essentials",
"essex",
"est",
"establish",
"established",
"establishing",
"establishment",
"estate",
"estates",
"estimate",
"estimated",
"estimates",
"estimation",
"estonia",
"etc",
"eternal",
"ethernet",
"ethical",
"ethics",
"ethiopia",
"ethnic",
"eugene",
"eur",
"euro",
"europe",
"european",
"euros",
"eva",
"eval",
"evaluate",
"evaluated",
"evaluating",
"evaluation",
"evaluations",
"evanescence",
"evans",
"eve",
"even",
"evening",
"event",
"events",
"eventually",
"ever",
"every",
"everybody",
"everyday",
"everyone",
"everything",
"everywhere",
"evidence",
"evident",
"evil",
"evolution",
"exact",
"exactly",
"exam",
"examination",
"examinations",
"examine",
"examined",
"examines",
"examining",
"example",
"examples",
"exams",
"exceed",
"excel",
"excellence",
"excellent",
"except",
"exception",
"exceptional",
"exceptions",
"excerpt",
"excess",
"excessive",
"exchange",
"exchanges",
"excited",
"excitement",
"exciting",
"exclude",
"excluded",
"excluding",
"exclusion",
"exclusive",
"exclusively",
"excuse",
"exec",
"execute",
"executed",
"execution",
"executive",
"executives",
"exempt",
"exemption",
"exercise",
"exercises",
"exhaust",
"exhibit",
"exhibition",
"exhibitions",
"exhibits",
"exist",
"existed",
"existence",
"existing",
"exists",
"exit",
"exotic",
"exp",
"expand",
"expanded",
"expanding",
"expansion",
"expansys",
"expect",
"expectations",
"expected",
"expects",
"expedia",
"expenditure",
"expenditures",
"expense",
"expenses",
"expensive",
"experience",
"experienced",
"experiences",
"experiencing",
"experiment",
"experimental",
"experiments",
"expert",
"expertise",
"experts",
"expiration",
"expired",
"expires",
"explain",
"explained",
"explaining",
"explains",
"explanation",
"explicit",
"explicitly",
"exploration",
"explore",
"explorer",
"exploring",
"explosion",
"expo",
"export",
"exports",
"exposed",
"exposure",
"express",
"expressed",
"expression",
"expressions",
"ext",
"extend",
"extended",
"extending",
"extends",
"extension",
"extensions",
"extensive",
"extent",
"exterior",
"external",
"extra",
"extract",
"extraction",
"extraordinary",
"extras",
"extreme",
"extremely",
"eye",
"eyed",
"eyes",
"fabric",
"fabrics",
"fabulous",
"face",
"faced",
"faces",
"facial",
"facilitate",
"facilities",
"facility",
"facing",
"fact",
"factor",
"factors",
"factory",
"facts",
"faculty",
"fail",
"failed",
"failing",
"fails",
"failure",
"failures",
"fair",
"fairfield",
"fairly",
"fairy",
"faith",
"fake",
"fall",
"fallen",
"falling",
"falls",
"false",
"fame",
"familiar",
"families",
"family",
"famous",
"fan",
"fancy",
"fans",
"fantastic",
"fantasy",
"faq",
"faqs",
"far",
"fare",
"fares",
"farm",
"farmer",
"farmers",
"farming",
"farms",
"fascinating",
"fashion",
"fast",
"faster",
"fastest",
"fat",
"fatal",
"fate",
"father",
"fathers",
"fatty",
"fault",
"favor",
"favorite",
"favorites",
"favors",
"fax",
"fbi",
"fcc",
"fda",
"fear",
"fears",
"feat",
"feature",
"featured",
"features",
"featuring",
"feb",
"february",
"fed",
"federal",
"federation",
"fee",
"feed",
"feedback",
"feeding",
"feeds",
"feel",
"feeling",
"feelings",
"feels",
"fees",
"feet",
"fell",
"fellow",
"fellowship",
"felt",
"female",
"females",
"fence",
"feof",
"ferrari",
"ferry",
"festival",
"festivals",
"fetish",
"fever",
"few",
"fewer",
"fiber",
"fibre",
"fiction",
"field",
"fields",
"fifteen",
"fifth",
"fifty",
"fig",
"fight",
"fighter",
"fighters",
"fighting",
"figure",
"figured",
"figures",
"fiji",
"file",
"filed",
"filename",
"files",
"filing",
"fill",
"filled",
"filling",
"film",
"filme",
"films",
"filter",
"filtering",
"filters",
"fin",
"final",
"finally",
"finals",
"finance",
"finances",
"financial",
"financing",
"find",
"findarticles",
"finder",
"finding",
"findings",
"findlaw",
"finds",
"fine",
"finest",
"finger",
"fingers",
"finish",
"finished",
"finishing",
"finite",
"finland",
"finnish",
"fioricet",
"fire",
"fired",
"firefox",
"fireplace",
"fires",
"firewall",
"firewire",
"firm",
"firms",
"firmware",
"first",
"fiscal",
"fish",
"fisher",
"fisheries",
"fishing",
"fist",
"fit",
"fitness",
"fits",
"fitted",
"fitting",
"five",
"fix",
"fixed",
"fixes",
"fixtures",
"flag",
"flags",
"flame",
"flash",
"flashers",
"flashing",
"flat",
"flavor",
"fleece",
"fleet",
"flesh",
"flex",
"flexibility",
"flexible",
"flickr",
"flight",
"flights",
"flip",
"float",
"floating",
"flood",
"floor",
"flooring",
"floors",
"floppy",
"floral",
"florence",
"florida",
"florist",
"florists",
"flour",
"flow",
"flower",
"flowers",
"flows",
"floyd",
"flu",
"fluid",
"flush",
"flux",
"fly",
"flyer",
"flying",
"foam",
"focal",
"focus",
"focused",
"focuses",
"focusing",
"fog",
"fold",
"folder",
"folders",
"folding",
"folk",
"folks",
"follow",
"followed",
"following",
"follows",
"font",
"fonts",
"foo",
"food",
"foods",
"fool",
"foot",
"footage",
"football",
"footwear",
"for",
"forbes",
"forbidden",
"force",
"forced",
"forces",
"ford",
"forecast",
"forecasts",
"foreign",
"forest",
"forestry",
"forests",
"forever",
"forge",
"forget",
"forgot",
"forgotten",
"fork",
"form",
"formal",
"format",
"formation",
"formats",
"formatting",
"formed",
"former",
"formerly",
"forming",
"forms",
"formula",
"fort",
"forth",
"fortune",
"forty",
"forum",
"forums",
"forward",
"forwarding",
"fossil",
"foster",
"foto",
"fotos",
"fought",
"foul",
"found",
"foundation",
"foundations",
"founded",
"founder",
"fountain",
"four",
"fourth",
"fox",
"fraction",
"fragrance",
"fragrances",
"frame",
"framed",
"frames",
"framework",
"framing",
"france",
"franchise",
"francis",
"francisco",
"frank",
"frankfurt",
"franklin",
"fraser",
"fraud",
"fred",
"frederick",
"free",
"freebsd",
"freedom",
"freelance",
"freely",
"freeware",
"freeze",
"freight",
"french",
"frequencies",
"frequency",
"frequent",
"frequently",
"fresh",
"fri",
"friday",
"fridge",
"friend",
"friendly",
"friends",
"friendship",
"frog",
"from",
"front",
"frontier",
"frontpage",
"frost",
"frozen",
"fruit",
"fruits",
"ftp",
"fuel",
"fuji",
"fujitsu",
"full",
"fully",
"fun",
"function",
"functional",
"functionality",
"functioning",
"functions",
"fund",
"fundamental",
"fundamentals",
"funded",
"funding",
"fundraising",
"funds",
"funeral",
"funk",
"funky",
"funny",
"fur",
"furnished",
"furnishings",
"furniture",
"further",
"furthermore",
"fusion",
"future",
"futures",
"fuzzy",
"fwd",
"gabriel",
"gadgets",
"gage",
"gain",
"gained",
"gains",
"galaxy",
"gale",
"galleries",
"gallery",
"gambling",
"game",
"gamecube",
"games",
"gamespot",
"gaming",
"gamma",
"gang",
"gap",
"gaps",
"garage",
"garbage",
"garcia",
"garden",
"gardening",
"gardens",
"garlic",
"garmin",
"gary",
"gas",
"gasoline",
"gate",
"gates",
"gateway",
"gather",
"gathered",
"gathering",
"gauge",
"gave",
"gay",
"gays",
"gazette",
"gba",
"gbp",
"gcc",
"gdp",
"gear",
"geek",
"gel",
"gem",
"gen",
"gender",
"gene",
"genealogy",
"general",
"generally",
"generate",
"generated",
"generates",
"generating",
"generation",
"generations",
"generator",
"generators",
"generic",
"generous",
"genes",
"genesis",
"genetic",
"genetics",
"geneva",
"genius",
"genome",
"genre",
"genres",
"gentle",
"gentleman",
"gently",
"genuine",
"geo",
"geographic",
"geographical",
"geography",
"geological",
"geology",
"geometry",
"george",
"georgia",
"gerald",
"german",
"germany",
"get",
"gets",
"getting",
"ghana",
"ghost",
"ghz",
"giant",
"giants",
"gibraltar",
"gibson",
"gif",
"gift",
"gifts",
"gig",
"gilbert",
"girl",
"girlfriend",
"girls",
"gis",
"give",
"given",
"gives",
"giving",
"glad",
"glance",
"glasgow",
"glass",
"glasses",
"glen",
"glenn",
"global",
"globe",
"glory",
"glossary",
"gloves",
"glow",
"glucose",
"gmbh",
"gmc",
"gmt",
"gnome",
"gnu",
"goal",
"goals",
"goat",
"gods",
"goes",
"going",
"gold",
"golden",
"golf",
"gone",
"gonna",
"good",
"goods",
"google",
"gordon",
"gore",
"gorgeous",
"gospel",
"gossip",
"got",
"gothic",
"goto",
"gotta",
"gotten",
"gourmet",
"governance",
"governing",
"government",
"governmental",
"governments",
"governor",
"gpl",
"gps",
"grab",
"grace",
"grad",
"grade",
"grades",
"gradually",
"graduate",
"graduated",
"graduates",
"graduation",
"graham",
"grain",
"grammar",
"grams",
"grand",
"grande",
"granny",
"grant",
"granted",
"grants",
"graph",
"graphic",
"graphical",
"graphics",
"graphs",
"gras",
"grass",
"grateful",
"gratis",
"gratuit",
"grave",
"gravity",
"gray",
"great",
"greater",
"greatest",
"greatly",
"greece",
"greek",
"green",
"greene",
"greenhouse",
"greensboro",
"greeting",
"greetings",
"greg",
"gregory",
"grenada",
"grew",
"grey",
"grid",
"griffin",
"grill",
"grip",
"grocery",
"groove",
"gross",
"ground",
"grounds",
"groundwater",
"group",
"groups",
"grove",
"grow",
"growing",
"grown",
"grows",
"growth",
"gsm",
"gst",
"gtk",
"guam",
"guarantee",
"guaranteed",
"guarantees",
"guard",
"guardian",
"guards",
"guatemala",
"guess",
"guest",
"guestbook",
"guests",
"gui",
"guidance",
"guide",
"guided",
"guidelines",
"guides",
"guild",
"guilty",
"guinea",
"guitar",
"guitars",
"gulf",
"gun",
"guns",
"guru",
"guy",
"guyana",
"guys",
"gym",
"gzip",
"habitat",
"habits",
"hack",
"hacker",
"had",
"hair",
"hairy",
"haiti",
"half",
"halifax",
"hall",
"halloween",
"halo",
"ham",
"hamburg",
"hamilton",
"hammer",
"hampshire",
"hampton",
"hand",
"handbags",
"handbook",
"handed",
"handheld",
"handhelds",
"handle",
"handled",
"handles",
"handling",
"handmade",
"hands",
"handy",
"hang",
"hanging",
"hans",
"hansen",
"happen",
"happened",
"happening",
"happens",
"happiness",
"happy",
"harassment",
"harbor",
"hard",
"hardcover",
"harder",
"hardly",
"hardware",
"hardwood",
"harley",
"harm",
"harmful",
"harmony",
"harold",
"harper",
"harris",
"harrison",
"harry",
"hart",
"hartford",
"harvard",
"harvest",
"harvey",
"has",
"hash",
"hat",
"hate",
"hats",
"have",
"haven",
"having",
"hawaii",
"hawaiian",
"hawk",
"hay",
"hayes",
"hazard",
"hazardous",
"hazards",
"hdtv",
"head",
"headed",
"header",
"headers",
"heading",
"headline",
"headlines",
"headphones",
"headquarters",
"heads",
"headset",
"healing",
"health",
"healthcare",
"healthy",
"hear",
"heard",
"hearing",
"hearings",
"heart",
"hearts",
"heat",
"heated",
"heater",
"heath",
"heather",
"heating",
"heaven",
"heavily",
"heavy",
"hebrew",
"heel",
"height",
"heights",
"held",
"helen",
"helena",
"helicopter",
"hello",
"helmet",
"help",
"helped",
"helpful",
"helping",
"helps",
"hence",
"henderson",
"henry",
"hepatitis",
"her",
"herald",
"herb",
"herbal",
"herbs",
"here",
"hereby",
"herein",
"heritage",
"hero",
"heroes",
"herself",
"hewlett",
"hey",
"hidden",
"hide",
"hierarchy",
"high",
"higher",
"highest",
"highland",
"highlight",
"highlighted",
"highlights",
"highly",
"highs",
"highway",
"highways",
"hiking",
"hill",
"hills",
"hilton",
"him",
"himself",
"hindu",
"hint",
"hints",
"hip",
"hire",
"hired",
"hiring",
"his",
"hispanic",
"hist",
"historic",
"historical",
"history",
"hit",
"hitachi",
"hits",
"hitting",
"hiv",
"hobbies",
"hobby",
"hockey",
"hold",
"holdem",
"holder",
"holders",
"holding",
"holdings",
"holds",
"hole",
"holes",
"holiday",
"holidays",
"holland",
"hollow",
"holly",
"hollywood",
"holmes",
"holocaust",
"holy",
"home",
"homeland",
"homeless",
"homepage",
"homes",
"hometown",
"homework",
"hon",
"honda",
"honduras",
"honest",
"honey",
"hong",
"honolulu",
"honor",
"honors",
"hood",
"hook",
"hop",
"hope",
"hoped",
"hopefully",
"hopes",
"hoping",
"hopkins",
"horizon",
"horizontal",
"hormone",
"horn",
"horrible",
"horror",
"horse",
"horses",
"hose",
"hospital",
"hospitality",
"hospitals",
"host",
"hosted",
"hostel",
"hostels",
"hosting",
"hosts",
"hot",
"hotel",
"hotels",
"hotmail",
"hottest",
"hour",
"hourly",
"hours",
"house",
"household",
"households",
"houses",
"housewares",
"housewives",
"housing",
"houston",
"how",
"howard",
"however",
"howto",
"href",
"hrs",
"html",
"http",
"hub",
"hudson",
"huge",
"hugh",
"hughes",
"hugo",
"hull",
"human",
"humanitarian",
"humanities",
"humanity",
"humans",
"humidity",
"humor",
"hundred",
"hundreds",
"hung",
"hungarian",
"hungary",
"hunger",
"hungry",
"hunt",
"hunter",
"hunting",
"huntington",
"hurricane",
"hurt",
"husband",
"hwy",
"hybrid",
"hydraulic",
"hydrocodone",
"hydrogen",
"hygiene",
"hypothesis",
"hypothetical",
"hyundai",
"ian",
"ibm",
"ice",
"iceland",
"icon",
"icons",
"icq",
"ict",
"idaho",
"ide",
"idea",
"ideal",
"ideas",
"identical",
"identification",
"identified",
"identifier",
"identifies",
"identify",
"identifying",
"identity",
"idle",
"idol",
"ids",
"ieee",
"ignore",
"ignored",
"iii",
"ill",
"illegal",
"illinois",
"illness",
"illustrated",
"illustration",
"illustrations",
"image",
"images",
"imagination",
"imagine",
"imaging",
"img",
"immediate",
"immediately",
"immigrants",
"immigration",
"immune",
"immunology",
"impact",
"impacts",
"impaired",
"imperial",
"implement",
"implementation",
"implemented",
"implementing",
"implications",
"implied",
"implies",
"import",
"importance",
"important",
"importantly",
"imported",
"imports",
"impose",
"imposed",
"impossible",
"impressed",
"impression",
"impressive",
"improve",
"improved",
"improvement",
"improvements",
"improving",
"inappropriate",
"inbox",
"inc",
"incentive",
"incentives",
"inch",
"inches",
"incidence",
"incident",
"incidents",
"incl",
"include",
"included",
"includes",
"including",
"inclusion",
"inclusive",
"income",
"incoming",
"incomplete",
"incorporate",
"incorporated",
"incorrect",
"increase",
"increased",
"increases",
"increasing",
"increasingly",
"incredible",
"incurred",
"ind",
"indeed",
"independence",
"independent",
"independently",
"index",
"indexed",
"indexes",
"india",
"indian",
"indiana",
"indianapolis",
"indians",
"indicate",
"indicated",
"indicates",
"indicating",
"indication",
"indicator",
"indicators",
"indices",
"indie",
"indigenous",
"indirect",
"individual",
"individually",
"individuals",
"indonesia",
"indonesian",
"indoor",
"induced",
"induction",
"industrial",
"industries",
"industry",
"inexpensive",
"inf",
"infant",
"infants",
"infected",
"infection",
"infections",
"infectious",
"infinite",
"inflation",
"influence",
"influenced",
"influences",
"info",
"inform",
"informal",
"information",
"informational",
"informative",
"informed",
"infrared",
"infrastructure",
"infringement",
"ing",
"ingredients",
"inherited",
"initial",
"initially",
"initiated",
"initiative",
"initiatives",
"injection",
"injured",
"injuries",
"injury",
"ink",
"inkjet",
"inline",
"inn",
"inner",
"innocent",
"innovation",
"innovations",
"innovative",
"inns",
"input",
"inputs",
"inquire",
"inquiries",
"inquiry",
"ins",
"insects",
"insert",
"inserted",
"insertion",
"inside",
"insider",
"insight",
"insights",
"inspection",
"inspections",
"inspector",
"inspiration",
"inspired",
"install",
"installation",
"installations",
"installed",
"installing",
"instance",
"instances",
"instant",
"instantly",
"instead",
"institute",
"institutes",
"institution",
"institutional",
"institutions",
"instruction",
"instructional",
"instructions",
"instructor",
"instructors",
"instrument",
"instrumental",
"instrumentation",
"instruments",
"insulation",
"insulin",
"insurance",
"insured",
"int",
"intake",
"integer",
"integral",
"integrate",
"integrated",
"integrating",
"integration",
"integrity",
"intel",
"intellectual",
"intelligence",
"intelligent",
"intend",
"intended",
"intense",
"intensity",
"intensive",
"intent",
"intention",
"inter",
"interact",
"interaction",
"interactions",
"interactive",
"interest",
"interested",
"interesting",
"interests",
"interface",
"interfaces",
"interference",
"interim",
"interior",
"intermediate",
"internal",
"international",
"internationally",
"internet",
"internship",
"interpretation",
"interpreted",
"interracial",
"intersection",
"interstate",
"interval",
"intervals",
"intervention",
"interventions",
"interview",
"interviews",
"intimate",
"intl",
"into",
"intranet",
"intro",
"introduce",
"introduced",
"introduces",
"introducing",
"introduction",
"introductory",
"invalid",
"invasion",
"invention",
"inventory",
"invest",
"investigate",
"investigated",
"investigation",
"investigations",
"investigator",
"investigators",
"investing",
"investment",
"investments",
"investor",
"investors",
"invisible",
"invision",
"invitation",
"invitations",
"invite",
"invited",
"invoice",
"involve",
"involved",
"involvement",
"involves",
"involving",
"ion",
"iowa",
"ipaq",
"ipod",
"ips",
"ira",
"iran",
"iraq",
"iraqi",
"irc",
"ireland",
"irish",
"iron",
"irrigation",
"irs",
"isa",
"isaac",
"isbn",
"islam",
"islamic",
"island",
"islands",<|fim▁hole|> "isle",
"iso",
"isolated",
"isolation",
"isp",
"israel",
"israeli",
"issn",
"issue",
"issued",
"issues",
"ist",
"istanbul",
"italia",
"italian",
"italiano",
"italic",
"italy",
"item",
"items",
"its",
"itself",
"itunes",
"ivory",
"jack",
"jacket",
"jackets",
"jackie",
"jackson",
"jacksonville",
"jacob",
"jade",
"jaguar",
"jail",
"jake",
"jam",
"jamaica",
"james",
"jamie",
"jan",
"jane",
"janet",
"january",
"japan",
"japanese",
"jar",
"jason",
"java",
"javascript",
"jay",
"jazz",
"jean",
"jeans",
"jeep",
"jeff",
"jefferson",
"jeffrey",
"jelsoft",
"jennifer",
"jenny",
"jeremy",
"jerry",
"jersey",
"jerusalem",
"jesse",
"jessica",
"jesus",
"jet",
"jets",
"jewel",
"jewellery",
"jewelry",
"jewish",
"jews",
"jill",
"jim",
"jimmy",
"joan",
"job",
"jobs",
"joe",
"joel",
"john",
"johnny",
"johns",
"johnson",
"johnston",
"join",
"joined",
"joining",
"joins",
"joint",
"joke",
"jokes",
"jon",
"jonathan",
"jones",
"jordan",
"jose",
"joseph",
"josh",
"joshua",
"journal",
"journalism",
"journalist",
"journalists",
"journals",
"journey",
"joy",
"joyce",
"jpeg",
"jpg",
"juan",
"judge",
"judges",
"judgment",
"judicial",
"judy",
"juice",
"jul",
"julia",
"julian",
"julie",
"july",
"jump",
"jumping",
"jun",
"junction",
"june",
"jungle",
"junior",
"junk",
"jurisdiction",
"jury",
"just",
"justice",
"justify",
"justin",
"juvenile",
"jvc",
"kai",
"kansas",
"karaoke",
"karen",
"karl",
"karma",
"kate",
"kathy",
"katie",
"katrina",
"kay",
"kazakhstan",
"kde",
"keen",
"keep",
"keeping",
"keeps",
"keith",
"kelkoo",
"kelly",
"ken",
"kennedy",
"kenneth",
"kenny",
"keno",
"kent",
"kentucky",
"kenya",
"kept",
"kernel",
"kerry",
"kevin",
"key",
"keyboard",
"keyboards",
"keys",
"keyword",
"keywords",
"kick",
"kid",
"kidney",
"kids",
"kijiji",
"kill",
"killed",
"killer",
"killing",
"kills",
"kilometers",
"kim",
"kinase",
"kind",
"kinda",
"kinds",
"king",
"kingdom",
"kings",
"kingston",
"kirk",
"kiss",
"kissing",
"kit",
"kitchen",
"kits",
"kitty",
"klein",
"knee",
"knew",
"knife",
"knight",
"knights",
"knit",
"knitting",
"knives",
"knock",
"know",
"knowing",
"knowledge",
"knowledgestorm",
"known",
"knows",
"kodak",
"kong",
"korea",
"korean",
"kruger",
"kurt",
"kuwait",
"kyle",
"lab",
"label",
"labeled",
"labels",
"labor",
"laboratories",
"laboratory",
"labs",
"lace",
"lack",
"ladder",
"laden",
"ladies",
"lady",
"lafayette",
"laid",
"lake",
"lakes",
"lamb",
"lambda",
"lamp",
"lamps",
"lan",
"lancaster",
"lance",
"land",
"landing",
"lands",
"landscape",
"landscapes",
"lane",
"lanes",
"lang",
"language",
"languages",
"lanka",
"laos",
"lap",
"laptop",
"laptops",
"large",
"largely",
"larger",
"largest",
"larry",
"las",
"laser",
"last",
"lasting",
"lat",
"late",
"lately",
"later",
"latest",
"latex",
"latin",
"latina",
"latinas",
"latino",
"latitude",
"latter",
"latvia",
"lauderdale",
"laugh",
"laughing",
"launch",
"launched",
"launches",
"laundry",
"laura",
"lauren",
"law",
"lawn",
"lawrence",
"laws",
"lawsuit",
"lawyer",
"lawyers",
"lay",
"layer",
"layers",
"layout",
"lazy",
"lbs",
"lcd",
"lead",
"leader",
"leaders",
"leadership",
"leading",
"leads",
"leaf",
"league",
"lean",
"learn",
"learned",
"learners",
"learning",
"lease",
"leasing",
"least",
"leather",
"leave",
"leaves",
"leaving",
"lebanon",
"lecture",
"lectures",
"led",
"lee",
"leeds",
"left",
"leg",
"legacy",
"legal",
"legally",
"legend",
"legendary",
"legends",
"legislation",
"legislative",
"legislature",
"legitimate",
"legs",
"leisure",
"lemon",
"len",
"lender",
"lenders",
"lending",
"length",
"lens",
"lenses",
"leo",
"leon",
"leonard",
"leone",
"les",
"lesbian",
"lesbians",
"leslie",
"less",
"lesser",
"lesson",
"lessons",
"let",
"lets",
"letter",
"letters",
"letting",
"leu",
"level",
"levels",
"levitra",
"levy",
"lewis",
"lexington",
"lexmark",
"lexus",
"liabilities",
"liability",
"liable",
"lib",
"liberal",
"liberia",
"liberty",
"librarian",
"libraries",
"library",
"libs",
"licence",
"license",
"licensed",
"licenses",
"licensing",
"licking",
"lid",
"lie",
"liechtenstein",
"lies",
"life",
"lifestyle",
"lifetime",
"lift",
"light",
"lightbox",
"lighter",
"lighting",
"lightning",
"lights",
"lightweight",
"like",
"liked",
"likelihood",
"likely",
"likes",
"likewise",
"lil",
"lime",
"limit",
"limitation",
"limitations",
"limited",
"limiting",
"limits",
"limousines",
"lincoln",
"linda",
"lindsay",
"line",
"linear",
"lined",
"lines",
"lingerie",
"link",
"linked",
"linking",
"links",
"linux",
"lion",
"lions",
"lip",
"lips",
"liquid",
"lisa",
"list",
"listed",
"listen",
"listening",
"listing",
"listings",
"listprice",
"lists",
"lit",
"lite",
"literacy",
"literally",
"literary",
"literature",
"lithuania",
"litigation",
"little",
"live",
"livecam",
"lived",
"liver",
"liverpool",
"lives",
"livestock",
"living",
"liz",
"llc",
"lloyd",
"llp",
"load",
"loaded",
"loading",
"loads",
"loan",
"loans",
"lobby",
"loc",
"local",
"locale",
"locally",
"locate",
"located",
"location",
"locations",
"locator",
"lock",
"locked",
"locking",
"locks",
"lodge",
"lodging",
"log",
"logan",
"logged",
"logging",
"logic",
"logical",
"login",
"logistics",
"logitech",
"logo",
"logos",
"logs",
"lol",
"london",
"lone",
"lonely",
"long",
"longer",
"longest",
"longitude",
"look",
"looked",
"looking",
"looks",
"looksmart",
"lookup",
"loop",
"loops",
"loose",
"lopez",
"lord",
"los",
"lose",
"losing",
"loss",
"losses",
"lost",
"lot",
"lots",
"lottery",
"lotus",
"lou",
"loud",
"louis",
"louise",
"louisiana",
"louisville",
"lounge",
"love",
"loved",
"lovely",
"lover",
"lovers",
"loves",
"loving",
"low",
"lower",
"lowest",
"lows",
"ltd",
"lucas",
"lucia",
"luck",
"lucky",
"lucy",
"luggage",
"luis",
"luke",
"lunch",
"lung",
"luther",
"luxembourg",
"luxury",
"lycos",
"lying",
"lynn",
"lyric",
"lyrics",
"mac",
"macedonia",
"machine",
"machinery",
"machines",
"macintosh",
"macro",
"macromedia",
"mad",
"madagascar",
"made",
"madison",
"madness",
"madonna",
"madrid",
"mae",
"mag",
"magazine",
"magazines",
"magic",
"magical",
"magnet",
"magnetic",
"magnificent",
"magnitude",
"mai",
"maiden",
"mail",
"mailed",
"mailing",
"mailman",
"mails",
"mailto",
"main",
"maine",
"mainland",
"mainly",
"mainstream",
"maintain",
"maintained",
"maintaining",
"maintains",
"maintenance",
"major",
"majority",
"make",
"maker",
"makers",
"makes",
"makeup",
"making",
"malawi",
"malaysia",
"maldives",
"male",
"males",
"mali",
"mall",
"malpractice",
"malta",
"mambo",
"man",
"manage",
"managed",
"management",
"manager",
"managers",
"managing",
"manchester",
"mandate",
"mandatory",
"manga",
"manhattan",
"manitoba",
"manner",
"manor",
"manual",
"manually",
"manuals",
"manufacture",
"manufactured",
"manufacturer",
"manufacturers",
"manufacturing",
"many",
"map",
"maple",
"mapping",
"maps",
"mar",
"marathon",
"marble",
"marc",
"march",
"marco",
"marcus",
"mardi",
"margaret",
"margin",
"maria",
"mariah",
"marie",
"marijuana",
"marilyn",
"marina",
"marine",
"mario",
"marion",
"maritime",
"mark",
"marked",
"marker",
"markers",
"market",
"marketing",
"marketplace",
"markets",
"marking",
"marks",
"marriage",
"married",
"marriott",
"mars",
"marsh",
"marshall",
"mart",
"martha",
"martial",
"martin",
"marvel",
"mary",
"maryland",
"mas",
"mask",
"mason",
"mass",
"massachusetts",
"massage",
"massive",
"master",
"mastercard",
"masters",
"mat",
"match",
"matched",
"matches",
"matching",
"mate",
"material",
"materials",
"maternity",
"math",
"mathematical",
"mathematics",
"mating",
"matrix",
"mats",
"matt",
"matter",
"matters",
"matthew",
"mattress",
"mature",
"maui",
"mauritius",
"max",
"maximize",
"maximum",
"may",
"maybe",
"mayor",
"mazda",
"mba",
"mcdonald",
"meal",
"meals",
"mean",
"meaning",
"meaningful",
"means",
"meant",
"meanwhile",
"measure",
"measured",
"measurement",
"measurements",
"measures",
"measuring",
"meat",
"mechanical",
"mechanics",
"mechanism",
"mechanisms",
"med",
"medal",
"media",
"median",
"mediawiki",
"medicaid",
"medical",
"medicare",
"medication",
"medications",
"medicine",
"medicines",
"medieval",
"meditation",
"mediterranean",
"medium",
"medline",
"meet",
"meeting",
"meetings",
"meets",
"meetup",
"mega",
"mel",
"melbourne",
"melissa",
"mem",
"member",
"members",
"membership",
"membrane",
"memo",
"memorabilia",
"memorial",
"memories",
"memory",
"memphis",
"men",
"mens",
"ment",
"mental",
"mention",
"mentioned",
"mentor",
"menu",
"menus",
"mercedes",
"merchandise",
"merchant",
"merchants",
"mercury",
"mercy",
"mere",
"merely",
"merge",
"merger",
"merit",
"merry",
"mesa",
"mesh",
"mess",
"message",
"messages",
"messaging",
"messenger",
"met",
"meta",
"metabolism",
"metadata",
"metal",
"metallic",
"metallica",
"metals",
"meter",
"meters",
"method",
"methodology",
"methods",
"metres",
"metric",
"metro",
"metropolitan",
"mexican",
"mexico",
"meyer",
"mhz",
"mia",
"miami",
"mic",
"mice",
"michael",
"michel",
"michelle",
"michigan",
"micro",
"microphone",
"microsoft",
"microwave",
"mid",
"middle",
"midi",
"midlands",
"midnight",
"midwest",
"might",
"mighty",
"migration",
"mike",
"mil",
"milan",
"mild",
"mile",
"mileage",
"miles",
"military",
"milk",
"mill",
"millennium",
"miller",
"million",
"millions",
"mills",
"milton",
"milwaukee",
"mime",
"min",
"mind",
"minds",
"mine",
"mineral",
"minerals",
"mines",
"mini",
"miniature",
"minimal",
"minimize",
"minimum",
"mining",
"minister",
"ministers",
"ministries",
"ministry",
"minneapolis",
"minnesota",
"minolta",
"minor",
"minority",
"mins",
"mint",
"minus",
"minute",
"minutes",
"miracle",
"mirror",
"mirrors",
"misc",
"miscellaneous",
"miss",
"missed",
"missile",
"missing",
"mission",
"missions",
"mississippi",
"missouri",
"mistake",
"mistakes",
"mistress",
"mit",
"mitchell",
"mitsubishi",
"mix",
"mixed",
"mixer",
"mixing",
"mixture",
"mlb",
"mls",
"mobile",
"mobiles",
"mobility",
"mod",
"mode",
"model",
"modeling",
"modelling",
"models",
"modem",
"modems",
"moderate",
"moderator",
"moderators",
"modern",
"modes",
"modification",
"modifications",
"modified",
"modify",
"mods",
"modular",
"module",
"modules",
"moisture",
"mold",
"moldova",
"molecular",
"molecules",
"mom",
"moment",
"moments",
"momentum",
"moms",
"mon",
"monaco",
"monday",
"monetary",
"money",
"mongolia",
"monica",
"monitor",
"monitored",
"monitoring",
"monitors",
"monkey",
"mono",
"monroe",
"monster",
"monsters",
"montana",
"monte",
"montgomery",
"month",
"monthly",
"months",
"montreal",
"mood",
"moon",
"moore",
"moral",
"more",
"moreover",
"morgan",
"morning",
"morocco",
"morris",
"morrison",
"mortality",
"mortgage",
"mortgages",
"moscow",
"moses",
"moss",
"most",
"mostly",
"motel",
"motels",
"mother",
"motherboard",
"mothers",
"motion",
"motivated",
"motivation",
"motor",
"motorcycle",
"motorcycles",
"motorola",
"motors",
"mount",
"mountain",
"mountains",
"mounted",
"mounting",
"mounts",
"mouse",
"mouth",
"move",
"moved",
"movement",
"movements",
"movers",
"moves",
"movie",
"movies",
"moving",
"mozambique",
"mozilla",
"mpeg",
"mpegs",
"mpg",
"mph",
"mrna",
"mrs",
"msg",
"msgid",
"msgstr",
"msie",
"msn",
"mtv",
"much",
"mud",
"mug",
"multi",
"multimedia",
"multiple",
"mumbai",
"munich",
"municipal",
"municipality",
"murder",
"murphy",
"murray",
"muscle",
"muscles",
"museum",
"museums",
"music",
"musical",
"musician",
"musicians",
"muslim",
"muslims",
"must",
"mustang",
"mutual",
"muze",
"myanmar",
"myers",
"myrtle",
"myself",
"mysimon",
"myspace",
"mysql",
"mysterious",
"mystery",
"myth",
"nail",
"nails",
"naked",
"nam",
"name",
"named",
"namely",
"names",
"namespace",
"namibia",
"nancy",
"nano",
"naples",
"narrative",
"narrow",
"nasa",
"nascar",
"nasdaq",
"nashville",
"nasty",
"nat",
"nathan",
"nation",
"national",
"nationally",
"nations",
"nationwide",
"native",
"nato",
"natural",
"naturally",
"naturals",
"nature",
"naughty",
"nav",
"naval",
"navigate",
"navigation",
"navigator",
"navy",
"nba",
"nbc",
"ncaa",
"near",
"nearby",
"nearest",
"nearly",
"nebraska",
"nec",
"necessarily",
"necessary",
"necessity",
"neck",
"necklace",
"need",
"needed",
"needle",
"needs",
"negative",
"negotiation",
"negotiations",
"neighbor",
"neighborhood",
"neighbors",
"neil",
"neither",
"nelson",
"neo",
"neon",
"nepal",
"nerve",
"nervous",
"nest",
"nested",
"net",
"netherlands",
"netscape",
"network",
"networking",
"networks",
"neural",
"neutral",
"nevada",
"never",
"nevertheless",
"new",
"newark",
"newbie",
"newcastle",
"newer",
"newest",
"newfoundland",
"newly",
"newman",
"newport",
"news",
"newsletter",
"newsletters",
"newspaper",
"newspapers",
"newton",
"next",
"nextel",
"nfl",
"nhl",
"nhs",
"niagara",
"nicaragua",
"nice",
"nicholas",
"nick",
"nickel",
"nickname",
"nicole",
"niger",
"nigeria",
"night",
"nightlife",
"nightmare",
"nights",
"nike",
"nikon",
"nil",
"nine",
"nintendo",
"nirvana",
"nissan",
"nitrogen",
"noble",
"nobody",
"node",
"nodes",
"noise",
"nokia",
"nominated",
"nomination",
"nominations",
"non",
"none",
"nonprofit",
"noon",
"nor",
"norfolk",
"norm",
"normal",
"normally",
"norman",
"north",
"northeast",
"northern",
"northwest",
"norton",
"norway",
"norwegian",
"nose",
"not",
"note",
"notebook",
"notebooks",
"noted",
"notes",
"nothing",
"notice",
"noticed",
"notices",
"notification",
"notifications",
"notified",
"notify",
"notion",
"notre",
"nottingham",
"nov",
"nova",
"novel",
"novels",
"novelty",
"november",
"now",
"nowhere",
"nsw",
"ntsc",
"nuclear",
"nudist",
"nuke",
"null",
"number",
"numbers",
"numeric",
"numerical",
"numerous",
"nurse",
"nursery",
"nurses",
"nursing",
"nut",
"nutrition",
"nutritional",
"nuts",
"nutten",
"nvidia",
"nyc",
"nylon",
"oak",
"oakland",
"oaks",
"oasis",
"obesity",
"obituaries",
"obj",
"object",
"objective",
"objectives",
"objects",
"obligation",
"obligations",
"observation",
"observations",
"observe",
"observed",
"observer",
"obtain",
"obtained",
"obtaining",
"obvious",
"obviously",
"occasion",
"occasional",
"occasionally",
"occasions",
"occupation",
"occupational",
"occupations",
"occupied",
"occur",
"occurred",
"occurrence",
"occurring",
"occurs",
"ocean",
"oclc",
"oct",
"october",
"odd",
"odds",
"oecd",
"oem",
"off",
"offense",
"offensive",
"offer",
"offered",
"offering",
"offerings",
"offers",
"office",
"officer",
"officers",
"offices",
"official",
"officially",
"officials",
"offline",
"offset",
"offshore",
"often",
"ohio",
"oil",
"oils",
"okay",
"oklahoma",
"old",
"older",
"oldest",
"olive",
"oliver",
"olympic",
"olympics",
"olympus",
"omaha",
"oman",
"omega",
"omissions",
"once",
"one",
"ones",
"ongoing",
"onion",
"online",
"only",
"ons",
"ontario",
"onto",
"ooo",
"oops",
"open",
"opened",
"opening",
"openings",
"opens",
"opera",
"operate",
"operated",
"operates",
"operating",
"operation",
"operational",
"operations",
"operator",
"operators",
"opinion",
"opinions",
"opponent",
"opponents",
"opportunities",
"opportunity",
"opposed",
"opposite",
"opposition",
"opt",
"optical",
"optics",
"optimal",
"optimization",
"optimize",
"optimum",
"option",
"optional",
"options",
"oracle",
"oral",
"orange",
"orbit",
"orchestra",
"order",
"ordered",
"ordering",
"orders",
"ordinance",
"ordinary",
"oregon",
"org",
"organ",
"organic",
"organisation",
"organisations",
"organisms",
"organization",
"organizational",
"organizations",
"organize",
"organized",
"organizer",
"organizing",
"oriental",
"orientation",
"oriented",
"origin",
"original",
"originally",
"origins",
"orlando",
"orleans",
"oscar",
"other",
"others",
"otherwise",
"ottawa",
"ought",
"our",
"ours",
"ourselves",
"out",
"outcome",
"outcomes",
"outdoor",
"outdoors",
"outer",
"outlet",
"outlets",
"outline",
"outlined",
"outlook",
"output",
"outputs",
"outreach",
"outside",
"outsourcing",
"outstanding",
"oval",
"oven",
"over",
"overall",
"overcome",
"overhead",
"overnight",
"overseas",
"overview",
"owen",
"own",
"owned",
"owner",
"owners",
"ownership",
"owns",
"oxford",
"oxide",
"oxygen",
"ozone",
"pac",
"pace",
"pacific",
"pack",
"package",
"packages",
"packaging",
"packard",
"packed",
"packet",
"packets",
"packing",
"packs",
"pad",
"pads",
"page",
"pages",
"paid",
"pain",
"painful",
"paint",
"paintball",
"painted",
"painting",
"paintings",
"pair",
"pairs",
"pakistan",
"pal",
"palace",
"pale",
"palestine",
"palestinian",
"palm",
"palmer",
"pam",
"pamela",
"pan",
"panama",
"panasonic",
"panel",
"panels",
"panic",
"pants",
"pantyhose",
"paper",
"paperback",
"paperbacks",
"papers",
"papua",
"par",
"para",
"parade",
"paradise",
"paragraph",
"paragraphs",
"paraguay",
"parallel",
"parameter",
"parameters",
"parcel",
"parent",
"parental",
"parenting",
"parents",
"paris",
"parish",
"park",
"parker",
"parking",
"parks",
"parliament",
"parliamentary",
"part",
"partial",
"partially",
"participant",
"participants",
"participate",
"participated",
"participating",
"participation",
"particle",
"particles",
"particular",
"particularly",
"parties",
"partition",
"partly",
"partner",
"partners",
"partnership",
"partnerships",
"parts",
"party",
"pas",
"paso",
"pass",
"passage",
"passed",
"passenger",
"passengers",
"passes",
"passing",
"passion",
"passive",
"passport",
"password",
"passwords",
"past",
"pasta",
"paste",
"pastor",
"pat",
"patch",
"patches",
"patent",
"patents",
"path",
"pathology",
"paths",
"patient",
"patients",
"patio",
"patricia",
"patrick",
"patrol",
"pattern",
"patterns",
"paul",
"pavilion",
"paxil",
"pay",
"payable",
"payday",
"paying",
"payment",
"payments",
"paypal",
"payroll",
"pays",
"pci",
"pcs",
"pct",
"pda",
"pdas",
"pdf",
"pdt",
"peace",
"peaceful",
"peak",
"pearl",
"peas",
"pediatric",
"pee",
"peeing",
"peer",
"peers",
"pen",
"penalties",
"penalty",
"pencil",
"pendant",
"pending",
"penetration",
"penguin",
"peninsula",
"penn",
"pennsylvania",
"penny",
"pens",
"pension",
"pensions",
"pentium",
"people",
"peoples",
"pepper",
"per",
"perceived",
"percent",
"percentage",
"perception",
"perfect",
"perfectly",
"perform",
"performance",
"performances",
"performed",
"performer",
"performing",
"performs",
"perfume",
"perhaps",
"period",
"periodic",
"periodically",
"periods",
"peripheral",
"peripherals",
"perl",
"permalink",
"permanent",
"permission",
"permissions",
"permit",
"permits",
"permitted",
"perry",
"persian",
"persistent",
"person",
"personal",
"personality",
"personalized",
"personally",
"personals",
"personnel",
"persons",
"perspective",
"perspectives",
"perth",
"peru",
"pest",
"pet",
"pete",
"peter",
"petersburg",
"peterson",
"petite",
"petition",
"petroleum",
"pets",
"pgp",
"phantom",
"pharmaceutical",
"pharmaceuticals",
"pharmacies",
"pharmacology",
"pharmacy",
"phase",
"phases",
"phd",
"phenomenon",
"phentermine",
"phi",
"phil",
"philadelphia",
"philip",
"philippines",
"philips",
"phillips",
"philosophy",
"phoenix",
"phone",
"phones",
"photo",
"photograph",
"photographer",
"photographers",
"photographic",
"photographs",
"photography",
"photos",
"photoshop",
"php",
"phpbb",
"phrase",
"phrases",
"phys",
"physical",
"physically",
"physician",
"physicians",
"physics",
"physiology",
"piano",
"pic",
"pichunter",
"pick",
"picked",
"picking",
"picks",
"pickup",
"picnic",
"pics",
"picture",
"pictures",
"pie",
"piece",
"pieces",
"pierce",
"pierre",
"pig",
"pike",
"pill",
"pillow",
"pills",
"pilot",
"pin",
"pine",
"ping",
"pink",
"pins",
"pioneer",
"pipe",
"pipeline",
"pipes",
"pirates",
"pit",
"pitch",
"pittsburgh",
"pix",
"pixel",
"pixels",
"pizza",
"place",
"placed",
"placement",
"places",
"placing",
"plain",
"plains",
"plaintiff",
"plan",
"plane",
"planes",
"planet",
"planets",
"planned",
"planner",
"planners",
"planning",
"plans",
"plant",
"plants",
"plasma",
"plastic",
"plastics",
"plate",
"plates",
"platform",
"platforms",
"platinum",
"play",
"playback",
"played",
"player",
"players",
"playing",
"playlist",
"plays",
"playstation",
"plaza",
"plc",
"pleasant",
"please",
"pleased",
"pleasure",
"pledge",
"plenty",
"plot",
"plots",
"plug",
"plugin",
"plugins",
"plumbing",
"plus",
"plymouth",
"pmc",
"pmid",
"pocket",
"pockets",
"pod",
"podcast",
"podcasts",
"poem",
"poems",
"poet",
"poetry",
"point",
"pointed",
"pointer",
"pointing",
"points",
"poison",
"pokemon",
"poker",
"poland",
"polar",
"pole",
"police",
"policies",
"policy",
"polish",
"polished",
"political",
"politicians",
"politics",
"poll",
"polls",
"pollution",
"polo",
"poly",
"polyester",
"polymer",
"polyphonic",
"pond",
"pontiac",
"pool",
"pools",
"poor",
"pop",
"pope",
"popular",
"popularity",
"population",
"populations",
"por",
"porcelain",
"pork",
"porsche",
"port",
"portable",
"portal",
"porter",
"portfolio",
"portion",
"portions",
"portland",
"portrait",
"portraits",
"ports",
"portsmouth",
"portugal",
"portuguese",
"pos",
"pose",
"posing",
"position",
"positioning",
"positions",
"positive",
"possess",
"possession",
"possibilities",
"possibility",
"possible",
"possibly",
"post",
"postage",
"postal",
"postcard",
"postcards",
"posted",
"poster",
"posters",
"posting",
"postings",
"postposted",
"posts",
"pot",
"potato",
"potatoes",
"potential",
"potentially",
"potter",
"pottery",
"poultry",
"pound",
"pounds",
"pour",
"poverty",
"powder",
"powell",
"power",
"powered",
"powerful",
"powerpoint",
"powers",
"powerseller",
"ppc",
"ppm",
"practical",
"practice",
"practices",
"practitioner",
"practitioners",
"prague",
"prairie",
"praise",
"pray",
"prayer",
"prayers",
"pre",
"preceding",
"precious",
"precipitation",
"precise",
"precisely",
"precision",
"predict",
"predicted",
"prediction",
"predictions",
"prefer",
"preference",
"preferences",
"preferred",
"prefers",
"prefix",
"pregnancy",
"pregnant",
"preliminary",
"premier",
"premiere",
"premises",
"premium",
"prep",
"prepaid",
"preparation",
"prepare",
"prepared",
"preparing",
"prerequisite",
"prescribed",
"prescription",
"presence",
"present",
"presentation",
"presentations",
"presented",
"presenting",
"presently",
"presents",
"preservation",
"preserve",
"president",
"presidential",
"press",
"pressed",
"pressing",
"pressure",
"preston",
"pretty",
"prev",
"prevent",
"preventing",
"prevention",
"preview",
"previews",
"previous",
"previously",
"price",
"priced",
"prices",
"pricing",
"pride",
"priest",
"primarily",
"primary",
"prime",
"prince",
"princess",
"princeton",
"principal",
"principle",
"principles",
"print",
"printable",
"printed",
"printer",
"printers",
"printing",
"prints",
"prior",
"priorities",
"priority",
"prison",
"prisoner",
"prisoners",
"privacy",
"private",
"privilege",
"privileges",
"prix",
"prize",
"prizes",
"pro",
"probability",
"probably",
"probe",
"problem",
"problems",
"proc",
"procedure",
"procedures",
"proceed",
"proceeding",
"proceedings",
"proceeds",
"process",
"processed",
"processes",
"processing",
"processor",
"processors",
"procurement",
"produce",
"produced",
"producer",
"producers",
"produces",
"producing",
"product",
"production",
"productions",
"productive",
"productivity",
"products",
"profession",
"professional",
"professionals",
"professor",
"profile",
"profiles",
"profit",
"profits",
"program",
"programme",
"programmer",
"programmers",
"programmes",
"programming",
"programs",
"progress",
"progressive",
"prohibited",
"project",
"projected",
"projection",
"projector",
"projectors",
"projects",
"prominent",
"promise",
"promised",
"promises",
"promising",
"promo",
"promote",
"promoted",
"promotes",
"promoting",
"promotion",
"promotional",
"promotions",
"prompt",
"promptly",
"proof",
"propecia",
"proper",
"properly",
"properties",
"property",
"prophet",
"proportion",
"proposal",
"proposals",
"propose",
"proposed",
"proposition",
"proprietary",
"pros",
"prospect",
"prospective",
"prospects",
"prostate",
"prostores",
"prot",
"protect",
"protected",
"protecting",
"protection",
"protective",
"protein",
"proteins",
"protest",
"protocol",
"protocols",
"prototype",
"proud",
"proudly",
"prove",
"proved",
"proven",
"provide",
"provided",
"providence",
"provider",
"providers",
"provides",
"providing",
"province",
"provinces",
"provincial",
"provision",
"provisions",
"proxy",
"prozac",
"psi",
"psp",
"pst",
"psychiatry",
"psychological",
"psychology",
"pts",
"pty",
"pub",
"public",
"publication",
"publications",
"publicity",
"publicly",
"publish",
"published",
"publisher",
"publishers",
"publishing",
"pubmed",
"pubs",
"puerto",
"pull",
"pulled",
"pulling",
"pulse",
"pump",
"pumps",
"punch",
"punishment",
"punk",
"pupils",
"puppy",
"purchase",
"purchased",
"purchases",
"purchasing",
"pure",
"purple",
"purpose",
"purposes",
"purse",
"pursuant",
"pursue",
"pursuit",
"push",
"pushed",
"pushing",
"put",
"puts",
"putting",
"puzzle",
"puzzles",
"pvc",
"python",
"qatar",
"qld",
"qty",
"quad",
"qualification",
"qualifications",
"qualified",
"qualify",
"qualifying",
"qualities",
"quality",
"quantitative",
"quantities",
"quantity",
"quantum",
"quarter",
"quarterly",
"quarters",
"que",
"quebec",
"queen",
"queens",
"queensland",
"queries",
"query",
"quest",
"question",
"questionnaire",
"questions",
"queue",
"qui",
"quick",
"quickly",
"quiet",
"quilt",
"quit",
"quite",
"quiz",
"quizzes",
"quotations",
"quote",
"quoted",
"quotes",
"rabbit",
"race",
"races",
"rachel",
"racial",
"racing",
"rack",
"racks",
"radar",
"radiation",
"radical",
"radio",
"radios",
"radius",
"rage",
"raid",
"rail",
"railroad",
"railway",
"rain",
"rainbow",
"raise",
"raised",
"raises",
"raising",
"raleigh",
"rally",
"ralph",
"ram",
"ran",
"ranch",
"rand",
"random",
"randy",
"range",
"ranger",
"rangers",
"ranges",
"ranging",
"rank",
"ranked",
"ranking",
"rankings",
"ranks",
"rap",
"rapid",
"rapidly",
"rapids",
"rare",
"rarely",
"rat",
"rate",
"rated",
"rates",
"rather",
"rating",
"ratings",
"ratio",
"rational",
"ratios",
"rats",
"raw",
"ray",
"raymond",
"rays",
"rca",
"reach",
"reached",
"reaches",
"reaching",
"reaction",
"reactions",
"read",
"reader",
"readers",
"readily",
"reading",
"readings",
"reads",
"ready",
"real",
"realistic",
"reality",
"realize",
"realized",
"really",
"realm",
"realtor",
"realtors",
"realty",
"rear",
"reason",
"reasonable",
"reasonably",
"reasoning",
"reasons",
"rebate",
"rebates",
"rebecca",
"rebel",
"rebound",
"rec",
"recall",
"receipt",
"receive",
"received",
"receiver",
"receivers",
"receives",
"receiving",
"recent",
"recently",
"reception",
"receptor",
"receptors",
"recipe",
"recipes",
"recipient",
"recipients",
"recognition",
"recognize",
"recognized",
"recommend",
"recommendation",
"recommendations",
"recommended",
"recommends",
"reconstruction",
"record",
"recorded",
"recorder",
"recorders",
"recording",
"recordings",
"records",
"recover",
"recovered",
"recovery",
"recreation",
"recreational",
"recruiting",
"recruitment",
"recycling",
"red",
"redeem",
"redhead",
"reduce",
"reduced",
"reduces",
"reducing",
"reduction",
"reductions",
"reed",
"reef",
"reel",
"ref",
"refer",
"reference",
"referenced",
"references",
"referral",
"referrals",
"referred",
"referring",
"refers",
"refinance",
"refine",
"refined",
"reflect",
"reflected",
"reflection",
"reflections",
"reflects",
"reform",
"reforms",
"refresh",
"refrigerator",
"refugees",
"refund",
"refurbished",
"refuse",
"refused",
"reg",
"regard",
"regarded",
"regarding",
"regardless",
"regards",
"reggae",
"regime",
"region",
"regional",
"regions",
"register",
"registered",
"registrar",
"registration",
"registry",
"regression",
"regular",
"regularly",
"regulated",
"regulation",
"regulations",
"regulatory",
"rehab",
"rehabilitation",
"reid",
"reject",
"rejected",
"relate",
"related",
"relates",
"relating",
"relation",
"relations",
"relationship",
"relationships",
"relative",
"relatively",
"relatives",
"relax",
"relaxation",
"relay",
"release",
"released",
"releases",
"relevance",
"relevant",
"reliability",
"reliable",
"reliance",
"relief",
"religion",
"religions",
"religious",
"reload",
"relocation",
"rely",
"relying",
"remain",
"remainder",
"remained",
"remaining",
"remains",
"remark",
"remarkable",
"remarks",
"remedies",
"remedy",
"remember",
"remembered",
"remind",
"reminder",
"remix",
"remote",
"removable",
"removal",
"remove",
"removed",
"removing",
"renaissance",
"render",
"rendered",
"rendering",
"renew",
"renewable",
"renewal",
"reno",
"rent",
"rental",
"rentals",
"rep",
"repair",
"repairs",
"repeat",
"repeated",
"replace",
"replaced",
"replacement",
"replacing",
"replica",
"replication",
"replied",
"replies",
"reply",
"report",
"reported",
"reporter",
"reporters",
"reporting",
"reports",
"repository",
"represent",
"representation",
"representations",
"representative",
"representatives",
"represented",
"representing",
"represents",
"reprint",
"reprints",
"reproduce",
"reproduced",
"reproduction",
"reproductive",
"republic",
"republican",
"republicans",
"reputation",
"request",
"requested",
"requesting",
"requests",
"require",
"required",
"requirement",
"requirements",
"requires",
"requiring",
"res",
"rescue",
"research",
"researcher",
"researchers",
"reseller",
"reservation",
"reservations",
"reserve",
"reserved",
"reserves",
"reservoir",
"reset",
"residence",
"resident",
"residential",
"residents",
"resist",
"resistance",
"resistant",
"resolution",
"resolutions",
"resolve",
"resolved",
"resort",
"resorts",
"resource",
"resources",
"respect",
"respected",
"respective",
"respectively",
"respiratory",
"respond",
"responded",
"respondent",
"respondents",
"responding",
"response",
"responses",
"responsibilities",
"responsibility",
"responsible",
"rest",
"restaurant",
"restaurants",
"restoration",
"restore",
"restored",
"restrict",
"restricted",
"restriction",
"restrictions",
"restructuring",
"result",
"resulted",
"resulting",
"results",
"resume",
"resumes",
"retail",
"retailer",
"retailers",
"retain",
"retained",
"retention",
"retired",
"retirement",
"retreat",
"retrieval",
"retrieve",
"retrieved",
"retro",
"return",
"returned",
"returning",
"returns",
"reunion",
"reuters",
"rev",
"reveal",
"revealed",
"reveals",
"revelation",
"revenge",
"revenue",
"revenues",
"reverse",
"review",
"reviewed",
"reviewer",
"reviewing",
"reviews",
"revised",
"revision",
"revisions",
"revolution",
"revolutionary",
"reward",
"rewards",
"reynolds",
"rfc",
"rhode",
"rhythm",
"ribbon",
"rica",
"rice",
"rich",
"richard",
"richards",
"richardson",
"richmond",
"rick",
"ricky",
"rico",
"rid",
"ride",
"rider",
"riders",
"rides",
"ridge",
"riding",
"right",
"rights",
"rim",
"ring",
"rings",
"ringtone",
"ringtones",
"rio",
"rip",
"ripe",
"rise",
"rising",
"risk",
"risks",
"river",
"rivers",
"riverside",
"rna",
"road",
"roads",
"rob",
"robbie",
"robert",
"roberts",
"robertson",
"robin",
"robinson",
"robot",
"robots",
"robust",
"rochester",
"rock",
"rocket",
"rocks",
"rocky",
"rod",
"roger",
"rogers",
"roland",
"role",
"roles",
"roll",
"rolled",
"roller",
"rolling",
"rolls",
"rom",
"roman",
"romance",
"romania",
"romantic",
"rome",
"ron",
"ronald",
"roof",
"room",
"roommate",
"roommates",
"rooms",
"root",
"roots",
"rope",
"rosa",
"rose",
"roses",
"ross",
"roster",
"rotary",
"rotation",
"rouge",
"rough",
"roughly",
"roulette",
"round",
"rounds",
"route",
"router",
"routers",
"routes",
"routine",
"routines",
"routing",
"rover",
"row",
"rows",
"roy",
"royal",
"royalty",
"rpg",
"rpm",
"rrp",
"rss",
"rubber",
"ruby",
"rug",
"rugby",
"rugs",
"rule",
"ruled",
"rules",
"ruling",
"run",
"runner",
"running",
"runs",
"runtime",
"rural",
"rush",
"russell",
"russia",
"russian",
"ruth",
"rwanda",
"ryan",
"sacramento",
"sacred",
"sacrifice",
"sad",
"saddam",
"safari",
"safe",
"safely",
"safer",
"safety",
"sage",
"sagem",
"said",
"sail",
"sailing",
"saint",
"saints",
"sake",
"salad",
"salaries",
"salary",
"sale",
"salem",
"sales",
"sally",
"salmon",
"salon",
"salt",
"salvador",
"salvation",
"sam",
"samba",
"same",
"samoa",
"sample",
"samples",
"sampling",
"samsung",
"samuel",
"san",
"sand",
"sandra",
"sandwich",
"sandy",
"sans",
"santa",
"sanyo",
"sao",
"sap",
"sapphire",
"sara",
"sarah",
"sas",
"saskatchewan",
"sat",
"satellite",
"satin",
"satisfaction",
"satisfactory",
"satisfied",
"satisfy",
"saturday",
"saturn",
"sauce",
"saudi",
"savage",
"savannah",
"save",
"saved",
"saver",
"saves",
"saving",
"savings",
"saw",
"say",
"saying",
"says",
"sbjct",
"scale",
"scales",
"scan",
"scanned",
"scanner",
"scanners",
"scanning",
"scared",
"scary",
"scenario",
"scenarios",
"scene",
"scenes",
"scenic",
"schedule",
"scheduled",
"schedules",
"scheduling",
"schema",
"scheme",
"schemes",
"scholar",
"scholars",
"scholarship",
"scholarships",
"school",
"schools",
"sci",
"science",
"sciences",
"scientific",
"scientist",
"scientists",
"scoop",
"scope",
"score",
"scored",
"scores",
"scoring",
"scotia",
"scotland",
"scott",
"scottish",
"scout",
"scratch",
"screen",
"screening",
"screens",
"screensaver",
"screensavers",
"screenshot",
"screenshots",
"screw",
"script",
"scripting",
"scripts",
"scroll",
"scsi",
"scuba",
"sculpture",
"sea",
"seafood",
"seal",
"sealed",
"sean",
"search",
"searched",
"searches",
"searching",
"seas",
"season",
"seasonal",
"seasons",
"seat",
"seating",
"seats",
"seattle",
"sec",
"second",
"secondary",
"seconds",
"secret",
"secretariat",
"secretary",
"secrets",
"section",
"sections",
"sector",
"sectors",
"secure",
"secured",
"securely",
"securities",
"security",
"see",
"seed",
"seeds",
"seeing",
"seek",
"seeker",
"seekers",
"seeking",
"seeks",
"seem",
"seemed",
"seems",
"seen",
"sees",
"sega",
"segment",
"segments",
"select",
"selected",
"selecting",
"selection",
"selections",
"selective",
"self",
"sell",
"seller",
"sellers",
"selling",
"sells",
"semester",
"semi",
"semiconductor",
"seminar",
"seminars",
"sen",
"senate",
"senator",
"senators",
"send",
"sender",
"sending",
"sends",
"senegal",
"senior",
"seniors",
"sense",
"sensitive",
"sensitivity",
"sensor",
"sensors",
"sent",
"sentence",
"sentences",
"seo",
"sep",
"separate",
"separated",
"separately",
"separation",
"sept",
"september",
"seq",
"sequence",
"sequences",
"ser",
"serbia",
"serial",
"series",
"serious",
"seriously",
"serum",
"serve",
"served",
"server",
"servers",
"serves",
"service",
"services",
"serving",
"session",
"sessions",
"set",
"sets",
"setting",
"settings",
"settle",
"settled",
"settlement",
"setup",
"seven",
"seventh",
"several",
"severe",
"sewing",
"sexual",
"sexuality",
"sexually",
"shade",
"shades",
"shadow",
"shadows",
"shaft",
"shake",
"shakespeare",
"shakira",
"shall",
"shame",
"shanghai",
"shannon",
"shape",
"shaped",
"shapes",
"share",
"shared",
"shareholders",
"shares",
"shareware",
"sharing",
"shark",
"sharon",
"sharp",
"shaved",
"shaw",
"she",
"shed",
"sheep",
"sheer",
"sheet",
"sheets",
"sheffield",
"shelf",
"shell",
"shelter",
"shepherd",
"sheriff",
"sherman",
"shield",
"shift",
"shine",
"ship",
"shipment",
"shipments",
"shipped",
"shipping",
"ships",
"shirt",
"shirts",
"shock",
"shoe",
"shoes",
"shoot",
"shooting",
"shop",
"shopper",
"shoppers",
"shopping",
"shops",
"shopzilla",
"shore",
"short",
"shortcuts",
"shorter",
"shortly",
"shorts",
"shot",
"shots",
"should",
"shoulder",
"show",
"showcase",
"showed",
"shower",
"showers",
"showing",
"shown",
"shows",
"showtimes",
"shut",
"shuttle",
"sic",
"sick",
"side",
"sides",
"sie",
"siemens",
"sierra",
"sig",
"sight",
"sigma",
"sign",
"signal",
"signals",
"signature",
"signatures",
"signed",
"significance",
"significant",
"significantly",
"signing",
"signs",
"signup",
"silence",
"silent",
"silicon",
"silk",
"silly",
"silver",
"sim",
"similar",
"similarly",
"simon",
"simple",
"simplified",
"simply",
"simpson",
"simpsons",
"sims",
"simulation",
"simulations",
"simultaneously",
"sin",
"since",
"sing",
"singapore",
"singer",
"singh",
"singing",
"single",
"singles",
"sink",
"sip",
"sir",
"sister",
"sisters",
"sit",
"site",
"sitemap",
"sites",
"sitting",
"situated",
"situation",
"situations",
"six",
"sixth",
"size",
"sized",
"sizes",
"skating",
"ski",
"skiing",
"skill",
"skilled",
"skills",
"skin",
"skins",
"skip",
"skirt",
"skirts",
"sku",
"sky",
"skype",
"slave",
"sleep",
"sleeping",
"sleeps",
"sleeve",
"slide",
"slides",
"slideshow",
"slight",
"slightly",
"slim",
"slip",
"slope",
"slot",
"slots",
"slovak",
"slovakia",
"slovenia",
"slow",
"slowly",
"small",
"smaller",
"smallest",
"smart",
"smell",
"smile",
"smilies",
"smith",
"smithsonian",
"smoke",
"smoking",
"smooth",
"sms",
"smtp",
"snake",
"snap",
"snapshot",
"snow",
"snowboard",
"soa",
"soap",
"soc",
"soccer",
"social",
"societies",
"society",
"sociology",
"socket",
"socks",
"sodium",
"sofa",
"soft",
"softball",
"software",
"soil",
"sol",
"solar",
"solaris",
"sold",
"soldier",
"soldiers",
"sole",
"solely",
"solid",
"solo",
"solomon",
"solution",
"solutions",
"solve",
"solved",
"solving",
"soma",
"somalia",
"some",
"somebody",
"somehow",
"someone",
"somerset",
"something",
"sometimes",
"somewhat",
"somewhere",
"son",
"song",
"songs",
"sonic",
"sons",
"sony",
"soon",
"soonest",
"sophisticated",
"sorry",
"sort",
"sorted",
"sorts",
"sought",
"soul",
"souls",
"sound",
"sounds",
"soundtrack",
"soup",
"source",
"sources",
"south",
"southampton",
"southeast",
"southern",
"southwest",
"soviet",
"sox",
"spa",
"space",
"spaces",
"spain",
"spam",
"span",
"spanish",
"spank",
"spanking",
"sparc",
"spare",
"spas",
"spatial",
"speak",
"speaker",
"speakers",
"speaking",
"speaks",
"spears",
"spec",
"special",
"specialist",
"specialists",
"specialized",
"specializing",
"specially",
"specials",
"specialties",
"specialty",
"species",
"specific",
"specifically",
"specification",
"specifications",
"specifics",
"specified",
"specifies",
"specify",
"specs",
"spectacular",
"spectrum",
"speech",
"speeches",
"speed",
"speeds",
"spell",
"spelling",
"spencer",
"spend",
"spending",
"spent",
"sperm",
"sphere",
"spice",
"spider",
"spies",
"spin",
"spine",
"spirit",
"spirits",
"spiritual",
"spirituality",
"split",
"spoke",
"spoken",
"spokesman",
"sponsor",
"sponsored",
"sponsors",
"sponsorship",
"sport",
"sporting",
"sports",
"spot",
"spotlight",
"spots",
"spouse",
"spray",
"spread",
"spreading",
"spring",
"springer",
"springfield",
"springs",
"sprint",
"spy",
"spyware",
"sql",
"squad",
"square",
"src",
"sri",
"ssl",
"stability",
"stable",
"stack",
"stadium",
"staff",
"staffing",
"stage",
"stages",
"stainless",
"stake",
"stakeholders",
"stamp",
"stamps",
"stan",
"stand",
"standard",
"standards",
"standing",
"standings",
"stands",
"stanford",
"stanley",
"star",
"starring",
"stars",
"starsmerchant",
"start",
"started",
"starter",
"starting",
"starts",
"startup",
"stat",
"state",
"stated",
"statement",
"statements",
"states",
"statewide",
"static",
"stating",
"station",
"stationery",
"stations",
"statistical",
"statistics",
"stats",
"status",
"statute",
"statutes",
"statutory",
"stay",
"stayed",
"staying",
"stays",
"std",
"ste",
"steady",
"steal",
"steam",
"steel",
"steering",
"stem",
"step",
"stephanie",
"stephen",
"steps",
"stereo",
"sterling",
"steve",
"steven",
"stevens",
"stewart",
"stick",
"sticker",
"stickers",
"sticks",
"sticky",
"still",
"stock",
"stockholm",
"stockings",
"stocks",
"stolen",
"stomach",
"stone",
"stones",
"stood",
"stop",
"stopped",
"stopping",
"stops",
"storage",
"store",
"stored",
"stores",
"stories",
"storm",
"story",
"str",
"straight",
"strain",
"strand",
"strange",
"stranger",
"strap",
"strategic",
"strategies",
"strategy",
"stream",
"streaming",
"streams",
"street",
"streets",
"strength",
"strengthen",
"strengthening",
"strengths",
"stress",
"stretch",
"strict",
"strictly",
"strike",
"strikes",
"striking",
"string",
"strings",
"strip",
"stripes",
"strips",
"stroke",
"strong",
"stronger",
"strongly",
"struck",
"struct",
"structural",
"structure",
"structured",
"structures",
"struggle",
"stuart",
"stuck",
"stud",
"student",
"students",
"studied",
"studies",
"studio",
"studios",
"study",
"studying",
"stuff",
"stuffed",
"stunning",
"stupid",
"style",
"styles",
"stylish",
"stylus",
"sub",
"subaru",
"subcommittee",
"subdivision",
"subject",
"subjective",
"subjects",
"sublime",
"sublimedirectory",
"submission",
"submissions",
"submit",
"submitted",
"submitting",
"subscribe",
"subscriber",
"subscribers",
"subscription",
"subscriptions",
"subsection",
"subsequent",
"subsequently",
"subsidiaries",
"subsidiary",
"substance",
"substances",
"substantial",
"substantially",
"substitute",
"subtle",
"suburban",
"succeed",
"success",
"successful",
"successfully",
"such",
"sucking",
"sudan",
"sudden",
"suddenly",
"sue",
"suffer",
"suffered",
"suffering",
"sufficient",
"sufficiently",
"sugar",
"suggest",
"suggested",
"suggesting",
"suggestion",
"suggestions",
"suggests",
"suicide",
"suit",
"suitable",
"suite",
"suited",
"suites",
"suits",
"sullivan",
"sum",
"summaries",
"summary",
"summer",
"summit",
"sun",
"sunday",
"sunglasses",
"sunny",
"sunrise",
"sunset",
"sunshine",
"super",
"superb",
"superintendent",
"superior",
"supervision",
"supervisor",
"supervisors",
"supplement",
"supplemental",
"supplements",
"supplied",
"supplier",
"suppliers",
"supplies",
"supply",
"support",
"supported",
"supporters",
"supporting",
"supports",
"suppose",
"supposed",
"supreme",
"sur",
"sure",
"surely",
"surf",
"surface",
"surfaces",
"surfing",
"surge",
"surgeon",
"surgeons",
"surgery",
"surgical",
"surname",
"surplus",
"surprise",
"surprised",
"surprising",
"surrey",
"surround",
"surrounded",
"surrounding",
"surveillance",
"survey",
"surveys",
"survival",
"survive",
"survivor",
"survivors",
"susan",
"suse",
"suspect",
"suspected",
"suspended",
"suspension",
"sussex",
"sustainability",
"sustainable",
"sustained",
"suzuki",
"swap",
"swaziland",
"sweden",
"swedish",
"sweet",
"swift",
"swim",
"swimming",
"swing",
"swingers",
"swiss",
"switch",
"switched",
"switches",
"switching",
"switzerland",
"sword",
"sydney",
"symantec",
"symbol",
"symbols",
"sympathy",
"symphony",
"symposium",
"symptoms",
"sync",
"syndicate",
"syndication",
"syndrome",
"synopsis",
"syntax",
"synthesis",
"synthetic",
"syracuse",
"syria",
"sys",
"system",
"systematic",
"systems",
"tab",
"table",
"tables",
"tablet",
"tablets",
"tabs",
"tackle",
"tactics",
"tag",
"tagged",
"tags",
"tahoe",
"tail",
"taiwan",
"take",
"taken",
"takes",
"taking",
"tale",
"talent",
"talented",
"tales",
"talk",
"talked",
"talking",
"talks",
"tall",
"tamil",
"tampa",
"tan",
"tank",
"tanks",
"tanzania",
"tap",
"tape",
"tapes",
"tar",
"target",
"targeted",
"targets",
"tariff",
"task",
"tasks",
"taste",
"tattoo",
"taught",
"tax",
"taxation",
"taxes",
"taxi",
"taylor",
"tba",
"tcp",
"tea",
"teach",
"teacher",
"teachers",
"teaches",
"teaching",
"team",
"teams",
"tear",
"tears",
"tech",
"technical",
"technician",
"technique",
"techniques",
"techno",
"technological",
"technologies",
"technology",
"techrepublic",
"ted",
"teddy",
"tee",
"teen",
"teenage",
"teens",
"teeth",
"tel",
"telecharger",
"telecom",
"telecommunications",
"telephone",
"telephony",
"telescope",
"television",
"televisions",
"tell",
"telling",
"tells",
"temp",
"temperature",
"temperatures",
"template",
"templates",
"temple",
"temporal",
"temporarily",
"temporary",
"ten",
"tenant",
"tend",
"tender",
"tennessee",
"tennis",
"tension",
"tent",
"term",
"terminal",
"terminals",
"termination",
"terminology",
"terms",
"terrace",
"terrain",
"terrible",
"territories",
"territory",
"terror",
"terrorism",
"terrorist",
"terrorists",
"terry",
"test",
"testament",
"tested",
"testimonials",
"testimony",
"testing",
"tests",
"tex",
"texas",
"text",
"textbook",
"textbooks",
"textile",
"textiles",
"texts",
"texture",
"tft",
"tgp",
"thai",
"thailand",
"than",
"thank",
"thanks",
"thanksgiving",
"that",
"thats",
"the",
"theater",
"theaters",
"theatre",
"thee",
"theft",
"thehun",
"their",
"them",
"theme",
"themes",
"themselves",
"then",
"theology",
"theorem",
"theoretical",
"theories",
"theory",
"therapeutic",
"therapist",
"therapy",
"there",
"thereafter",
"thereby",
"therefore",
"thereof",
"thermal",
"thesaurus",
"these",
"thesis",
"theta",
"they",
"thick",
"thickness",
"thin",
"thing",
"things",
"think",
"thinking",
"thinkpad",
"thinks",
"third",
"thirty",
"this",
"thomas",
"thompson",
"thomson",
"thong",
"thongs",
"thorough",
"thoroughly",
"those",
"thou",
"though",
"thought",
"thoughts",
"thousand",
"thousands",
"thread",
"threaded",
"threads",
"threat",
"threatened",
"threatening",
"threats",
"three",
"threshold",
"thriller",
"throat",
"through",
"throughout",
"throw",
"throwing",
"thrown",
"throws",
"thru",
"thu",
"thumb",
"thumbnail",
"thumbnails",
"thumbs",
"thumbzilla",
"thunder",
"thursday",
"thus",
"thy",
"ticket",
"tickets",
"tide",
"tie",
"tied",
"tier",
"ties",
"tiffany",
"tiger",
"tigers",
"tight",
"til",
"tile",
"tiles",
"till",
"tim",
"timber",
"time",
"timeline",
"timely",
"timer",
"times",
"timing",
"timothy",
"tin",
"tiny",
"tion",
"tions",
"tip",
"tips",
"tire",
"tired",
"tires",
"tissue",
"titanium",
"titans",
"title",
"titled",
"titles",
"titten",
"tmp",
"tobacco",
"tobago",
"today",
"todd",
"toddler",
"toe",
"together",
"toilet",
"token",
"tokyo",
"told",
"tolerance",
"toll",
"tom",
"tomato",
"tomatoes",
"tommy",
"tomorrow",
"ton",
"tone",
"toner",
"tones",
"tongue",
"tonight",
"tons",
"tony",
"too",
"took",
"tool",
"toolbar",
"toolbox",
"toolkit",
"tools",
"tooth",
"top",
"topic",
"topics",
"tops",
"toronto",
"torture",
"toshiba",
"total",
"totally",
"totals",
"touch",
"touched",
"tough",
"tour",
"touring",
"tourism",
"tourist",
"tournament",
"tournaments",
"tours",
"toward",
"towards",
"tower",
"towers",
"town",
"towns",
"township",
"toxic",
"toy",
"toyota",
"toys",
"trace",
"track",
"trackback",
"trackbacks",
"tracked",
"tracker",
"tracking",
"tracks",
"tract",
"tractor",
"tracy",
"trade",
"trademark",
"trademarks",
"trader",
"trades",
"trading",
"tradition",
"traditional",
"traditions",
"traffic",
"tragedy",
"trail",
"trailer",
"trailers",
"trails",
"train",
"trained",
"trainer",
"trainers",
"training",
"trains",
"tramadol",
"trance",
"trans",
"transaction",
"transactions",
"transcript",
"transcription",
"transcripts",
"transexual",
"transexuales",
"transfer",
"transferred",
"transfers",
"transform",
"transformation",
"transit",
"transition",
"translate",
"translated",
"translation",
"translations",
"translator",
"transmission",
"transmit",
"transmitted",
"transparency",
"transparent",
"transport",
"transportation",
"transsexual",
"trap",
"trash",
"trauma",
"travel",
"traveler",
"travelers",
"traveling",
"traveller",
"travelling",
"travels",
"travesti",
"travis",
"tray",
"treasure",
"treasurer",
"treasures",
"treasury",
"treat",
"treated",
"treating",
"treatment",
"treatments",
"treaty",
"tree",
"trees",
"trek",
"trembl",
"tremendous",
"trend",
"trends",
"treo",
"tri",
"trial",
"trials",
"triangle",
"tribal",
"tribe",
"tribes",
"tribunal",
"tribune",
"tribute",
"trick",
"tricks",
"tried",
"tries",
"trigger",
"trim",
"trinidad",
"trinity",
"trio",
"trip",
"tripadvisor",
"triple",
"trips",
"triumph",
"trivia",
"troops",
"tropical",
"trouble",
"troubleshooting",
"trout",
"troy",
"truck",
"trucks",
"true",
"truly",
"trunk",
"trust",
"trusted",
"trustee",
"trustees",
"trusts",
"truth",
"try",
"trying",
"tsunami",
"tub",
"tube",
"tubes",
"tucson",
"tue",
"tuesday",
"tuition",
"tulsa",
"tumor",
"tune",
"tuner",
"tunes",
"tuning",
"tunisia",
"tunnel",
"turbo",
"turkey",
"turkish",
"turn",
"turned",
"turner",
"turning",
"turns",
"turtle",
"tutorial",
"tutorials",
"tvs",
"twelve",
"twenty",
"twice",
"twiki",
"twin",
"twins",
"twist",
"twisted",
"two",
"tyler",
"type",
"types",
"typical",
"typically",
"typing",
"uganda",
"ugly",
"ukraine",
"ultimate",
"ultimately",
"ultra",
"ultram",
"una",
"unable",
"unauthorized",
"unavailable",
"uncertainty",
"uncle",
"und",
"undefined",
"under",
"undergraduate",
"underground",
"underlying",
"understand",
"understanding",
"understood",
"undertake",
"undertaken",
"underwear",
"undo",
"une",
"unemployment",
"unexpected",
"unfortunately",
"uni",
"unified",
"uniform",
"union",
"unions",
"uniprotkb",
"unique",
"unit",
"united",
"units",
"unity",
"univ",
"universal",
"universe",
"universities",
"university",
"unix",
"unknown",
"unless",
"unlike",
"unlikely",
"unlimited",
"unlock",
"unnecessary",
"unsigned",
"unsubscribe",
"until",
"untitled",
"unto",
"unusual",
"unwrap",
"upc",
"upcoming",
"update",
"updated",
"updates",
"updating",
"upgrade",
"upgrades",
"upgrading",
"upload",
"uploaded",
"upon",
"upper",
"ups",
"upset",
"urban",
"urge",
"urgent",
"uri",
"url",
"urls",
"uruguay",
"urw",
"usa",
"usage",
"usb",
"usc",
"usd",
"usda",
"use",
"used",
"useful",
"user",
"username",
"users",
"uses",
"usgs",
"using",
"usps",
"usr",
"usual",
"usually",
"utah",
"utc",
"utilities",
"utility",
"utilization",
"utilize",
"utils",
"uzbekistan",
"vacancies",
"vacation",
"vacations",
"vaccine",
"vacuum",
"val",
"valentine",
"valid",
"validation",
"validity",
"valium",
"valley",
"valuable",
"valuation",
"value",
"valued",
"values",
"valve",
"valves",
"vampire",
"van",
"vancouver",
"vanilla",
"var",
"variable",
"variables",
"variance",
"variation",
"variations",
"varied",
"varies",
"varieties",
"variety",
"various",
"vary",
"varying",
"vast",
"vat",
"vatican",
"vault",
"vbulletin",
"vcr",
"vector",
"vegas",
"vegetable",
"vegetables",
"vegetarian",
"vegetation",
"vehicle",
"vehicles",
"velocity",
"velvet",
"vendor",
"vendors",
"venezuela",
"venice",
"venture",
"ventures",
"venue",
"venues",
"ver",
"verbal",
"verde",
"verification",
"verified",
"verify",
"verizon",
"vermont",
"vernon",
"verse",
"version",
"versions",
"versus",
"vertex",
"vertical",
"very",
"verzeichnis",
"vessel",
"vessels",
"veteran",
"veterans",
"veterinary",
"vhs",
"via",
"vic",
"vice",
"victim",
"victims",
"victor",
"victoria",
"victorian",
"victory",
"vid",
"video",
"videos",
"vids",
"vienna",
"vietnam",
"vietnamese",
"view",
"viewed",
"viewer",
"viewers",
"viewing",
"viewpicture",
"views",
"vii",
"viii",
"viking",
"villa",
"village",
"villages",
"villas",
"vincent",
"vintage",
"vinyl",
"violation",
"violations",
"violence",
"violent",
"violin",
"vip",
"viral",
"virgin",
"virginia",
"virtual",
"virtually",
"virtue",
"virus",
"viruses",
"visa",
"visibility",
"visible",
"vision",
"visit",
"visited",
"visiting",
"visitor",
"visitors",
"visits",
"vista",
"visual",
"vital",
"vitamin",
"vitamins",
"vocabulary",
"vocal",
"vocals",
"vocational",
"voice",
"voices",
"void",
"voip",
"vol",
"volkswagen",
"volleyball",
"volt",
"voltage",
"volume",
"volumes",
"voluntary",
"volunteer",
"volunteers",
"volvo",
"von",
"vote",
"voted",
"voters",
"votes",
"voting",
"voyeurweb",
"voyuer",
"vpn",
"vsnet",
"vulnerability",
"vulnerable",
"wage",
"wages",
"wagner",
"wagon",
"wait",
"waiting",
"waiver",
"wake",
"wal",
"wales",
"walk",
"walked",
"walker",
"walking",
"walks",
"wall",
"wallace",
"wallet",
"wallpaper",
"wallpapers",
"walls",
"walnut",
"walt",
"walter",
"wan",
"wanna",
"want",
"wanted",
"wanting",
"wants",
"war",
"warcraft",
"ward",
"ware",
"warehouse",
"warm",
"warming",
"warned",
"warner",
"warning",
"warnings",
"warrant",
"warranties",
"warranty",
"warren",
"warrior",
"warriors",
"wars",
"was",
"wash",
"washer",
"washing",
"washington",
"waste",
"watch",
"watched",
"watches",
"watching",
"water",
"waterproof",
"waters",
"watershed",
"watson",
"watt",
"watts",
"wav",
"wave",
"waves",
"wax",
"way",
"wayne",
"ways",
"weak",
"wealth",
"weapon",
"weapons",
"wear",
"wearing",
"weather",
"web",
"webcam",
"webcams",
"webcast",
"weblog",
"weblogs",
"webmaster",
"webmasters",
"webpage",
"webshots",
"website",
"websites",
"webster",
"wed",
"wedding",
"weddings",
"wednesday",
"weed",
"week",
"weekend",
"weekends",
"weekly",
"weeks",
"weight",
"weighted",
"weights",
"weird",
"welcome",
"welding",
"welfare",
"well",
"wellington",
"wellness",
"wells",
"welsh",
"wendy",
"went",
"were",
"wesley",
"west",
"western",
"westminster",
"wet",
"whale",
"what",
"whatever",
"whats",
"wheat",
"wheel",
"wheels",
"when",
"whenever",
"where",
"whereas",
"wherever",
"whether",
"which",
"while",
"whilst",
"white",
"who",
"whole",
"wholesale",
"whom",
"whose",
"why",
"wichita",
"wicked",
"wide",
"widely",
"wider",
"widescreen",
"widespread",
"width",
"wife",
"wifi",
"wiki",
"wikipedia",
"wild",
"wilderness",
"wildlife",
"wiley",
"will",
"william",
"williams",
"willing",
"willow",
"wilson",
"win",
"wind",
"window",
"windows",
"winds",
"windsor",
"wine",
"wines",
"wing",
"wings",
"winner",
"winners",
"winning",
"wins",
"winston",
"winter",
"wire",
"wired",
"wireless",
"wires",
"wiring",
"wisconsin",
"wisdom",
"wise",
"wish",
"wishes",
"wishing",
"wishlist",
"wit",
"witch",
"with",
"withdrawal",
"within",
"without",
"witness",
"witnesses",
"wives",
"wizard",
"wma",
"wolf",
"woman",
"women",
"womens",
"won",
"wonder",
"wonderful",
"wondering",
"wood",
"wooden",
"woods",
"wool",
"worcester",
"word",
"wordpress",
"words",
"work",
"worked",
"worker",
"workers",
"workflow",
"workforce",
"working",
"workout",
"workplace",
"works",
"workshop",
"workshops",
"workstation",
"world",
"worldcat",
"worlds",
"worldwide",
"worm",
"worn",
"worried",
"worry",
"worse",
"worship",
"worst",
"worth",
"worthy",
"would",
"wound",
"wow",
"wrap",
"wrapped",
"wrapping",
"wrestling",
"wright",
"wrist",
"write",
"writer",
"writers",
"writes",
"writing",
"writings",
"written",
"wrong",
"wrote",
"wto",
"www",
"wyoming",
"xanax",
"xbox",
"xerox",
"xhtml",
"xml",
"yacht",
"yahoo",
"yale",
"yamaha",
"yang",
"yard",
"yards",
"yarn",
"yea",
"yeah",
"year",
"yearly",
"years",
"yeast",
"yellow",
"yemen",
"yen",
"yes",
"yesterday",
"yet",
"yield",
"yields",
"yoga",
"york",
"yorkshire",
"you",
"young",
"younger",
"your",
"yours",
"yourself",
"youth",
"yrs",
"yugoslavia",
"yukon",
"zambia",
"zdnet",
"zealand",
"zen",
"zero",
"zimbabwe",
"zinc",
"zip",
"zoloft",
"zone",
"zones",
"zoning",
"zoo",
"zoom",
"zope",
"zshops",
"zum",
"zus",
]<|fim▁end|> | |
<|file_name|>initializers_test.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for initializers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tf_slim
from tf_slim.layers import initializers
from tf_slim.layers import regularizers
# pylint: disable=g-direct-tensorflow-import
from tensorflow.python.client import session
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
class InitializerTest(test.TestCase):
def test_xavier_wrong_dtype(self):
with self.assertRaisesRegexp(
TypeError, 'Cannot create initializer for non-floating point type.'):
initializers.xavier_initializer(dtype=dtypes.int32)
self.assertIsNone(regularizers.l1_regularizer(0.)(None))
def _test_xavier(self, initializer, shape, variance, uniform):
with session.Session() as sess:
var = variable_scope.get_variable(
name='test',
shape=shape,
dtype=dtypes.float32,
initializer=initializer(
uniform=uniform, seed=1))
sess.run(variables.global_variables_initializer())
values = var.eval()
self.assertAllClose(np.var(values), variance, 1e-3, 1e-3)
def test_xavier_uniform(self):
self._test_xavier(initializers.xavier_initializer, [100, 40],
2. / (100. + 40.), True)
def test_xavier_normal(self):
self._test_xavier(initializers.xavier_initializer, [100, 40],
2. / (100. + 40.), False)
def test_xavier_scalar(self):
self._test_xavier(initializers.xavier_initializer, [], 0.0, True)
def test_xavier_conv2d_uniform(self):
self._test_xavier(tf_slim.xavier_initializer_conv2d, [100, 40, 5, 7],
2. / (100. * 40 * (5 + 7)), True)
def test_xavier_conv2d_normal(self):
self._test_xavier(tf_slim.xavier_initializer_conv2d, [100, 40, 5, 7],
2. / (100. * 40 * (5 + 7)), False)
class VarianceScalingInitializerTest(test.TestCase):
def test_wrong_dtype(self):
with self.assertRaisesRegexp(
TypeError, 'Cannot create initializer for non-floating point type.'):
initializers.variance_scaling_initializer(dtype=dtypes.int32)
initializer = initializers.variance_scaling_initializer()
with self.assertRaisesRegexp(
TypeError, 'Cannot create initializer for non-floating point type.'):
initializer([], dtype=dtypes.int32)
def _test_variance(self, initializer, shape, variance, factor, mode, uniform):
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
var = variable_scope.get_variable(
name='test',
shape=shape,
dtype=dtypes.float32,
initializer=initializer(
factor=factor, mode=mode, uniform=uniform, seed=1))
sess.run(variables.global_variables_initializer())
values = var.eval()
self.assertAllClose(np.var(values), variance, 1e-3, 1e-3)
def test_fan_in(self):
for uniform in [False, True]:
self._test_variance(
initializers.variance_scaling_initializer,
shape=[100, 40],
variance=2. / 100.,
factor=2.0,
mode='FAN_IN',
uniform=uniform)
def test_fan_out(self):
for uniform in [False, True]:
self._test_variance(
initializers.variance_scaling_initializer,
shape=[100, 40],
variance=2. / 40.,
factor=2.0,
mode='FAN_OUT',
uniform=uniform)
def test_fan_avg(self):
for uniform in [False, True]:
self._test_variance(<|fim▁hole|> factor=2.0,
mode='FAN_AVG',
uniform=uniform)
def test_conv2d_fan_in(self):
for uniform in [False, True]:
self._test_variance(
initializers.variance_scaling_initializer,
shape=[100, 40, 5, 7],
variance=2. / (100. * 40. * 5.),
factor=2.0,
mode='FAN_IN',
uniform=uniform)
def test_conv2d_fan_out(self):
for uniform in [False, True]:
self._test_variance(
initializers.variance_scaling_initializer,
shape=[100, 40, 5, 7],
variance=2. / (100. * 40. * 7.),
factor=2.0,
mode='FAN_OUT',
uniform=uniform)
def test_conv2d_fan_avg(self):
for uniform in [False, True]:
self._test_variance(
initializers.variance_scaling_initializer,
shape=[100, 40, 5, 7],
variance=2. / (100. * 40. * (5. + 7.)),
factor=2.0,
mode='FAN_AVG',
uniform=uniform)
def test_xavier_uniform(self):
self._test_variance(
initializers.variance_scaling_initializer,
shape=[100, 40],
variance=2. / (100. + 40.),
factor=1.0,
mode='FAN_AVG',
uniform=True)
def test_xavier_normal(self):
self._test_variance(
initializers.variance_scaling_initializer,
shape=[100, 40],
variance=2. / (100. + 40.),
factor=1.0,
mode='FAN_AVG',
uniform=False)
def test_xavier_scalar(self):
self._test_variance(
initializers.variance_scaling_initializer,
shape=[],
variance=0.0,
factor=1.0,
mode='FAN_AVG',
uniform=False)
def test_xavier_conv2d_uniform(self):
self._test_variance(
initializers.variance_scaling_initializer,
shape=[100, 40, 5, 7],
variance=2. / (100. * 40. * (5. + 7.)),
factor=1.0,
mode='FAN_AVG',
uniform=True)
def test_xavier_conv2d_normal(self):
self._test_variance(
initializers.variance_scaling_initializer,
shape=[100, 40, 5, 7],
variance=2. / (100. * 40. * (5. + 7.)),
factor=1.0,
mode='FAN_AVG',
uniform=True)
def test_1d_shape_fan_in(self):
for uniform in [False, True]:
self._test_variance(
initializers.variance_scaling_initializer,
shape=[100],
variance=2. / 100.,
factor=2.0,
mode='FAN_IN',
uniform=uniform)
def test_1d_shape_fan_out(self):
for uniform in [False, True]:
self._test_variance(
initializers.variance_scaling_initializer,
shape=[100],
variance=2. / 100.,
factor=2.0,
mode='FAN_OUT',
uniform=uniform)
def test_1d_shape_fan_avg(self):
for uniform in [False, True]:
self._test_variance(
initializers.variance_scaling_initializer,
shape=[100],
variance=4. / (100. + 100.),
factor=2.0,
mode='FAN_AVG',
uniform=uniform)
if __name__ == '__main__':
test.main()<|fim▁end|> | initializers.variance_scaling_initializer,
shape=[100, 40],
variance=4. / (100. + 40.), |
<|file_name|>bootstrap.js<|end_file_name|><|fim▁begin|>/*!
* Bootstrap v3.3.7 (http://getbootstrap.com)
* Copyright 2011-2016 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
*/
/*!
* Generated using the Bootstrap Customizer (http://getbootstrap.com/customize/?id=bdc334eb7ee1e998de2a85b5f46ddcac)
* Config saved to config.json and https://gist.github.com/bdc334eb7ee1e998de2a85b5f46ddcac
*/
if (typeof jQuery === 'undefined') {
throw new Error('Bootstrap\'s JavaScript requires jQuery')
}
+function ($) {
'use strict';
var version = $.fn.jquery.split(' ')[0].split('.')
if ((version[0] < 2 && version[1] < 9) || (version[0] == 1 && version[1] == 9 && version[2] < 1) || (version[0] > 3)) {
throw new Error('Bootstrap\'s JavaScript requires jQuery version 1.9.1 or higher, but lower than version 4')
}
}(jQuery);
/* ========================================================================
* Bootstrap: alert.js v3.3.7
* http://getbootstrap.com/javascript/#alerts
* ========================================================================
* Copyright 2011-2016 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// ALERT CLASS DEFINITION
// ======================
var dismiss = '[data-dismiss="alert"]'
var Alert = function (el) {
$(el).on('click', dismiss, this.close)
}
Alert.VERSION = '3.3.7'
Alert.TRANSITION_DURATION = 150
Alert.prototype.close = function (e) {
var $this = $(this)
var selector = $this.attr('data-target')
if (!selector) {
selector = $this.attr('href')
selector = selector && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7
}
var $parent = $(selector === '#' ? [] : selector)
if (e) e.preventDefault()
if (!$parent.length) {
$parent = $this.closest('.alert')
}
$parent.trigger(e = $.Event('close.bs.alert'))
if (e.isDefaultPrevented()) return
$parent.removeClass('in')
function removeElement() {
// detach from parent, fire event then clean up data
$parent.detach().trigger('closed.bs.alert').remove()
}
$.support.transition && $parent.hasClass('fade') ?
$parent
.one('bsTransitionEnd', removeElement)
.emulateTransitionEnd(Alert.TRANSITION_DURATION) :
removeElement()
}
// ALERT PLUGIN DEFINITION
// =======================
function Plugin(option) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.alert')
if (!data) $this.data('bs.alert', (data = new Alert(this)))
if (typeof option == 'string') data[option].call($this)
})
}
var old = $.fn.alert
$.fn.alert = Plugin
$.fn.alert.Constructor = Alert
// ALERT NO CONFLICT
// =================
$.fn.alert.noConflict = function () {
$.fn.alert = old
return this
}
// ALERT DATA-API
// ==============
$(document).on('click.bs.alert.data-api', dismiss, Alert.prototype.close)
}(jQuery);
/* ========================================================================
* Bootstrap: button.js v3.3.7
* http://getbootstrap.com/javascript/#buttons
* ========================================================================
* Copyright 2011-2016 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// BUTTON PUBLIC CLASS DEFINITION
// ==============================
var Button = function (element, options) {
this.$element = $(element)
this.options = $.extend({}, Button.DEFAULTS, options)
this.isLoading = false
}
Button.VERSION = '3.3.7'
<|fim▁hole|> }
Button.prototype.setState = function (state) {
var d = 'disabled'
var $el = this.$element
var val = $el.is('input') ? 'val' : 'html'
var data = $el.data()
state += 'Text'
if (data.resetText == null) $el.data('resetText', $el[val]())
// push to event loop to allow forms to submit
setTimeout($.proxy(function () {
$el[val](data[state] == null ? this.options[state] : data[state])
if (state == 'loadingText') {
this.isLoading = true
$el.addClass(d).attr(d, d).prop(d, true)
} else if (this.isLoading) {
this.isLoading = false
$el.removeClass(d).removeAttr(d).prop(d, false)
}
}, this), 0)
}
Button.prototype.toggle = function () {
var changed = true
var $parent = this.$element.closest('[data-toggle="buttons"]')
if ($parent.length) {
var $input = this.$element.find('input')
if ($input.prop('type') == 'radio') {
if ($input.prop('checked')) changed = false
$parent.find('.active').removeClass('active')
this.$element.addClass('active')
} else if ($input.prop('type') == 'checkbox') {
if (($input.prop('checked')) !== this.$element.hasClass('active')) changed = false
this.$element.toggleClass('active')
}
$input.prop('checked', this.$element.hasClass('active'))
if (changed) $input.trigger('change')
} else {
this.$element.attr('aria-pressed', !this.$element.hasClass('active'))
this.$element.toggleClass('active')
}
}
// BUTTON PLUGIN DEFINITION
// ========================
function Plugin(option) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.button')
var options = typeof option == 'object' && option
if (!data) $this.data('bs.button', (data = new Button(this, options)))
if (option == 'toggle') data.toggle()
else if (option) data.setState(option)
})
}
var old = $.fn.button
$.fn.button = Plugin
$.fn.button.Constructor = Button
// BUTTON NO CONFLICT
// ==================
$.fn.button.noConflict = function () {
$.fn.button = old
return this
}
// BUTTON DATA-API
// ===============
$(document)
.on('click.bs.button.data-api', '[data-toggle^="button"]', function (e) {
var $btn = $(e.target).closest('.btn')
Plugin.call($btn, 'toggle')
if (!($(e.target).is('input[type="radio"], input[type="checkbox"]'))) {
// Prevent double click on radios, and the double selections (so cancellation) on checkboxes
e.preventDefault()
// The target component still receive the focus
if ($btn.is('input,button')) $btn.trigger('focus')
else $btn.find('input:visible,button:visible').first().trigger('focus')
}
})
.on('focus.bs.button.data-api blur.bs.button.data-api', '[data-toggle^="button"]', function (e) {
$(e.target).closest('.btn').toggleClass('focus', /^focus(in)?$/.test(e.type))
})
}(jQuery);
/* ========================================================================
* Bootstrap: modal.js v3.3.7
* http://getbootstrap.com/javascript/#modals
* ========================================================================
* Copyright 2011-2016 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// MODAL CLASS DEFINITION
// ======================
var Modal = function (element, options) {
this.options = options
this.$body = $(document.body)
this.$element = $(element)
this.$dialog = this.$element.find('.modal-dialog')
this.$backdrop = null
this.isShown = null
this.originalBodyPad = null
this.scrollbarWidth = 0
this.ignoreBackdropClick = false
if (this.options.remote) {
this.$element
.find('.modal-content')
.load(this.options.remote, $.proxy(function () {
this.$element.trigger('loaded.bs.modal')
}, this))
}
}
Modal.VERSION = '3.3.7'
Modal.TRANSITION_DURATION = 300
Modal.BACKDROP_TRANSITION_DURATION = 150
Modal.DEFAULTS = {
backdrop: true,
keyboard: true,
show: true
}
Modal.prototype.toggle = function (_relatedTarget) {
return this.isShown ? this.hide() : this.show(_relatedTarget)
}
Modal.prototype.show = function (_relatedTarget) {
var that = this
var e = $.Event('show.bs.modal', { relatedTarget: _relatedTarget })
this.$element.trigger(e)
if (this.isShown || e.isDefaultPrevented()) return
this.isShown = true
this.checkScrollbar()
this.setScrollbar()
this.$body.addClass('modal-open')
this.escape()
this.resize()
this.$element.on('click.dismiss.bs.modal', '[data-dismiss="modal"]', $.proxy(this.hide, this))
this.$dialog.on('mousedown.dismiss.bs.modal', function () {
that.$element.one('mouseup.dismiss.bs.modal', function (e) {
if ($(e.target).is(that.$element)) that.ignoreBackdropClick = true
})
})
this.backdrop(function () {
var transition = $.support.transition && that.$element.hasClass('fade')
if (!that.$element.parent().length) {
that.$element.appendTo(that.$body) // don't move modals dom position
}
that.$element
.show()
.scrollTop(0)
that.adjustDialog()
if (transition) {
that.$element[0].offsetWidth // force reflow
}
that.$element.addClass('in')
that.enforceFocus()
var e = $.Event('shown.bs.modal', { relatedTarget: _relatedTarget })
transition ?
that.$dialog // wait for modal to slide in
.one('bsTransitionEnd', function () {
that.$element.trigger('focus').trigger(e)
})
.emulateTransitionEnd(Modal.TRANSITION_DURATION) :
that.$element.trigger('focus').trigger(e)
})
}
Modal.prototype.hide = function (e) {
if (e) e.preventDefault()
e = $.Event('hide.bs.modal')
this.$element.trigger(e)
if (!this.isShown || e.isDefaultPrevented()) return
this.isShown = false
this.escape()
this.resize()
$(document).off('focusin.bs.modal')
this.$element
.removeClass('in')
.off('click.dismiss.bs.modal')
.off('mouseup.dismiss.bs.modal')
this.$dialog.off('mousedown.dismiss.bs.modal')
$.support.transition && this.$element.hasClass('fade') ?
this.$element
.one('bsTransitionEnd', $.proxy(this.hideModal, this))
.emulateTransitionEnd(Modal.TRANSITION_DURATION) :
this.hideModal()
}
Modal.prototype.enforceFocus = function () {
$(document)
.off('focusin.bs.modal') // guard against infinite focus loop
.on('focusin.bs.modal', $.proxy(function (e) {
if (document !== e.target &&
this.$element[0] !== e.target &&
!this.$element.has(e.target).length) {
this.$element.trigger('focus')
}
}, this))
}
Modal.prototype.escape = function () {
if (this.isShown && this.options.keyboard) {
this.$element.on('keydown.dismiss.bs.modal', $.proxy(function (e) {
e.which == 27 && this.hide()
}, this))
} else if (!this.isShown) {
this.$element.off('keydown.dismiss.bs.modal')
}
}
Modal.prototype.resize = function () {
if (this.isShown) {
$(window).on('resize.bs.modal', $.proxy(this.handleUpdate, this))
} else {
$(window).off('resize.bs.modal')
}
}
Modal.prototype.hideModal = function () {
var that = this
this.$element.hide()
this.backdrop(function () {
that.$body.removeClass('modal-open')
that.resetAdjustments()
that.resetScrollbar()
that.$element.trigger('hidden.bs.modal')
})
}
Modal.prototype.removeBackdrop = function () {
this.$backdrop && this.$backdrop.remove()
this.$backdrop = null
}
Modal.prototype.backdrop = function (callback) {
var that = this
var animate = this.$element.hasClass('fade') ? 'fade' : ''
if (this.isShown && this.options.backdrop) {
var doAnimate = $.support.transition && animate
this.$backdrop = $(document.createElement('div'))
.addClass('modal-backdrop ' + animate)
.appendTo(this.$body)
this.$element.on('click.dismiss.bs.modal', $.proxy(function (e) {
if (this.ignoreBackdropClick) {
this.ignoreBackdropClick = false
return
}
if (e.target !== e.currentTarget) return
this.options.backdrop == 'static'
? this.$element[0].focus()
: this.hide()
}, this))
if (doAnimate) this.$backdrop[0].offsetWidth // force reflow
this.$backdrop.addClass('in')
if (!callback) return
doAnimate ?
this.$backdrop
.one('bsTransitionEnd', callback)
.emulateTransitionEnd(Modal.BACKDROP_TRANSITION_DURATION) :
callback()
} else if (!this.isShown && this.$backdrop) {
this.$backdrop.removeClass('in')
var callbackRemove = function () {
that.removeBackdrop()
callback && callback()
}
$.support.transition && this.$element.hasClass('fade') ?
this.$backdrop
.one('bsTransitionEnd', callbackRemove)
.emulateTransitionEnd(Modal.BACKDROP_TRANSITION_DURATION) :
callbackRemove()
} else if (callback) {
callback()
}
}
// these following methods are used to handle overflowing modals
Modal.prototype.handleUpdate = function () {
this.adjustDialog()
}
Modal.prototype.adjustDialog = function () {
var modalIsOverflowing = this.$element[0].scrollHeight > document.documentElement.clientHeight
this.$element.css({
paddingLeft: !this.bodyIsOverflowing && modalIsOverflowing ? this.scrollbarWidth : '',
paddingRight: this.bodyIsOverflowing && !modalIsOverflowing ? this.scrollbarWidth : ''
})
}
Modal.prototype.resetAdjustments = function () {
this.$element.css({
paddingLeft: '',
paddingRight: ''
})
}
Modal.prototype.checkScrollbar = function () {
var fullWindowWidth = window.innerWidth
if (!fullWindowWidth) { // workaround for missing window.innerWidth in IE8
var documentElementRect = document.documentElement.getBoundingClientRect()
fullWindowWidth = documentElementRect.right - Math.abs(documentElementRect.left)
}
this.bodyIsOverflowing = document.body.clientWidth < fullWindowWidth
this.scrollbarWidth = this.measureScrollbar()
}
Modal.prototype.setScrollbar = function () {
var bodyPad = parseInt((this.$body.css('padding-right') || 0), 10)
this.originalBodyPad = document.body.style.paddingRight || ''
if (this.bodyIsOverflowing) this.$body.css('padding-right', bodyPad + this.scrollbarWidth)
}
Modal.prototype.resetScrollbar = function () {
this.$body.css('padding-right', this.originalBodyPad)
}
Modal.prototype.measureScrollbar = function () { // thx walsh
var scrollDiv = document.createElement('div')
scrollDiv.className = 'modal-scrollbar-measure'
this.$body.append(scrollDiv)
var scrollbarWidth = scrollDiv.offsetWidth - scrollDiv.clientWidth
this.$body[0].removeChild(scrollDiv)
return scrollbarWidth
}
// MODAL PLUGIN DEFINITION
// =======================
function Plugin(option, _relatedTarget) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.modal')
var options = $.extend({}, Modal.DEFAULTS, $this.data(), typeof option == 'object' && option)
if (!data) $this.data('bs.modal', (data = new Modal(this, options)))
if (typeof option == 'string') data[option](_relatedTarget)
else if (options.show) data.show(_relatedTarget)
})
}
var old = $.fn.modal
$.fn.modal = Plugin
$.fn.modal.Constructor = Modal
// MODAL NO CONFLICT
// =================
$.fn.modal.noConflict = function () {
$.fn.modal = old
return this
}
// MODAL DATA-API
// ==============
$(document).on('click.bs.modal.data-api', '[data-toggle="modal"]', function (e) {
var $this = $(this)
var href = $this.attr('href')
var $target = $($this.attr('data-target') || (href && href.replace(/.*(?=#[^\s]+$)/, ''))) // strip for ie7
var option = $target.data('bs.modal') ? 'toggle' : $.extend({ remote: !/#/.test(href) && href }, $target.data(), $this.data())
if ($this.is('a')) e.preventDefault()
$target.one('show.bs.modal', function (showEvent) {
if (showEvent.isDefaultPrevented()) return // only register focus restorer if modal will actually get shown
$target.one('hidden.bs.modal', function () {
$this.is(':visible') && $this.trigger('focus')
})
})
Plugin.call($target, option, this)
})
}(jQuery);
/* ========================================================================
* Bootstrap: collapse.js v3.3.7
* http://getbootstrap.com/javascript/#collapse
* ========================================================================
* Copyright 2011-2016 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
/* jshint latedef: false */
+function ($) {
'use strict';
// COLLAPSE PUBLIC CLASS DEFINITION
// ================================
var Collapse = function (element, options) {
this.$element = $(element)
this.options = $.extend({}, Collapse.DEFAULTS, options)
this.$trigger = $('[data-toggle="collapse"][href="#' + element.id + '"],' +
'[data-toggle="collapse"][data-target="#' + element.id + '"]')
this.transitioning = null
if (this.options.parent) {
this.$parent = this.getParent()
} else {
this.addAriaAndCollapsedClass(this.$element, this.$trigger)
}
if (this.options.toggle) this.toggle()
}
Collapse.VERSION = '3.3.7'
Collapse.TRANSITION_DURATION = 350
Collapse.DEFAULTS = {
toggle: true
}
Collapse.prototype.dimension = function () {
var hasWidth = this.$element.hasClass('width')
return hasWidth ? 'width' : 'height'
}
Collapse.prototype.show = function () {
if (this.transitioning || this.$element.hasClass('in')) return
var activesData
var actives = this.$parent && this.$parent.children('.panel').children('.in, .collapsing')
if (actives && actives.length) {
activesData = actives.data('bs.collapse')
if (activesData && activesData.transitioning) return
}
var startEvent = $.Event('show.bs.collapse')
this.$element.trigger(startEvent)
if (startEvent.isDefaultPrevented()) return
if (actives && actives.length) {
Plugin.call(actives, 'hide')
activesData || actives.data('bs.collapse', null)
}
var dimension = this.dimension()
this.$element
.removeClass('collapse')
.addClass('collapsing')[dimension](0)
.attr('aria-expanded', true)
this.$trigger
.removeClass('collapsed')
.attr('aria-expanded', true)
this.transitioning = 1
var complete = function () {
this.$element
.removeClass('collapsing')
.addClass('collapse in')[dimension]('')
this.transitioning = 0
this.$element
.trigger('shown.bs.collapse')
}
if (!$.support.transition) return complete.call(this)
var scrollSize = $.camelCase(['scroll', dimension].join('-'))
this.$element
.one('bsTransitionEnd', $.proxy(complete, this))
.emulateTransitionEnd(Collapse.TRANSITION_DURATION)[dimension](this.$element[0][scrollSize])
}
Collapse.prototype.hide = function () {
if (this.transitioning || !this.$element.hasClass('in')) return
var startEvent = $.Event('hide.bs.collapse')
this.$element.trigger(startEvent)
if (startEvent.isDefaultPrevented()) return
var dimension = this.dimension()
this.$element[dimension](this.$element[dimension]())[0].offsetHeight
this.$element
.addClass('collapsing')
.removeClass('collapse in')
.attr('aria-expanded', false)
this.$trigger
.addClass('collapsed')
.attr('aria-expanded', false)
this.transitioning = 1
var complete = function () {
this.transitioning = 0
this.$element
.removeClass('collapsing')
.addClass('collapse')
.trigger('hidden.bs.collapse')
}
if (!$.support.transition) return complete.call(this)
this.$element
[dimension](0)
.one('bsTransitionEnd', $.proxy(complete, this))
.emulateTransitionEnd(Collapse.TRANSITION_DURATION)
}
Collapse.prototype.toggle = function () {
this[this.$element.hasClass('in') ? 'hide' : 'show']()
}
Collapse.prototype.getParent = function () {
return $(this.options.parent)
.find('[data-toggle="collapse"][data-parent="' + this.options.parent + '"]')
.each($.proxy(function (i, element) {
var $element = $(element)
this.addAriaAndCollapsedClass(getTargetFromTrigger($element), $element)
}, this))
.end()
}
Collapse.prototype.addAriaAndCollapsedClass = function ($element, $trigger) {
var isOpen = $element.hasClass('in')
$element.attr('aria-expanded', isOpen)
$trigger
.toggleClass('collapsed', !isOpen)
.attr('aria-expanded', isOpen)
}
function getTargetFromTrigger($trigger) {
var href
var target = $trigger.attr('data-target')
|| (href = $trigger.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '') // strip for ie7
return $(target)
}
// COLLAPSE PLUGIN DEFINITION
// ==========================
function Plugin(option) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.collapse')
var options = $.extend({}, Collapse.DEFAULTS, $this.data(), typeof option == 'object' && option)
if (!data && options.toggle && /show|hide/.test(option)) options.toggle = false
if (!data) $this.data('bs.collapse', (data = new Collapse(this, options)))
if (typeof option == 'string') data[option]()
})
}
var old = $.fn.collapse
$.fn.collapse = Plugin
$.fn.collapse.Constructor = Collapse
// COLLAPSE NO CONFLICT
// ====================
$.fn.collapse.noConflict = function () {
$.fn.collapse = old
return this
}
// COLLAPSE DATA-API
// =================
$(document).on('click.bs.collapse.data-api', '[data-toggle="collapse"]', function (e) {
var $this = $(this)
if (!$this.attr('data-target')) e.preventDefault()
var $target = getTargetFromTrigger($this)
var data = $target.data('bs.collapse')
var option = data ? 'toggle' : $this.data()
Plugin.call($target, option)
})
}(jQuery);
/* ========================================================================
* Bootstrap: transition.js v3.3.7
* http://getbootstrap.com/javascript/#transitions
* ========================================================================
* Copyright 2011-2016 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// CSS TRANSITION SUPPORT (Shoutout: http://www.modernizr.com/)
// ============================================================
function transitionEnd() {
var el = document.createElement('bootstrap')
var transEndEventNames = {
WebkitTransition : 'webkitTransitionEnd',
MozTransition : 'transitionend',
OTransition : 'oTransitionEnd otransitionend',
transition : 'transitionend'
}
for (var name in transEndEventNames) {
if (el.style[name] !== undefined) {
return { end: transEndEventNames[name] }
}
}
return false // explicit for ie8 ( ._.)
}
// http://blog.alexmaccaw.com/css-transitions
$.fn.emulateTransitionEnd = function (duration) {
var called = false
var $el = this
$(this).one('bsTransitionEnd', function () { called = true })
var callback = function () { if (!called) $($el).trigger($.support.transition.end) }
setTimeout(callback, duration)
return this
}
$(function () {
$.support.transition = transitionEnd()
if (!$.support.transition) return
$.event.special.bsTransitionEnd = {
bindType: $.support.transition.end,
delegateType: $.support.transition.end,
handle: function (e) {
if ($(e.target).is(this)) return e.handleObj.handler.apply(this, arguments)
}
}
})
}(jQuery);<|fim▁end|> | Button.DEFAULTS = {
loadingText: 'loading...' |
<|file_name|>kraken_taxonomy_report.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Reports a summary of Kraken's results
# and optionally creates a newick Tree
# Copyright (c) 2016 Daniel Blankenberg
# Licensed under the Academic Free License version 3.0
# https://github.com/blankenberg/Kraken-Taxonomy-Report
from __future__ import print_function
import optparse
import os
import re
import sys
__VERSION__ = '0.0.2'
__URL__ = "https://github.com/blankenberg/Kraken-Taxonomy-Report"
# Rank names were pulled from ncbi nodes.dmp on 02/02/2016
# cat nodes.dmp | cut -f 5 | sort | uniq
# "root" is added manually
NO_RANK_NAME = "no rank"
RANK_NAMES = [ NO_RANK_NAME,
"root",
"superkingdom",
"kingdom",
"subkingdom",
"superphylum",
"phylum",
"subphylum",
"superclass",
"class",
"subclass",
"infraclass",
"superorder",
"order",
"suborder",
"infraorder",
"parvorder",
"superfamily",
"family",
"subfamily",
"tribe",
"subtribe",
"genus",
"subgenus",
"species group",
"species subgroup",
"species",
"subspecies",
"varietas",
"forma" ]<|fim▁hole|># root (root) -> cellular organisms (no rank) -> bacteria (superkingdom)
RANK_NAME_TO_INTS = dict( [ (y, x) for (x, y) in enumerate( RANK_NAMES ) ] )
RANK_NAMES_INTS = range( len( RANK_NAMES ) )
NO_RANK_INT = RANK_NAMES.index( NO_RANK_NAME )
NO_RANK_CODE = 'n'
PRIMARY_RANK_NAMES = [ 'species', 'genus', 'family', 'order', 'class', 'phylum', 'kingdom' ]
RANK_INT_TO_CODE = {}
for name in PRIMARY_RANK_NAMES:
RANK_INT_TO_CODE[ RANK_NAMES.index( name ) ] = name[0]
RANK_INT_TO_CODE[ RANK_NAMES.index( 'superkingdom' ) ] = 'd'
PRIMARY_RANK_NAMES.append( 'superkingdom' )
NAME_STUB = "%s__%s"
NAME_RE = re.compile( "(\t| |\||\.;)" )
NAME_REPL = "_"
def get_kraken_db_path( db ):
assert db, ValueError( "You must provide a kraken database" )
k_db_path = os.getenv('KRAKEN_DB_PATH', None )
if k_db_path:
db = os.path.join( k_db_path, db )
return db
def load_taxonomy( db_path, sanitize_names=False ):
child_lists = {}
name_map = {}
rank_map = {}
names = {} # Store names here to look for duplicates (id, True/False name fixed)
with open( os.path.join( db_path, "taxonomy/names.dmp" ) ) as fh:
for line in fh:
line = line.rstrip( "\n\r" )
if line.endswith( "\t|" ):
line = line[:-2]
fields = line.split( "\t|\t" )
node_id = fields[0]
name = fields[1]
if sanitize_names:
name = NAME_RE.sub( NAME_REPL, name )
name_type = fields[3]
if name_type == "scientific name":
if name in names:
print( 'Warning: name "%s" found at node "%s" but already exists originally for node "%s".' % ( name, node_id, names[name][0] ), file=sys.stderr )
new_name = "%s_%s" % ( name, node_id )
print( 'Transforming node "%s" named "%s" to "%s".' % ( node_id, name, new_name ), file=sys.stderr )
assert new_name not in names, 'Transformed Name "%s" already exists. Cannot recover at this time.' % new_name
if not names[name][1]:
orig_new_name = "%s_%s" % ( name, names[name][0] )
print( 'Transforming node "%s" named "%s" to "%s".' % ( names[name][0], name, orig_new_name ), file=sys.stderr )
assert orig_new_name not in names, 'Transformed Name "%s" already exists. Cannot recover at this time.' % orig_new_name
name_map[names[name][0]] = orig_new_name
names[name] = ( names[name][0], True )
name = new_name
else:
names[name] = ( node_id, False )
name_map[ node_id ] = name
with open( os.path.join( db_path, "taxonomy/nodes.dmp" ) ) as fh:
for line in fh:
line = line.rstrip( "\n\r" )
fields = line.split( "\t|\t" )
node_id = fields[0]
parent_id = fields[1]
rank = RANK_NAME_TO_INTS.get( fields[2].lower(), None )
if rank is None:
# This should never happen, unless new taxonomy ranks are created
print( 'Unrecognized rank: Node "%s" is "%s", setting to "%s"' % ( node_id, fields[2], NO_RANK_NAME ), file=sys.stderr )
rank = NO_RANK_INT
if node_id == '1':
parent_id = '0'
if parent_id not in child_lists:
child_lists[ parent_id ] = []
child_lists[ parent_id ].append( node_id )
rank_map[node_id] = rank
return ( child_lists, name_map, rank_map )
def dfs_summation( node, counts, child_lists ):
children = child_lists.get( node, None )
if children:
for child in children:
dfs_summation( child, counts, child_lists )
counts[ node ] = counts.get( node, 0 ) + counts.get( child, 0 )
def dfs_report( node, file_data, hit_taxa, rank_map, name_map, child_lists, output_lines, options, name=None, tax=None ):
rank_int = rank_map[node]
code = RANK_INT_TO_CODE.get( rank_int, NO_RANK_CODE )
if ( code != NO_RANK_CODE or options.intermediate ) and ( options.show_zeros or node in hit_taxa):
if name is None:
name = ""
else:
name = "%s|" % name
if tax is None:
tax = ''
else:
tax = "%s;" % tax
sanitized_name = name_map[ node ]
name_stub = NAME_STUB % ( code, sanitized_name )
name = name + name_stub
tax = tax + name_stub
if options.name_id:
output = node
elif options.name_long:
output = name
else:
output = sanitized_name
for val in file_data:
output = "%s\t%i" % ( output, val.get( node, 0 ) )
if options.show_rank:
output = "%s\t%s" % ( output, RANK_NAMES[ rank_int ] )
if options.taxonomy:
output = "%s\t%s" % ( output, tax )
output_lines[ rank_int ].append( output )
children = child_lists.get( node )
if children:
for child in children:
dfs_report( child, file_data, hit_taxa, rank_map, name_map, child_lists, output_lines, options, name=name, tax=tax )
def write_tree( child_lists, name_map, rank_map, options, branch_length=1 ):
# Uses Biopython, only load if making tree
import Bio.Phylo
from Bio.Phylo import BaseTree
def _get_name( node_id ):
if options.name_id:
return node_id
return name_map[node_id]
nodes = {}
root_node_id = child_lists["0"][0]
nodes[root_node_id] = BaseTree.Clade( name=_get_name( root_node_id), branch_length=branch_length )
def recurse_children( parent_id ):
if options.cluster is not None and rank_map[parent_id] == options.cluster:
# Short circuit if we found our rank, prevents 'hanging' no ranks from being output
# e.g. clustering by "species" (Escherichia coli), but have "no rank" below (Escherichia coli K-12) in test_db
return
if parent_id not in nodes:
nodes[parent_id] = BaseTree.Clade( name=_get_name( parent_id ), branch_length=branch_length )
for child_id in child_lists.get( parent_id, [] ):
if options.cluster is None or ( rank_map[child_id] <= options.cluster ):
if child_id not in nodes:
nodes[child_id] = BaseTree.Clade(name=_get_name( child_id ), branch_length=branch_length)
nodes[parent_id].clades.append(nodes[child_id])
recurse_children( child_id )
recurse_children( root_node_id )
tree = BaseTree.Tree(root=nodes[root_node_id])
Bio.Phylo.write( [tree], options.output_tree, 'newick' )
def __main__():
parser = optparse.OptionParser( usage="%prog [options] file1 file...fileN" )
parser.add_option( '-v', '--version', dest='version', action='store_true', default=False, help='print version and exit' )
parser.add_option( '', '--show-zeros', dest='show_zeros', action='store_true', default=False, help='Show empty nodes' )
parser.add_option( '', '--header-line', dest='header_line', action='store_true', default=False, help='Provide a header on output' )
parser.add_option( '', '--intermediate', dest='intermediate', action='store_true', default=False, help='Intermediate Ranks' )
parser.add_option( '', '--name-id', dest='name_id', action='store_true', default=False, help='Use Taxa ID instead of Name' )
parser.add_option( '', '--name-long', dest='name_long', action='store_true', default=False, help='Use Long taxa ID instead of base name' )
parser.add_option( '', '--taxonomy', dest='taxonomy', action='store_true', default=False, help='Output taxonomy in last column' )
parser.add_option( '', '--cluster', dest='cluster', action='store', type="string", default=None, help='Cluster counts to specified rank' )
parser.add_option( '', '--summation', dest='summation', action='store_true', default=False, help='Add summation of child counts to each taxa' )
parser.add_option( '', '--sanitize-names', dest='sanitize_names', action='store_true', default=False, help='Replace special chars (\t| |\||\.;) with underscore (_)' )
parser.add_option( '', '--show-rank', dest='show_rank', action='store_true', default=False, help='Output column with Rank name' )
parser.add_option( '', '--db', dest='db', action='store', type="string", default=None, help='Name of Kraken database' )
parser.add_option( '', '--output', dest='output', action='store', type="string", default=None, help='Name of output file' )
parser.add_option( '', '--output-tree', dest='output_tree', action='store', type="string", default=None, help='Name of output file to place newick tree' )
(options, args) = parser.parse_args()
if options.version:
print( "Kraken Taxonomy Report (%s) version %s" % ( __URL__, __VERSION__ ), file=sys.stderr )
sys.exit()
if not args:
print( parser.get_usage(), file=sys.stderr )
sys.exit()
if options.cluster:
cluster_name = options.cluster.lower()
cluster = RANK_NAME_TO_INTS.get( cluster_name, None )
assert cluster is not None, ValueError( '"%s" is not a valid rank for clustering.' % options.cluster )
if cluster_name not in PRIMARY_RANK_NAMES:
assert options.intermediate, ValueError( 'You cannot cluster by "%s", unless you enable intermediate ranks.' % options.cluster )
ranks_to_report = [ cluster ]
options.cluster = cluster
# When clustering we need to do summatation
options.summation = True
else:
options.cluster = None # make empty string into None
ranks_to_report = RANK_NAMES_INTS
if options.output:
output_fh = open(options.output, 'w')
else:
output_fh = sys.stdout
db_path = get_kraken_db_path( options.db )
( child_lists, name_map, rank_map ) = load_taxonomy( db_path, sanitize_names=options.sanitize_names )
file_data = []
hit_taxa = []
for input_filename in args:
taxo_counts = {}
with open( input_filename ) as fh:
for line in fh:
fields = line.split( "\t" )
taxo_counts[ fields[2] ] = taxo_counts.get( fields[2], 0 ) + 1
clade_counts = taxo_counts.copy() # fixme remove copying?
if options.summation:
dfs_summation( '1', clade_counts, child_lists )
for key, value in clade_counts.items():
if value and key not in hit_taxa:
hit_taxa.append( key )
file_data.append( clade_counts )
if options.header_line:
output_fh.write( "#ID\t" )
output_fh.write( "\t".join( args ) )
if options.show_rank:
output_fh.write( "\trank" )
if options.taxonomy:
output_fh.write( "\ttaxonomy" )
output_fh.write( '\n' )
output_lines = dict( [ ( x, [] ) for x in RANK_NAMES_INTS ] )
dfs_report( '1', file_data, hit_taxa, rank_map, name_map, child_lists, output_lines, options, name=None, tax=None )
for rank_int in ranks_to_report:
for line in output_lines.get( rank_int, [] ):
output_fh.write( line )
output_fh.write( '\n' )
fh.close()
if options.output_tree:
write_tree( child_lists, name_map, rank_map, options )
if __name__ == "__main__":
__main__()<|fim▁end|> | # NB: We put 'no rank' at top of list for generating trees, due to e.g. |
<|file_name|>vendor.js<|end_file_name|><|fim▁begin|>window.jQuery = window.$ = require('jquery');<|fim▁hole|>require('file?name=[name].[ext]!bootstrap/dist/css/bootstrap.css.map');
require('yii2-pjax');
require('yii');
require('yii.validation');
require('yii.activeForm');
require('yii.gridView');
require('yii.captcha');<|fim▁end|> | require('bootstrap');
require('bootstrap/dist/js/bootstrap.js');
require('bootstrap/dist/css/bootstrap.css'); |
<|file_name|>36784000.jsonp.js<|end_file_name|><|fim▁begin|><|fim▁hole|>jsonp({"cep":"36784000","cidade":"Dona Euz\u00e9bia","uf":"MG","estado":"Minas Gerais"});<|fim▁end|> | |
<|file_name|>io.cpp<|end_file_name|><|fim▁begin|>// -*- Mode:C++ -*-
/**************************************************************************************************/
/* */
/* Copyright (C) 2015 University of Hull */
/* */
/**************************************************************************************************/
/* */
/* module : platform/glx/io.cpp */
/* project : */
/* description: */
/* */
/**************************************************************************************************/
// include i/f header
#include "platform/glx/io.hpp"
// includes, system
#include <boost/io/ios_state.hpp> // boost::io::ios_all_save
#include <iomanip> // std::hex
#include <ostream> // std::ostream
// includes, project
//#include <>
#define UKACHULLDCS_USE_TRACE
#undef UKACHULLDCS_USE_TRACE
#include <support/trace.hpp>
// internal unnamed namespace
namespace {
// types, internal (class, enum, struct, union, typedef)
// variables, internal
// functions, internal
} // namespace {
namespace platform {
<|fim▁hole|> // variables, exported
// functions, exported
std::ostream&
operator<<(std::ostream& os, Display const& a)
{
std::ostream::sentry const cerberus(os);
if (cerberus) {
boost::io::ios_all_saver const ias(os);
os << '['
<< "@" << std::hex << reinterpret_cast<void const*>(&a)
<< ']';
}
return os;
}
std::ostream&
operator<<(std::ostream& os, GLXContext const& a)
{
std::ostream::sentry const cerberus(os);
if (cerberus) {
boost::io::ios_all_saver const ias(os);
os << '['
<< "@" << std::hex << reinterpret_cast<void const*>(a)
<< ']';
}
return os;
}
std::ostream&
operator<<(std::ostream& os, XVisualInfo const& a)
{
std::ostream::sentry const cerberus(os);
if (cerberus) {
boost::io::ios_all_saver const ias(os);
os << '['
<< "@" << std::hex << reinterpret_cast<void const*>(&a)
<< ']';
}
return os;
}
} // namespace glx {
} // namespace platform {<|fim▁end|> | namespace glx {
|
<|file_name|>Events.d.ts<|end_file_name|><|fim▁begin|>declare enum Events {
AUDIO_LEVEL = 'statistics.audioLevel',
BEFORE_DISPOSED = 'statistics.before_disposed',
BYTE_SENT_STATS = 'statistics.byte_sent_stats',<|fim▁hole|> LONG_TASKS_STATS = 'statistics.long_tasks_stats'
}<|fim▁end|> | CONNECTION_STATS = 'statistics.connectionstats', |
<|file_name|>ete_extract.py<|end_file_name|><|fim▁begin|># #START_LICENSE###########################################################
#
#
# This file is part of the Environment for Tree Exploration program
# (ETE). http://etetoolkit.org
#
# ETE is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ETE is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ETE. If not, see <http://www.gnu.org/licenses/>.
#
#
# ABOUT THE ETE PACKAGE
# =====================
#
# ETE is distributed under the GPL copyleft license (2008-2015).
#
# If you make use of ETE in published work, please cite:
#
# Jaime Huerta-Cepas, Joaquin Dopazo and Toni Gabaldon.
# ETE: a python Environment for Tree Exploration. Jaime BMC
# Bioinformatics 2010,:24doi:10.1186/1471-2105-11-24
#
# Note that extra references to the specific methods implemented in
# the toolkit may be available in the documentation.
#
# More info at http://etetoolkit.org. Contact: [email protected]
#
#
# #END_LICENSE#############################################################
from __future__ import absolute_import
from __future__ import print_function
from .common import src_tree_iterator
DESC = ""
def populate_args(extract_args_p):
extract_args = extract_args_p.add_argument_group('TREE EDIT OPTIONS')
extract_args.add_argument("--orthologs", dest="orthologs",
nargs="*",
help="")
extract_args.add_argument("--duplications", dest="duplications",
action="store_true",
help="")
def run(args):<|fim▁hole|> from .. import Tree, PhyloTree
for nw in src_tree_iterator(args):
if args.orthologs is not None:
t = PhyloTree(nw)
for e in t.get_descendant_evol_events():
print(e.in_seqs, e.out_seqs)<|fim▁end|> | |
<|file_name|>SnapshotOptions.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express<|fim▁hole|> * the License.
*/
package org.apache.geode.cache.snapshot;
import java.io.Serializable;
import org.apache.geode.internal.cache.snapshot.SnapshotFileMapper;
/**
* Provides a way to configure the behavior of snapshot operations. The default options are:
* <dl>
* <dt>filter</dt>
* <dd>null</dd>
* </dl>
*
* @param <K> the cache entry key type
* @param <V> the cache entry value type
*
* @since GemFire 7.0
*/
public interface SnapshotOptions<K, V> extends Serializable {
/**
* Defines the available snapshot file formats.
*
* @since GemFire 7.0
*/
enum SnapshotFormat {
/** an optimized binary format specific to GemFire */
GEMFIRE
}
/**
* Sets a filter to apply to snapshot entries. Entries that are accepted by the filter will be
* included in import and export operations.
*
* @param filter the filter to apply, or null to remove the filter
* @return the snapshot options
*/
SnapshotOptions<K, V> setFilter(SnapshotFilter<K, V> filter);
/**
* Returns the filter to be applied to snapshot entries. Entries that are accepted by the filter
* will be included in import and export operations.
*
* @return the filter, or null if the filter is not set
*/
SnapshotFilter<K, V> getFilter();
/**
* Sets whether to invoke callbacks when loading a snapshot. The default is false.
*
* @param invokeCallbacks
*
* @return the snapshot options
*/
SnapshotOptions<K, V> invokeCallbacks(boolean invokeCallbacks);
/**
* Returns whether loading a snapshot causes callbacks to be invoked
*
* @return whether loading a snapshot causes callbacks to be invoked
*/
boolean shouldInvokeCallbacks();
/**
* Returns true if the snapshot operation will proceed in parallel.
*
* @return true if the parallel mode has been enabled
*
* @since Geode 1.3
*/
boolean isParallelMode();
/**
* Enables parallel mode for snapshot export, which will cause each member of a partitioned region
* to save its local data set (ignoring redundant copies) to a separate snapshot file.
*
* <p>
* Parallelizing snapshot operations may yield significant performance improvements for large data
* sets. This is particularly true when each member is writing to separate physical disks.
* <p>
* This flag is ignored for replicated regions.
*
* @param parallel true if the snapshot operations will be performed in parallel
* @return the snapshot options
*
* @see SnapshotFileMapper
*
* @since Geode 1.3
*/
SnapshotOptions<K, V> setParallelMode(boolean parallel);
}<|fim▁end|> | * or implied. See the License for the specific language governing permissions and limitations under |
<|file_name|>normalize_multiline_doc_attribute.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>///This comment
///is split
///on multiple lines
fn foo() {}
/// B1
///
/// A1
fn bar() {}<|fim▁end|> | // rustfmt-unstable: true
// rustfmt-normalize_doc_attributes: true
|
<|file_name|>SpongeDirectionalProcessor.java<|end_file_name|><|fim▁begin|>/*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.common.data.processor.block;
import static org.spongepowered.common.data.DataTransactionBuilder.fail;
import com.google.common.base.Optional;
import net.minecraft.block.state.IBlockState;
import net.minecraft.util.BlockPos;
import net.minecraft.world.World;
import org.spongepowered.api.block.BlockState;
import org.spongepowered.api.data.DataHolder;
import org.spongepowered.api.data.DataPriority;
import org.spongepowered.api.data.DataTransactionResult;
import org.spongepowered.api.data.DataView;
import org.spongepowered.api.data.manipulator.block.DirectionalData;
import org.spongepowered.api.service.persistence.InvalidDataException;
import org.spongepowered.common.data.DataTransactionBuilder;
import org.spongepowered.common.data.SpongeBlockProcessor;
import org.spongepowered.common.data.SpongeDataProcessor;
import org.spongepowered.common.data.manipulator.block.SpongeDirectionalData;
import org.spongepowered.common.interfaces.block.IMixinBlockDirectional;
public class SpongeDirectionalProcessor implements SpongeDataProcessor<DirectionalData>, SpongeBlockProcessor<DirectionalData> {
@Override
public Optional<DirectionalData> fillData(DataHolder dataHolder, DirectionalData manipulator, DataPriority priority) {
return Optional.absent();
}
@Override
public DataTransactionResult setData(DataHolder dataHolder, DirectionalData manipulator, DataPriority priority) {
return DataTransactionBuilder.successNoData();
}
@Override
public boolean remove(DataHolder dataHolder) {
return false;
}
@Override
public Optional<DirectionalData> build(DataView container) throws InvalidDataException {
return Optional.absent();
}
@Override
public DirectionalData create() {
return new SpongeDirectionalData();
}
@Override
public Optional<DirectionalData> createFrom(DataHolder dataHolder) {
return Optional.absent();
}
@Override
public Optional<DirectionalData> fromBlockPos(final World world, final BlockPos blockPos) {
IBlockState blockState = world.getBlockState(blockPos);
if (blockState.getBlock() instanceof IMixinBlockDirectional) {
return Optional.of(((IMixinBlockDirectional) blockState.getBlock()).getDirectionalData(blockState));
}
return Optional.absent();
}
@Override
public DataTransactionResult setData(World world, BlockPos blockPos, DirectionalData manipulator, DataPriority priority) {
IBlockState blockState = world.getBlockState(blockPos);
if (blockState.getBlock() instanceof IMixinBlockDirectional) {
return ((IMixinBlockDirectional) blockState.getBlock()).setDirectionalData(manipulator, world, blockPos, priority);
}
return fail(manipulator);
}
@Override
public boolean remove(World world, BlockPos blockPos) {
IBlockState blockState = world.getBlockState(blockPos);
if (blockState.getBlock() instanceof IMixinBlockDirectional) {
// ((IMixinBlockDirectional) blockState.getBlock()).resetDirectionData(blockState);
return true;
}
return false;<|fim▁hole|> return Optional.absent();
}
@Override
public Optional<DirectionalData> createFrom(IBlockState blockState) {
if (blockState.getBlock() instanceof IMixinBlockDirectional) {
((IMixinBlockDirectional) blockState.getBlock()).getDirectionalData(blockState);
}
return Optional.absent();
}
@Override
public Optional<DirectionalData> getFrom(DataHolder dataHolder) {
return Optional.absent();
}
}<|fim▁end|> | }
@Override
public Optional<BlockState> removeFrom(IBlockState blockState) { |
<|file_name|>example.js<|end_file_name|><|fim▁begin|>$(function() {
function cellValue(val) {
return { sortValue: val, displayValue: val.toString() };
}
var yAxis = [{ id: 'store', name: 'Store' }, { id: 'clerk', name: 'Clerk' }];
var keyfigures = [{ id: 'nocustomers', name: 'Customers' }, { id: 'turnover', name: 'Turnover' }];
// normally one would request data here from the server using yAxis and keyfigures
// but to keep things simple, we type in the result below
var reportState = new ReportState({ useExpandCollapse: true });
reportState.dimensionsY = _.map(yAxis, function(e) { return e.id; });
reportState.serverData = [
{ type: 'row', values: [cellValue('Copenhagen'), cellValue(''), cellValue(210), cellValue(43100)] },
{ type: 'row', values: [cellValue('Stockholm'), cellValue(''), cellValue(120), cellValue(22100)] },
{ type: 'row', values: [cellValue('Berlin'), cellValue(''), cellValue(743), cellValue(50032)] },
{ type: 'grandtotal', values: [cellValue('Grand total'), cellValue(''), cellValue(1073), cellValue(115232)] }
];
var allYAxisValues = reportBuilder.getYAxisValues(reportState.serverData, yAxis);
reportState.drawNewData = function(data) {
// this also simulates a server backend returning new results
reportState.serverData = [];
if (_.any(reportState.expandedCells['store'], function(e) { return e == 'store:Copenhagen'; }))
{
reportState.serverData.push({ type: 'row', values: [cellValue('Copenhagen'), cellValue('Stine'), cellValue(110), cellValue(33100)] });
reportState.serverData.push({ type: 'row', values: [cellValue('Copenhagen'), cellValue('Dorthe'), cellValue(100), cellValue(10000)] });
reportState.serverData.push({ type: 'subtotal', values: [cellValue('Copenhagen'), cellValue(''), cellValue(210), cellValue(43100)] });
}
else
reportState.serverData.push({ type: 'row', values: [cellValue('Copenhagen'), cellValue(''), cellValue(210), cellValue(43100)] });
if (_.any(reportState.expandedCells['store'], function(e) { return e == 'store:Stockholm'; }))
{
reportState.serverData.push({ type: 'row', values: [cellValue('Stockholm'), cellValue('Emma'), cellValue(30), cellValue(2100)] });
reportState.serverData.push({ type: 'row', values: [cellValue('Stockholm'), cellValue('Anne'), cellValue(70), cellValue(18000)] });
reportState.serverData.push({ type: 'row', values: [cellValue('Stockholm'), cellValue('Julia'), cellValue(20), cellValue(2000)] });
reportState.serverData.push({ type: 'subtotal', values: [cellValue('Stockholm'), cellValue(''), cellValue(120), cellValue(22100)] });
}
else
reportState.serverData.push({ type: 'row', values: [cellValue('Stockholm'), cellValue(''), cellValue(120), cellValue(22100)] });
if (_.any(reportState.expandedCells['store'], function(e) { return e == 'store:Berlin'; }))
{
reportState.serverData.push({ type: 'row', values: [cellValue('Berlin'), cellValue('Sandra'), cellValue(93), cellValue(1182)] });
reportState.serverData.push({ type: 'row', values: [cellValue('Berlin'), cellValue('Katharina'), cellValue(100), cellValue(6700)] });
reportState.serverData.push({ type: 'row', values: [cellValue('Berlin'), cellValue('Nadine'), cellValue(120), cellValue(10030)] });<|fim▁hole|> reportState.serverData.push({ type: 'subtotal', values: [cellValue('Berlin'), cellValue(''), cellValue(743), cellValue(50032)] });
}
else
reportState.serverData.push({ type: 'row', values: [cellValue('Berlin'), cellValue(''), cellValue(743), cellValue(50032)] });
reportState.serverData.push({ type: 'grandtotal', values: [cellValue('Grand total'), cellValue(''), cellValue(1073), cellValue(115232)] });
if (reportState.sortRowIndex != -1)
reportState.drawData(reportBuilder.sortExpandedData(reportState.serverData, reportState.dimensionsY, reportState.sortRowIndex, reportState.sortDirection, reportState.expandedCells));
else
reportState.drawData(reportState.serverData);
};
reportState.drawData = function(data) {
reportInterface.drawTable("data", reportState, allYAxisValues, data, yAxis, keyfigures);
reportInterface.addSortHeaders("data", reportState);
reportInterface.addExpandCollapseHeaders("data", reportState);
};
reportState.drawData(reportState.serverData);
});<|fim▁end|> | reportState.serverData.push({ type: 'row', values: [cellValue('Berlin'), cellValue('Julia'), cellValue(430), cellValue(30200)] }); |
<|file_name|>extend.routes.ts<|end_file_name|><|fim▁begin|>import { NgModule } from '@angular/core';
import { RouterModule, Routes } from '@angular/router';
import { ExtendComponent } from './src/components/extend.component';
import { SubExtendComponent } from './src/components/sub-extend.component';
const extendRoutes: Routes = [
{
path: '',
component: ExtendComponent,
},
{
path: 'main',
component: ExtendComponent,
},
{
path: 'sub',
component: SubExtendComponent
},
];<|fim▁hole|> ],
exports: [
RouterModule
]
})
export class ExtendRoutingModule {}<|fim▁end|> | @NgModule({
imports: [
RouterModule.forChild(extendRoutes) |
<|file_name|>jade.min.js<|end_file_name|><|fim▁begin|><|fim▁hole|>size 21785<|fim▁end|> | version https://git-lfs.github.com/spec/v1
oid sha256:dd32b7eaa7daed7371af2e06195ec013df98eb5920727aaf459d9419ef607ff9 |
<|file_name|>xrecord_event_handler.go<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2017 ~ 2018 Deepin Technology Co., Ltd.
*
* Author: jouyouyun <[email protected]>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package shortcuts
import (
"strings"
x "github.com/linuxdeepin/go-x11-client"
"github.com/linuxdeepin/go-x11-client/util/keysyms"
)
type XRecordEventHandler struct {
keySymbols *keysyms.KeySymbols
pressedMods uint16
historyPressedMods uint16
nonModKeyPressed bool
modKeyReleasedCb func(uint8, uint16)
allModKeysReleasedCb func()
}
func NewXRecordEventHandler(keySymbols *keysyms.KeySymbols) *XRecordEventHandler {
return &XRecordEventHandler{
keySymbols: keySymbols,
}
}
//func (h *XRecordEventHandler) logPressedMods(title string) {
// logger.Debug(title, "pressedMods:", Modifiers(h.pressedMods))
//}
func (h *XRecordEventHandler) handleButtonEvent(pressed bool) {
if h.pressedMods > 0 {
h.nonModKeyPressed = true
}
}
func (h *XRecordEventHandler) handleKeyEvent(pressed bool, keycode uint8, state uint16) {
keystr, _ := h.keySymbols.LookupString(x.Keycode(keycode), state)
//var pr string
//if pressed {
// pr = "PRESS"
//} else {
// pr = "RELEASE"
//}
//logger.Debugf("%s keycode: [%d|%s], state: %v\n", pr, keycode, keystr, Modifiers(state))
if pressed {
mod, ok := key2Mod(keystr)
if ok {
h.pressedMods |= mod
h.historyPressedMods |= mod
} else {
//logger.Debug("non-mod key pressed")
if h.pressedMods > 0 {
h.nonModKeyPressed = true
}
}
//h.logPressedMods("pressed")
} else {
// release<|fim▁hole|> return
}
if h.pressedMods == h.historyPressedMods && !h.nonModKeyPressed {
if h.modKeyReleasedCb != nil {
logger.Debugf("modKeyReleased keycode %d historyPressedMods: %s",
keycode, Modifiers(h.historyPressedMods))
h.modKeyReleasedCb(keycode, h.historyPressedMods)
}
}
h.pressedMods &^= mod
//h.logPressedMods("after release")
if h.pressedMods == 0 {
h.historyPressedMods = 0
h.nonModKeyPressed = false
if h.allModKeysReleasedCb != nil {
//logger.Debug("allModKeysReleased")
h.allModKeysReleasedCb()
}
}
}
}
func key2Mod(key string) (uint16, bool) {
key = strings.ToLower(key)
// caps_lock and num_lock
if key == "caps_lock" {
return keysyms.ModMaskCapsLock, true
} else if key == "num_lock" {
return keysyms.ModMaskNumLock, true
}
// control/alt/meta/shift/super _ l/r
i := strings.IndexByte(key, '_')
if i == -1 {
return 0, false
}
match := key[0:i]
switch match {
case "shift":
return keysyms.ModMaskShift, true
case "control":
return keysyms.ModMaskControl, true
case "super":
return keysyms.ModMaskSuper, true
case "alt", "meta":
return keysyms.ModMaskAlt, true
}
return 0, false
}<|fim▁end|> | //h.logPressedMods("before release")
mod, ok := key2Mod(keystr)
if !ok { |
<|file_name|>vtable.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use middle::subst::{SelfSpace, FnSpace};
use middle::traits;
use middle::traits::{SelectionError, OutputTypeParameterMismatch, Overflow, Unimplemented};
use middle::traits::{Obligation, obligation_for_builtin_bound};
use middle::traits::{FulfillmentError, CodeSelectionError, CodeAmbiguity};
use middle::traits::{ObligationCause};
use middle::ty::{mod, Ty};
use middle::typeck::check::{FnCtxt,
structurally_resolved_type};
use middle::typeck::infer;
use std::rc::Rc;
use syntax::ast;
use syntax::codemap::Span;
use util::ppaux::{UserString, Repr, ty_to_string};
pub fn check_object_cast<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
cast_expr: &ast::Expr,
source_expr: &ast::Expr,
target_object_ty: Ty<'tcx>)
{
debug!("check_object_cast(cast_expr={}, target_object_ty={})",
cast_expr.repr(fcx.tcx()),
target_object_ty.repr(fcx.tcx()));
// Look up vtables for the type we're casting to,
// passing in the source and target type. The source
// must be a pointer type suitable to the object sigil,
// e.g.: `&x as &Trait` or `box x as Box<Trait>`
let source_ty = fcx.expr_ty(source_expr);
let source_ty = structurally_resolved_type(fcx, source_expr.span, source_ty);
debug!("source_ty={}", source_ty.repr(fcx.tcx()));
match (&source_ty.sty, &target_object_ty.sty) {
(&ty::ty_uniq(referent_ty), &ty::ty_uniq(object_trait_ty)) => {
let object_trait = object_trait(&object_trait_ty);
// Ensure that if ~T is cast to ~Trait, then T : Trait
push_cast_obligation(fcx, cast_expr, object_trait, referent_ty);
check_object_safety(fcx.tcx(), object_trait, source_expr.span);
}
(&ty::ty_rptr(referent_region, ty::mt { ty: referent_ty,
mutbl: referent_mutbl }),
&ty::ty_rptr(target_region, ty::mt { ty: object_trait_ty,
mutbl: target_mutbl })) =>
{
let object_trait = object_trait(&object_trait_ty);
if !mutability_allowed(referent_mutbl, target_mutbl) {
fcx.tcx().sess.span_err(source_expr.span,
"types differ in mutability");
} else {
// Ensure that if &'a T is cast to &'b Trait, then T : Trait
push_cast_obligation(fcx, cast_expr,
object_trait,
referent_ty);
// Ensure that if &'a T is cast to &'b Trait, then 'b <= 'a
infer::mk_subr(fcx.infcx(),
infer::RelateObjectBound(source_expr.span),
target_region,
referent_region);
check_object_safety(fcx.tcx(), object_trait, source_expr.span);
}
}
(_, &ty::ty_uniq(..)) => {
fcx.ccx.tcx.sess.span_err(
source_expr.span,
format!("can only cast an boxed pointer \
to a boxed object, not a {}",
ty::ty_sort_string(fcx.tcx(), source_ty)).as_slice());
}
(_, &ty::ty_rptr(..)) => {
fcx.ccx.tcx.sess.span_err(
source_expr.span,
format!("can only cast a &-pointer \
to an &-object, not a {}",
ty::ty_sort_string(fcx.tcx(), source_ty)).as_slice());
}
_ => {
fcx.tcx().sess.span_bug(
source_expr.span,
"expected object type");
}
}
fn object_trait<'a, 'tcx>(t: &'a Ty<'tcx>) -> &'a ty::TyTrait<'tcx> {
match t.sty {
ty::ty_trait(ref ty_trait) => &**ty_trait,
_ => panic!("expected ty_trait")
}
}
fn mutability_allowed(a_mutbl: ast::Mutability,
b_mutbl: ast::Mutability)
-> bool {
a_mutbl == b_mutbl ||
(a_mutbl == ast::MutMutable && b_mutbl == ast::MutImmutable)
}
fn push_cast_obligation<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
cast_expr: &ast::Expr,
object_trait: &ty::TyTrait<'tcx>,
referent_ty: Ty<'tcx>) {
let object_trait_ref =
register_object_cast_obligations(fcx,
cast_expr.span,
object_trait,
referent_ty);
// Finally record the object_trait_ref for use during trans
// (it would prob be better not to do this, but it's just kind
// of a pain to have to reconstruct it).
fcx.write_object_cast(cast_expr.id, object_trait_ref);
}
}
// Check that a trait is 'object-safe'. This should be checked whenever a trait object
// is created (by casting or coercion, etc.). A trait is object-safe if all its
// methods are object-safe. A trait method is object-safe if it does not take
// self by value, has no type parameters and does not use the `Self` type, except
// in self position.
pub fn check_object_safety<'tcx>(tcx: &ty::ctxt<'tcx>,
object_trait: &ty::TyTrait<'tcx>,
span: Span) {
// Skip the fn_once lang item trait since only the compiler should call
// `call_once` which is the method which takes self by value. What could go
// wrong?
match tcx.lang_items.fn_once_trait() {
Some(def_id) if def_id == object_trait.principal.def_id => return,
_ => {}
}
let trait_items = ty::trait_items(tcx, object_trait.principal.def_id);
let mut errors = Vec::new();
for item in trait_items.iter() {
match *item {
ty::MethodTraitItem(ref m) => {
errors.push(check_object_safety_of_method(tcx, &**m))
}
ty::TypeTraitItem(_) => {}
}
}
let mut errors = errors.iter().flat_map(|x| x.iter()).peekable();
if errors.peek().is_some() {
let trait_name = ty::item_path_str(tcx, object_trait.principal.def_id);
span_err!(tcx.sess, span, E0038,
"cannot convert to a trait object because trait `{}` is not object-safe",
trait_name);
for msg in errors {
tcx.sess.note(msg.as_slice());
}
}
// Returns a vec of error messages. If hte vec is empty - no errors!
fn check_object_safety_of_method<'tcx>(tcx: &ty::ctxt<'tcx>,
method: &ty::Method<'tcx>)
-> Vec<String> {
/*!
* There are some limitations to calling functions through an
* object, because (a) the self type is not known
* (that's the whole point of a trait instance, after all, to
* obscure the self type) and (b) the call must go through a
* vtable and hence cannot be monomorphized.
*/
let mut msgs = Vec::new();
let method_name = method.name.repr(tcx);
match method.explicit_self {
ty::ByValueExplicitSelfCategory => { // reason (a) above
msgs.push(format!("cannot call a method (`{}`) with a by-value \
receiver through a trait object", method_name))
}
ty::StaticExplicitSelfCategory => {
// Static methods are always object-safe since they
// can't be called through a trait object
return msgs
}
ty::ByReferenceExplicitSelfCategory(..) |
ty::ByBoxExplicitSelfCategory => {}
}
// reason (a) above
let check_for_self_ty = |ty| {
if ty::type_has_self(ty) {
Some(format!(
"cannot call a method (`{}`) whose type contains \
a self-type (`{}`) through a trait object",
method_name, ty_to_string(tcx, ty)))
} else {
None
}
};
let ref sig = method.fty.sig;
for &input_ty in sig.inputs[1..].iter() {
match check_for_self_ty(input_ty) {
Some(msg) => msgs.push(msg),
_ => {}
}
}
if let ty::FnConverging(result_type) = sig.output {
match check_for_self_ty(result_type) {
Some(msg) => msgs.push(msg),
_ => {}
}
}
if method.generics.has_type_params(FnSpace) {
// reason (b) above
msgs.push(format!("cannot call a generic method (`{}`) through a trait object",
method_name));
}
msgs
}
}
pub fn register_object_cast_obligations<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
span: Span,
object_trait: &ty::TyTrait<'tcx>,
referent_ty: Ty<'tcx>)
-> Rc<ty::TraitRef<'tcx>>
{
// This is just for better error reporting. Kinda goofy. The object type stuff
// needs some refactoring so there is a more convenient type to pass around.
let object_trait_ty =
ty::mk_trait(fcx.tcx(),
object_trait.principal.clone(),
object_trait.bounds);
debug!("register_object_cast_obligations: referent_ty={} object_trait_ty={}",
referent_ty.repr(fcx.tcx()),
object_trait_ty.repr(fcx.tcx()));
// Take the type parameters from the object type, but set
// the Self type (which is unknown, for the object type)
// to be the type we are casting from.
let mut object_substs = object_trait.principal.substs.clone();
assert!(object_substs.self_ty().is_none());
object_substs.types.push(SelfSpace, referent_ty);
// Create the obligation for casting from T to Trait.
let object_trait_ref =
Rc::new(ty::TraitRef { def_id: object_trait.principal.def_id,
substs: object_substs });
let object_obligation =
Obligation::new(
ObligationCause::new(span,
traits::ObjectCastObligation(object_trait_ty)),
object_trait_ref.clone());
fcx.register_obligation(object_obligation);
// Create additional obligations for all the various builtin
// bounds attached to the object cast. (In other words, if the
// object type is Foo+Send, this would create an obligation
// for the Send check.)
for builtin_bound in object_trait.bounds.builtin_bounds.iter() {
let obligation = obligation_for_builtin_bound(
fcx.tcx(),
ObligationCause::new(span,
traits::ObjectCastObligation(object_trait_ty)),
referent_ty,
builtin_bound);
match obligation {
Ok(obligation) => fcx.register_obligation(obligation),
_ => {}
}
}
object_trait_ref
}
pub fn select_all_fcx_obligations_or_error(fcx: &FnCtxt) {
debug!("select_all_fcx_obligations_or_error");
let mut fulfillment_cx = fcx.inh.fulfillment_cx.borrow_mut();
let r = fulfillment_cx.select_all_or_error(fcx.infcx(),
&fcx.inh.param_env,
fcx);
match r {
Ok(()) => { }
Err(errors) => { report_fulfillment_errors(fcx, &errors); }
}
}
fn resolve_trait_ref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, obligation: &Obligation<'tcx>)
-> (Rc<ty::TraitRef<'tcx>>, Ty<'tcx>)
{
let trait_ref =
fcx.infcx().resolve_type_vars_in_trait_ref_if_possible(
&*obligation.trait_ref);
let self_ty =
trait_ref.substs.self_ty().unwrap();
(Rc::new(trait_ref), self_ty)
}
pub fn report_fulfillment_errors<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
errors: &Vec<FulfillmentError<'tcx>>) {
for error in errors.iter() {
report_fulfillment_error(fcx, error);
}
}
pub fn report_fulfillment_error<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
error: &FulfillmentError<'tcx>) {
match error.code {
CodeSelectionError(ref e) => {
report_selection_error(fcx, &error.obligation, e);
}
CodeAmbiguity => {
maybe_report_ambiguity(fcx, &error.obligation);
}
}
}
pub fn report_selection_error<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
obligation: &Obligation<'tcx>,
error: &SelectionError<'tcx>)
{
match *error {
Overflow => {
let (trait_ref, self_ty) = resolve_trait_ref(fcx, obligation);
fcx.tcx().sess.span_err(
obligation.cause.span,
format!(
"overflow evaluating the trait `{}` for the type `{}`",
trait_ref.user_string(fcx.tcx()),
self_ty.user_string(fcx.tcx())).as_slice());
note_obligation_cause(fcx, obligation);
}
Unimplemented => {
let (trait_ref, self_ty) = resolve_trait_ref(fcx, obligation);
if !ty::type_is_error(self_ty) {
fcx.tcx().sess.span_err(
obligation.cause.span,
format!(
"the trait `{}` is not implemented for the type `{}`",
trait_ref.user_string(fcx.tcx()),
self_ty.user_string(fcx.tcx())).as_slice());
note_obligation_cause(fcx, obligation);
}
}
OutputTypeParameterMismatch(ref expected_trait_ref, ref e) => {
let expected_trait_ref =
fcx.infcx().resolve_type_vars_in_trait_ref_if_possible(
&**expected_trait_ref);
let (trait_ref, self_ty) = resolve_trait_ref(fcx, obligation);
if !ty::type_is_error(self_ty) {
fcx.tcx().sess.span_err(
obligation.cause.span,
format!(
"type mismatch: the type `{}` implements the trait `{}`, \
but the trait `{}` is required ({})",
self_ty.user_string(fcx.tcx()),
expected_trait_ref.user_string(fcx.tcx()),
trait_ref.user_string(fcx.tcx()),
ty::type_err_to_str(fcx.tcx(), e)).as_slice());
note_obligation_cause(fcx, obligation);<|fim▁hole|> }
}
}
}
pub fn maybe_report_ambiguity<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
obligation: &Obligation<'tcx>) {
// Unable to successfully determine, probably means
// insufficient type information, but could mean
// ambiguous impls. The latter *ought* to be a
// coherence violation, so we don't report it here.
let (trait_ref, self_ty) = resolve_trait_ref(fcx, obligation);
debug!("maybe_report_ambiguity(trait_ref={}, self_ty={}, obligation={})",
trait_ref.repr(fcx.tcx()),
self_ty.repr(fcx.tcx()),
obligation.repr(fcx.tcx()));
let all_types = &trait_ref.substs.types;
if all_types.iter().any(|&t| ty::type_is_error(t)) {
} else if all_types.iter().any(|&t| ty::type_needs_infer(t)) {
// This is kind of a hack: it frequently happens that some earlier
// error prevents types from being fully inferred, and then we get
// a bunch of uninteresting errors saying something like "<generic
// #0> doesn't implement Sized". It may even be true that we
// could just skip over all checks where the self-ty is an
// inference variable, but I was afraid that there might be an
// inference variable created, registered as an obligation, and
// then never forced by writeback, and hence by skipping here we'd
// be ignoring the fact that we don't KNOW the type works
// out. Though even that would probably be harmless, given that
// we're only talking about builtin traits, which are known to be
// inhabited. But in any case I just threw in this check for
// has_errors() to be sure that compilation isn't happening
// anyway. In that case, why inundate the user.
if !fcx.tcx().sess.has_errors() {
if fcx.ccx.tcx.lang_items.sized_trait()
.map_or(false, |sized_id| sized_id == trait_ref.def_id) {
fcx.tcx().sess.span_err(
obligation.cause.span,
format!(
"unable to infer enough type information about `{}`; type annotations \
required",
self_ty.user_string(fcx.tcx())).as_slice());
} else {
fcx.tcx().sess.span_err(
obligation.cause.span,
format!(
"unable to infer enough type information to \
locate the impl of the trait `{}` for \
the type `{}`; type annotations required",
trait_ref.user_string(fcx.tcx()),
self_ty.user_string(fcx.tcx())).as_slice());
note_obligation_cause(fcx, obligation);
}
}
} else if !fcx.tcx().sess.has_errors() {
// Ambiguity. Coherence should have reported an error.
fcx.tcx().sess.span_bug(
obligation.cause.span,
format!(
"coherence failed to report ambiguity: \
cannot locate the impl of the trait `{}` for \
the type `{}`",
trait_ref.user_string(fcx.tcx()),
self_ty.user_string(fcx.tcx())).as_slice());
}
}
pub fn select_fcx_obligations_where_possible(fcx: &FnCtxt) {
/*! Select as many obligations as we can at present. */
match
fcx.inh.fulfillment_cx
.borrow_mut()
.select_where_possible(fcx.infcx(), &fcx.inh.param_env, fcx)
{
Ok(()) => { }
Err(errors) => { report_fulfillment_errors(fcx, &errors); }
}
}
pub fn select_new_fcx_obligations(fcx: &FnCtxt) {
/*!
* Try to select any fcx obligation that we haven't tried yet,
* in an effort to improve inference. You could just call
* `select_fcx_obligations_where_possible` except that it leads
* to repeated work.
*/
match
fcx.inh.fulfillment_cx
.borrow_mut()
.select_new_obligations(fcx.infcx(), &fcx.inh.param_env, fcx)
{
Ok(()) => { }
Err(errors) => { report_fulfillment_errors(fcx, &errors); }
}
}
fn note_obligation_cause<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
obligation: &Obligation<'tcx>) {
let tcx = fcx.tcx();
let trait_name = ty::item_path_str(tcx, obligation.trait_ref.def_id);
match obligation.cause.code {
traits::MiscObligation => { }
traits::ItemObligation(item_def_id) => {
let item_name = ty::item_path_str(tcx, item_def_id);
tcx.sess.span_note(
obligation.cause.span,
format!(
"the trait `{}` must be implemented because it is required by `{}`",
trait_name,
item_name).as_slice());
}
traits::ObjectCastObligation(object_ty) => {
tcx.sess.span_note(
obligation.cause.span,
format!(
"the trait `{}` must be implemented for the cast \
to the object type `{}`",
trait_name,
fcx.infcx().ty_to_string(object_ty)).as_slice());
}
traits::RepeatVec => {
tcx.sess.span_note(
obligation.cause.span,
"the `Copy` trait is required because the \
repeated element will be copied");
}
traits::VariableType(_) => {
tcx.sess.span_note(
obligation.cause.span,
"all local variables must have a statically known size");
}
traits::ReturnType => {
tcx.sess.span_note(
obligation.cause.span,
"the return type of a function must have a \
statically known size");
}
traits::AssignmentLhsSized => {
tcx.sess.span_note(
obligation.cause.span,
"the left-hand-side of an assignment must have a statically known size");
}
traits::StructInitializerSized => {
tcx.sess.span_note(
obligation.cause.span,
"structs must have a statically known size to be initialized");
}
traits::DropTrait => {
span_note!(tcx.sess, obligation.cause.span,
"cannot implement a destructor on a \
structure or enumeration that does not satisfy Send");
span_help!(tcx.sess, obligation.cause.span,
"use \"#[unsafe_destructor]\" on the implementation \
to force the compiler to allow this");
}
traits::ClosureCapture(var_id, closure_span) => {
let name = ty::local_var_name_str(tcx, var_id);
span_note!(tcx.sess, closure_span,
"the closure that captures `{}` requires that all captured variables \"
implement the trait `{}`",
name,
trait_name);
}
traits::FieldSized => {
span_note!(tcx.sess, obligation.cause.span,
"only the last field of a struct or enum variant \
may have a dynamically sized type")
}
}
}<|fim▁end|> | |
<|file_name|>test_xenapi.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2010 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test suite for XenAPI."""
import ast
import contextlib
import datetime
import functools
import os
import re
import mox
from nova.compute import aggregate_states
from nova.compute import instance_types
from nova.compute import power_state
from nova.compute import task_states
from nova.compute import utils as compute_utils
from nova.compute import vm_states
from nova import context
from nova import db
from nova import exception
from nova import flags
from nova import log as logging
from nova.openstack.common import importutils
from nova import test
from nova.tests.db import fakes as db_fakes
from nova.tests import fake_network
from nova.tests import fake_utils
from nova.tests.glance import stubs as glance_stubs
from nova.tests.xenapi import stubs
from nova.virt.xenapi import connection as xenapi_conn
from nova.virt.xenapi import fake as xenapi_fake
from nova.virt.xenapi import vm_utils
from nova.virt.xenapi import vmops
from nova.virt.xenapi import volume_utils
LOG = logging.getLogger(__name__)
FLAGS = flags.FLAGS
def stub_vm_utils_with_vdi_attached_here(function, should_return=True):
"""
vm_utils.with_vdi_attached_here needs to be stubbed out because it
calls down to the filesystem to attach a vdi. This provides a
decorator to handle that.
"""
@functools.wraps(function)
def decorated_function(self, *args, **kwargs):
@contextlib.contextmanager
def fake_vdi_attached_here(*args, **kwargs):
fake_dev = 'fakedev'
yield fake_dev
def fake_stream_disk(*args, **kwargs):
pass
def fake_is_vdi_pv(*args, **kwargs):
return should_return
orig_vdi_attached_here = vm_utils.vdi_attached_here
orig_stream_disk = vm_utils._stream_disk
orig_is_vdi_pv = vm_utils._is_vdi_pv
try:
vm_utils.vdi_attached_here = fake_vdi_attached_here
vm_utils._stream_disk = fake_stream_disk
vm_utils._is_vdi_pv = fake_is_vdi_pv
return function(self, *args, **kwargs)
finally:
vm_utils._is_vdi_pv = orig_is_vdi_pv
vm_utils._stream_disk = orig_stream_disk
vm_utils.vdi_attached_here = orig_vdi_attached_here
return decorated_function
class XenAPIVolumeTestCase(test.TestCase):
"""Unit tests for Volume operations."""
def setUp(self):
super(XenAPIVolumeTestCase, self).setUp()
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id, self.project_id)
self.flags(target_host='127.0.0.1',
xenapi_connection_url='test_url',
xenapi_connection_password='test_pass',
firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
db_fakes.stub_out_db_instance_api(self.stubs)
xenapi_fake.reset()
self.instance_values = {'id': 1,
'project_id': self.user_id,
'user_id': 'fake',
'image_ref': 1,
'kernel_id': 2,
'ramdisk_id': 3,
'root_gb': 20,
'instance_type_id': '3', # m1.large
'os_type': 'linux',
'architecture': 'x86-64'}
def _create_volume(self, size='0'):
"""Create a volume object."""
vol = {}
vol['size'] = size
vol['user_id'] = 'fake'
vol['project_id'] = 'fake'
vol['host'] = 'localhost'
vol['availability_zone'] = FLAGS.storage_availability_zone
vol['status'] = "creating"
vol['attach_status'] = "detached"
return db.volume_create(self.context, vol)
@staticmethod
def _make_info():
return {
'driver_volume_type': 'iscsi',
'data': {
'volume_id': 1,
'target_iqn': 'iqn.2010-10.org.openstack:volume-00000001',
'target_portal': '127.0.0.1:3260,fake',
'target_lun': None,
'auth_method': 'CHAP',
'auth_method': 'fake',
'auth_method': 'fake',
}
}
def test_mountpoint_to_number(self):
cases = {
'sda': 0,
'sdp': 15,
'hda': 0,
'hdp': 15,
'vda': 0,
'xvda': 0,
'0': 0,
'10': 10,
'vdq': -1,
'sdq': -1,
'hdq': -1,
'xvdq': -1,
}
for (input, expected) in cases.iteritems():
func = volume_utils.VolumeHelper.mountpoint_to_number
actual = func(input)
self.assertEqual(actual, expected,
'%s yielded %s, not %s' % (input, actual, expected))
def test_parse_volume_info_raise_exception(self):
"""This shows how to test helper classes' methods."""
stubs.stubout_session(self.stubs, stubs.FakeSessionForVolumeTests)
session = xenapi_conn.XenAPISession('test_url', 'root', 'test_pass')
helper = volume_utils.VolumeHelper
helper.XenAPI = session.get_imported_xenapi()
vol = self._create_volume()
# oops, wrong mount point!
self.assertRaises(volume_utils.StorageError,
helper.parse_volume_info,
self._make_info(),
'dev/sd'
)
db.volume_destroy(context.get_admin_context(), vol['id'])
def test_attach_volume(self):
"""This shows how to test Ops classes' methods."""
stubs.stubout_session(self.stubs, stubs.FakeSessionForVolumeTests)
conn = xenapi_conn.get_connection(False)
volume = self._create_volume()
instance = db.instance_create(self.context, self.instance_values)
vm = xenapi_fake.create_vm(instance.name, 'Running')
result = conn.attach_volume(self._make_info(),
instance.name, '/dev/sdc')
# check that the VM has a VBD attached to it
# Get XenAPI record for VBD
vbds = xenapi_fake.get_all('VBD')
vbd = xenapi_fake.get_record('VBD', vbds[0])
vm_ref = vbd['VM']
self.assertEqual(vm_ref, vm)
def test_attach_volume_raise_exception(self):
"""This shows how to test when exceptions are raised."""
stubs.stubout_session(self.stubs,
stubs.FakeSessionForVolumeFailedTests)
conn = xenapi_conn.get_connection(False)
volume = self._create_volume()
instance = db.instance_create(self.context, self.instance_values)
xenapi_fake.create_vm(instance.name, 'Running')
self.assertRaises(exception.VolumeDriverNotFound,
conn.attach_volume,
{'driver_volume_type': 'nonexist'},
instance.name,
'/dev/sdc')
class XenAPIVMTestCase(test.TestCase):
"""Unit tests for VM operations."""
def setUp(self):
super(XenAPIVMTestCase, self).setUp()
self.network = importutils.import_object(FLAGS.network_manager)
self.flags(xenapi_connection_url='test_url',
xenapi_connection_password='test_pass',
instance_name_template='%d',
firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
xenapi_fake.reset()
xenapi_fake.create_local_srs()
xenapi_fake.create_local_pifs()
db_fakes.stub_out_db_instance_api(self.stubs)
xenapi_fake.create_network('fake', FLAGS.flat_network_bridge)
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
stubs.stubout_get_this_vm_uuid(self.stubs)
stubs.stubout_stream_disk(self.stubs)
stubs.stubout_is_vdi_pv(self.stubs)
stubs.stub_out_vm_methods(self.stubs)
glance_stubs.stubout_glance_client(self.stubs)
fake_utils.stub_out_utils_execute(self.stubs)
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id, self.project_id)
self.conn = xenapi_conn.get_connection(False)
def test_init_host(self):
session = xenapi_conn.XenAPISession('test_url', 'root', 'test_pass')
vm = vm_utils.get_this_vm_ref(session)
# Local root disk
vdi0 = xenapi_fake.create_vdi('compute', None)
vbd0 = xenapi_fake.create_vbd(vm, vdi0)
# Instance VDI
vdi1 = xenapi_fake.create_vdi('instance-aaaa', None,
other_config={'nova_instance_uuid': 'aaaa'})
vbd1 = xenapi_fake.create_vbd(vm, vdi1)
# Only looks like instance VDI
vdi2 = xenapi_fake.create_vdi('instance-bbbb', None)
vbd2 = xenapi_fake.create_vbd(vm, vdi2)
self.conn.init_host(None)
self.assertEquals(set(xenapi_fake.get_all('VBD')), set([vbd0, vbd2]))
def test_list_instances_0(self):
instances = self.conn.list_instances()
self.assertEquals(instances, [])
def test_get_rrd_server(self):
self.flags(xenapi_connection_url='myscheme://myaddress/')
server_info = vm_utils.get_rrd_server()
self.assertEqual(server_info[0], 'myscheme')
self.assertEqual(server_info[1], 'myaddress')
def test_get_diagnostics(self):
def fake_get_rrd(host, vm_uuid):
with open('xenapi/vm_rrd.xml') as f:
return re.sub(r'\s', '', f.read())
self.stubs.Set(vm_utils, 'get_rrd', fake_get_rrd)
fake_diagnostics = {
'vbd_xvdb_write': '0.0',
'memory_target': '10961792000.0000',
'memory_internal_free': '3612860.6020',
'memory': '10961792000.0000',
'vbd_xvda_write': '0.0',
'cpu0': '0.0110',
'vif_0_tx': '752.4007',
'vbd_xvda_read': '0.0',
'vif_0_rx': '4837.8805'
}
instance = self._create_instance()
expected = self.conn.get_diagnostics(instance)
self.assertDictMatch(fake_diagnostics, expected)
def test_instance_snapshot_fails_with_no_primary_vdi(self):
def create_bad_vbd(vm_ref, vdi_ref):
vbd_rec = {'VM': vm_ref,
'VDI': vdi_ref,
'userdevice': 'fake',
'currently_attached': False}
vbd_ref = xenapi_fake._create_object('VBD', vbd_rec)
xenapi_fake.after_VBD_create(vbd_ref, vbd_rec)
return vbd_ref
self.stubs.Set(xenapi_fake, 'create_vbd', create_bad_vbd)
stubs.stubout_instance_snapshot(self.stubs)
# Stubbing out firewall driver as previous stub sets alters
# xml rpc result parsing
stubs.stubout_firewall_driver(self.stubs, self.conn)
instance = self._create_instance()
name = "MySnapshot"
self.assertRaises(exception.NovaException, self.conn.snapshot,
self.context, instance, name)
def test_instance_snapshot(self):
stubs.stubout_instance_snapshot(self.stubs)
stubs.stubout_is_snapshot(self.stubs)
# Stubbing out firewall driver as previous stub sets alters
# xml rpc result parsing
stubs.stubout_firewall_driver(self.stubs, self.conn)
instance = self._create_instance()
name = "MySnapshot"
template_vm_ref = self.conn.snapshot(self.context, instance, name)
# Ensure VM was torn down
vm_labels = []
for vm_ref in xenapi_fake.get_all('VM'):
vm_rec = xenapi_fake.get_record('VM', vm_ref)
if not vm_rec["is_control_domain"]:
vm_labels.append(vm_rec["name_label"])
self.assertEquals(vm_labels, [instance.name])
# Ensure VBDs were torn down
vbd_labels = []
for vbd_ref in xenapi_fake.get_all('VBD'):
vbd_rec = xenapi_fake.get_record('VBD', vbd_ref)
vbd_labels.append(vbd_rec["vm_name_label"])
self.assertEquals(vbd_labels, [instance.name])
# Ensure VDIs were torn down
for vdi_ref in xenapi_fake.get_all('VDI'):
vdi_rec = xenapi_fake.get_record('VDI', vdi_ref)
name_label = vdi_rec["name_label"]
self.assert_(not name_label.endswith('snapshot'))
def create_vm_record(self, conn, os_type, name):
instances = conn.list_instances()
self.assertEquals(instances, [name])
# Get Nova record for VM
vm_info = conn.get_info({'name': name})
# Get XenAPI record for VM
vms = [rec for ref, rec
in xenapi_fake.get_all_records('VM').iteritems()
if not rec['is_control_domain']]
vm = vms[0]
self.vm_info = vm_info
self.vm = vm
def check_vm_record(self, conn, check_injection=False):
# Check that m1.large above turned into the right thing.
instance_type = db.instance_type_get_by_name(conn, 'm1.large')
mem_kib = long(instance_type['memory_mb']) << 10
mem_bytes = str(mem_kib << 10)
vcpus = instance_type['vcpus']
self.assertEquals(self.vm_info['max_mem'], mem_kib)
self.assertEquals(self.vm_info['mem'], mem_kib)
self.assertEquals(self.vm['memory_static_max'], mem_bytes)
self.assertEquals(self.vm['memory_dynamic_max'], mem_bytes)
self.assertEquals(self.vm['memory_dynamic_min'], mem_bytes)
self.assertEquals(self.vm['VCPUs_max'], str(vcpus))
self.assertEquals(self.vm['VCPUs_at_startup'], str(vcpus))
# Check that the VM is running according to Nova
self.assertEquals(self.vm_info['state'], power_state.RUNNING)
# Check that the VM is running according to XenAPI.
self.assertEquals(self.vm['power_state'], 'Running')
if check_injection:
xenstore_data = self.vm['xenstore_data']
self.assertEquals(xenstore_data['vm-data/hostname'], 'test')
key = 'vm-data/networking/DEADBEEF0000'
xenstore_value = xenstore_data[key]
tcpip_data = ast.literal_eval(xenstore_value)
self.assertEquals(tcpip_data,
{'broadcast': '192.168.0.255',
'dns': ['192.168.0.1'],
'gateway': '192.168.0.1',
'gateway_v6': 'dead:beef::1',
'ip6s': [{'enabled': '1',
'ip': 'dead:beef::dcad:beff:feef:0',
'netmask': '64'}],
'ips': [{'enabled': '1',
'ip': '192.168.0.100',
'netmask': '255.255.255.0'}],
'dhcp_server': '192.168.0.1',
'label': 'fake',
'mac': 'DE:AD:BE:EF:00:00',
'rxtx_cap': 3})
def check_vm_params_for_windows(self):
self.assertEquals(self.vm['platform']['nx'], 'true')
self.assertEquals(self.vm['HVM_boot_params'], {'order': 'dc'})
self.assertEquals(self.vm['HVM_boot_policy'], 'BIOS order')
# check that these are not set
self.assertEquals(self.vm['PV_args'], '')
self.assertEquals(self.vm['PV_bootloader'], '')
self.assertEquals(self.vm['PV_kernel'], '')
self.assertEquals(self.vm['PV_ramdisk'], '')
def check_vm_params_for_linux(self):
self.assertEquals(self.vm['platform']['nx'], 'false')
self.assertEquals(self.vm['PV_args'], '')
self.assertEquals(self.vm['PV_bootloader'], 'pygrub')
# check that these are not set
self.assertEquals(self.vm['PV_kernel'], '')
self.assertEquals(self.vm['PV_ramdisk'], '')
self.assertEquals(self.vm['HVM_boot_params'], {})
self.assertEquals(self.vm['HVM_boot_policy'], '')
def check_vm_params_for_linux_with_external_kernel(self):
self.assertEquals(self.vm['platform']['nx'], 'false')
self.assertEquals(self.vm['PV_args'], 'root=/dev/xvda1')
self.assertNotEquals(self.vm['PV_kernel'], '')
self.assertNotEquals(self.vm['PV_ramdisk'], '')
# check that these are not set
self.assertEquals(self.vm['HVM_boot_params'], {})
self.assertEquals(self.vm['HVM_boot_policy'], '')
def _list_vdis(self):
url = FLAGS.xenapi_connection_url
username = FLAGS.xenapi_connection_username
password = FLAGS.xenapi_connection_password
session = xenapi_conn.XenAPISession(url, username, password)
return session.call_xenapi('VDI.get_all')
def _check_vdis(self, start_list, end_list):
for vdi_ref in end_list:
if not vdi_ref in start_list:
vdi_rec = xenapi_fake.get_record('VDI', vdi_ref)
# If the cache is turned on then the base disk will be
# there even after the cleanup
if 'other_config' in vdi_rec:
if vdi_rec['other_config']['image-id'] is None:
self.fail('Found unexpected VDI:%s' % vdi_ref)
else:
self.fail('Found unexpected VDI:%s' % vdi_ref)
def _test_spawn(self, image_ref, kernel_id, ramdisk_id,
instance_type_id="3", os_type="linux",
hostname="test", architecture="x86-64", instance_id=1,
check_injection=False,
create_record=True, empty_dns=False):
if create_record:
instance_values = {'id': instance_id,
'project_id': self.project_id,
'user_id': self.user_id,
'image_ref': image_ref,
'kernel_id': kernel_id,
'ramdisk_id': ramdisk_id,
'root_gb': 20,
'instance_type_id': instance_type_id,
'os_type': os_type,
'hostname': hostname,
'architecture': architecture}
instance = db.instance_create(self.context, instance_values)
else:
instance = db.instance_get(self.context, instance_id)
network_info = [({'bridge': 'fa0', 'id': 0,
'injected': True,
'cidr': '192.168.0.0/24',
'cidr_v6': 'dead:beef::1/120',
},
{'broadcast': '192.168.0.255',
'dns': ['192.168.0.1'],
'gateway': '192.168.0.1',
'gateway_v6': 'dead:beef::1',
'ip6s': [{'enabled': '1',
'ip': 'dead:beef::dcad:beff:feef:0',
'netmask': '64'}],
'ips': [{'enabled': '1',
'ip': '192.168.0.100',
'netmask': '255.255.255.0'}],
'dhcp_server': '192.168.0.1',
'label': 'fake',
'mac': 'DE:AD:BE:EF:00:00',
'rxtx_cap': 3})]
if empty_dns:
network_info[0][1]['dns'] = []
# admin_pass isn't part of the DB model, but it does get set as
# an attribute for spawn to use
instance.admin_pass = 'herp'
image_meta = {'id': glance_stubs.FakeGlance.IMAGE_VHD,
'disk_format': 'vhd'}
self.conn.spawn(self.context, instance, image_meta, network_info)
self.create_vm_record(self.conn, os_type, instance['name'])
self.check_vm_record(self.conn, check_injection)
self.assertTrue(instance.os_type)
self.assertTrue(instance.architecture)
def test_spawn_empty_dns(self):
"""Test spawning with an empty dns list"""
self._test_spawn(glance_stubs.FakeGlance.IMAGE_VHD, None, None,
os_type="linux", architecture="x86-64",
empty_dns=True)
self.check_vm_params_for_linux()
def test_spawn_not_enough_memory(self):
self.assertRaises(exception.InsufficientFreeMemory,
self._test_spawn,
1, 2, 3, "4") # m1.xlarge
def test_spawn_fail_cleanup_1(self):
"""Simulates an error while downloading an image.
Verifies that VDIs created are properly cleaned up.
"""
vdi_recs_start = self._list_vdis()
stubs.stubout_fetch_image_glance_disk(self.stubs, raise_failure=True)
self.assertRaises(xenapi_fake.Failure,
self._test_spawn, 1, 2, 3)
# No additional VDI should be found.
vdi_recs_end = self._list_vdis()
self._check_vdis(vdi_recs_start, vdi_recs_end)
def test_spawn_fail_cleanup_2(self):
"""Simulates an error while creating VM record.
It verifies that VDIs created are properly cleaned up.
"""
vdi_recs_start = self._list_vdis()
stubs.stubout_create_vm(self.stubs)
self.assertRaises(xenapi_fake.Failure,
self._test_spawn, 1, 2, 3)
# No additional VDI should be found.
vdi_recs_end = self._list_vdis()
self._check_vdis(vdi_recs_start, vdi_recs_end)
@stub_vm_utils_with_vdi_attached_here
def test_spawn_raw_glance(self):
self._test_spawn(glance_stubs.FakeGlance.IMAGE_RAW, None, None)
self.check_vm_params_for_linux()
def test_spawn_vhd_glance_linux(self):
self._test_spawn(glance_stubs.FakeGlance.IMAGE_VHD, None, None,
os_type="linux", architecture="x86-64")
self.check_vm_params_for_linux()
def test_spawn_vhd_glance_swapdisk(self):
# Change the default host_call_plugin to one that'll return
# a swap disk
orig_func = stubs.FakeSessionForVMTests.host_call_plugin
_host_call_plugin = stubs.FakeSessionForVMTests.host_call_plugin_swap
stubs.FakeSessionForVMTests.host_call_plugin = _host_call_plugin
# Stubbing out firewall driver as previous stub sets a particular
# stub for async plugin calls
stubs.stubout_firewall_driver(self.stubs, self.conn)
try:
# We'll steal the above glance linux test
self.test_spawn_vhd_glance_linux()
finally:
# Make sure to put this back
stubs.FakeSessionForVMTests.host_call_plugin = orig_func
# We should have 2 VBDs.
self.assertEqual(len(self.vm['VBDs']), 2)
# Now test that we have 1.
self.tearDown()
self.setUp()
self.test_spawn_vhd_glance_linux()
self.assertEqual(len(self.vm['VBDs']), 1)
def test_spawn_vhd_glance_windows(self):
self._test_spawn(glance_stubs.FakeGlance.IMAGE_VHD, None, None,
os_type="windows", architecture="i386")
self.check_vm_params_for_windows()
def test_spawn_iso_glance(self):
self._test_spawn(glance_stubs.FakeGlance.IMAGE_ISO, None, None,
os_type="windows", architecture="i386")
self.check_vm_params_for_windows()
def test_spawn_glance(self):
stubs.stubout_fetch_image_glance_disk(self.stubs)
self._test_spawn(glance_stubs.FakeGlance.IMAGE_MACHINE,
glance_stubs.FakeGlance.IMAGE_KERNEL,
glance_stubs.FakeGlance.IMAGE_RAMDISK)
self.check_vm_params_for_linux_with_external_kernel()
def test_spawn_netinject_file(self):
self.flags(flat_injected=True)
db_fakes.stub_out_db_instance_api(self.stubs, injected=True)
self._tee_executed = False
def _tee_handler(cmd, **kwargs):
input = kwargs.get('process_input', None)
self.assertNotEqual(input, None)
config = [line.strip() for line in input.split("\n")]
# Find the start of eth0 configuration and check it
index = config.index('auto eth0')
self.assertEquals(config[index + 1:index + 8], [
'iface eth0 inet static',
'address 192.168.0.100',
'netmask 255.255.255.0',
'broadcast 192.168.0.255',
'gateway 192.168.0.1',
'dns-nameservers 192.168.0.1',
''])
self._tee_executed = True
return '', ''
fake_utils.fake_execute_set_repliers([
# Capture the tee .../etc/network/interfaces command
(r'tee.*interfaces', _tee_handler),
])
self._test_spawn(glance_stubs.FakeGlance.IMAGE_MACHINE,
glance_stubs.FakeGlance.IMAGE_KERNEL,
glance_stubs.FakeGlance.IMAGE_RAMDISK,
check_injection=True)
self.assertTrue(self._tee_executed)
def test_spawn_netinject_xenstore(self):
db_fakes.stub_out_db_instance_api(self.stubs, injected=True)
self._tee_executed = False
def _mount_handler(cmd, *ignore_args, **ignore_kwargs):
# When mounting, create real files under the mountpoint to simulate
# files in the mounted filesystem
# mount point will be the last item of the command list
self._tmpdir = cmd[len(cmd) - 1]
LOG.debug(_('Creating files in %s to simulate guest agent'),
self._tmpdir)
os.makedirs(os.path.join(self._tmpdir, 'usr', 'sbin'))
# Touch the file using open
open(os.path.join(self._tmpdir, 'usr', 'sbin',
'xe-update-networking'), 'w').close()
return '', ''
def _umount_handler(cmd, *ignore_args, **ignore_kwargs):
# Umount would normall make files in the m,ounted filesystem
# disappear, so do that here
LOG.debug(_('Removing simulated guest agent files in %s'),
self._tmpdir)
os.remove(os.path.join(self._tmpdir, 'usr', 'sbin',
'xe-update-networking'))
os.rmdir(os.path.join(self._tmpdir, 'usr', 'sbin'))
os.rmdir(os.path.join(self._tmpdir, 'usr'))
return '', ''
def _tee_handler(cmd, *ignore_args, **ignore_kwargs):
self._tee_executed = True
return '', ''
fake_utils.fake_execute_set_repliers([
(r'mount', _mount_handler),
(r'umount', _umount_handler),
(r'tee.*interfaces', _tee_handler)])
self._test_spawn(1, 2, 3, check_injection=True)
# tee must not run in this case, where an injection-capable
# guest agent is detected
self.assertFalse(self._tee_executed)
def test_spawn_vlanmanager(self):
self.flags(image_service='nova.image.glance.GlanceImageService',
network_manager='nova.network.manager.VlanManager',
vlan_interface='fake0')
def dummy(*args, **kwargs):
pass
self.stubs.Set(vmops.VMOps, '_create_vifs', dummy)
# Reset network table
xenapi_fake.reset_table('network')
# Instance id = 2 will use vlan network (see db/fakes.py)
ctxt = self.context.elevated()
instance = self._create_instance(2, False)
networks = self.network.db.network_get_all(ctxt)
for network in networks:
self.network.set_network_host(ctxt, network)
self.network.allocate_for_instance(ctxt,
instance_id=2,
instance_uuid="00000000-0000-0000-0000-000000000000",
host=FLAGS.host,
vpn=None,
rxtx_factor=3,
project_id=self.project_id)
self._test_spawn(glance_stubs.FakeGlance.IMAGE_MACHINE,
glance_stubs.FakeGlance.IMAGE_KERNEL,
glance_stubs.FakeGlance.IMAGE_RAMDISK,
instance_id=2,
create_record=False)
# TODO(salvatore-orlando): a complete test here would require
# a check for making sure the bridge for the VM's VIF is
# consistent with bridge specified in nova db
def test_spawn_with_network_qos(self):
self._create_instance()
for vif_ref in xenapi_fake.get_all('VIF'):
vif_rec = xenapi_fake.get_record('VIF', vif_ref)
self.assertEquals(vif_rec['qos_algorithm_type'], 'ratelimit')
self.assertEquals(vif_rec['qos_algorithm_params']['kbps'],
str(3 * 1024))
def test_rescue(self):
instance = self._create_instance()
session = xenapi_conn.XenAPISession('test_url', 'root', 'test_pass')
vm = vm_utils.VMHelper.lookup(session, instance.name)
vbd = xenapi_fake.create_vbd(vm, None)
conn = xenapi_conn.get_connection(False)
image_meta = {'id': glance_stubs.FakeGlance.IMAGE_VHD,
'disk_format': 'vhd'}
conn.rescue(self.context, instance, [], image_meta)
def test_unrescue(self):
instance = self._create_instance()
conn = xenapi_conn.get_connection(False)
# Unrescue expects the original instance to be powered off
conn.power_off(instance)
rescue_vm = xenapi_fake.create_vm(instance.name + '-rescue', 'Running')
conn.unrescue(instance, None)
def test_unrescue_not_in_rescue(self):
instance = self._create_instance()
conn = xenapi_conn.get_connection(False)
# Ensure that it will not unrescue a non-rescued instance.
self.assertRaises(exception.InstanceNotInRescueMode, conn.unrescue,
instance, None)
def test_finish_revert_migration(self):
instance = self._create_instance()
class VMOpsMock():
def __init__(self):
self.finish_revert_migration_called = False
def finish_revert_migration(self, instance):
self.finish_revert_migration_called = True
conn = xenapi_conn.get_connection(False)
conn._vmops = VMOpsMock()
conn.finish_revert_migration(instance, None)
self.assertTrue(conn._vmops.finish_revert_migration_called)
def _create_instance(self, instance_id=1, spawn=True):
"""Creates and spawns a test instance."""
instance_values = {
'id': instance_id,
'project_id': self.project_id,
'user_id': self.user_id,
'image_ref': 1,
'kernel_id': 2,
'ramdisk_id': 3,
'root_gb': 20,
'instance_type_id': '3', # m1.large
'os_type': 'linux',
'architecture': 'x86-64'}
instance = db.instance_create(self.context, instance_values)
network_info = [({'bridge': 'fa0', 'id': 0,
'injected': False,
'cidr': '192.168.0.0/24',
'cidr_v6': 'dead:beef::1/120',
},
{'broadcast': '192.168.0.255',
'dns': ['192.168.0.1'],
'gateway': '192.168.0.1',
'gateway_v6': 'dead:beef::1',
'ip6s': [{'enabled': '1',
'ip': 'dead:beef::dcad:beff:feef:0',
'netmask': '64'}],
'ips': [{'enabled': '1',
'ip': '192.168.0.100',
'netmask': '255.255.255.0'}],
'dhcp_server': '192.168.0.1',
'label': 'fake',
'mac': 'DE:AD:BE:EF:00:00',
'rxtx_cap': 3})]
image_meta = {'id': glance_stubs.FakeGlance.IMAGE_VHD,
'disk_format': 'vhd'}
if spawn:
instance.admin_pass = 'herp'
self.conn.spawn(self.context, instance, image_meta, network_info)
return instance
class XenAPIDiffieHellmanTestCase(test.TestCase):
"""Unit tests for Diffie-Hellman code."""
def setUp(self):
super(XenAPIDiffieHellmanTestCase, self).setUp()
self.alice = vmops.SimpleDH()
self.bob = vmops.SimpleDH()
def test_shared(self):
alice_pub = self.alice.get_public()
bob_pub = self.bob.get_public()
alice_shared = self.alice.compute_shared(bob_pub)
bob_shared = self.bob.compute_shared(alice_pub)
self.assertEquals(alice_shared, bob_shared)
def _test_encryption(self, message):
enc = self.alice.encrypt(message)
self.assertFalse(enc.endswith('\n'))
dec = self.bob.decrypt(enc)
self.assertEquals(dec, message)
def test_encrypt_simple_message(self):
self._test_encryption('This is a simple message.')
def test_encrypt_message_with_newlines_at_end(self):
self._test_encryption('This message has a newline at the end.\n')
def test_encrypt_many_newlines_at_end(self):
self._test_encryption('Message with lotsa newlines.\n\n\n')
def test_encrypt_newlines_inside_message(self):
self._test_encryption('Message\nwith\ninterior\nnewlines.')
def test_encrypt_with_leading_newlines(self):
self._test_encryption('\n\nMessage with leading newlines.')
def test_encrypt_really_long_message(self):
self._test_encryption(''.join(['abcd' for i in xrange(1024)]))
class XenAPIMigrateInstance(test.TestCase):
"""Unit test for verifying migration-related actions."""
def setUp(self):
super(XenAPIMigrateInstance, self).setUp()
self.flags(target_host='127.0.0.1',
xenapi_connection_url='test_url',
xenapi_connection_password='test_pass',
firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
db_fakes.stub_out_db_instance_api(self.stubs)
xenapi_fake.reset()
xenapi_fake.create_network('fake', FLAGS.flat_network_bridge)
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id, self.project_id)
self.instance_values = {'id': 1,
'project_id': self.project_id,
'user_id': self.user_id,
'image_ref': 1,
'kernel_id': None,
'ramdisk_id': None,
'root_gb': 5,
'instance_type_id': '3', # m1.large
'os_type': 'linux',
'architecture': 'x86-64'}
migration_values = {
'source_compute': 'nova-compute',
'dest_compute': 'nova-compute',
'dest_host': '10.127.5.114',
'status': 'post-migrating',
'instance_uuid': '15f23e6a-cc6e-4d22-b651-d9bdaac316f7',
'old_instance_type_id': 5,
'new_instance_type_id': 1
}
self.migration = db.migration_create(
context.get_admin_context(), migration_values)
fake_utils.stub_out_utils_execute(self.stubs)
stubs.stub_out_migration_methods(self.stubs)
stubs.stubout_get_this_vm_uuid(self.stubs)
glance_stubs.stubout_glance_client(self.stubs)
def test_resize_xenserver_6(self):
instance = db.instance_create(self.context, self.instance_values)
called = {'resize': False}
def fake_vdi_resize(*args, **kwargs):
called['resize'] = True
self.stubs.Set(stubs.FakeSessionForVMTests,
"VDI_resize", fake_vdi_resize)
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests,
product_version=(6, 0, 0))
conn = xenapi_conn.get_connection(False)
vdi_ref = xenapi_fake.create_vdi('hurr', 'fake')
vdi_uuid = xenapi_fake.get_record('VDI', vdi_ref)['uuid']
conn._vmops._resize_instance(instance, vdi_uuid)
self.assertEqual(called['resize'], True)
def test_migrate_disk_and_power_off(self):
instance = db.instance_create(self.context, self.instance_values)
xenapi_fake.create_vm(instance.name, 'Running')
instance_type = db.instance_type_get_by_name(self.context, 'm1.large')
conn = xenapi_conn.get_connection(False)
conn.migrate_disk_and_power_off(self.context, instance,
'127.0.0.1', instance_type, None)
def test_migrate_disk_and_power_off_passes_exceptions(self):
instance = db.instance_create(self.context, self.instance_values)
xenapi_fake.create_vm(instance.name, 'Running')
instance_type = db.instance_type_get_by_name(self.context, 'm1.large')
def fake_raise(*args, **kwargs):
raise exception.MigrationError(reason='test failure')
self.stubs.Set(vmops.VMOps, "_migrate_vhd", fake_raise)
conn = xenapi_conn.get_connection(False)
self.assertRaises(exception.MigrationError,
conn.migrate_disk_and_power_off,
self.context, instance,
'127.0.0.1', instance_type, None)
def test_revert_migrate(self):
instance = db.instance_create(self.context, self.instance_values)
self.called = False
self.fake_vm_start_called = False
self.fake_finish_revert_migration_called = False
def fake_vm_start(*args, **kwargs):
self.fake_vm_start_called = True
def fake_vdi_resize(*args, **kwargs):
self.called = True
def fake_finish_revert_migration(*args, **kwargs):
self.fake_finish_revert_migration_called = True
self.stubs.Set(stubs.FakeSessionForVMTests,
"VDI_resize_online", fake_vdi_resize)
self.stubs.Set(vmops.VMOps, '_start', fake_vm_start)
self.stubs.Set(vmops.VMOps, 'finish_revert_migration',
fake_finish_revert_migration)
conn = xenapi_conn.get_connection(False)
network_info = [({'bridge': 'fa0', 'id': 0, 'injected': False},
{'broadcast': '192.168.0.255',
'dns': ['192.168.0.1'],
'gateway': '192.168.0.1',
'gateway_v6': 'dead:beef::1',
'ip6s': [{'enabled': '1',
'ip': 'dead:beef::dcad:beff:feef:0',
'netmask': '64'}],
'ips': [{'enabled': '1',
'ip': '192.168.0.100',
'netmask': '255.255.255.0'}],
'label': 'fake',
'mac': 'DE:AD:BE:EF:00:00',
'rxtx_cap': 3})]
image_meta = {'id': instance.image_ref, 'disk_format': 'vhd'}
base = xenapi_fake.create_vdi('hurr', 'fake')
base_uuid = xenapi_fake.get_record('VDI', base)['uuid']
cow = xenapi_fake.create_vdi('durr', 'fake')
cow_uuid = xenapi_fake.get_record('VDI', cow)['uuid']
conn.finish_migration(self.context, self.migration, instance,
dict(base_copy=base_uuid, cow=cow_uuid),
network_info, image_meta, resize_instance=True)
self.assertEqual(self.called, True)
self.assertEqual(self.fake_vm_start_called, True)
conn.finish_revert_migration(instance, network_info)
self.assertEqual(self.fake_finish_revert_migration_called, True)
def test_finish_migrate(self):
instance = db.instance_create(self.context, self.instance_values)
self.called = False
self.fake_vm_start_called = False
def fake_vm_start(*args, **kwargs):
self.fake_vm_start_called = True
def fake_vdi_resize(*args, **kwargs):
self.called = True
self.stubs.Set(vmops.VMOps, '_start', fake_vm_start)
self.stubs.Set(stubs.FakeSessionForVMTests,
"VDI_resize_online", fake_vdi_resize)
conn = xenapi_conn.get_connection(False)
network_info = [({'bridge': 'fa0', 'id': 0, 'injected': False},
{'broadcast': '192.168.0.255',
'dns': ['192.168.0.1'],
'gateway': '192.168.0.1',
'gateway_v6': 'dead:beef::1',
'ip6s': [{'enabled': '1',
'ip': 'dead:beef::dcad:beff:feef:0',
'netmask': '64'}],
'ips': [{'enabled': '1',
'ip': '192.168.0.100',
'netmask': '255.255.255.0'}],
'label': 'fake',
'mac': 'DE:AD:BE:EF:00:00',
'rxtx_cap': 3})]
image_meta = {'id': instance.image_ref, 'disk_format': 'vhd'}
conn.finish_migration(self.context, self.migration, instance,
dict(base_copy='hurr', cow='durr'),
network_info, image_meta, resize_instance=True)
self.assertEqual(self.called, True)
self.assertEqual(self.fake_vm_start_called, True)
def test_finish_migrate_no_local_storage(self):
tiny_type = instance_types.get_instance_type_by_name('m1.tiny')
tiny_type_id = tiny_type['id']
self.instance_values.update({'instance_type_id': tiny_type_id,
'root_gb': 0})
instance = db.instance_create(self.context, self.instance_values)
def fake_vdi_resize(*args, **kwargs):
raise Exception("This shouldn't be called")
self.stubs.Set(stubs.FakeSessionForVMTests,
"VDI_resize_online", fake_vdi_resize)
conn = xenapi_conn.get_connection(False)
network_info = [({'bridge': 'fa0', 'id': 0, 'injected': False},
{'broadcast': '192.168.0.255',
'dns': ['192.168.0.1'],
'gateway': '192.168.0.1',
'gateway_v6': 'dead:beef::1',
'ip6s': [{'enabled': '1',
'ip': 'dead:beef::dcad:beff:feef:0',
'netmask': '64'}],
'ips': [{'enabled': '1',
'ip': '192.168.0.100',
'netmask': '255.255.255.0'}],
'label': 'fake',
'mac': 'DE:AD:BE:EF:00:00',
'rxtx_cap': 3})]
image_meta = {'id': instance.image_ref, 'disk_format': 'vhd'}
conn.finish_migration(self.context, self.migration, instance,
dict(base_copy='hurr', cow='durr'),
network_info, image_meta, resize_instance=True)
def test_finish_migrate_no_resize_vdi(self):
instance = db.instance_create(self.context, self.instance_values)
def fake_vdi_resize(*args, **kwargs):
raise Exception("This shouldn't be called")
self.stubs.Set(stubs.FakeSessionForVMTests,
"VDI_resize_online", fake_vdi_resize)
conn = xenapi_conn.get_connection(False)
network_info = [({'bridge': 'fa0', 'id': 0, 'injected': False},
{'broadcast': '192.168.0.255',
'dns': ['192.168.0.1'],
'gateway': '192.168.0.1',
'gateway_v6': 'dead:beef::1',
'ip6s': [{'enabled': '1',
'ip': 'dead:beef::dcad:beff:feef:0',
'netmask': '64'}],
'ips': [{'enabled': '1',
'ip': '192.168.0.100',
'netmask': '255.255.255.0'}],
'label': 'fake',
'mac': 'DE:AD:BE:EF:00:00',
'rxtx_cap': 3})]
# Resize instance would be determined by the compute call
image_meta = {'id': instance.image_ref, 'disk_format': 'vhd'}
conn.finish_migration(self.context, self.migration, instance,
dict(base_copy='hurr', cow='durr'),
network_info, image_meta, resize_instance=False)
class XenAPIImageTypeTestCase(test.TestCase):
"""Test ImageType class."""
def test_to_string(self):
"""Can convert from type id to type string."""
self.assertEquals(
vm_utils.ImageType.to_string(vm_utils.ImageType.KERNEL),
vm_utils.ImageType.KERNEL_STR)
def test_from_string(self):
"""Can convert from string to type id."""
self.assertEquals(
vm_utils.ImageType.from_string(vm_utils.ImageType.KERNEL_STR),
vm_utils.ImageType.KERNEL)
class XenAPIDetermineDiskImageTestCase(test.TestCase):
"""Unit tests for code that detects the ImageType."""
def setUp(self):
super(XenAPIDetermineDiskImageTestCase, self).setUp()
glance_stubs.stubout_glance_client(self.stubs)
class FakeInstance(object):
pass
self.fake_instance = FakeInstance()
self.fake_instance.id = 42
self.fake_instance.os_type = 'linux'
self.fake_instance.architecture = 'x86-64'
def assert_disk_type(self, image_meta, expected_disk_type):
actual = vm_utils.VMHelper.determine_disk_image_type(image_meta)
self.assertEqual(expected_disk_type, actual)
def test_machine(self):
image_meta = {'id': 'a', 'disk_format': 'ami'}
self.assert_disk_type(image_meta, vm_utils.ImageType.DISK)
def test_raw(self):
image_meta = {'id': 'a', 'disk_format': 'raw'}
self.assert_disk_type(image_meta, vm_utils.ImageType.DISK_RAW)
def test_vhd(self):
image_meta = {'id': 'a', 'disk_format': 'vhd'}
self.assert_disk_type(image_meta, vm_utils.ImageType.DISK_VHD)
class CompareVersionTestCase(test.TestCase):
def test_less_than(self):
"""Test that cmp_version compares a as less than b"""
self.assertTrue(vmops.cmp_version('1.2.3.4', '1.2.3.5') < 0)
def test_greater_than(self):
"""Test that cmp_version compares a as greater than b"""
self.assertTrue(vmops.cmp_version('1.2.3.5', '1.2.3.4') > 0)
def test_equal(self):
"""Test that cmp_version compares a as equal to b"""
self.assertTrue(vmops.cmp_version('1.2.3.4', '1.2.3.4') == 0)
def test_non_lexical(self):
"""Test that cmp_version compares non-lexically"""
self.assertTrue(vmops.cmp_version('1.2.3.10', '1.2.3.4') > 0)
def test_length(self):
"""Test that cmp_version compares by length as last resort"""
self.assertTrue(vmops.cmp_version('1.2.3', '1.2.3.4') < 0)
class XenAPIHostTestCase(test.TestCase):
"""Tests HostState, which holds metrics from XenServer that get
reported back to the Schedulers."""
def setUp(self):
super(XenAPIHostTestCase, self).setUp()
self.flags(xenapi_connection_url='test_url',
xenapi_connection_password='test_pass')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
xenapi_fake.reset()
xenapi_fake.create_local_srs()
self.conn = xenapi_conn.get_connection(False)
def test_host_state(self):
stats = self.conn.get_host_stats()
self.assertEquals(stats['disk_total'], 10000)
self.assertEquals(stats['disk_used'], 20000)
self.assertEquals(stats['host_memory_total'], 10)
self.assertEquals(stats['host_memory_overhead'], 20)
self.assertEquals(stats['host_memory_free'], 30)
self.assertEquals(stats['host_memory_free_computed'], 40)
def _test_host_action(self, method, action, expected=None):
result = method('host', action)
if not expected:
expected = action
self.assertEqual(result, expected)
def test_host_reboot(self):
self._test_host_action(self.conn.host_power_action, 'reboot')
def test_host_shutdown(self):
self._test_host_action(self.conn.host_power_action, 'shutdown')
def test_host_startup(self):
self.assertRaises(NotImplementedError,
self.conn.host_power_action, 'host', 'startup')
def test_host_maintenance_on(self):
self._test_host_action(self.conn.host_maintenance_mode,
True, 'on_maintenance')
def test_host_maintenance_off(self):
self._test_host_action(self.conn.host_maintenance_mode,
False, 'off_maintenance')
def test_set_enable_host_enable(self):
self._test_host_action(self.conn.set_host_enabled, True, 'enabled')
def test_set_enable_host_disable(self):
self._test_host_action(self.conn.set_host_enabled, False, 'disabled')
class XenAPIAutoDiskConfigTestCase(test.TestCase):
def setUp(self):
super(XenAPIAutoDiskConfigTestCase, self).setUp()
self.flags(target_host='127.0.0.1',
xenapi_connection_url='test_url',
xenapi_connection_password='test_pass',
firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
xenapi_fake.reset()
self.conn = xenapi_conn.get_connection(False)
self.user_id = 'fake'
self.project_id = 'fake'
self.instance_values = {'id': 1,
'project_id': self.project_id,
'user_id': self.user_id,
'image_ref': 1,
'kernel_id': 2,
'ramdisk_id': 3,
'root_gb': 20,
'instance_type_id': '3', # m1.large
'os_type': 'linux',
'architecture': 'x86-64'}
self.context = context.RequestContext(self.user_id, self.project_id)
@classmethod
def fake_create_vbd(cls, session, vm_ref, vdi_ref, userdevice,
vbd_type='disk', read_only=False, bootable=True):
pass
self.stubs.Set(vm_utils.VMHelper,
"create_vbd",
fake_create_vbd)
def assertIsPartitionCalled(self, called):
marker = {"partition_called": False}
def fake_resize_part_and_fs(dev, start, old, new):
marker["partition_called"] = True
self.stubs.Set(vm_utils, "_resize_part_and_fs",
fake_resize_part_and_fs)
instance = db.instance_create(self.context, self.instance_values)
disk_image_type = vm_utils.ImageType.DISK_VHD
vm_ref = "blah"
first_vdi_ref = "blah"
vdis = ["blah"]
self.conn._vmops._attach_disks(
instance, disk_image_type, vm_ref, first_vdi_ref, vdis)
self.assertEqual(marker["partition_called"], called)
def test_instance_not_auto_disk_config(self):
"""Should not partition unless instance is marked as
auto_disk_config.
"""
self.instance_values['auto_disk_config'] = False
self.assertIsPartitionCalled(False)
@stub_vm_utils_with_vdi_attached_here
def test_instance_auto_disk_config_doesnt_pass_fail_safes(self):
"""Should not partition unless fail safes pass"""
self.instance_values['auto_disk_config'] = True
def fake_get_partitions(dev):
return [(1, 0, 100, 'ext4'), (2, 100, 200, 'ext4')]
self.stubs.Set(vm_utils, "_get_partitions",
fake_get_partitions)
self.assertIsPartitionCalled(False)
@stub_vm_utils_with_vdi_attached_here
def test_instance_auto_disk_config_passes_fail_safes(self):
"""Should partition if instance is marked as auto_disk_config=True and
virt-layer specific fail-safe checks pass.
"""
self.instance_values['auto_disk_config'] = True
def fake_get_partitions(dev):
return [(1, 0, 100, 'ext4')]
self.stubs.Set(vm_utils, "_get_partitions",
fake_get_partitions)
self.assertIsPartitionCalled(True)
class XenAPIGenerateLocal(test.TestCase):
"""Test generating of local disks, like swap and ephemeral"""
def setUp(self):
super(XenAPIGenerateLocal, self).setUp()
self.flags(target_host='127.0.0.1',
xenapi_connection_url='test_url',
xenapi_connection_password='test_pass',
xenapi_generate_swap=True,
firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
db_fakes.stub_out_db_instance_api(self.stubs)
xenapi_fake.reset()
self.conn = xenapi_conn.get_connection(False)
self.user_id = 'fake'
self.project_id = 'fake'
self.instance_values = {'id': 1,
'project_id': self.project_id,
'user_id': self.user_id,
'image_ref': 1,
'kernel_id': 2,
'ramdisk_id': 3,
'root_gb': 20,
'instance_type_id': '3', # m1.large
'os_type': 'linux',
'architecture': 'x86-64'}
self.context = context.RequestContext(self.user_id, self.project_id)
@classmethod
def fake_create_vbd(cls, session, vm_ref, vdi_ref, userdevice,
vbd_type='disk', read_only=False, bootable=True):
pass
self.stubs.Set(vm_utils.VMHelper,
"create_vbd",
fake_create_vbd)
def assertCalled(self, instance):
disk_image_type = vm_utils.ImageType.DISK_VHD
vm_ref = "blah"
first_vdi_ref = "blah"
vdis = ["blah"]
self.called = False
self.conn._vmops._attach_disks(instance, disk_image_type,
vm_ref, first_vdi_ref, vdis)
self.assertTrue(self.called)
def test_generate_swap(self):
"""Test swap disk generation."""
instance = db.instance_create(self.context, self.instance_values)
instance = db.instance_update(self.context, instance['id'],
{'instance_type_id': 5})
@classmethod
def fake_generate_swap(cls, *args, **kwargs):
self.called = True
self.stubs.Set(vm_utils.VMHelper, 'generate_swap',
fake_generate_swap)
self.assertCalled(instance)
def test_generate_ephemeral(self):
"""Test ephemeral disk generation."""
instance = db.instance_create(self.context, self.instance_values)
instance = db.instance_update(self.context, instance['id'],
{'instance_type_id': 4})
@classmethod
def fake_generate_ephemeral(cls, *args):
self.called = True
self.stubs.Set(vm_utils.VMHelper, 'generate_ephemeral',
fake_generate_ephemeral)
self.assertCalled(instance)
class XenAPIBWUsageTestCase(test.TestCase):
def setUp(self):
super(XenAPIBWUsageTestCase, self).setUp()
self.stubs.Set(vm_utils.VMHelper, "compile_metrics",
XenAPIBWUsageTestCase._fake_compile_metrics)
self.flags(target_host='127.0.0.1',
xenapi_connection_url='test_url',
xenapi_connection_password='test_pass',
firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
xenapi_fake.reset()
self.conn = xenapi_conn.get_connection(False)
@classmethod
def _fake_compile_metrics(cls, start_time, stop_time=None):
raise exception.CouldNotFetchMetrics()
def test_get_all_bw_usage_in_failure_case(self):
"""Test that get_all_bw_usage returns an empty list when metrics
compilation failed. c.f. bug #910045.
"""
class testinstance(object):
def __init__(self):
self.name = "instance-0001"
self.uuid = "1-2-3-4-5"
result = self.conn.get_all_bw_usage([testinstance()],
datetime.datetime.utcnow())
self.assertEqual(result, [])
# TODO(salvatore-orlando): this class and
# nova.tests.test_libvirt.IPTablesFirewallDriverTestCase share a lot of code.
# Consider abstracting common code in a base class for firewall driver testing.
class XenAPIDom0IptablesFirewallTestCase(test.TestCase):
_in_nat_rules = [
'# Generated by iptables-save v1.4.10 on Sat Feb 19 00:03:19 2011',
'*nat',
':PREROUTING ACCEPT [1170:189210]',
':INPUT ACCEPT [844:71028]',
':OUTPUT ACCEPT [5149:405186]',
':POSTROUTING ACCEPT [5063:386098]',
]
_in_filter_rules = [
'# Generated by iptables-save v1.4.4 on Mon Dec 6 11:54:13 2010',
'*filter',
':INPUT ACCEPT [969615:281627771]',
':FORWARD ACCEPT [0:0]',
':OUTPUT ACCEPT [915599:63811649]',
':nova-block-ipv4 - [0:0]',
'-A INPUT -i virbr0 -p tcp -m tcp --dport 67 -j ACCEPT ',
'-A FORWARD -d 192.168.122.0/24 -o virbr0 -m state --state RELATED'
',ESTABLISHED -j ACCEPT ',
'-A FORWARD -s 192.168.122.0/24 -i virbr0 -j ACCEPT ',
'-A FORWARD -i virbr0 -o virbr0 -j ACCEPT ',
'-A FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable ',
'-A FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable ',
'COMMIT',
'# Completed on Mon Dec 6 11:54:13 2010',
]
_in6_filter_rules = [
'# Generated by ip6tables-save v1.4.4 on Tue Jan 18 23:47:56 2011',
'*filter',
':INPUT ACCEPT [349155:75810423]',
':FORWARD ACCEPT [0:0]',
':OUTPUT ACCEPT [349256:75777230]',
'COMMIT',
'# Completed on Tue Jan 18 23:47:56 2011',
]
def setUp(self):
super(XenAPIDom0IptablesFirewallTestCase, self).setUp()
self.flags(xenapi_connection_url='test_url',
xenapi_connection_password='test_pass',
instance_name_template='%d',
firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
xenapi_fake.reset()
xenapi_fake.create_local_srs()
xenapi_fake.create_local_pifs()
self.user_id = 'mappin'
self.project_id = 'fake'
stubs.stubout_session(self.stubs, stubs.FakeSessionForFirewallTests,
test_case=self)
self.context = context.RequestContext(self.user_id, self.project_id)
self.network = importutils.import_object(FLAGS.network_manager)
self.conn = xenapi_conn.get_connection(False)
self.fw = self.conn._vmops.firewall_driver
def _create_instance_ref(self):
return db.instance_create(self.context,
{'user_id': self.user_id,
'project_id': self.project_id,
'instance_type_id': 1})
def _create_test_security_group(self):
admin_ctxt = context.get_admin_context()
secgroup = db.security_group_create(admin_ctxt,
{'user_id': self.user_id,
'project_id': self.project_id,
'name': 'testgroup',
'description': 'test group'})
db.security_group_rule_create(admin_ctxt,
{'parent_group_id': secgroup['id'],
'protocol': 'icmp',
'from_port': -1,
'to_port': -1,
'cidr': '192.168.11.0/24'})
db.security_group_rule_create(admin_ctxt,
{'parent_group_id': secgroup['id'],
'protocol': 'icmp',
'from_port': 8,
'to_port': -1,
'cidr': '192.168.11.0/24'})
db.security_group_rule_create(admin_ctxt,
{'parent_group_id': secgroup['id'],
'protocol': 'tcp',
'from_port': 80,
'to_port': 81,
'cidr': '192.168.10.0/24'})
return secgroup
def _validate_security_group(self):
in_rules = filter(lambda l: not l.startswith('#'),
self._in_filter_rules)
for rule in in_rules:
if not 'nova' in rule:
self.assertTrue(rule in self._out_rules,
'Rule went missing: %s' % rule)
instance_chain = None
for rule in self._out_rules:
# This is pretty crude, but it'll do for now
# last two octets change
if re.search('-d 192.168.[0-9]{1,3}.[0-9]{1,3} -j', rule):
instance_chain = rule.split(' ')[-1]
break
self.assertTrue(instance_chain, "The instance chain wasn't added")
security_group_chain = None
for rule in self._out_rules:
# This is pretty crude, but it'll do for now
if '-A %s -j' % instance_chain in rule:
security_group_chain = rule.split(' ')[-1]
break
self.assertTrue(security_group_chain,
"The security group chain wasn't added")
regex = re.compile('-A .* -j ACCEPT -p icmp -s 192.168.11.0/24')
self.assertTrue(len(filter(regex.match, self._out_rules)) > 0,
"ICMP acceptance rule wasn't added")
regex = re.compile('-A .* -j ACCEPT -p icmp -m icmp --icmp-type 8'
' -s 192.168.11.0/24')
self.assertTrue(len(filter(regex.match, self._out_rules)) > 0,
"ICMP Echo Request acceptance rule wasn't added")
regex = re.compile('-A .* -j ACCEPT -p tcp --dport 80:81'
' -s 192.168.10.0/24')
self.assertTrue(len(filter(regex.match, self._out_rules)) > 0,
"TCP port 80/81 acceptance rule wasn't added")
def test_static_filters(self):
instance_ref = self._create_instance_ref()
src_instance_ref = self._create_instance_ref()
admin_ctxt = context.get_admin_context()
secgroup = self._create_test_security_group()
src_secgroup = db.security_group_create(admin_ctxt,
{'user_id': self.user_id,
'project_id': self.project_id,
'name': 'testsourcegroup',
'description': 'src group'})
db.security_group_rule_create(admin_ctxt,
{'parent_group_id': secgroup['id'],
'protocol': 'tcp',
'from_port': 80,
'to_port': 81,
'group_id': src_secgroup['id']})
db.instance_add_security_group(admin_ctxt, instance_ref['uuid'],
secgroup['id'])
db.instance_add_security_group(admin_ctxt, src_instance_ref['uuid'],
src_secgroup['id'])
instance_ref = db.instance_get(admin_ctxt, instance_ref['id'])
src_instance_ref = db.instance_get(admin_ctxt, src_instance_ref['id'])
network_model = fake_network.fake_get_instance_nw_info(self.stubs,
1, spectacular=True)
fake_network.stub_out_nw_api_get_instance_nw_info(self.stubs,
lambda *a, **kw: network_model)
network_info = compute_utils.legacy_network_info(network_model)
self.fw.prepare_instance_filter(instance_ref, network_info)
self.fw.apply_instance_filter(instance_ref, network_info)
self._validate_security_group()
# Extra test for TCP acceptance rules
for ip in network_model.fixed_ips():
if ip['version'] != 4:
continue
regex = re.compile('-A .* -j ACCEPT -p tcp'
' --dport 80:81 -s %s' % ip['address'])
self.assertTrue(len(filter(regex.match, self._out_rules)) > 0,
"TCP port 80/81 acceptance rule wasn't added")
db.instance_destroy(admin_ctxt, instance_ref['id'])
def test_filters_for_instance_with_ip_v6(self):
self.flags(use_ipv6=True)
network_info = fake_network.fake_get_instance_nw_info(self.stubs, 1)
rulesv4, rulesv6 = self.fw._filters_for_instance("fake", network_info)
self.assertEquals(len(rulesv4), 2)
self.assertEquals(len(rulesv6), 1)
def test_filters_for_instance_without_ip_v6(self):
self.flags(use_ipv6=False)
network_info = fake_network.fake_get_instance_nw_info(self.stubs, 1)
rulesv4, rulesv6 = self.fw._filters_for_instance("fake", network_info)
self.assertEquals(len(rulesv4), 2)
self.assertEquals(len(rulesv6), 0)
def test_multinic_iptables(self):
ipv4_rules_per_addr = 1
ipv4_addr_per_network = 2
ipv6_rules_per_addr = 1
ipv6_addr_per_network = 1
networks_count = 5<|fim▁hole|> instance_ref = self._create_instance_ref()
_get_instance_nw_info = fake_network.fake_get_instance_nw_info
network_info = _get_instance_nw_info(self.stubs,
networks_count,
ipv4_addr_per_network)
ipv4_len = len(self.fw.iptables.ipv4['filter'].rules)
ipv6_len = len(self.fw.iptables.ipv6['filter'].rules)
inst_ipv4, inst_ipv6 = self.fw.instance_rules(instance_ref,
network_info)
self.fw.prepare_instance_filter(instance_ref, network_info)
ipv4 = self.fw.iptables.ipv4['filter'].rules
ipv6 = self.fw.iptables.ipv6['filter'].rules
ipv4_network_rules = len(ipv4) - len(inst_ipv4) - ipv4_len
ipv6_network_rules = len(ipv6) - len(inst_ipv6) - ipv6_len
self.assertEquals(ipv4_network_rules,
ipv4_rules_per_addr * ipv4_addr_per_network * networks_count)
self.assertEquals(ipv6_network_rules,
ipv6_rules_per_addr * ipv6_addr_per_network * networks_count)
def test_do_refresh_security_group_rules(self):
admin_ctxt = context.get_admin_context()
instance_ref = self._create_instance_ref()
network_info = fake_network.fake_get_instance_nw_info(self.stubs, 1, 1)
secgroup = self._create_test_security_group()
db.instance_add_security_group(admin_ctxt, instance_ref['uuid'],
secgroup['id'])
self.fw.prepare_instance_filter(instance_ref, network_info)
self.fw.instances[instance_ref['id']] = instance_ref
self._validate_security_group()
# add a rule to the security group
db.security_group_rule_create(admin_ctxt,
{'parent_group_id': secgroup['id'],
'protocol': 'udp',
'from_port': 200,
'to_port': 299,
'cidr': '192.168.99.0/24'})
#validate the extra rule
self.fw.refresh_security_group_rules(secgroup)
regex = re.compile('-A .* -j ACCEPT -p udp --dport 200:299'
' -s 192.168.99.0/24')
self.assertTrue(len(filter(regex.match, self._out_rules)) > 0,
"Rules were not updated properly."
"The rule for UDP acceptance is missing")
def test_provider_firewall_rules(self):
# setup basic instance data
instance_ref = self._create_instance_ref()
# FRAGILE: as in libvirt tests
# peeks at how the firewall names chains
chain_name = 'inst-%s' % instance_ref['id']
network_info = fake_network.fake_get_instance_nw_info(self.stubs, 1, 1)
self.fw.prepare_instance_filter(instance_ref, network_info)
self.assertTrue('provider' in self.fw.iptables.ipv4['filter'].chains)
rules = [rule for rule in self.fw.iptables.ipv4['filter'].rules
if rule.chain == 'provider']
self.assertEqual(0, len(rules))
admin_ctxt = context.get_admin_context()
# add a rule and send the update message, check for 1 rule
provider_fw0 = db.provider_fw_rule_create(admin_ctxt,
{'protocol': 'tcp',
'cidr': '10.99.99.99/32',
'from_port': 1,
'to_port': 65535})
self.fw.refresh_provider_fw_rules()
rules = [rule for rule in self.fw.iptables.ipv4['filter'].rules
if rule.chain == 'provider']
self.assertEqual(1, len(rules))
# Add another, refresh, and make sure number of rules goes to two
provider_fw1 = db.provider_fw_rule_create(admin_ctxt,
{'protocol': 'udp',
'cidr': '10.99.99.99/32',
'from_port': 1,
'to_port': 65535})
self.fw.refresh_provider_fw_rules()
rules = [rule for rule in self.fw.iptables.ipv4['filter'].rules
if rule.chain == 'provider']
self.assertEqual(2, len(rules))
# create the instance filter and make sure it has a jump rule
self.fw.prepare_instance_filter(instance_ref, network_info)
self.fw.apply_instance_filter(instance_ref, network_info)
inst_rules = [rule for rule in self.fw.iptables.ipv4['filter'].rules
if rule.chain == chain_name]
jump_rules = [rule for rule in inst_rules if '-j' in rule.rule]
provjump_rules = []
# IptablesTable doesn't make rules unique internally
for rule in jump_rules:
if 'provider' in rule.rule and rule not in provjump_rules:
provjump_rules.append(rule)
self.assertEqual(1, len(provjump_rules))
# remove a rule from the db, cast to compute to refresh rule
db.provider_fw_rule_destroy(admin_ctxt, provider_fw1['id'])
self.fw.refresh_provider_fw_rules()
rules = [rule for rule in self.fw.iptables.ipv4['filter'].rules
if rule.chain == 'provider']
self.assertEqual(1, len(rules))
class XenAPISRSelectionTestCase(test.TestCase):
"""Unit tests for testing we find the right SR."""
def setUp(self):
super(XenAPISRSelectionTestCase, self).setUp()
xenapi_fake.reset()
def test_safe_find_sr_raise_exception(self):
"""Ensure StorageRepositoryNotFound is raise when wrong filter."""
self.flags(sr_matching_filter='yadayadayada')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
session = xenapi_conn.XenAPISession('test_url', 'root', 'test_pass')
helper = vm_utils.VMHelper
helper.XenAPI = session.get_imported_xenapi()
self.assertRaises(exception.StorageRepositoryNotFound,
helper.safe_find_sr, session)
def test_safe_find_sr_local_storage(self):
"""Ensure the default local-storage is found."""
self.flags(sr_matching_filter='other-config:i18n-key=local-storage')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
session = xenapi_conn.XenAPISession('test_url', 'root', 'test_pass')
helper = vm_utils.VMHelper
helper.XenAPI = session.get_imported_xenapi()
host_ref = xenapi_fake.get_all('host')[0]
local_sr = xenapi_fake.create_sr(
name_label='Fake Storage',
type='lvm',
other_config={'i18n-original-value-name_label':
'Local storage',
'i18n-key': 'local-storage'},
host_ref=host_ref)
expected = helper.safe_find_sr(session)
self.assertEqual(local_sr, expected)
def test_safe_find_sr_by_other_criteria(self):
"""Ensure the SR is found when using a different filter."""
self.flags(sr_matching_filter='other-config:my_fake_sr=true')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
session = xenapi_conn.XenAPISession('test_url', 'root', 'test_pass')
helper = vm_utils.VMHelper
helper.XenAPI = session.get_imported_xenapi()
host_ref = xenapi_fake.get_all('host')[0]
local_sr = xenapi_fake.create_sr(name_label='Fake Storage',
type='lvm',
other_config={'my_fake_sr': 'true'},
host_ref=host_ref)
expected = helper.safe_find_sr(session)
self.assertEqual(local_sr, expected)
def test_safe_find_sr_default(self):
"""Ensure the default SR is found regardless of other-config."""
self.flags(sr_matching_filter='default-sr:true')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
session = xenapi_conn.XenAPISession('test_url', 'root', 'test_pass')
helper = vm_utils.VMHelper
pool_ref = xenapi_fake.create_pool('')
helper.XenAPI = session.get_imported_xenapi()
expected = helper.safe_find_sr(session)
self.assertEqual(session.call_xenapi('pool.get_default_SR', pool_ref),
expected)
class XenAPIAggregateTestCase(test.TestCase):
"""Unit tests for aggregate operations."""
def setUp(self):
super(XenAPIAggregateTestCase, self).setUp()
self.flags(xenapi_connection_url='http://test_url',
xenapi_connection_username='test_user',
xenapi_connection_password='test_pass',
instance_name_template='%d',
firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver',
host='host')
xenapi_fake.reset()
host_ref = xenapi_fake.get_all('host')[0]
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
self.context = context.get_admin_context()
self.conn = xenapi_conn.get_connection(False)
self.fake_metadata = {'main_compute': 'host',
'host': xenapi_fake.get_record('host',
host_ref)['uuid']}
def test_add_to_aggregate_called(self):
def fake_add_to_aggregate(context, aggregate, host):
fake_add_to_aggregate.called = True
self.stubs.Set(self.conn._pool,
"add_to_aggregate",
fake_add_to_aggregate)
self.conn.add_to_aggregate(None, None, None)
self.assertTrue(fake_add_to_aggregate.called)
def test_add_to_aggregate_for_first_host_sets_metadata(self):
def fake_init_pool(id, name):
fake_init_pool.called = True
self.stubs.Set(self.conn._pool, "_init_pool", fake_init_pool)
aggregate = self._aggregate_setup()
self.conn._pool.add_to_aggregate(self.context, aggregate, "host")
result = db.aggregate_get(self.context, aggregate.id)
self.assertTrue(fake_init_pool.called)
self.assertDictMatch(self.fake_metadata, result.metadetails)
self.assertEqual(aggregate_states.ACTIVE, result.operational_state)
def test_join_subordinate(self):
"""Ensure join_subordinate gets called when the request gets to main."""
def fake_join_subordinate(id, compute_uuid, host, url, user, password):
fake_join_subordinate.called = True
self.stubs.Set(self.conn._pool, "_join_subordinate", fake_join_subordinate)
aggregate = self._aggregate_setup(hosts=['host', 'host2'],
metadata=self.fake_metadata)
self.conn._pool.add_to_aggregate(self.context, aggregate, "host2",
compute_uuid='fake_uuid',
url='fake_url',
user='fake_user',
passwd='fake_pass',
xenhost_uuid='fake_uuid')
self.assertTrue(fake_join_subordinate.called)
def test_add_to_aggregate_first_host(self):
def fake_pool_set_name_label(self, session, pool_ref, name):
fake_pool_set_name_label.called = True
self.stubs.Set(xenapi_fake.SessionBase, "pool_set_name_label",
fake_pool_set_name_label)
self.conn._session.call_xenapi("pool.create", {"name": "asdf"})
values = {"name": 'fake_aggregate',
"availability_zone": 'fake_zone'}
result = db.aggregate_create(self.context, values)
db.aggregate_host_add(self.context, result.id, "host")
aggregate = db.aggregate_get(self.context, result.id)
self.assertEqual(["host"], aggregate.hosts)
self.assertEqual({}, aggregate.metadetails)
self.conn._pool.add_to_aggregate(self.context, aggregate, "host")
self.assertTrue(fake_pool_set_name_label.called)
def test_remove_from_aggregate_called(self):
def fake_remove_from_aggregate(context, aggregate, host):
fake_remove_from_aggregate.called = True
self.stubs.Set(self.conn._pool,
"remove_from_aggregate",
fake_remove_from_aggregate)
self.conn.remove_from_aggregate(None, None, None)
self.assertTrue(fake_remove_from_aggregate.called)
def test_remove_from_empty_aggregate(self):
values = {"name": 'fake_aggregate',
"availability_zone": 'fake_zone'}
result = db.aggregate_create(self.context, values)
self.assertRaises(exception.AggregateError,
self.conn._pool.remove_from_aggregate,
None, result, "test_host")
def test_remove_subordinate(self):
"""Ensure eject subordinate gets called."""
def fake_eject_subordinate(id, compute_uuid, host_uuid):
fake_eject_subordinate.called = True
self.stubs.Set(self.conn._pool, "_eject_subordinate", fake_eject_subordinate)
self.fake_metadata['host2'] = 'fake_host2_uuid'
aggregate = self._aggregate_setup(hosts=['host', 'host2'],
metadata=self.fake_metadata)
self.conn._pool.remove_from_aggregate(self.context, aggregate, "host2")
self.assertTrue(fake_eject_subordinate.called)
def test_remove_main_solo(self):
"""Ensure metadata are cleared after removal."""
def fake_clear_pool(id):
fake_clear_pool.called = True
self.stubs.Set(self.conn._pool, "_clear_pool", fake_clear_pool)
aggregate = self._aggregate_setup(aggr_state=aggregate_states.ACTIVE,
metadata=self.fake_metadata)
self.conn._pool.remove_from_aggregate(self.context, aggregate, "host")
result = db.aggregate_get(self.context, aggregate.id)
self.assertTrue(fake_clear_pool.called)
self.assertDictMatch({}, result.metadetails)
self.assertEqual(aggregate_states.ACTIVE, result.operational_state)
def test_remote_main_non_empty_pool(self):
"""Ensure AggregateError is raised if removing the main."""
aggregate = self._aggregate_setup(aggr_state=aggregate_states.ACTIVE,
hosts=['host', 'host2'],
metadata=self.fake_metadata)
self.assertRaises(exception.InvalidAggregateAction,
self.conn._pool.remove_from_aggregate,
self.context, aggregate, "host")
def _aggregate_setup(self, aggr_name='fake_aggregate',
aggr_zone='fake_zone',
aggr_state=aggregate_states.CREATED,
hosts=['host'], metadata=None):
values = {"name": aggr_name,
"availability_zone": aggr_zone,
"operational_state": aggr_state, }
result = db.aggregate_create(self.context, values)
for host in hosts:
db.aggregate_host_add(self.context, result.id, host)
if metadata:
db.aggregate_metadata_add(self.context, result.id, metadata)
return db.aggregate_get(self.context, result.id)<|fim▁end|> | |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>"""stockretriever"""
from setuptools import setup
setup(
name='portfolio-manager',
version='1.0',
description='a web app that keeps track of your investment portfolio',
url='https://github.com/gurch101/portfolio-manager',<|fim▁hole|> license='MIT',
classifiers=[
'Intended Audience :: Financial and Insurance Industry',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7'
],
keywords='investment portfolio',
dependency_links=['https://github.com/gurch101/StockScraper/tarball/master#egg=stockretriever-1.0'],
zip_safe=True,
setup_requires=[
'stockretriever==1.0',
'Flask==0.10.1',
'passlib==1.6.2',
'schedule==0.3.2',
'requests==2.2.1'
]
)<|fim▁end|> | author='Gurchet Rai',
author_email='[email protected]', |
<|file_name|>download.py<|end_file_name|><|fim▁begin|># download.py
import urllib.request
print("Downloading")
url = 'https://www.python.org/ftp/python/3.4.1/python-3.4.1.msi'<|fim▁hole|>urllib.request.urlretrieve(url, 'python-3.4.1.msi')<|fim▁end|> | print('File Downloading') |
<|file_name|>tests_forms.py<|end_file_name|><|fim▁begin|>"""Tests for forms in eCommerce app."""
from django.test import TestCase
from ecommerce.forms import OrderForm
required_fields = {
'phone': '123456789',
'email': '[email protected]',
}
invalid_form_email = {<|fim▁hole|>no_phone = {'email': '[email protected]'}
class TestForm(TestCase):
"""Test suite for forms in eCommerce app."""
def test_empty_form(self):
"""Empty form shouldn't be valid."""
form = OrderForm()
self.assertFalse(form.is_valid())
def test_filled_form_without_required_field(self):
"""Form is still not valid, if there are some required fields left unfilled."""
form = OrderForm(data=no_phone)
self.assertFalse(form.is_valid())
def test_valid_form(self):
"""Form is valid, if there all required fields are filled."""
form = OrderForm(data=required_fields)
self.assertTrue(form.is_valid())
def test_from_validation_on_email_field(self):
"""Form should validate user's email if it is filled."""
form = OrderForm(data=invalid_form_email)
self.assertFalse(form.is_valid())<|fim▁end|> | 'email': 'clearly!not_@_email',
'phone': '123456789'
} |
<|file_name|>codec_mme_audio_spdifin.cpp<|end_file_name|><|fim▁begin|>/************************************************************************
COPYRIGHT (C) SGS-THOMSON Microelectronics 2007
Source file name : codec_mme_audio_spdifin.cpp
Author : Gael Lassure
Implementation of the spdif-input audio codec class for player 2.
Date Modification Name
---- ------------ --------
24-July-07 Created (from codec_mme_audio_eac3.cpp) Gael Lassure
************************************************************************/
////////////////////////////////////////////////////////////////////////////
/// \class Codec_MmeAudioSpdifIn_c
///
/// The SpdifIn audio codec proxy.
///
// /////////////////////////////////////////////////////////////////////
//
// Include any component headers
#define CODEC_TAG "SPDIFIN audio codec"
#include "codec_mme_audio_spdifin.h"
#include "codec_mme_audio_eac3.h"
#include "codec_mme_audio_dtshd.h"
#include "lpcm.h"
#include "spdifin_audio.h"
// /////////////////////////////////////////////////////////////////////////
//
// Locally defined constants
//
// /////////////////////////////////////////////////////////////////////////
//
// Locally defined structures
//
typedef struct SpdifinAudioCodecStreamParameterContext_s
{
CodecBaseStreamParameterContext_t BaseContext;
MME_LxAudioDecoderGlobalParams_t StreamParameters;
} SpdifinAudioCodecStreamParameterContext_t;
//#if __KERNEL__
#if 0
#define BUFFER_SPDIFIN_AUDIO_CODEC_STREAM_PARAMETER_CONTEXT "SpdifinAudioCodecStreamParameterContext"
#define BUFFER_SPDIFIN_AUDIO_CODEC_STREAM_PARAMETER_CONTEXT_TYPE {BUFFER_SPDIFIN_AUDIO_CODEC_STREAM_PARAMETER_CONTEXT, BufferDataTypeBase, AllocateFromDeviceMemory, 32, 0, true, true, sizeof(SpdifinAudioCodecStreamParameterContext_t)}
#else
#define BUFFER_SPDIFIN_AUDIO_CODEC_STREAM_PARAMETER_CONTEXT "SpdifinAudioCodecStreamParameterContext"
#define BUFFER_SPDIFIN_AUDIO_CODEC_STREAM_PARAMETER_CONTEXT_TYPE {BUFFER_SPDIFIN_AUDIO_CODEC_STREAM_PARAMETER_CONTEXT, BufferDataTypeBase, AllocateFromOSMemory, 32, 0, true, true, sizeof(SpdifinAudioCodecStreamParameterContext_t)}
#endif
static BufferDataDescriptor_t SpdifinAudioCodecStreamParameterContextDescriptor = BUFFER_SPDIFIN_AUDIO_CODEC_STREAM_PARAMETER_CONTEXT_TYPE;
// --------
typedef struct SpdifinAudioCodecDecodeContext_s
{
CodecBaseDecodeContext_t BaseContext;
MME_LxAudioDecoderFrameParams_t DecodeParameters;
MME_LxAudioDecoderFrameStatus_t DecodeStatus;
// Input Buffer is sent in a separate Command
MME_Command_t BufferCommand;
MME_SpdifinBufferParams_t BufferParameters;
MME_LxAudioDecoderFrameStatus_t BufferStatus;
} SpdifinAudioCodecDecodeContext_t;
//#if __KERNEL__
#if 0
#define BUFFER_SPDIFIN_AUDIO_CODEC_DECODE_CONTEXT "SpdifinAudioCodecDecodeContext"
#define BUFFER_SPDIFIN_AUDIO_CODEC_DECODE_CONTEXT_TYPE {BUFFER_SPDIFIN_AUDIO_CODEC_DECODE_CONTEXT, BufferDataTypeBase, AllocateFromDeviceMemory, 32, 0, true, true, sizeof(SpdifinAudioCodecDecodeContext_t)}
#else
#define BUFFER_SPDIFIN_AUDIO_CODEC_DECODE_CONTEXT "SpdifinAudioCodecDecodeContext"
#define BUFFER_SPDIFIN_AUDIO_CODEC_DECODE_CONTEXT_TYPE {BUFFER_SPDIFIN_AUDIO_CODEC_DECODE_CONTEXT, BufferDataTypeBase, AllocateFromOSMemory, 32, 0, true, true, sizeof(SpdifinAudioCodecDecodeContext_t)}
#endif
static BufferDataDescriptor_t SpdifinAudioCodecDecodeContextDescriptor = BUFFER_SPDIFIN_AUDIO_CODEC_DECODE_CONTEXT_TYPE;
// --------
////////////////////////////////////////////////////////////////////////////
///
/// Fill in the configuration parameters used by the super-class and reset everything.
///
/// \todo Correctly setup AudioDecoderTransformCapabilityMask
///
Codec_MmeAudioSpdifin_c::Codec_MmeAudioSpdifin_c( void )
{
Configuration.CodecName = "SPDIFIN audio";
// for SPDIFin we know that the incoming data is never longer than 1024 samples giving us a fairly
// small maximum frame size (reducing the maximum frame size allows us to make more efficient use of
/// the coded frame buffer)
Configuration.MaximumCodedFrameSize = 0x8000;
// Large because if it changes each frame we won't be freed until the decodes have rippled through (causing deadlock)
Configuration.StreamParameterContextCount = 10;
Configuration.StreamParameterContextDescriptor = &SpdifinAudioCodecStreamParameterContextDescriptor;
// Send up to 10 frames for look-ahead.
Configuration.DecodeContextCount = 10;
Configuration.DecodeContextDescriptor = &SpdifinAudioCodecDecodeContextDescriptor;
DecodeErrors = 0;
NumberOfSamplesProcessed = 0;
//AudioDecoderTransformCapabilityMask.DecoderCapabilityFlags = (1 << ACC_SPDIFIN);
DecoderId = ACC_SPDIFIN_ID;
memset(&EOF, 0x00, sizeof(Codec_SpdifinEOF_t));
EOF.Command.CmdStatus.State = MME_COMMAND_FAILED; // always park the command in a not-running state
SpdifStatus.State = SPDIFIN_STATE_PCM_BYPASS;
SpdifStatus.StreamType = SPDIFIN_RESERVED;
SpdifStatus.PlayedSamples = 0;
Reset();
}
////////////////////////////////////////////////////////////////////////////
///
/// Destructor function, ensures a full halt and reset
/// are executed for all levels of the class.
///
Codec_MmeAudioSpdifin_c::~Codec_MmeAudioSpdifin_c( void )
{
Halt();
Reset();
}
CodecStatus_t Codec_MmeAudioSpdifin_c::Reset( void )
{
EOF.SentEOFCommand = false;
return Codec_MmeAudio_c::Reset();
}
const static enum eAccFsCode LpcmSpdifin2ACC[] =
{
// DVD Video Supported Frequencies
ACC_FS48k,
ACC_FS96k,
ACC_FS192k,
ACC_FS_reserved,
ACC_FS32k,
ACC_FS16k,
ACC_FS22k,
ACC_FS24k,
// DVD Audio Supported Frequencies
ACC_FS44k,
ACC_FS88k,
ACC_FS176k,
ACC_FS_reserved,
ACC_FS_reserved,
ACC_FS_reserved,
ACC_FS_reserved,
ACC_FS_reserved,
// SPDIFIN Supported frequencies
ACC_FS_reserved,
ACC_FS_reserved,
ACC_FS_reserved,
ACC_FS_reserved,
ACC_FS_reserved,
ACC_FS_reserved,
ACC_FS_reserved,
ACC_FS_reserved,
ACC_FS_reserved,
ACC_FS_reserved,
};
static const LpcmAudioStreamParameters_t DefaultStreamParameters =
{
TypeLpcmSPDIFIN,
ACC_MME_FALSE, // MuteFlag
ACC_MME_FALSE, // EmphasisFlag
LpcmWordSize32,
LpcmWordSizeNone,
LpcmSamplingFreq48,
LpcmSamplingFreqNone,
2, // NbChannels
0,
LPCM_DEFAULT_CHANNEL_ASSIGNMENT, // derived from NbChannels.
/*
0,
0,
0,
0,
0,
0,
0,
0
*/
};
////////////////////////////////////////////////////////////////////////////
///
/// Populate the supplied structure with parameters for SPDIFIN audio.
///
CodecStatus_t Codec_MmeAudioSpdifin_c::FillOutTransformerGlobalParameters( MME_LxAudioDecoderGlobalParams_t *GlobalParams_p )
{
CodecStatus_t Status;
//
LpcmAudioStreamParameters_t *Parsed;
MME_LxAudioDecoderGlobalParams_t &GlobalParams = *GlobalParams_p;
GlobalParams.StructSize = sizeof(MME_LxAudioDecoderGlobalParams_t);
//
if (ParsedFrameParameters == NULL)
{
// At transformer init, stream properties might be unknown...
Parsed = (LpcmAudioStreamParameters_t*) &DefaultStreamParameters;
}
else
{
Parsed = (LpcmAudioStreamParameters_t *)ParsedFrameParameters->StreamParameterStructure;
}
MME_SpdifinConfig_t &Config = *((MME_SpdifinConfig_t *) GlobalParams.DecConfig);
Config.DecoderId = ACC_SPDIFIN_ID;
Config.StructSize = sizeof(MME_SpdifinConfig_t);
// Setup default IEC config
Config.Config[IEC_SFREQ] = LpcmSpdifin2ACC[Parsed->SamplingFrequency1]; // should be 48 by default.
Config.Config[IEC_NBSAMPLES] = Parsed->NumberOfSamples; // should be 1024
Config.Config[IEC_DEEMPH ] = Parsed->EmphasisFlag;
// Setup default DD+ decoder config
memset(&Config.DecConfig[0], 0, sizeof(Config.DecConfig));
Config.DecConfig[DD_CRC_ENABLE] = ACC_MME_TRUE;
Config.DecConfig[DD_LFE_ENABLE] = ACC_MME_TRUE;
Config.DecConfig[DD_COMPRESS_MODE] = DD_LINE_OUT;
Config.DecConfig[DD_HDR] = 0xFF;
Config.DecConfig[DD_LDR] = 0xFF;
//
Status = Codec_MmeAudio_c::FillOutTransformerGlobalParameters( GlobalParams_p );
if( Status != CodecNoError )
{
return Status;
}
//
unsigned char *PcmParams_p = ((unsigned char *) &Config) + Config.StructSize;
MME_LxPcmProcessingGlobalParams_Subset_t &PcmParams =
*((MME_LxPcmProcessingGlobalParams_Subset_t *) PcmParams_p);
MME_Resamplex2GlobalParams_t &resamplex2 = PcmParams.Resamplex2;
// Id already set
// StructSize already set
resamplex2.Apply = ACC_MME_AUTO;
resamplex2.Range = ACC_FSRANGE_48k;
//
return CodecNoError;
}
////////////////////////////////////////////////////////////////////////////
///
/// Populate the AUDIO_DECODER's initialization parameters for SPDIFIN audio.
///
/// When this method completes Codec_MmeAudio_c::AudioDecoderInitializationParameters
/// will have been filled out with valid values sufficient to initialize an
/// SPDIFIN audio decoder.
///
CodecStatus_t Codec_MmeAudioSpdifin_c::FillOutTransformerInitializationParameters( void )
{
CodecStatus_t Status;
MME_LxAudioDecoderInitParams_t &Params = AudioDecoderInitializationParameters;
//
MMEInitializationParameters.TransformerInitParamsSize = sizeof(Params);
MMEInitializationParameters.TransformerInitParams_p = &Params;
//
Status = Codec_MmeAudio_c::FillOutTransformerInitializationParameters();
if (Status != CodecNoError)
return Status;
// Spdifin decoder must be handled as streambase.
AUDIODEC_SET_STREAMBASE((&Params), ACC_MME_TRUE);
//
return FillOutTransformerGlobalParameters( &Params.GlobalParams );
}
////////////////////////////////////////////////////////////////////////////
///
/// Populate the AUDIO_DECODER's MME_SET_GLOBAL_TRANSFORMER_PARAMS parameters for SPDIFIN audio.
///
CodecStatus_t Codec_MmeAudioSpdifin_c::FillOutSetStreamParametersCommand( void )
{
CodecStatus_t Status;
SpdifinAudioCodecStreamParameterContext_t *Context = (SpdifinAudioCodecStreamParameterContext_t *)StreamParameterContext;
//
// Examine the parsed stream parameters and determine what type of codec to instanciate
//
DecoderId = ACC_SPDIFIN_ID;
//
// Now fill out the actual structure
//
memset( &(Context->StreamParameters), 0, sizeof(Context->StreamParameters) );
Status = FillOutTransformerGlobalParameters( &(Context->StreamParameters) );
if( Status != CodecNoError )
return Status;
//
// Fillout the actual command
//
Context->BaseContext.MMECommand.CmdStatus.AdditionalInfoSize = 0;
Context->BaseContext.MMECommand.CmdStatus.AdditionalInfo_p = NULL;
Context->BaseContext.MMECommand.ParamSize = sizeof(Context->StreamParameters);
Context->BaseContext.MMECommand.Param_p = (MME_GenericParams_t)(&Context->StreamParameters);
//
return CodecNoError;
}
////////////////////////////////////////////////////////////////////////////
///
/// Populate the AUDIO_DECODER's MME_TRANSFORM parameters for SPDIFIN audio.
///
CodecStatus_t Codec_MmeAudioSpdifin_c::FillOutDecodeCommand( void )
{
SpdifinAudioCodecDecodeContext_t *Context = (SpdifinAudioCodecDecodeContext_t *)DecodeContext;
//
// Initialize the frame parameters (we don't actually have much to say here)
//
memset( &Context->DecodeParameters, 0, sizeof(Context->DecodeParameters) );
//
// Zero the reply structure
//
memset( &Context->DecodeStatus, 0, sizeof(Context->DecodeStatus) );
//
// Fillout the actual command
//
Context->BaseContext.MMECommand.CmdStatus.AdditionalInfoSize = sizeof(Context->DecodeStatus);
Context->BaseContext.MMECommand.CmdStatus.AdditionalInfo_p = (MME_GenericParams_t)(&Context->DecodeStatus);
Context->BaseContext.MMECommand.ParamSize = sizeof(Context->DecodeParameters);
Context->BaseContext.MMECommand.Param_p = (MME_GenericParams_t)(&Context->DecodeParameters);
return CodecNoError;
}
#ifdef __KERNEL__
extern "C"{void flush_cache_all();};
#endif
////////////////////////////////////////////////////////////////////////////
///
/// Populate the AUDIO_DECODER's MME_SEND_BUFFERS parameters for SPDIFIN audio.
/// Copy some code of codec_mme_base.cpp
/// Do not expect any Callback upon completion of this SEND_BUFFER as its
/// completion must be synchronous with the TRANSFORM command that contains the
/// corresponding decoded buffer.
CodecStatus_t Codec_MmeAudioSpdifin_c::FillOutSendBufferCommand( void )
{
SpdifinAudioCodecDecodeContext_t *Context = (SpdifinAudioCodecDecodeContext_t *)DecodeContext;
if (EOF.SentEOFCommand)
{
CODEC_TRACE("Already sent EOF command - refusing to queue more buffers\n");
return CodecNoError;
}
//
// Initialize the input buffer parameters (we don't actually have much to say here)
//
memset( &Context->BufferParameters, 0, sizeof(Context->BufferParameters) );
//
// Zero the reply structure
//
memset( &Context->BufferStatus, 0, sizeof(Context->BufferStatus) );
//
// Fillout the actual command
//
Context->BufferCommand.CmdStatus.AdditionalInfoSize = sizeof(Context->BufferStatus);
Context->BufferCommand.CmdStatus.AdditionalInfo_p = (MME_GenericParams_t)(&Context->BufferStatus);
Context->BufferCommand.ParamSize = sizeof(Context->BufferParameters);
Context->BufferCommand.Param_p = (MME_GenericParams_t)(&Context->BufferParameters);
// Feed back will be managed at same time as the return of the corresponding MME_TRANSFORM.
Context->BufferCommand.StructSize = sizeof(MME_Command_t);
Context->BufferCommand.CmdCode = MME_SEND_BUFFERS;
Context->BufferCommand.CmdEnd = MME_COMMAND_END_RETURN_NO_INFO;
#ifdef __KERNEL__
flush_cache_all();
#endif
MME_ERROR Status = MME_SendCommand( MMEHandle, &Context->BufferCommand );
if( Status != MME_SUCCESS )
{
report( severity_error, "Codec_MmeAudioSpdifin_c::FillOutSendBufferCommand(%s) - Unable to send buffer command (%08x).\n", Configuration.CodecName, Status );
return CodecError;
}
return CodecNoError;
}
////////////////////////////////////////////////////////////////////////////
///
/// Set SPDIFIN StreamBase style TRANSFORM command IOs
/// Set DecodeContext the same way as for FrameBase TRANSFORMS
/// but Send the Input buffer in a Specific SEND_BUFFER command.
/// TRANSFORM Command is preset to emit only the corresponding Output buffer
void Codec_MmeAudioSpdifin_c::SetCommandIO(void)
{
SpdifinAudioCodecDecodeContext_t *Context = (SpdifinAudioCodecDecodeContext_t *)DecodeContext;
// Attach both I/O buffers to DecodeContext.
PresetIOBuffers();
// StreamBase Transformer : 1 Input Buffer sent through SEND_BUFFER / 1 Output Buffer sent through MME_TRANSFORM
// Prepare SEND_BUFFER Command to transmit Input Buffer
Context->BufferCommand.NumberInputBuffers = 1;
Context->BufferCommand.NumberOutputBuffers = 0;
Context->BufferCommand.DataBuffers_p = &DecodeContext->MMEBufferList[0];
// Prepare MME_TRANSFORM Command to transmit Output Buffer
DecodeContext->MMECommand.NumberInputBuffers = 0;
DecodeContext->MMECommand.NumberOutputBuffers = 1;
DecodeContext->MMECommand.DataBuffers_p = &DecodeContext->MMEBufferList[1];
//
// Load the parameters into MME SendBuffer command
//
FillOutSendBufferCommand();
}
CodecStatus_t Codec_MmeAudioSpdifin_c::SendEofCommand()
{
MME_Command_t *eof = &EOF.Command;
if (EOF.SentEOFCommand) {
CODEC_TRACE("Already sent EOF command once, refusing to do it again.\n");
return CodecNoError;
}
EOF.SentEOFCommand = true;
// Setup EOF Command ::
eof->StructSize = sizeof(MME_Command_t);
eof->CmdCode = MME_SEND_BUFFERS;
eof->CmdEnd = MME_COMMAND_END_RETURN_NO_INFO;
eof->NumberInputBuffers = 1;
eof->NumberOutputBuffers = 0;
eof->DataBuffers_p = (MME_DataBuffer_t **) &EOF.DataBuffers;
eof->ParamSize = sizeof(MME_SpdifinBufferParams_t);
eof->Param_p = &EOF.Params;
//
// The following fields were reset during the Class Instantiation ::
//
//eof->DueTime = 0;
//eof->CmdStatus.AdditionalInfoSize = 0;
//eof->CmdStatus.AdditionalInfo_p = NULL;
// Setup EOF Params
EOF.Params.StructSize = sizeof(MME_SpdifinBufferParams_t);
STREAMING_SET_BUFFER_TYPE(EOF.Params.BufferParams, STREAMING_DEC_EOF);
// Setup DataBuffer ::
EOF.DataBuffers[0] = &EOF.DataBuffer;
EOF.DataBuffer.StructSize = sizeof(MME_DataBuffer_t);
EOF.DataBuffer.UserData_p = NULL;
EOF.DataBuffer.NumberOfScatterPages = 1;
EOF.DataBuffer.ScatterPages_p = &EOF.ScatterPage;
//
// The following fields were reset during the Class Instantiation ::
//
//eof->DueTime = 0; // immediate.
//EOF.DataBuffer.Flags = 0;
//EOF.DataBuffer.StreamNumber = 0;
//EOF.DataBuffer.TotalSize = 0;
//EOF.DataBuffer.StartOffset = 0;
// Setup EOF ScatterPage ::
//
// The following fields were reset during the Class Instantiation ::
//
//EOF.ScatterPage.Page_p = NULL;
//EOF.ScatterPage.Size = 0;
//EOF.ScatterPage.BytesUsed = 0;
//EOF.ScatterPage.FlagsIn = 0;
//EOF.ScatterPage.FlagsOut = 0;
MME_ERROR Result = MME_SendCommand( MMEHandle, eof);
if( Result != MME_SUCCESS )
{
CODEC_ERROR("Unable to send eof (%08x).\n", Result );
return CodecError;
}
return CodecNoError;
}
CodecStatus_t Codec_MmeAudioSpdifin_c::DiscardQueuedDecodes()
{
CodecStatus_t Status;
Status = Codec_MmeAudio_c::DiscardQueuedDecodes();
if (CodecNoError != Status)
return Status;
Status = SendEofCommand();
if (CodecNoError != Status)
return Status;
return CodecNoError;
}
////////////////////////////////////////////////////////////////////////////
///
/// Status Information display
#define SPDIFIN_TEXT(x) #x
const char * SpdifinStreamTypeText[]=
{
SPDIFIN_TEXT(SPDIFIN_NULL_DATA_BURST),
SPDIFIN_TEXT(SPDIFIN_AC3),
SPDIFIN_TEXT(SPDIFIN_PAUSE_BURST),
SPDIFIN_TEXT(SPDIFIN_MP1L1),
SPDIFIN_TEXT(SPDIFIN_MP1L2L3),
SPDIFIN_TEXT(SPDIFIN_MP2MC),
SPDIFIN_TEXT(SPDIFIN_MP2AAC),
SPDIFIN_TEXT(SPDIFIN_MP2L1LSF),
SPDIFIN_TEXT(SPDIFIN_MP2L2LSF),
SPDIFIN_TEXT(SPDIFIN_MP2L3LSF),
SPDIFIN_TEXT(SPDIFIN_DTS1),
SPDIFIN_TEXT(SPDIFIN_DTS2),
SPDIFIN_TEXT(SPDIFIN_DTS3),
SPDIFIN_TEXT(SPDIFIN_ATRAC),
SPDIFIN_TEXT(SPDIFIN_ATRAC2_3),
SPDIFIN_TEXT(SPDIFIN_IEC60937_RESERVED),
SPDIFIN_TEXT(SPDIFIN_RESERVED_16),
SPDIFIN_TEXT(SPDIFIN_RESERVED_17),
SPDIFIN_TEXT(SPDIFIN_RESERVED_18),
SPDIFIN_TEXT(SPDIFIN_RESERVED_19),
SPDIFIN_TEXT(SPDIFIN_RESERVED_20),
SPDIFIN_TEXT(SPDIFIN_RESERVED_21),
SPDIFIN_TEXT(SPDIFIN_RESERVED_22),
SPDIFIN_TEXT(SPDIFIN_RESERVED_23),
SPDIFIN_TEXT(SPDIFIN_RESERVED_24),
SPDIFIN_TEXT(SPDIFIN_RESERVED_25),
SPDIFIN_TEXT(SPDIFIN_RESERVED_26),
SPDIFIN_TEXT(SPDIFIN_RESERVED_27),
SPDIFIN_TEXT(SPDIFIN_RESERVED_28),
SPDIFIN_TEXT(SPDIFIN_RESERVED_29),
SPDIFIN_TEXT(SPDIFIN_RESERVED_30),
SPDIFIN_TEXT(SPDIFIN_RESERVED_31),
SPDIFIN_TEXT(SPDIFIN_IEC60958_PCM),
SPDIFIN_TEXT(SPDIFIN_IEC60958_DTS14),
SPDIFIN_TEXT(SPDIFIN_IEC60958_DTS16),
SPDIFIN_TEXT(SPDIFIN_RESERVED)
};
const char * SpdifinStateText[]=
{
SPDIFIN_TEXT(SPDIFIN_STATE_RESET),
SPDIFIN_TEXT(SPDIFIN_STATE_PCM_BYPASS),
SPDIFIN_TEXT(SPDIFIN_STATE_COMPRESSED_BYPASS),
SPDIFIN_TEXT(SPDIFIN_STATE_UNDERFLOW),
SPDIFIN_TEXT(SPDIFIN_STATE_COMPRESSED_MUTE),
SPDIFIN_TEXT(SPDIFIN_STATE_INVALID)
};
static inline const char * reportStreamType(enum eMulticomSpdifinPC type)
{
return (type < SPDIFIN_RESERVED) ? SpdifinStreamTypeText[type] : SpdifinStreamTypeText[SPDIFIN_RESERVED];
}
static inline const char * reportState(enum eMulticomSpdifinState state)
{
return (state < SPDIFIN_STATE_INVALID) ? SpdifinStateText[state] : SpdifinStateText[SPDIFIN_STATE_INVALID];
}
////////////////////////////////////////////////////////////////////////////
///
/// Validate the ACC status structure and squawk loudly if problems are found.
///
/// Dispite the squawking this method unconditionally returns success. This is
/// because the firmware will already have concealed the decode problems by
/// performing a soft mute.
///
/// \return CodecSuccess
///
CodecStatus_t Codec_MmeAudioSpdifin_c::ValidateDecodeContext( CodecBaseDecodeContext_t *Context )
{
SpdifinAudioCodecDecodeContext_t * DecodeContext = (SpdifinAudioCodecDecodeContext_t *) Context;
MME_LxAudioDecoderFrameStatus_t & Status = DecodeContext->DecodeStatus;
ParsedAudioParameters_t * AudioParameters;
enum eMulticomSpdifinState NewState, OldState = SpdifStatus.State;
enum eMulticomSpdifinPC NewPC , OldPC = SpdifStatus.StreamType;
tMMESpdifinStatus *FrameStatus = (tMMESpdifinStatus *) &Status.FrameStatus[0];
NewState = (enum eMulticomSpdifinState) FrameStatus->CurrentState;
NewPC = (enum eMulticomSpdifinPC ) FrameStatus->PC;
bool StatusChange = (AudioDecoderStatus.SamplingFreq != Status.SamplingFreq) ||
(AudioDecoderStatus.DecAudioMode != Status.DecAudioMode);
// HACK: This should bloody well be in the super-class
AudioDecoderStatus = Status;
if ((OldState != NewState) || (OldPC != NewPC) || StatusChange)
{
SpdifStatus.State = NewState;
SpdifStatus.StreamType = NewPC;
report( severity_info, "Codec_MmeAudioSpdifin_c::ValidateDecodeContext() New State :: %s after %d samples\n", reportState(NewState) , SpdifStatus.PlayedSamples);
report( severity_info, "Codec_MmeAudioSpdifin_c::ValidateDecodeContext() New StreamType :: [%d] %s after %d samples\n", NewPC, reportStreamType(NewPC), SpdifStatus.PlayedSamples);
PlayerStatus_t PlayerStatus;
PlayerEventRecord_t Event;
void *EventUserData = NULL;
Event.Code = EventInputFormatChanged;
Event.Playback = Playback;
Event.Stream = Stream;
Event.PlaybackTime = TIME_NOT_APPLICABLE;
Event.UserData = EventUserData;
Event.Value[0].Pointer = this; // pointer to the component
PlayerStatus = Player->SignalEvent( &Event );
if( PlayerStatus != PlayerNoError )
{
report( severity_error, "Codec_MmeAudioSpdifin_c::ValidateDecodeContext - Failed to signal event.\n" );
return CodecError;
}
// END SYSFS
}
SpdifStatus.PlayedSamples += Status.NbOutSamples;
NumberOfSamplesProcessed += Status.NbOutSamples; // SYSFS
CODEC_DEBUG("Codec_MmeAudioSpdifin_c::ValidateDecodeContext() Transform Cmd returned \n");
if (ENABLE_CODEC_DEBUG)
{
//DumpCommand(bufferIndex);
}
if (Status.DecStatus)
{
CODEC_ERROR("SPDIFIN audio decode error (muted frame): %d\n", Status.DecStatus);
DecodeErrors++;
//DumpCommand(bufferIndex);
// don't report an error to the higher levels (because the frame is muted)
}
//
// Attach any codec derived metadata to the output buffer (or verify the
// frame analysis if the frame analyser already filled everything in for
// us).
//
AudioParameters = BufferState[DecodeContext->BaseContext.BufferIndex].ParsedAudioParameters;
// TODO: these values should be extracted from the codec's reply
if (AudioOutputSurface)
{
AudioParameters->Source.BitsPerSample = AudioOutputSurface->BitsPerSample;
AudioParameters->Source.ChannelCount = AudioOutputSurface->ChannelCount;
}
AudioParameters->Organisation = Status.AudioMode;
AudioParameters->SampleCount = Status.NbOutSamples;
enum eAccFsCode SamplingFreqCode = (enum eAccFsCode) Status.SamplingFreq;
if (SamplingFreqCode < ACC_FS_reserved)
{
AudioParameters->Source.SampleRateHz = Codec_MmeAudio_c::ConvertCodecSamplingFreq(SamplingFreqCode);
//AudioParameters->Source.SampleRateHz = 44100;
}
else
{
AudioParameters->Source.SampleRateHz = 0;
CODEC_ERROR("SPDIFIn audio decode bad sampling freq returned: 0x%x\n", SamplingFreqCode);
}
if (SpdifStatus.StreamType == SPDIFIN_AC3)
{
Codec_MmeAudioEAc3_c::FillStreamMetadata(AudioParameters, (MME_LxAudioDecoderFrameStatus_t*)&Status);
}
else if (((SpdifStatus.StreamType >= SPDIFIN_DTS1) && ((SpdifStatus.StreamType <= SPDIFIN_DTS3))) ||
(SpdifStatus.StreamType == SPDIFIN_IEC60958_DTS14) || (SpdifStatus.StreamType == SPDIFIN_IEC60958_DTS16))
{
Codec_MmeAudioDtshd_c::FillStreamMetadata(AudioParameters, (MME_LxAudioDecoderFrameStatus_t*)&Status);
}
else
{
// do nothing, the AudioParameters are zeroed by FrameParser_Audio_c::Input() which is
// appropriate (i.e. OriginalEncoding is AudioOriginalEncodingUnknown)
}
return CodecNoError;
}
/////////////////////////////////////////////////////////////////////////////////////////////////////
///
/// Terminate the SPDIFIN mme transformer
/// First Send an Empty BUFFER with EOF Tag so that in unlocks pending TRANSFORM in case it is waiting
/// for more buffers, then wait for all SentBuffers and DecodeTransforms to have returned
CodecStatus_t Codec_MmeAudioSpdifin_c::TerminateMMETransformer( void )
{
CodecStatus_t Status;
if( MMEInitialized )
{
Status = SendEofCommand();
if (CodecNoError != Status)
return Status;
// Call base class that waits enough time for all MME_TRANSFORMS to return
Status = Codec_MmeBase_c::TerminateMMETransformer();
return Status;<|fim▁hole|>}
// /////////////////////////////////////////////////////////////////////////
//
// Function to dump out the set stream
// parameters from an mme command.
//
CodecStatus_t Codec_MmeAudioSpdifin_c::DumpSetStreamParameters( void *Parameters )
{
CODEC_ERROR("Not implemented\n");
return CodecNoError;
}
// /////////////////////////////////////////////////////////////////////////
//
// Function to dump out the decode
// parameters from an mme command.
//
CodecStatus_t Codec_MmeAudioSpdifin_c::DumpDecodeParameters( void *Parameters )
{
CODEC_ERROR("Not implemented\n");
return CodecNoError;
}
CodecStatus_t Codec_MmeAudioSpdifin_c::CreateAttributeEvents (void)
{
PlayerStatus_t Status;
PlayerEventRecord_t Event;
void *EventUserData = NULL;
Event.Playback = Playback;
Event.Stream = Stream;
Event.PlaybackTime = TIME_NOT_APPLICABLE;
Event.UserData = EventUserData;
Event.Value[0].Pointer = this;
Status = Codec_MmeAudio_c::CreateAttributeEvents();
if (Status != PlayerNoError)
return Status;
Event.Code = EventInputFormatCreated;
Status = Player->SignalEvent( &Event );
if( Status != PlayerNoError )
{
CODEC_ERROR("Failed to signal event.\n");
return CodecError;
}
Event.Code = EventSupportedInputFormatCreated;
Status = Player->SignalEvent( &Event );
if( Status != PlayerNoError )
{
CODEC_ERROR("Failed to signal event.\n" );
return CodecError;
}
Event.Code = EventDecodeErrorsCreated;
Status = Player->SignalEvent( &Event );
if( Status != PlayerNoError )
{
CODEC_ERROR("Failed to signal event.\n" );
return CodecError;
}
Event.Code = EventNumberOfSamplesProcessedCreated;
Status = Player->SignalEvent( &Event );
if( Status != PlayerNoError )
{
CODEC_ERROR("Failed to signal event.\n" );
return CodecError;
}
return CodecNoError;
}
CodecStatus_t Codec_MmeAudioSpdifin_c::GetAttribute (const char *Attribute, PlayerAttributeDescriptor_t *Value)
{
//report( severity_error, "Codec_MmeAudioSpdifin_c::GetAttribute Enter\n");
if (0 == strcmp(Attribute, "input_format"))
{
Value->Id = SYSFS_ATTRIBUTE_ID_CONSTCHARPOINTER;
#define C(x) case SPDIFIN_ ## x: Value->u.ConstCharPointer = #x; return CodecNoError
switch (SpdifStatus.StreamType)
{
C(NULL_DATA_BURST);
C(AC3);
C(PAUSE_BURST);
C(MP1L1);
C(MP1L2L3);
C(MP2MC);
C(MP2AAC);
C(MP2L1LSF);
C(MP2L2LSF);
C(MP2L3LSF);
C(DTS1);
C(DTS2);
C(DTS3);
C(ATRAC);
C(ATRAC2_3);
case SPDIFIN_IEC60958:
Value->u.ConstCharPointer = "PCM";
return CodecNoError;
C(IEC60958_DTS14);
C(IEC60958_DTS16);
default:
CODEC_ERROR("This input_format does not exist.\n" );
return CodecError;
}
#undef C
}
else if (0 == strcmp(Attribute, "decode_errors"))
{
Value->Id = SYSFS_ATTRIBUTE_ID_INTEGER;
Value->u.Int = DecodeErrors;
return CodecNoError;
}
else if (0 == strcmp(Attribute, "supported_input_format"))
{
//report( severity_error, "%s %d\n", __FUNCTION__, __LINE__);
MME_LxAudioDecoderInfo_t &Capability = AudioDecoderTransformCapability;
Value->Id = SYSFS_ATTRIBUTE_ID_BOOL;
switch (SpdifStatus.StreamType)
{
case SPDIFIN_AC3:
Value->u.Bool = Capability.DecoderCapabilityExtFlags[0] & 0x8; // ACC_SPDIFIN_DD
return CodecNoError;
case SPDIFIN_DTS1:
case SPDIFIN_DTS2:
case SPDIFIN_DTS3:
case SPDIFIN_IEC60958_DTS14:
case SPDIFIN_IEC60958_DTS16:
Value->u.Bool = Capability.DecoderCapabilityExtFlags[0] & 0x10; // ACC_SPDIFIN_DTS
return CodecNoError;
case SPDIFIN_MP2AAC:
Value->u.Bool = Capability.DecoderCapabilityExtFlags[0] & 0x20; // ACC_SPDIFIN_MPG to be renamed ACC_SPDIFIN_AAC
case SPDIFIN_IEC60958:
case SPDIFIN_NULL_DATA_BURST:
case SPDIFIN_PAUSE_BURST:
Value->u.Bool = true;
return CodecNoError;
case SPDIFIN_MP1L1:
case SPDIFIN_MP1L2L3:
case SPDIFIN_MP2MC:
case SPDIFIN_MP2L1LSF:
case SPDIFIN_MP2L2LSF:
case SPDIFIN_MP2L3LSF:
case SPDIFIN_ATRAC:
case SPDIFIN_ATRAC2_3:
default:
Value->u.Bool = false;
return CodecNoError;
}
}
else if (0 == strcmp(Attribute, "number_of_samples_processed"))
{
Value->Id = SYSFS_ATTRIBUTE_ID_UNSIGNEDLONGLONGINT;
Value->u.UnsignedLongLongInt = NumberOfSamplesProcessed;
return CodecNoError;
}
else
{
CodecStatus_t Status;
Status = Codec_MmeAudio_c::GetAttribute (Attribute, Value);
if (Status != CodecNoError)
{
CODEC_ERROR("This attribute does not exist.\n" );
return CodecError;
}
}
return CodecNoError;
}
CodecStatus_t Codec_MmeAudioSpdifin_c::SetAttribute (const char *Attribute, PlayerAttributeDescriptor_t *Value)
{
if (0 == strcmp(Attribute, "decode_errors"))
{
DecodeErrors = Value->u.Int;
return CodecNoError;
}
else
{
CODEC_ERROR("This attribute cannot be set.\n" );
return CodecError;
}
return CodecNoError;
}<|fim▁end|> | }
return CodecNoError; |
<|file_name|>draggable-element-base.ts<|end_file_name|><|fim▁begin|>import { Input } from '@angular/core';
import { Config } from '../../../../../../../config/config';
import { ISpecmatePositionableModelObject } from '../../../../../../../model/ISpecmatePositionableModelObject';
import { Id } from '../../../../../../../util/id';
import { SpecmateDataService } from '../../../../../../data/modules/data-service/services/specmate-data.service';
import { ValidationService } from '../../../../../../forms/modules/validation/services/validation.service';
import { SelectedElementService } from '../../../../../side/modules/selected-element/services/selected-element.service';
import { MultiselectionService } from '../../tool-pallette/services/multiselection.service';
import { Coords } from '../util/coords';
import { GraphicalNodeBase } from './graphical-node-base';
export abstract class DraggableElementBase<T extends ISpecmatePositionableModelObject> extends GraphicalNodeBase<T> {
constructor(selectedElementService: SelectedElementService, validationService: ValidationService,
multiselectionService: MultiselectionService) {
super(selectedElementService, validationService, multiselectionService);
}
private isGrabbed = false;
private isGrabTrigger = false;
private prevX: number;
private prevY: number;
private _rawX: number;
private _rawY: number;
protected _zoom = 1;
@Input()
public set zoom(zoom: number) {
this._zoom = zoom;
}
private get rawX(): number {
if (this._rawX === undefined) {
return this.center.x;
}
return this._rawX;
}
private set rawX(x: number) {
this._rawX = x;
}
private get rawY(): number {
if (this._rawY === undefined) {
return this.center.y;
}
return this._rawY;
}
private set rawY(y: number) {
this._rawY = y;
}
private userIsDraggingElsewhere = true;
public get cursorClass() {
if (this.userIsDraggingElsewhere) {
return 'draggable-element-default';
}
if (this.isGrabbed) {
return 'draggable-element-grabbed';
}
return 'draggable-element-grab';
}
protected abstract get dataService(): SpecmateDataService;
private get x(): number {
if (this.isOffX && !this.isGrabbed) {
this.rawX = this.center.x;
}
return Coords.roundToGrid(this.rawX);
}
private get y(): number {
if (this.isOffY && !this.isGrabbed) {
this.rawY = this.center.y;
}
return Coords.roundToGrid(this.rawY);
}
<|fim▁hole|>
private get isOffY(): boolean {
return this.isCoordOff(this.rawY, this.center.y);
}
private isCoordOff(rawCoord: number, nodeCoord: number): boolean {
return rawCoord === undefined || Math.abs(rawCoord - nodeCoord) >= Config.GRAPHICAL_EDITOR_GRID_SPACE;
}
public get dragDummyPosition(): {x: number, y: number} {
if (this.isGrabbed && this.isGrabTrigger) {
return {
x: 0,
y: 0
};
}
return this.topLeft;
}
public get dragDummyDimensions(): {width: number, height: number} {
if (this.isGrabbed && this.isGrabTrigger) {
return {
width: this.topLeft.x + this.dimensions.width + 300,
height: this.topLeft.y + this.dimensions.height + 300
};
}
return this.dimensions;
}
public leave(e: MouseEvent): void {
e.preventDefault();
e.stopPropagation();
this.dragEnd();
this.userIsDraggingElsewhere = true;
}
public enter(e: MouseEvent): void {
// Check if the icon should change depending on whether the user enters
// the space with a button pressed.
this.userIsDraggingElsewhere = e.buttons !== 0;
}
public grab(e: MouseEvent): void {
e.preventDefault();
e.stopPropagation();
this.dragStart(e);
}
public drop(e: MouseEvent): void {
e.preventDefault();
e.stopPropagation();
if (this.userIsDraggingElsewhere) {
this.multiselectionService.mouseUp(e, this._zoom);
}
this.userIsDraggingElsewhere = false;
this.dragEnd();
}
private moveable: DraggableElementBase<ISpecmatePositionableModelObject>[];
private dragStart(e: MouseEvent): void {
// Since we only move selected elements we need to update the selection
// before starting the movement.
if (!this.selected ) {
if (e.shiftKey) {
this.selectedElementService.addToSelection(this.element);
} else {
this.selectedElementService.select(this.element);
}
} else {
if (e.shiftKey) {
// A desection of the current Element aborts any drag & drop action.
this.selectedElementService.deselectElement(this.element);
return;
}
}
this.isGrabTrigger = true;
// Get all moveable, selected elements
this.moveable = this.multiselectionService.selection.map( elem => <DraggableElementBase<ISpecmatePositionableModelObject>>elem)
.filter(elem => (elem.moveNode !== undefined) && (elem.dropNode !== undefined));
this.moveable.forEach(elem => {
elem.isGrabbed = true;
// All elements should jump to the next position at the same time, so snap to the grid.
elem.snapToGrid();
});
}
private snapToGrid() {
this.rawX = Coords.roundToGrid(this.rawX);
this.rawY = Coords.roundToGrid(this.rawY);
}
public drag(e: MouseEvent): void {
e.preventDefault();
if (this.isGrabbed) {
e.stopPropagation();
// Compute movement delta
let movementX: number = (this.prevX ? e.offsetX - this.prevX : 0); // this._zoom;
let movementY: number = (this.prevY ? e.offsetY - this.prevY : 0); // this._zoom;
// Update prev Position
this.prevX = e.offsetX;
this.prevY = e.offsetY;
if (!this.isMove(movementX, movementY)) {
return;
}
// Check if all elements can move
let allowedMove = this.moveable.every(e => e.isAllowedMove(movementX, movementY));
// Inform all movable elements
if (allowedMove) {
this.moveable.forEach(elem => elem.moveNode(movementX, movementY));
}
}
}
public isAllowedMove(movementX: number, movementY: number) {
let destX: number = this.rawX + movementX;
let destY: number = this.rawY + movementY;
return this.isWithinBounds(destX, destY);
}
public moveNode(movementX: number, movementY: number) {
let destX: number = this.rawX + movementX;
let destY: number = this.rawY + movementY;
this.rawX = destX;
this.rawY = destY;
// Works, because this.element.x === this.center.x && this.element.y === this.center.y
this.element.x = this.x;
this.element.y = this.y;
}
public dropNode(): void {
this.isGrabbed = false;
this.isGrabTrigger = false;
this.prevX = undefined;
this.prevY = undefined;
this.dataService.updateElement(this.element, true, Id.uuid);
}
private dragEnd(): void {
if (this.isGrabbed) {
this.moveable.forEach(elem => elem.dropNode());
}
}
private isMove(movementX: number, movementY: number): boolean {
return movementX > 0 || movementX < 0 || movementY > 0 || movementY < 0;
}
private isWithinBounds(destX: number, destY: number): boolean {
return destX - this.dimensions.width / 2 >= 0 && destY - this.dimensions.height / 2 >= 0;
}
}<|fim▁end|> |
private get isOffX(): boolean {
return this.isCoordOff(this.rawX, this.center.x);
}
|
<|file_name|>when_fn_regular.rs<|end_file_name|><|fim▁begin|>use super::*;
#[mockable]
fn function(arg: bool) -> String {
format!("{}", arg)
}
#[test]
fn and_not_mocked_then_runs_normally() {
assert_eq!("true", function(true));
}
#[test]
fn and_continue_mocked_then_runs_with_modified_args() {
unsafe {
function.mock_raw(|a| MockResult::Continue((!a,)));
}
assert_eq!("false", function(true));
}
#[test]
fn and_return_mocked_then_returns_mocking_result() {
unsafe {
function.mock_raw(|a| MockResult::Return(format!("mocked {}", a)));
}
<|fim▁hole|><|fim▁end|> | assert_eq!("mocked true", function(true));
} |
<|file_name|>TextBox.tsx<|end_file_name|><|fim▁begin|>import * as React from 'react';
import './TextBox.css';
interface Props {
className: string;
onChange?: (e: React.ChangeEvent<HTMLInputElement>) => void;
placeholder: string;
}
class TextBox extends React.Component<Props, {}> {<|fim▁hole|> }
private handleChange(e: React.ChangeEvent<HTMLInputElement>) {
if (this.props.onChange) {
this.props.onChange(e);
}
return false;
}
public render() {
return (
<input
className={this.props.className}
placeholder={this.props.placeholder}
onChange={this.handleChange}
/>
);
}
}
export default TextBox;<|fim▁end|> | public constructor(props: Props) {
super(props);
this.handleChange = this.handleChange.bind(this); |
<|file_name|>address.js<|end_file_name|><|fim▁begin|>'use strict';
/* jshint maxstatements: 30 */
var chai = require('chai');
var should = chai.should();
var expect = chai.expect;
var bitcore = require('..');
var PublicKey = bitcore.PublicKey;
var Address = bitcore.Address;
var Script = bitcore.Script;
var Networks = bitcore.Networks;
var validbase58 = require('./data/bitcoind/base58_keys_valid.json');
var invalidbase58 = require('./data/bitcoind/base58_keys_invalid.json');
describe('Address', function() {
var pubkeyhash = Buffer.from('4a0529a63fcc33e4f51688311b89b97371b5c76c', 'hex');
var buf = Buffer.concat([Buffer.from([30]), pubkeyhash]);
var str = 'DBtUjAUFWHqS1y8CgetQtPW6YKDhffyFT4';
it('can\'t build without data', function() {
(function() {
return new Address();
}).should.throw('First argument is required, please include address data.');
});
it('should throw an error because of bad network param', function() {
(function() {
return new Address(PKHLivenet[0], 'main', 'pubkeyhash');
}).should.throw('Second argument must be "livenet" or "testnet".');
});
<|fim▁hole|> it('should throw an error because of bad type param', function() {
(function() {
return new Address(PKHLivenet[0], 'livenet', 'pubkey');
}).should.throw('Third argument must be "pubkeyhash" or "scripthash"');
});
describe('bitcoind compliance', function() {
validbase58.map(function(d) {
if (!d[2].isPrivkey) {
it('should describe address ' + d[0] + ' as valid', function() {
var type;
if (d[2].addrType === 'script') {
type = 'scripthash';
} else if (d[2].addrType === 'pubkey') {
type = 'pubkeyhash';
}
var network = 'livenet';
if (d[2].isTestnet) {
network = 'testnet';
}
return new Address(d[0], network, type);
});
}
});
invalidbase58.map(function(d) {
it('should describe input ' + d[0].slice(0, 10) + '... as invalid', function() {
expect(function() {
return new Address(d[0]);
}).to.throw(Error);
});
});
});
// livenet valid
var PKHLivenet = [
'D77Z1nmgSZxJTmtN65n2MVF9yvLSB4MpiC',
'DGYdw7jC17b9SappjsrAsaghhDTS8sV5Mx',
'DExWunrSkF2P9NAzW4JpEeC38uC3z36cCk',
'DMYFDsuFamS4kogsiqxQRXK9diCfmJzNLh',
' DQJXChtyuN3aiU3W6pG4mgnykisHH5hqst \t\n'
];
// livenet p2sh
var P2SHLivenet = [
'A7HRQk3GFCW2QasvdZxXuYj8kkQK5QrYLs',
'A3RHoAQLPDSuBewbuvt5NMqEKasqs9ty3C',
'A3YUuiZjn3FzW2i6KqFVgHaN5r5fjvwEJ1',
'AFVkW8WgPP2rsTdAeyogFK9k8gXJ1LZwrb',
'A2CUbyEtqGUjngAjxpGmM5Nwhrz72gh23v',
'\t \AD21pKoCWcrqrUW4oRv5xUaGwCfXhiyx6y \r'
];
// testnet p2sh
var P2SHTestnet = [
'2MsvyG12kxxipe276Au4zKqvd2xdrBuHWb3',
'2MvY1BBau2C6jRC1hNX5LmviLrpvVG386TM',
'2N22vuiMGhfqD77vPqsXPiALBfn6oUTtFtH',
'2N1Gz97rN7P6VCj9xf64wWFqQZgpUng1mM3'
];
//livenet bad checksums
var badChecksums = [
'461QQ2sYWxU7H2PV4oBwJGNch8XVTYYbZxU',
'461QQ2sYWxASd2PV4oBwJGNch8XVTYYbZxU',
'DRjyUS2uuieEPkhZNdQz8hE5YycxVEqSXA', ///////
];
//livenet non-base58
var nonBase58 = [
'15vkcKf7g#23wLAnZLmb$uMiiVDc3nq4a2',
'1A601ttWnUq1SEQLMr4ttDh24wcbj4w2TT',
'1BpbpfLdY7oBS9gK7aIXgvMgr1DpvNH3B2',
'1Jz2yCRdOST1p2gUqFB5wsSQfdmEJaffg7'
];
//testnet valid
var PKHTestnet = [
'ngbSgr1dhCqsLg6Z5tpsaCspwrH72x2Zk3',
'ngao4q8auS6YuDKYPkzjt7zKYYT8LzkGZS',
'nbo7vyCuiMxiYW4thwwuaTHQdCoqinmzJz',
'nagGnGDKWYpvnphf9rgo3xtQhAxiEM1QGD'
];
describe('validation', function() {
it('getValidationError detects network mismatchs', function() {
var error = Address.getValidationError('D77Z1nmgSZxJTmtN65n2MVF9yvLSB4MpiC', 'testnet');
should.exist(error);
});
it('isValid returns true on a valid address', function() {
var valid = Address.isValid('D77Z1nmgSZxJTmtN65n2MVF9yvLSB4MpiC', 'livenet');
valid.should.equal(true);
});
it('isValid returns false on network mismatch', function() {
var valid = Address.isValid('D77Z1nmgSZxJTmtN65n2MVF9yvLSB4MpiC', 'testnet');
valid.should.equal(false);
});
it('validates correctly the P2PKH test vector', function() {
for (var i = 0; i < PKHLivenet.length; i++) {
var error = Address.getValidationError(PKHLivenet[i]);
should.not.exist(error);
}
});
it('validates correctly the P2SH test vector', function() {
for (var i = 0; i < P2SHLivenet.length; i++) {
var error = Address.getValidationError(P2SHLivenet[i]);
should.not.exist(error);
}
});
it('validates correctly the P2SH testnet test vector', function() {
for (var i = 0; i < P2SHTestnet.length; i++) {
var error = Address.getValidationError(P2SHTestnet[i], 'testnet');
should.not.exist(error);
}
});
it('rejects correctly the P2PKH livenet test vector with "testnet" parameter', function() {
for (var i = 0; i < PKHLivenet.length; i++) {
var error = Address.getValidationError(PKHLivenet[i], 'testnet');
should.exist(error);
}
});
it('validates correctly the P2PKH livenet test vector with "livenet" parameter', function() {
for (var i = 0; i < PKHLivenet.length; i++) {
var error = Address.getValidationError(PKHLivenet[i], 'livenet');
should.not.exist(error);
}
});
it('should not validate if checksum is invalid', function() {
for (var i = 0; i < badChecksums.length; i++) {
var error = Address.getValidationError(badChecksums[i], 'livenet', 'pubkeyhash');
should.exist(error);
error.message.should.equal('Checksum mismatch');
}
});
it('should not validate on a network mismatch', function() {
var error, i;
for (i = 0; i < PKHLivenet.length; i++) {
error = Address.getValidationError(PKHLivenet[i], 'testnet', 'pubkeyhash');
should.exist(error);
error.message.should.equal('Address has mismatched network type.');
}
for (i = 0; i < PKHTestnet.length; i++) {
error = Address.getValidationError(PKHTestnet[i], 'livenet', 'pubkeyhash');
should.exist(error);
error.message.should.equal('Address has mismatched network type.');
}
});
it('should not validate on a type mismatch', function() {
for (var i = 0; i < PKHLivenet.length; i++) {
var error = Address.getValidationError(PKHLivenet[i], 'livenet', 'scripthash');
should.exist(error);
error.message.should.equal('Address has mismatched type.');
}
});
it('should not validate on non-base58 characters', function() {
for (var i = 0; i < nonBase58.length; i++) {
var error = Address.getValidationError(nonBase58[i], 'livenet', 'pubkeyhash');
should.exist(error);
error.message.should.equal('Non-base58 character');
}
});
it('testnet addresses are validated correctly', function() {
for (var i = 0; i < PKHTestnet.length; i++) {
var error = Address.getValidationError(PKHTestnet[i], 'testnet');
should.not.exist(error);
}
});
it('addresses with whitespace are validated correctly', function() {
var ws = ' \r \t \n DD4KSSuBJqcjuTcvUg1CgUKeurPUFeEZkE \t \n \r';
var error = Address.getValidationError(ws);
should.not.exist(error);
Address.fromString(ws).toString().should.equal('DD4KSSuBJqcjuTcvUg1CgUKeurPUFeEZkE');
});
});
describe('instantiation', function() {
it('can be instantiated from another address', function() {
var address = Address.fromBuffer(buf);
var address2 = new Address({
hashBuffer: address.hashBuffer,
network: address.network,
type: address.type
});
address.toString().should.equal(address2.toString());
});
});
describe('encodings', function() {
it('should make an address from a buffer', function() {
Address.fromBuffer(buf).toString().should.equal(str);
new Address(buf).toString().should.equal(str);
new Address(buf).toString().should.equal(str);
});
it('should make an address from a string', function() {
Address.fromString(str).toString().should.equal(str);
new Address(str).toString().should.equal(str);
});
it('should make an address using a non-string network', function() {
Address.fromString(str, Networks.livenet).toString().should.equal(str);
});
it('should error because of unrecognized data format', function() {
(function() {
return new Address(new Error());
}).should.throw(bitcore.errors.InvalidArgument);
});
it('should error because of incorrect format for pubkey hash', function() {
(function() {
return new Address.fromPublicKeyHash('notahash');
}).should.throw('Address supplied is not a buffer.');
});
it('should error because of incorrect format for script hash', function() {
(function() {
return new Address.fromScriptHash('notascript');
}).should.throw('Address supplied is not a buffer.');
});
it('should error because of incorrect type for transform buffer', function() {
(function() {
return Address._transformBuffer('notabuffer');
}).should.throw('Address supplied is not a buffer.');
});
it('should error because of incorrect length buffer for transform buffer', function() {
(function() {
return Address._transformBuffer(new Buffer(20));
}).should.throw('Address buffers must be exactly 21 bytes.');
});
it('should error because of incorrect type for pubkey transform', function() {
(function() {
return Address._transformPublicKey(new Buffer(20));
}).should.throw('Address must be an instance of PublicKey.');
});
it('should error because of incorrect type for script transform', function() {
(function() {
return Address._transformScript(new Buffer(20));
}).should.throw('Invalid Argument: script must be a Script instance');
});
it('should error because of incorrect type for string transform', function() {
(function() {
return Address._transformString(new Buffer(20));
}).should.throw('data parameter supplied is not a string.');
});
it('should make an address from a pubkey hash buffer', function() {
var hash = pubkeyhash; //use the same hash
var a = Address.fromPublicKeyHash(hash, 'livenet');
a.network.should.equal(Networks.livenet);
a.toString().should.equal(str);
var b = Address.fromPublicKeyHash(hash, 'testnet');
b.network.should.equal(Networks.testnet);
b.type.should.equal('pubkeyhash');
new Address(hash, 'livenet').toString().should.equal(str);
});
it('should make an address using the default network', function() {
var hash = pubkeyhash; //use the same hash
var network = Networks.defaultNetwork;
Networks.defaultNetwork = Networks.livenet;
var a = Address.fromPublicKeyHash(hash);
a.network.should.equal(Networks.livenet);
// change the default
Networks.defaultNetwork = Networks.testnet;
var b = Address.fromPublicKeyHash(hash);
b.network.should.equal(Networks.testnet);
// restore the default
Networks.defaultNetwork = network;
});
it('should throw an error for invalid length hashBuffer', function() {
(function() {
return Address.fromPublicKeyHash(buf);
}).should.throw('Address hashbuffers must be exactly 20 bytes.');
});
it('should make this address from a compressed pubkey', function() {
var pubkey = new PublicKey('02789418bb8aee5a63471c09d3402b9d06a1c3d7975635a4a5258e89dff3547079');
var address = Address.fromPublicKey(pubkey, 'livenet');
address.toString().should.equal('DKLLdArnsyhkuPhxDbWYEBgLpQDLerRNrZ');
});
it('should use the default network for pubkey', function() {
var pubkey = new PublicKey('02789418bb8aee5a63471c09d3402b9d06a1c3d7975635a4a5258e89dff3547079');
var address = Address.fromPublicKey(pubkey);
address.network.should.equal(Networks.defaultNetwork);
});
it('should make this address from an uncompressed pubkey', function() {
var pubkey = new PublicKey('0450A38BD7F0AC212FEBA77354A9B036A32E0F7C81FC4E0C5ADCA7C549C4505D2522458C2D9AE3CEFD684E039194B72C8A10F9CB9D4764AB26FCC2718D421D3B84');
var a = Address.fromPublicKey(pubkey, 'livenet');
a.toString().should.equal('DFJ1z7FVd8CEodkQ41w54bmCDYhrNd24Dv');
var b = new Address(pubkey, 'livenet', 'pubkeyhash');
b.toString().should.equal('DFJ1z7FVd8CEodkQ41w54bmCDYhrNd24Dv');
});
it('should classify from a custom network', function() {
var custom = {
name: 'customnetwork',
pubkeyhash: 0x1c,
privatekey: 0x1e,
scripthash: 0x28,
xpubkey: 0x02e8de8f,
xprivkey: 0x02e8da54,
networkMagic: 0x0c110907,
port: 7333
};
var addressString = 'CX4WePxBwq1Y6u7VyMJfmmitE7GiTgC9aE';
Networks.add(custom);
var network = Networks.get('customnetwork');
var address = Address.fromString(addressString);
address.type.should.equal(Address.PayToPublicKeyHash);
address.network.should.equal(network);
Networks.remove(network);
});
describe('from a script', function() {
it('should fail to build address from a non p2sh,p2pkh script', function() {
var s = new Script('OP_CHECKMULTISIG');
(function() {
return new Address(s);
}).should.throw('needs to be p2pkh in, p2pkh out, p2sh in, or p2sh out');
});
it('should make this address from a p2pkh output script', function() {
var s = new Script('OP_DUP OP_HASH160 20 ' +
'0xc8e11b0eb0d2ad5362d894f048908341fa61b6e1 OP_EQUALVERIFY OP_CHECKSIG');
var buf = s.toBuffer();
var a = Address.fromScript(s, 'livenet');
a.toString().should.equal('DPTFMF1EYh2tYtGwWJgFqLy37QgLdSSDQW');
var b = new Address(s, 'livenet');
b.toString().should.equal('DPTFMF1EYh2tYtGwWJgFqLy37QgLdSSDQW');
});
it('should make this address from a p2sh input script', function() {
var s = Script.fromString('OP_HASH160 20 0xa6ed4af315271e657ee307828f54a4365fa5d20f OP_EQUAL');
var a = Address.fromScript(s, 'livenet');
a.toString().should.equal('A7eu6dAkyaNZJpjXwSqgUipxxvKTTjJKzY');
var b = new Address(s, 'livenet');
b.toString().should.equal('A7eu6dAkyaNZJpjXwSqgUipxxvKTTjJKzY');
});
it('returns the same address if the script is a pay to public key hash out', function() {
var address = 'D77Z1nmgSZxJTmtN65n2MVF9yvLSB4MpiC';
var script = Script.buildPublicKeyHashOut(new Address(address));
console.log(script);
Address(script, Networks.livenet).toString().should.equal(address);
});
it('returns the same address if the script is a pay to script hash out', function() {
var address = 'A7eu6dAkyaNZJpjXwSqgUipxxvKTTjJKzY';
var script = Script.buildScriptHashOut(new Address(address));
Address(script, Networks.livenet).toString().should.equal(address);
});
});
it('should derive from this known address string livenet', function() {
var address = new Address(str);
var buffer = address.toBuffer();
var slice = buffer.slice(1);
var sliceString = slice.toString('hex');
sliceString.should.equal(pubkeyhash.toString('hex'));
});
it('should derive from this known address string testnet', function() {
var a = new Address(PKHTestnet[0], 'testnet');
var b = new Address(a.toString());
b.toString().should.equal(PKHTestnet[0]);
b.network.should.equal(Networks.testnet);
});
it('should derive from this known address string livenet scripthash', function() {
var a = new Address(P2SHLivenet[0], 'livenet', 'scripthash');
var b = new Address(a.toString());
b.toString().should.equal(P2SHLivenet[0]);
});
it('should derive from this known address string testnet scripthash', function() {
var address = new Address(P2SHTestnet[0], 'testnet', 'scripthash');
address = new Address(address.toString());
address.toString().should.equal(P2SHTestnet[0]);
});
});
describe('#toBuffer', function() {
it('3c3fa3d4adcaf8f52d5b1843975e122548269937 corresponds to hash LQiX3WFFnc6JDULSRRG3Dsjza3VmhJ5pCP', function() {
var address = new Address(str);
address.toBuffer().slice(1).toString('hex').should.equal(pubkeyhash.toString('hex'));
});
});
describe('#object', function() {
it('roundtrip to-from-to', function() {
var obj = new Address(str).toObject();
var address = Address.fromObject(obj);
address.toString().should.equal(str);
});
it('will fail with invalid state', function() {
expect(function() {
return Address.fromObject('¹');
}).to.throw(bitcore.errors.InvalidState);
});
});
describe('#toString', function() {
it('livenet pubkeyhash address', function() {
var address = new Address(str);
address.toString().should.equal(str);
});
it('scripthash address', function() {
var address = new Address(P2SHLivenet[0]);
address.toString().should.equal(P2SHLivenet[0]);
});
it('testnet scripthash address', function() {
var address = new Address(P2SHTestnet[0]);
address.toString().should.equal(P2SHTestnet[0]);
});
it('testnet pubkeyhash address', function() {
var address = new Address(PKHTestnet[0]);
address.toString().should.equal(PKHTestnet[0]);
});
});
describe('#inspect', function() {
it('should output formatted output correctly', function() {
var address = new Address(str);
var output = '<Address: DBtUjAUFWHqS1y8CgetQtPW6YKDhffyFT4, type: pubkeyhash, network: livenet>';
address.inspect().should.equal(output);
});
});
describe('questions about the address', function() {
it('should detect a P2SH address', function() {
new Address(P2SHLivenet[0]).isPayToScriptHash().should.equal(true);
new Address(P2SHLivenet[0]).isPayToPublicKeyHash().should.equal(false);
new Address(P2SHTestnet[0]).isPayToScriptHash().should.equal(true);
new Address(P2SHTestnet[0]).isPayToPublicKeyHash().should.equal(false);
});
it('should detect a Pay To PubkeyHash address', function() {
new Address(PKHLivenet[0]).isPayToPublicKeyHash().should.equal(true);
new Address(PKHLivenet[0]).isPayToScriptHash().should.equal(false);
new Address(PKHTestnet[0]).isPayToPublicKeyHash().should.equal(true);
new Address(PKHTestnet[0]).isPayToScriptHash().should.equal(false);
});
});
it('throws an error if it couldn\'t instantiate', function() {
expect(function() {
return new Address(1);
}).to.throw(TypeError);
});
it('can roundtrip from/to a object', function() {
var address = new Address(P2SHLivenet[0]);
expect(new Address(address.toObject()).toString()).to.equal(P2SHLivenet[0]);
});
it('will use the default network for an object', function() {
var obj = {
hash: '19a7d869032368fd1f1e26e5e73a4ad0e474960e',
type: 'scripthash'
};
var address = new Address(obj);
address.network.should.equal(Networks.defaultNetwork);
});
describe('creating a P2SH address from public keys', function() {
var public1 = '02da5798ed0c055e31339eb9b5cef0d3c0ccdec84a62e2e255eb5c006d4f3e7f5b';
var public2 = '0272073bf0287c4469a2a011567361d42529cd1a72ab0d86aa104ecc89342ffeb0';
var public3 = '02738a516a78355db138e8119e58934864ce222c553a5407cf92b9c1527e03c1a2';
var publics = [public1, public2, public3];
it('can create an address from a set of public keys', function() {
var address = Address.createMultisig(publics, 2, Networks.livenet);
address.toString().should.equal('A6e68GnkmTokZV8xtaRPw7BTBVBCeFFwuT');
address = new Address(publics, 2, Networks.livenet);
address.toString().should.equal('A6e68GnkmTokZV8xtaRPw7BTBVBCeFFwuT');
});
it('works on testnet also', function() {
var address = Address.createMultisig(publics, 2, Networks.testnet);
address.toString().should.equal('2N7T3TAetJrSCruQ39aNrJvYLhG1LJosujf');
});
it('can also be created by Address.createMultisig', function() {
var address = Address.createMultisig(publics, 2);
var address2 = Address.createMultisig(publics, 2);
address.toString().should.equal(address2.toString());
});
it('fails if invalid array is provided', function() {
expect(function() {
return Address.createMultisig([], 3, 'testnet');
}).to.throw('Number of required signatures must be less than or equal to the number of public keys');
});
});
});<|fim▁end|> | |
<|file_name|>ovirt_auth.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Red Hat, Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ovirt_auth
short_description: "Module to manage authentication to oVirt/RHV"
author: "Ondra Machacek (@machacekondra)"
version_added: "2.2"
description:
- "This module authenticates to oVirt/RHV engine and creates SSO token, which should be later used in
all other oVirt/RHV modules, so all modules don't need to perform login and logout.
This module returns an Ansible fact called I(ovirt_auth). Every module can use this
fact as C(auth) parameter, to perform authentication."
options:
state:
default: present
choices: ['present', 'absent']
description:
- "Specifies if a token should be created or revoked."
username:
required: False
description:
- "The name of the user. For example: I(admin@internal)
Default value is set by I(OVIRT_USERNAME) environment variable."
password:
required: False
description:
- "The password of the user. Default value is set by I(OVIRT_PASSWORD) environment variable."
token:
required: False
description:
- "SSO token to be used instead of login with username/password.
Default value is set by I(OVIRT_TOKEN) environment variable."
version_added: 2.5
url:
required: False
description:
- "A string containing the API URL of the server.
For example: I(https://server.example.com/ovirt-engine/api).
Default value is set by I(OVIRT_URL) environment variable."
- "Either C(url) or C(hostname) is required."
hostname:
required: False
description:
- "A string containing the hostname of the server.
For example: I(server.example.com).
Default value is set by I(OVIRT_HOSTNAME) environment variable."
- "Either C(url) or C(hostname) is required."
version_added: "2.6"
insecure:
required: False
description:
- "A boolean flag that indicates if the server TLS certificate and host name should be checked."
type: bool
ca_file:
required: False
description:
- "A PEM file containing the trusted CA certificates. The
certificate presented by the server will be verified using these CA
certificates. If C(ca_file) parameter is not set, system wide
CA certificate store is used.
Default value is set by I(OVIRT_CAFILE) environment variable."
timeout:
required: False
description:
- "The maximum total time to wait for the response, in
seconds. A value of zero (the default) means wait forever. If
the timeout expires before the response is received an exception
will be raised."
compress:
required: False
description:
- "A boolean flag indicating if the SDK should ask
the server to send compressed responses. The default is I(True).
Note that this is a hint for the server, and that it may return
uncompressed data even when this parameter is set to I(True)."
type: bool
kerberos:
required: False<|fim▁hole|> type: bool
headers:
required: False
description:
- "A dictionary of HTTP headers to be added to each API call."
version_added: "2.4"
requirements:
- python >= 2.7
- ovirt-engine-sdk-python >= 4.3.0
notes:
- "Everytime you use ovirt_auth module to obtain ticket, you need to also revoke the ticket,
when you no longer need it, otherwise the ticket would be revoked by engine when it expires.
For an example of how to achieve that, please take a look at I(examples) section."
- "In order to use this module you have to install oVirt/RHV Python SDK.
To ensure it's installed with correct version you can create the following task:
I(pip: name=ovirt-engine-sdk-python version=4.3.0)"
- "Note that in oVirt/RHV 4.1 if you want to use a user which is not administrator
you must enable the I(ENGINE_API_FILTER_BY_DEFAULT) variable in engine. In
oVirt/RHV 4.2 and later it's enabled by default."
'''
EXAMPLES = '''
- block:
# Create a vault with `ovirt_password` variable which store your
# oVirt/RHV user's password, and include that yaml file with variable:
- include_vars: ovirt_password.yml
- name: Obtain SSO token with using username/password credentials
ovirt_auth:
url: https://ovirt.example.com/ovirt-engine/api
username: admin@internal
ca_file: ca.pem
password: "{{ ovirt_password }}"
# Previous task generated I(ovirt_auth) fact, which you can later use
# in different modules as follows:
- ovirt_vm:
auth: "{{ ovirt_auth }}"
state: absent
name: myvm
always:
- name: Always revoke the SSO token
ovirt_auth:
state: absent
ovirt_auth: "{{ ovirt_auth }}"
# When user will set following environment variables:
# OVIRT_URL = https://fqdn/ovirt-engine/api
# OVIRT_USERNAME = admin@internal
# OVIRT_PASSWORD = the_password
# User can login the oVirt using environment variable instead of variables
# in yaml file.
# This is mainly useful when using Ansible Tower or AWX, as it will work
# for Red Hat Virtualization credentials type.
- name: Obtain SSO token
ovirt_auth:
state: present
'''
RETURN = '''
ovirt_auth:
description: Authentication facts, needed to perform authentication to oVirt/RHV.
returned: success
type: complex
contains:
token:
description: SSO token which is used for connection to oVirt/RHV engine.
returned: success
type: str
sample: "kdfVWp9ZgeewBXV-iq3Js1-xQJZPSEQ334FLb3eksoEPRaab07DhZ8ED8ghz9lJd-MQ2GqtRIeqhvhCkrUWQPw"
url:
description: URL of the oVirt/RHV engine API endpoint.
returned: success
type: str
sample: "https://ovirt.example.com/ovirt-engine/api"
ca_file:
description: CA file, which is used to verify SSL/TLS connection.
returned: success
type: str
sample: "ca.pem"
insecure:
description: Flag indicating if insecure connection is used.
returned: success
type: bool
sample: False
timeout:
description: Number of seconds to wait for response.
returned: success
type: int
sample: 0
compress:
description: Flag indicating if compression is used for connection.
returned: success
type: bool
sample: True
kerberos:
description: Flag indicating if kerberos is used for authentication.
returned: success
type: bool
sample: False
headers:
description: Dictionary of HTTP headers to be added to each API call.
returned: success
type: dict
'''
import os
import traceback
try:
import ovirtsdk4 as sdk
except ImportError:
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ovirt import check_sdk
def main():
module = AnsibleModule(
argument_spec=dict(
url=dict(default=None),
hostname=dict(default=None),
username=dict(default=None),
password=dict(default=None, no_log=True),
ca_file=dict(default=None, type='path'),
insecure=dict(required=False, type='bool', default=None),
timeout=dict(required=False, type='int', default=0),
compress=dict(required=False, type='bool', default=True),
kerberos=dict(required=False, type='bool', default=False),
headers=dict(required=False, type='dict'),
state=dict(default='present', choices=['present', 'absent']),
token=dict(default=None, no_log=True),
ovirt_auth=dict(required=None, type='dict'),
),
required_if=[
('state', 'absent', ['ovirt_auth']),
],
supports_check_mode=True,
)
check_sdk(module)
state = module.params.get('state')
if state == 'present':
params = module.params
elif state == 'absent':
params = module.params['ovirt_auth']
def get_required_parameter(param, env_var, required=False):
var = params.get(param) or os.environ.get(env_var)
if not var and required and state == 'present':
module.fail_json(msg="'%s' is a required parameter." % param)
return var
url = get_required_parameter('url', 'OVIRT_URL', required=False)
hostname = get_required_parameter('hostname', 'OVIRT_HOSTNAME', required=False)
if url is None and hostname is None:
module.fail_json(msg="You must specify either 'url' or 'hostname'.")
if url is None and hostname is not None:
url = 'https://{0}/ovirt-engine/api'.format(hostname)
username = get_required_parameter('username', 'OVIRT_USERNAME')
password = get_required_parameter('password', 'OVIRT_PASSWORD')
token = get_required_parameter('token', 'OVIRT_TOKEN')
ca_file = get_required_parameter('ca_file', 'OVIRT_CAFILE')
insecure = params.get('insecure') if params.get('insecure') is not None else not bool(ca_file)
connection = sdk.Connection(
url=url,
username=username,
password=password,
ca_file=ca_file,
insecure=insecure,
timeout=params.get('timeout'),
compress=params.get('compress'),
kerberos=params.get('kerberos'),
headers=params.get('headers'),
token=token,
)
try:
token = connection.authenticate()
module.exit_json(
changed=False,
ansible_facts=dict(
ovirt_auth=dict(
token=token,
url=url,
ca_file=ca_file,
insecure=insecure,
timeout=params.get('timeout'),
compress=params.get('compress'),
kerberos=params.get('kerberos'),
headers=params.get('headers'),
) if state == 'present' else dict()
)
)
except Exception as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
finally:
# Close the connection, but don't revoke token
connection.close(logout=state == 'absent')
if __name__ == "__main__":
main()<|fim▁end|> | description:
- "A boolean flag indicating if Kerberos authentication
should be used instead of the default basic authentication." |
<|file_name|>splitToWords.H<|end_file_name|><|fim▁begin|>/******************************************************************************
*
* This file is part of canu, a software program that assembles whole-genome
* sequencing reads into contigs.
*
* This software is based on:
* 'Celera Assembler' (http://wgs-assembler.sourceforge.net)
* the 'kmer package' (http://kmer.sourceforge.net)
* both originally distributed by Applera Corporation under the GNU General
* Public License, version 2.
*
* Canu branched from Celera Assembler at its revision 4587.
* Canu branched from the kmer project at its revision 1994.
*
* This file is derived from:
*
* kmer/libutil/splitToWords.H
*
* Modifications by:
*
* Brian P. Walenz from 2005-JUL-12 to 2014-APR-11
* are Copyright 2005-2006,2012,2014 J. Craig Venter Institute, and
* are subject to the GNU General Public License version 2
*
* Brian P. Walenz from 2014-DEC-05 to 2015-AUG-11
* are Copyright 2014-2015 Battelle National Biodefense Institute, and
* are subject to the BSD 3-Clause License<|fim▁hole|> * are released in the public domain
*
* File 'README.licenses' in the root directory of this distribution contains
* full conditions and disclaimers for each license.
*/
#ifndef SPLITTOWORDS_H
#define SPLITTOWORDS_H
class splitToWords {
public:
splitToWords() {
_argWords = 0;
_maxWords = 0;
_arg = 0L;
_maxChars = 0;
_cmd = 0L;
};
splitToWords(char *cmd) {
_argWords = 0;
_maxWords = 0;
_arg = 0L;
_maxChars = 0;
_cmd = 0L;
split(cmd);
};
~splitToWords() {
delete [] _cmd;
delete [] _arg;
};
void split(char *cmd) {
// Step Zero:
//
// Count the length of the string, in words and in characters.
// For simplicity, we overcount words, by just counting white-space.
//
// Then, allocate space for a temporary copy of the string, and a
// set of pointers into the temporary copy (much like argv).
//
uint32 cmdChars = 1; // 1 == Space for terminating 0
uint32 cmdWords = 2; // 2 == Space for first word and terminating 0L
for (char *tmp=cmd; *tmp; tmp++) {
cmdWords += (*tmp == ' ') ? 1 : 0;
cmdWords += (*tmp == '\t') ? 1 : 0;
cmdChars++;
}
if (cmdChars > _maxChars) {
delete [] _cmd;
_cmd = new char [cmdChars];
_maxChars = cmdChars;
}
if (cmdWords > _maxWords) {
delete [] _arg;
_arg = new char * [cmdWords];
_maxWords = cmdWords;
}
_argWords = 0;
// Step One:
//
// Determine where the words are in the command string, copying the
// string to _cmd and storing words in _arg.
//
bool isFirst = true;
char *cmdI = cmd;
char *cmdO = _cmd;
while (*cmdI) {
// If we are at a non-space character, we are in a word. If
// this is the first character in the word, save the word in
// the args list.
//
// Otherwise we are at a space and thus not in a word. Make
// all spaces be string terminators, and declare that we are
// at the start of a word.
//
if ((*cmdI != ' ') && (*cmdI != '\t') && (*cmdI != '\n') && (*cmdI != '\r')) {
*cmdO = *cmdI;
if (isFirst) {
_arg[_argWords++] = cmdO;
isFirst = false;
}
} else {
*cmdO = 0;
isFirst = true;
}
cmdI++;
cmdO++;
}
// Finish off the list by terminating the last arg, and
// terminating the list of args.
//
*cmdO = 0;
_arg[_argWords] = 0L;
};
uint32 numWords(void) { return(_argWords); };
char *getWord(uint32 i) { return(_arg[i]); };
char *operator[](uint32 i) { return(_arg[i]); };
int64 operator()(uint32 i) { return(strtoull(_arg[i], NULL, 10)); };
private:
uint32 _argWords;
uint32 _maxWords;
char **_arg;
uint32 _maxChars;
char *_cmd;
};
#endif // SPLITTOWORDS_H<|fim▁end|> | *
* Brian P. Walenz beginning on 2016-FEB-25
* are a 'United States Government Work', and |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.