prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>refcounted.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A generic, safe mechanism by which DOM objects can be pinned and transferred
//! between tasks (or intra-task for asynchronous events). Akin to Gecko's
//! nsMainThreadPtrHandle, this uses thread-safe reference counting and ensures
//! that the actual SpiderMonkey GC integration occurs on the script task via
//! message passing. Ownership of a `Trusted<T>` object means the DOM object of
//! type T to which it points remains alive. Any other behaviour is undefined.
//! To guarantee the lifetime of a DOM object when performing asynchronous operations,
//! obtain a `Trusted<T>` from that object and pass it along with each operation.
//! A usable pointer to the original DOM object can be obtained on the script task
//! from a `Trusted<T>` via the `to_temporary` method.
//!
//! The implementation of Trusted<T> is as follows:
//! A hashtable resides in the script task, keyed on the pointer to the Rust DOM object.
//! The values in this hashtable are atomic reference counts. When a Trusted<T> object is
//! created or cloned, this count is increased. When a Trusted<T> is dropped, the count
//! decreases. If the count hits zero, a message is dispatched to the script task to remove
//! the entry from the hashmap if the count is still zero. The JS reflector for the DOM object
//! is rooted when a hashmap entry is first created, and unrooted when the hashmap entry
//! is removed.
use dom::bindings::js::Root;
use dom::bindings::utils::{Reflector, Reflectable};
use dom::bindings::trace::trace_reflector;
use script_task::{ScriptMsg, ScriptChan};
use js::jsapi::{JSContext, JSTracer};
use libc;
use std::cell::RefCell;
use std::collections::hash_map::HashMap;
use std::collections::hash_map::Entry::{Vacant, Occupied};
use std::marker::PhantomData;
use std::sync::{Arc, Mutex};
use core::nonzero::NonZero;
#[allow(missing_docs)] // FIXME
mod dummy { // Attributes don’t apply through the macro.
use std::rc::Rc;
use std::cell::RefCell;
use super::LiveDOMReferences;
thread_local!(pub static LIVE_REFERENCES: Rc<RefCell<Option<LiveDOMReferences>>> =
Rc::new(RefCell::new(None)));
}
pub use self::dummy::LIVE_REFERENCES;
/// A pointer to a Rust DOM object that needs to be destroyed.
pub struct TrustedReference(*const libc::c_void);
unsafe impl Send for TrustedReference {}
/// A safe wrapper around a raw pointer to a DOM object that can be
/// shared among tasks for use in asynchronous operations. The underlying
/// DOM object is guaranteed to live at least as long as the last outstanding
/// `Trusted<T>` instance.
#[allow_unrooted_interior]
pub struct Trusted<T: Reflectable> {
/// A pointer to the Rust DOM object of type T, but void to allow
/// sending `Trusted<T>` between tasks, regardless of T's sendability.
ptr: *const libc::c_void,
refcount: Arc<Mutex<usize>>,
script_chan: Box<ScriptChan + Send>,
owner_thread: *const libc::c_void,
phantom: PhantomData<T>,
}
unsafe impl<T: Reflectable> Send for Trusted<T> {}
impl<T: Reflectable> Trusted<T> {
/// Create a new `Trusted<T>` instance from an existing DOM pointer. The DOM object will
/// be prevented from being GCed for the duration of the resulting `Trusted<T>` object's
/// lifetime.
pub fn new(_cx: *mut JSContext, ptr: &T, script_chan: Box<ScriptChan + Send>) -> Trusted<T> {
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
let refcount = live_references.addref(&*ptr as *const T);
Trusted {
ptr: &*ptr as *const T as *const libc::c_void,
refcount: refcount,
script_chan: script_chan.clone(),
owner_thread: (&*live_references) as *const _ as *const libc::c_void,
phantom: PhantomData,
}
})
}
/// Obtain a usable DOM pointer from a pinned `Trusted<T>` value. Fails if used on
/// a different thread than the original value from which this `Trusted<T>` was
/// obtained.
pub fn root(&self) -> Root<T> {
assert!(LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
self.owner_thread == (&*live_references) as *const _ as *const libc::c_void
}));
unsafe {
Root::new(NonZero::new(self.ptr as *const T))<|fim▁hole|> }
}
impl<T: Reflectable> Clone for Trusted<T> {
fn clone(&self) -> Trusted<T> {
{
let mut refcount = self.refcount.lock().unwrap();
*refcount += 1;
}
Trusted {
ptr: self.ptr,
refcount: self.refcount.clone(),
script_chan: self.script_chan.clone(),
owner_thread: self.owner_thread,
phantom: PhantomData,
}
}
}
impl<T: Reflectable> Drop for Trusted<T> {
fn drop(&mut self) {
let mut refcount = self.refcount.lock().unwrap();
assert!(*refcount > 0);
*refcount -= 1;
if *refcount == 0 {
// It's possible this send will fail if the script task
// has already exited. There's not much we can do at this
// point though.
let msg = ScriptMsg::RefcountCleanup(TrustedReference(self.ptr));
let _ = self.script_chan.send(msg);
}
}
}
/// The set of live, pinned DOM objects that are currently prevented
/// from being garbage collected due to outstanding references.
pub struct LiveDOMReferences {
// keyed on pointer to Rust DOM object
table: RefCell<HashMap<*const libc::c_void, Arc<Mutex<usize>>>>
}
impl LiveDOMReferences {
/// Set up the task-local data required for storing the outstanding DOM references.
pub fn initialize() {
LIVE_REFERENCES.with(|ref r| {
*r.borrow_mut() = Some(LiveDOMReferences {
table: RefCell::new(HashMap::new()),
})
});
}
fn addref<T: Reflectable>(&self, ptr: *const T) -> Arc<Mutex<usize>> {
let mut table = self.table.borrow_mut();
match table.entry(ptr as *const libc::c_void) {
Occupied(mut entry) => {
let refcount = entry.get_mut();
*refcount.lock().unwrap() += 1;
refcount.clone()
}
Vacant(entry) => {
let refcount = Arc::new(Mutex::new(1));
entry.insert(refcount.clone());
refcount
}
}
}
/// Unpin the given DOM object if its refcount is 0.
pub fn cleanup(raw_reflectable: TrustedReference) {
let TrustedReference(raw_reflectable) = raw_reflectable;
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
let mut table = live_references.table.borrow_mut();
match table.entry(raw_reflectable) {
Occupied(entry) => {
if *entry.get().lock().unwrap() != 0 {
// there could have been a new reference taken since
// this message was dispatched.
return;
}
let _ = entry.remove();
}
Vacant(_) => {
// there could be a cleanup message dispatched, then a new
// pinned reference obtained and released before the message
// is processed, at which point there would be no matching
// hashtable entry.
info!("attempt to cleanup an unrecognized reflector");
}
}
})
}
}
/// A JSTraceDataOp for tracing reflectors held in LIVE_REFERENCES
pub unsafe extern fn trace_refcounted_objects(tracer: *mut JSTracer, _data: *mut libc::c_void) {
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
let table = live_references.table.borrow();
for obj in table.keys() {
let reflectable = &*(*obj as *const Reflector);
trace_reflector(tracer, "LIVE_REFERENCES", reflectable);
}
});
}<|fim▁end|>
|
}
|
<|file_name|>views.py<|end_file_name|><|fim▁begin|>#-*-*- encoding: utf-8 -*-*-
from django.shortcuts import render
from django.template import RequestContext, loader, Context
from django.http import JsonResponse
from .models import Airburst
def index(request):
return render(request, 'NextGenThreat/index.html', {})
def radar(request):
latest_airburst_list = Airburst.objects.all()
context = {'latest_airburst_list': latest_airburst_list}
return render(request, 'NextGenThreat/radar.html', context)
def credits(request):
context = {'contributors': [
{'name': 'Aitor', 'lastname': 'Brazaola', 'description': 'Aitor is a student of Computer Engineering. Been a geek and software developer, he has a podcast with Iban Eguia named El Gato de Turing.'},
{'name': 'Eneko', 'lastname': 'Cruz', 'description': 'Eneko is studying Computer Science at the University of Deusto and Mathematics at the National University of Distance Education (UNED). His main interests are information security, mathematics and computer vision.'},
{'name': 'Elena', 'lastname': 'López de Dicastillo', 'description': 'Elena is a student at University of Deusto. She is studying Telecom Engineering and is very interested in fields such as Internet security, biomedicine and aeronautics. She is currently working on OpenStratos to send a Raspberry Pi to the stratosphere.'},
{'name': 'Iban', 'lastname': 'Eguia', 'description': 'Iban is a future IT engineer and a total space geek. Translator and contributor at CodeIgniter and core developer at OpenStratos and XG Project. He has a podcast with Aitor Brazaola called El Gato de Turing.'},
{'name': 'Alejandro', 'lastname': 'Pérez', 'description': 'Alejandro is a last year software engineering student, researcher in bioinformatics and net security and cofounder of aprenditeka.'},<|fim▁hole|>
def api(request):
airburst_list = Airburst.objects.all()
response = {}
for airburst in airburst_list:
response[airburst.id] = {'radiated_energy': airburst.radiated_energy,
'impact_energy': airburst.impact_energy,
'latitude': airburst.latitude,
'longitude': airburst.longitude,
'date': airburst.date.isoformat(),
'altitude': airburst.altitude,
}
return JsonResponse(response)<|fim▁end|>
|
],
}
return render(request, 'NextGenThreat/credits.html', context)
|
<|file_name|>rollforward.py<|end_file_name|><|fim▁begin|># This file is part of taxtastic.
#
# taxtastic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# taxtastic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with taxtastic. If not, see <http://www.gnu.org/licenses/>.
"""Restore a change to a refpkg immediately after being reverted
Restore the last ``N`` rolled back operations on ``refpkg``, or the
last operation if ``-n`` is omitted. If there are not at least ``N``
operations that can be rolled forward on this refpkg, then an error is
returned and no changes are made to the refpkg.
Note that operations can only be rolled forward immediately after
being rolled back. If any operation besides a rollback occurs, all
roll forward information is removed.
"""
import logging
from taxtastic import refpkg<|fim▁hole|>def build_parser(parser):
parser.add_argument('refpkg', action='store', metavar='refpkg',
help='the reference package to operate on')
parser.add_argument('-n', action='store', metavar='int',
default=1, type=int,
help='Number of operations to roll back')
def action(args):
"""Roll forward previously rolled back commands on a refpkg.
*args* should be an argparse object with fields refpkg (giving the
path to the refpkg to operate on) and optionall n (giving the
number of operations to roll forward.
"""
log.info('loading reference package')
r = refpkg.Refpkg(args.refpkg, create=False)
# First check if we can do n rollforwards
q = r.contents
for i in range(args.n):
if q['rollforward'] is None:
log.error(
'Cannot rollforward {} changes; '
'refpkg only records {} rolled back changes.'.format(args.n, i))
return 1
else:
q = q['rollforward'][1]
for i in range(args.n):
r.rollforward()
return 0<|fim▁end|>
|
log = logging.getLogger(__name__)
|
<|file_name|>to_encrypt.py<|end_file_name|><|fim▁begin|>'''
This mission is the part of the set. Another one - Caesar cipher decriptor.
Your mission is to encrypt a secret message (text only, without special chars like "!", "&", "?" etc.) using Caesar cipher where each letter of input text is replaced by another that stands at a fixed distance. For example ("a b c", 3) == "d e f"
example
Input: A secret message as a string (lowercase letters only and white spaces)
Output: The same string, but encrypted
Precondition:
0 < len(text) < 50
-26 < delta < 26 <|fim▁hole|> alpha = 'abcdefghijklmnopqrstuvwxyz'
result = ''
for letter in text:
index = alpha.find(letter)
if(index > -1):
print(index+delta)
result = result + result.join(alpha[(index + delta) % 26])
else:
result = result + result.join(' ')
return result
if __name__ == '__main__':
print("Example:")
print(to_encrypt('abc', 10))
#These "asserts" using only for self-checking and not necessary for auto-testing
assert to_encrypt("a b c", 3) == "d e f"
assert to_encrypt("a b c", -3) == "x y z"
assert to_encrypt("simple text", 16) == "iycfbu junj"
assert to_encrypt("important text", 10) == "swzybdkxd dohd"
assert to_encrypt("state secret", -13) == "fgngr frperg"
print("Coding complete? Click 'Check' to earn cool rewards!")<|fim▁end|>
|
'''
def to_encrypt(text, delta):
|
<|file_name|>serve.js<|end_file_name|><|fim▁begin|>var watchers = Object.create(null);
var wait = function (linkid, callback) {
watchers[linkid] = cross("options", "/:care-" + linkid).success(function (res) {
if (watchers[linkid]) wait(linkid, callback);
var a = JSAM.parse(res.responseText);
if (isFunction(callback)) a.forEach(b => callback(b));
}).error(function () {
if (watchers[linkid]) wait(linkid, callback);
});
};
var kill = function (linkid) {
var r = watchers[linkid];
delete watchers[linkid];
if (r) r.abort();<|fim▁hole|>var cast = function (linkid, data) {
data = encode(data);
var inc = 0, size = block_size;
return new Promise(function (ok, oh) {
var run = function () {
if (inc > data.length) return ok();
cross("options", "/:cast-" + linkid + "?" + data.slice(inc, inc + size)).success(run).error(oh);
inc += size;
};
run();
}).then(function () {
if (inc === data.length) {
return cast(linkid, '');
}
});
};
var encode = function (data) {
var str = encodeURIComponent(data.replace(/\-/g, '--')).replace(/%/g, '-');
return str;
};
var decode = function (params) {
params = params.replace(/\-\-|\-/g, a => a === '-' ? '%' : '-');
params = decodeURIComponent(params);
return params;
};
function serve(listener) {
return new Promise(function (ok, oh) {
cross("options", "/:link").success(function (res) {
var responseText = res.responseText;
wait(responseText, listener);
ok(responseText);
}).error(oh);
})
}
function servd(getdata) {
return serve(function (linkid) {
cast(linkid, JSAM.stringify(getdata()));
});
}
function servp(linkto) {
return new Promise(function (ok, oh) {
var blocks = [], _linkid;
serve(function (block) {
blocks.push(block);
if (block.length < block_size) {
var data = decode(blocks.join(''));
ok(JSAM.parse(data));
kill(_linkid);
}
}).then(function (linkid) {
cast(linkto, linkid);
_linkid = linkid;
}, oh);
});
}
serve.servd = servd;
serve.servp = servp;
serve.kill = kill;<|fim▁end|>
|
};
var block_size = 1024;
|
<|file_name|>find_2g.py<|end_file_name|><|fim▁begin|>import sys
import pickle
##########################################################
# usage
# pypy find_2g.py xid_train.p ../../data/train
# xid_train.p is a list like ['loIP1tiwELF9YNZQjSUO',''....] to specify
# the order of samples in traing data
# ../../data/train is the path of original train data
##########################################################<|fim▁hole|>
xid=pickle.load(open(xid_name)) #xid_train.p or xid_test.p
newc=pickle.load(open('newc.p'))
cmd2g={}
for i in newc:
for j in newc:
cmd2g[(i,j)]=0
print newc
for c,f in enumerate(xid):#(files[len(files)/10*a1:len(files)/10*a2]):
count={}
for i in cmd2g:
count[i]=0
fo=open(data_path+'/'+f+'.asm')
tot=0
a=-1
b=-1
for line in fo:
xx=line.split()
for x in xx:
if x in newc:
a=b
b=x
if (a,b) in cmd2g:
count[(a,b)]+=1
tot+=1
# print (b,a)
fo.close()
if c%10==0:
print c*1.0/len(xid),tot
for i in cmd2g:
cmd2g[i]=count[i]+cmd2g[i]
del count
import pickle
cmd2gx={}
for i in cmd2g:
if cmd2g[i]>10:
cmd2gx[i]=cmd2g[i]
print len(cmd2gx)
pickle.dump(cmd2gx,open('cmd2g.p','w'))<|fim▁end|>
|
xid_name=sys.argv[1]
data_path=sys.argv[2]
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! Models the moonlander problem using genetic programming.
//!
//! The crate contains the AST node, fitness calculations,
//! and reporting code (to interface with a web-based visualizer
//! in another crate).
//!
//! The code is exercised using the examples in the `examples/`
//! directory.
#[macro_use] extern crate moonlander_gp;
extern crate toml;
extern crate clap;
extern crate rand;
extern crate rustc_serialize;<|fim▁hole|>pub mod num;
pub mod fitness;<|fim▁end|>
|
#[macro_use] pub mod run;
pub mod grammar;
pub mod sim;
|
<|file_name|>generic-exterior-unique.rs<|end_file_name|><|fim▁begin|>// run-pass<|fim▁hole|>struct Recbox<T> {x: Box<T>}
fn reclift<T>(t: T) -> Recbox<T> { return Recbox { x: Box::new(t) }; }
pub fn main() {
let foo: isize = 17;
let rbfoo: Recbox<isize> = reclift::<isize>(foo);
assert_eq!(*rbfoo.x, foo);
}<|fim▁end|>
| |
<|file_name|>neg_binomial_log.hpp<|end_file_name|><|fim▁begin|><|fim▁hole|>#include <stan/math/prim/scal/meta/return_type.hpp>
#include <stan/math/prim/scal/prob/neg_binomial_lpmf.hpp>
namespace stan {
namespace math {
/**
* @deprecated use <code>neg_binomial_lpmf</code>
*/
template <bool propto,
typename T_n,
typename T_shape, typename T_inv_scale>
typename return_type<T_shape, T_inv_scale>::type
neg_binomial_log(const T_n& n,
const T_shape& alpha,
const T_inv_scale& beta) {
return neg_binomial_lpmf<propto, T_n,
T_shape, T_inv_scale>(n, alpha, beta);
}
/**
* @deprecated use <code>neg_binomial_lpmf</code>
*/
template <typename T_n,
typename T_shape, typename T_inv_scale>
inline
typename return_type<T_shape, T_inv_scale>::type
neg_binomial_log(const T_n& n,
const T_shape& alpha,
const T_inv_scale& beta) {
return neg_binomial_lpmf<T_n, T_shape, T_inv_scale>(n, alpha, beta);
}
}
}
#endif<|fim▁end|>
|
#ifndef STAN_MATH_PRIM_SCAL_PROB_NEG_BINOMIAL_LOG_HPP
#define STAN_MATH_PRIM_SCAL_PROB_NEG_BINOMIAL_LOG_HPP
|
<|file_name|>AboutHigherOrderFunctions.js<|end_file_name|><|fim▁begin|>var _; //globals
/* This section uses a functional extension known as Underscore.js - http://documentcloud.github.com/underscore/
"Underscore is a utility-belt library for JavaScript that provides a lot of the functional programming support
that you would expect in Prototype.js (or Ruby), but without extending any of the built-in JavaScript objects.
It's the tie to go along with jQuery's tux."
*/
describe("About Higher Order Functions", function () {
it("should use filter to return array items that meet a criteria", function () {
var numbers = [1,2,3];
var odd = _(numbers).filter(function (x) { return x % 2 !== 0 });
expect(odd).toEqual([1,3]);
expect(odd.length).toBe(2);
expect(numbers.length).toBe(3);
});
it("should use 'map' to transform each element", function () {
var numbers = [1, 2, 3];
var numbersPlus1 = _(numbers).map(function(x) { return x + 1 });
expect(numbersPlus1).toEqual([2,3,4]);
expect(numbers).toEqual([1,2,3]);
});
it("should use 'reduce' to update the same result on each iteration", function () {
var numbers = [1, 2, 3];
var reduction = _(numbers).reduce(
function(memo, x) {
//note: memo is the result from last call, and x is the current number
return memo + x;
},
/* initial */ 0
);
expect(reduction).toBe(6);
expect(numbers).toEqual([1,2,3]);
});
it("should use 'forEach' for simple iteration", function () {
var numbers = [1,2,3];
var msg = "";
var isEven = function (item) {
msg += (item % 2) === 0;<|fim▁hole|>
_(numbers).forEach(isEven);
expect(msg).toEqual('falsetruefalse');
expect(numbers).toEqual([1,2,3]);
});
it("should use 'all' to test whether all items pass condition", function () {
var onlyEven = [2,4,6];
var mixedBag = [2,4,5,6];
var isEven = function(x) { return x % 2 === 0 };
expect(_(onlyEven).all(isEven)).toBe(true);
expect(_(mixedBag).all(isEven)).toBe(false);
});
it("should use 'any' to test if any items passes condition" , function () {
var onlyEven = [2,4,6];
var mixedBag = [2,4,5,6];
var isEven = function(x) { return x % 2 === 0 };
expect(_(onlyEven).any(isEven)).toBe(true);
expect(_(mixedBag).any(isEven)).toBe(true);
});
it("should use range to generate an array", function() {
expect(_.range(3)).toEqual([0, 1, 2]);
expect(_.range(1, 4)).toEqual([1, 2, 3]);
expect(_.range(0, -4, -1)).toEqual([0, -1, -2, -3]);
});
it("should use flatten to make nested arrays easy to work with", function() {
expect(_([ [1, 2], [3, 4] ]).flatten()).toEqual([1,2,3,4]);
});
it("should use chain() ... .value() to use multiple higher order functions", function() {
var result = _([ [0, 1], 2 ]).chain()
.flatten()
.map(function(x) { return x+1 } )
.reduce(function (sum, x) { return sum + x })
.value();
expect(result).toEqual(6);
});
});<|fim▁end|>
|
};
|
<|file_name|>version.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
__version__ = "9.1.4"
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
from .item_status import * # noqa
|
<|file_name|>predicates_test.go<|end_file_name|><|fim▁begin|>package predicates
import (
"reflect"
"testing"
)
func TestPredicatesArgs(t *testing.T) {
methods := map[string][]string{
"onA5": {"a"},
"onA9": {"b"},
"onA13": {"d"},
"onB9": {"innermost"},
"onB10": {"inner"},
"onB11": {"out"},
"onC1": {"rest"},
}
typ := reflect.TypeOf(¤t{})
for nm, args := range methods {
meth, ok := typ.MethodByName(nm)
if !ok {<|fim▁hole|> t.Errorf("want *current to have method %s", nm)
continue
}
if n := meth.Func.Type().NumIn(); n != len(args)+1 {
t.Errorf("%q: want %d arguments, got %d", nm, len(args)+1, n)
continue
}
}
}<|fim▁end|>
| |
<|file_name|>slugs_spec.js<|end_file_name|><|fim▁begin|>var should = require('should'),
supertest = require('supertest'),
testUtils = require('../../../utils/index'),
localUtils = require('./utils'),
config = require('../../../../server/config/index'),
ghost = testUtils.startGhost,
request;
describe('Slug API', function () {
var accesstoken = '', ghostServer;
before(function () {
return ghost()
.then(function (_ghostServer) {
ghostServer = _ghostServer;
request = supertest.agent(config.get('url'));
})
.then(function () {
return localUtils.doAuth(request);<|fim▁hole|> });
it('should be able to get a post slug', function (done) {
request.get(localUtils.API.getApiQuery('slugs/post/a post title/'))
.set('Authorization', 'Bearer ' + accesstoken)
.expect('Content-Type', /json/)
.expect('Cache-Control', testUtils.cacheRules.private)
.expect(200)
.end(function (err, res) {
if (err) {
return done(err);
}
should.not.exist(res.headers['x-cache-invalidate']);
var jsonResponse = res.body;
should.exist(jsonResponse);
should.exist(jsonResponse.slugs);
jsonResponse.slugs.should.have.length(1);
localUtils.API.checkResponse(jsonResponse.slugs[0], 'slug');
jsonResponse.slugs[0].slug.should.equal('a-post-title');
done();
});
});
it('should be able to get a tag slug', function (done) {
request.get(localUtils.API.getApiQuery('slugs/post/atag/'))
.set('Authorization', 'Bearer ' + accesstoken)
.expect('Content-Type', /json/)
.expect('Cache-Control', testUtils.cacheRules.private)
.expect(200)
.end(function (err, res) {
if (err) {
return done(err);
}
should.not.exist(res.headers['x-cache-invalidate']);
var jsonResponse = res.body;
should.exist(jsonResponse);
should.exist(jsonResponse.slugs);
jsonResponse.slugs.should.have.length(1);
localUtils.API.checkResponse(jsonResponse.slugs[0], 'slug');
jsonResponse.slugs[0].slug.should.equal('atag');
done();
});
});
it('should be able to get a user slug', function (done) {
request.get(localUtils.API.getApiQuery('slugs/user/user name/'))
.set('Authorization', 'Bearer ' + accesstoken)
.expect('Content-Type', /json/)
.expect('Cache-Control', testUtils.cacheRules.private)
.expect(200)
.end(function (err, res) {
if (err) {
return done(err);
}
should.not.exist(res.headers['x-cache-invalidate']);
var jsonResponse = res.body;
should.exist(jsonResponse);
should.exist(jsonResponse.slugs);
jsonResponse.slugs.should.have.length(1);
localUtils.API.checkResponse(jsonResponse.slugs[0], 'slug');
jsonResponse.slugs[0].slug.should.equal('user-name');
done();
});
});
it('should be able to get an app slug', function (done) {
request.get(localUtils.API.getApiQuery('slugs/app/cool app/'))
.set('Authorization', 'Bearer ' + accesstoken)
.expect('Content-Type', /json/)
.expect('Cache-Control', testUtils.cacheRules.private)
.expect(200)
.end(function (err, res) {
if (err) {
return done(err);
}
should.not.exist(res.headers['x-cache-invalidate']);
var jsonResponse = res.body;
should.exist(jsonResponse);
should.exist(jsonResponse.slugs);
jsonResponse.slugs.should.have.length(1);
localUtils.API.checkResponse(jsonResponse.slugs[0], 'slug');
jsonResponse.slugs[0].slug.should.equal('cool-app');
done();
});
});
it('should not be able to get a slug for an unknown type', function (done) {
request.get(localUtils.API.getApiQuery('slugs/unknown/who knows/'))
.set('Authorization', 'Bearer ' + accesstoken)
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect('Cache-Control', testUtils.cacheRules.private)
.expect(400)
.end(function (err, res) {
if (err) {
return done(err);
}
var jsonResponse = res.body;
should.exist(jsonResponse.errors);
done();
});
});
});<|fim▁end|>
|
})
.then(function (token) {
accesstoken = token;
});
|
<|file_name|>PluginBrowser.cpp<|end_file_name|><|fim▁begin|>/*
* PluginBrowser.cpp - implementation of the plugin-browser
*
* Copyright (c) 2005-2009 Tobias Doerffel <tobydox/at/users.sourceforge.net>
*
* This file is part of LMMS - https://lmms.io
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program (see COPYING); if not, write to the
* Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301 USA.
*
*/
#include "PluginBrowser.h"
#include <QHeaderView>
#include <QLabel>
#include <QLineEdit>
#include <QMouseEvent>
#include <QPainter>
#include <QStyleOption>
#include <QTreeWidget>
#include "embed.h"
#include "Engine.h"
#include "gui_templates.h"
#include "StringPairDrag.h"
#include "PluginFactory.h"
PluginBrowser::PluginBrowser( QWidget * _parent ) :
SideBarWidget( tr( "Instrument Plugins" ),
embed::getIconPixmap( "plugins" ).transformed( QTransform().rotate( 90 ) ), _parent )
{
setWindowTitle( tr( "Instrument browser" ) );
m_view = new QWidget( contentParent() );
//m_view->setFrameShape( QFrame::NoFrame );
addContentWidget( m_view );
QVBoxLayout * view_layout = new QVBoxLayout( m_view );
view_layout->setMargin( 5 );
view_layout->setSpacing( 5 );
auto hint = new QLabel( tr( "Drag an instrument "
"into either the Song-Editor, the "
"Beat+Bassline Editor or into an "
"existing instrument track." ),
m_view );
hint->setWordWrap( true );
QLineEdit * searchBar = new QLineEdit( m_view );
searchBar->setPlaceholderText( "Search" );
searchBar->setMaxLength( 64 );
searchBar->setClearButtonEnabled( true );
m_descTree = new QTreeWidget( m_view );
m_descTree->setColumnCount( 1 );
m_descTree->header()->setVisible( false );
m_descTree->setIndentation( 10 );
m_descTree->setSelectionMode( QAbstractItemView::NoSelection );
connect( searchBar, SIGNAL( textEdited( const QString & ) ),
this, SLOT( onFilterChanged( const QString & ) ) );
view_layout->addWidget( hint );
view_layout->addWidget( searchBar );
view_layout->addWidget( m_descTree );
// Add plugins to the tree
addPlugins();
// Resize
m_descTree->header()->setSectionResizeMode( QHeaderView::ResizeToContents );
// Hide empty roots
updateRootVisibilities();
}
void PluginBrowser::updateRootVisibility( int rootIndex )
{
QTreeWidgetItem * root = m_descTree->topLevelItem( rootIndex );
root->setHidden( !root->childCount() );
}
void PluginBrowser::updateRootVisibilities()
{
int rootCount = m_descTree->topLevelItemCount();
for (int rootIndex = 0; rootIndex < rootCount; ++rootIndex)
{
updateRootVisibility( rootIndex );
}
}
void PluginBrowser::onFilterChanged( const QString & filter )
{
int rootCount = m_descTree->topLevelItemCount();
for (int rootIndex = 0; rootIndex < rootCount; ++rootIndex)
{
QTreeWidgetItem * root = m_descTree->topLevelItem( rootIndex );
int itemCount = root->childCount();
for (int itemIndex = 0; itemIndex < itemCount; ++itemIndex)
{
QTreeWidgetItem * item = root->child( itemIndex );
PluginDescWidget * descWidget = static_cast<PluginDescWidget *>
(m_descTree->itemWidget( item, 0));
if (descWidget->name().contains(filter, Qt::CaseInsensitive))
{
item->setHidden( false );
}
else
{
item->setHidden( true );
}
}
}
}
void PluginBrowser::addPlugins()
{
// Add a root node to the plugin tree with the specified `label` and return it
const auto addRoot = [this](auto label)
{
const auto root = new QTreeWidgetItem();
root->setText(0, label);
m_descTree->addTopLevelItem(root);
return root;
};
// Add the plugin identified by `key` to the tree under the root node `root`
const auto addPlugin = [this](const auto& key, auto root)
{
const auto item = new QTreeWidgetItem();
root->addChild(item);
m_descTree->setItemWidget(item, 0, new PluginDescWidget(key, m_descTree));
};
// Remove any existing plugins from the tree
m_descTree->clear();
// Fetch and sort all instrument plugin descriptors
auto descs = pluginFactory->descriptors(Plugin::Instrument);
std::sort(descs.begin(), descs.end(),
[](auto d1, auto d2)
{
return qstricmp(d1->displayName, d2->displayName) < 0;
}
);
// Add a root node to the tree for native LMMS plugins
const auto lmmsRoot = addRoot("LMMS");
lmmsRoot->setExpanded(true);
// Add all of the descriptors to the tree
for (const auto desc : descs)
{
if (desc->subPluginFeatures)
{
// Fetch and sort all subplugins for this plugin descriptor
auto subPluginKeys = Plugin::Descriptor::SubPluginFeatures::KeyList{};<|fim▁hole|> {
return QString::compare(l.displayName(), r.displayName(), Qt::CaseInsensitive) < 0;
}
);
// Create a root node for this plugin and add the subplugins under it
const auto root = addRoot(desc->displayName);
for (const auto& key : subPluginKeys) { addPlugin(key, root); }
}
else
{
addPlugin(Plugin::Descriptor::SubPluginFeatures::Key(desc, desc->name), lmmsRoot);
}
}
}
PluginDescWidget::PluginDescWidget(const PluginKey &_pk,
QWidget * _parent ) :
QWidget( _parent ),
m_pluginKey( _pk ),
m_logo( _pk.logo()->pixmap() ),
m_mouseOver( false )
{
setFixedHeight( DEFAULT_HEIGHT );
setMouseTracking( true );
setCursor( Qt::PointingHandCursor );
setToolTip(_pk.desc->subPluginFeatures
? _pk.description()
: tr(_pk.desc->description));
}
QString PluginDescWidget::name() const
{
return m_pluginKey.displayName();
}
void PluginDescWidget::paintEvent( QPaintEvent * )
{
QPainter p( this );
// Paint everything according to the style sheet
QStyleOption o;
o.initFrom( this );
style()->drawPrimitive( QStyle::PE_Widget, &o, &p, this );
// Draw the rest
const int s = 16 + ( 32 * ( qBound( 24, height(), 60 ) - 24 ) ) /
( 60 - 24 );
const QSize logo_size( s, s );
QPixmap logo = m_logo.scaled( logo_size, Qt::KeepAspectRatio,
Qt::SmoothTransformation );
p.drawPixmap( 4, 4, logo );
QFont f = p.font();
if ( m_mouseOver )
{
f.setBold( true );
}
p.setFont( f );
p.drawText( 10 + logo_size.width(), 15, m_pluginKey.displayName());
}
void PluginDescWidget::enterEvent( QEvent * _e )
{
m_mouseOver = true;
QWidget::enterEvent( _e );
}
void PluginDescWidget::leaveEvent( QEvent * _e )
{
m_mouseOver = false;
QWidget::leaveEvent( _e );
}
void PluginDescWidget::mousePressEvent( QMouseEvent * _me )
{
if ( _me->button() == Qt::LeftButton )
{
Engine::setDndPluginKey(&m_pluginKey);
new StringPairDrag("instrument",
QString::fromUtf8(m_pluginKey.desc->name), m_logo, this);
leaveEvent( _me );
}
}<|fim▁end|>
|
desc->subPluginFeatures->listSubPluginKeys(desc, subPluginKeys);
std::sort(subPluginKeys.begin(), subPluginKeys.end(),
[](const auto& l, const auto& r)
|
<|file_name|>ElasticS3ObjectDAO.java<|end_file_name|><|fim▁begin|>/*
The MIT License (MIT)
Copyright (c) 2016 EMC Corporation
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
package com.emc.ecs.metadata.dao.elasticsearch;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Map.Entry;
import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.Settings.Builder;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.transport.ReceiveTimeoutTransportException;
import org.elasticsearch.transport.client.PreBuiltTransportClient;
import org.elasticsearch.xpack.client.PreBuiltXPackTransportClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.emc.ecs.metadata.dao.EcsCollectionType;
import com.emc.ecs.metadata.dao.ObjectDAO;
import com.emc.ecs.metadata.utils.Constants;
import com.emc.object.s3.bean.AbstractVersion;
import com.emc.object.s3.bean.DeleteMarker;
import com.emc.object.s3.bean.ListObjectsResult;
import com.emc.object.s3.bean.ListVersionsResult;
import com.emc.object.s3.bean.QueryMetadata;
import com.emc.object.s3.bean.QueryObject;
import com.emc.object.s3.bean.QueryObjectsResult;
import com.emc.object.s3.bean.S3Object;
import com.emc.object.s3.bean.Version;
public class ElasticS3ObjectDAO implements ObjectDAO {
private final static String CLIENT_SNIFFING_CONFIG = "client.transport.sniff";
private final static String CLIENT_TRANSPORT_PING_TIMEOUT = "client.transport.ping_timeout";
private final static String CLIENT_CLUSTER_NAME_CONFIG = "cluster.name";
public final static String S3_OBJECT_INDEX_NAME = "ecs-s3-object";
public final static String S3_OBJECT_VERSION_INDEX_NAME = "ecs-object-version";
public final static String S3_OBJECT_INDEX_TYPE = "object-info";
public final static String S3_OBJECT_VERSION_INDEX_TYPE = "object-version-info";
public final static String COLLECTION_TIME = "collection_time";
public final static String ANALYZED_TAG = "_analyzed";
public final static String NOT_ANALYZED_INDEX = "not_analyzed";
public final static String ANALYZED_INDEX = "analyzed";
public final static String LAST_MODIFIED_TAG = "last_modified";
public final static String SIZE_TAG = "size";
public final static String KEY_TAG = "key";
public final static String OWNER_ID_TAG = "owner_id";
public final static String OWNER_NAME_TAG = "owner_name";
public final static String NAMESPACE_TAG = "namespace";
public final static String BUCKET_TAG = "bucket";
public final static String ETAG_TAG = "e_tag";
public final static String VERSION_ID_TAG = "version_id";
public final static String IS_LATEST_TAG = "is_latest";
public final static String CUSTOM_GID_TAG = "x-amz-meta-x-emc-posix-group-owner-name";
public final static String CUSTOM_UID_TAG = "x-amz-meta-x-emc-posix-owner-name";
public final static String CUSTOM_MODIFIED_TIME_TAG = "mtime";
//=========================
// Private members
//=========================
private TransportClient elasticClient;
private static Logger LOGGER = LoggerFactory.getLogger(ElasticS3ObjectDAO.class);
private static final String DATA_DATE_PATTERN = "yyyy-MM-dd";
private static final String DATA_DATE_PATTERN_SEC = "yyyy-MM-dd HH:mm:ss";
private static final SimpleDateFormat DATA_DATE_FORMAT = new SimpleDateFormat(DATA_DATE_PATTERN);
private static final SimpleDateFormat DATA_DATE_FORMAT_SEC = new SimpleDateFormat(DATA_DATE_PATTERN_SEC);
private static String s3ObjectVersionIndexDayName;
private static String s3ObjectIndexDayName;
private ElasticDAOConfig config;
//=========================
// Public methods
//=========================
public ElasticS3ObjectDAO( ElasticDAOConfig config ) {
try {
this.config = config;
Builder builder = Settings.builder();
// Check for new hosts within the cluster
builder.put(CLIENT_SNIFFING_CONFIG, true);
builder.put(CLIENT_TRANSPORT_PING_TIMEOUT, "15s");
if (config.getXpackUser() != null) {
builder.put(Constants.XPACK_SECURITY_USER, config.getXpackUser() + ":" + config.getXpackPassword());
builder.put(Constants.XPACK_SSL_KEY, config.getXpackSslKey());
builder.put(Constants.XPACK_SSL_CERTIFICATE, config.getXpackSslCertificate());
builder.put(Constants.XPACK_SSL_CERTIFICATE_AUTH, config.getXpackSslCertificateAuthorities());
builder.put(Constants.XPACK_SECURITY_TRANPORT_ENABLED, "true");
}
// specify cluster name
if( config.getClusterName() != null ) {
builder.put(CLIENT_CLUSTER_NAME_CONFIG, config.getClusterName());
}
Settings settings = builder.build();
// create client
// create client
if (config.getXpackUser() != null) {
elasticClient = new PreBuiltXPackTransportClient(settings);
} else {
elasticClient = new PreBuiltTransportClient(settings);
}
// add hosts
for( String elasticHost : config.getHosts()) {
elasticClient.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName(elasticHost), config.getPort()));
}
} catch (UnknownHostException e) {
throw new RuntimeException(e.getLocalizedMessage());
} catch (ReceiveTimeoutTransportException re) {
LOGGER.error("An error occured while connecting to ElasticSearch Cluster ", re);
System.exit(1);
}
}
/**
* Init indexes
* @param collectionTime - collection time
*/
@Override
public void initIndexes(Date collectionTime) {
// init S3 Object Index
if( config.getCollectionType().equals(EcsCollectionType.object) ) {
initS3ObjectIndex( collectionTime );
}
// init S3 Object Version Index
if( config.getCollectionType().equals(EcsCollectionType.object_version) ) {
initS3ObjectVersionIndex( collectionTime );
}
}
/**
* {@inheritDoc}
*/
@Override
public void insert(ListObjectsResult listObjectsResult, String namespace, String bucket, Date collectionTime) {
if( listObjectsResult == null ||
listObjectsResult.getObjects() == null ||
listObjectsResult.getObjects().isEmpty() ) {
// nothing to insert
return;
}
BulkRequestBuilder requestBuilder = elasticClient.prepareBulk();
// Generate JSON for object buckets info
for( S3Object s3Object : listObjectsResult.getObjects() ) {
XContentBuilder s3ObjectBuilder = toJsonFormat(s3Object, namespace, bucket, collectionTime);
IndexRequestBuilder request = elasticClient.prepareIndex()
.setIndex(s3ObjectIndexDayName)
.setType(S3_OBJECT_INDEX_TYPE)
.setSource(s3ObjectBuilder);
requestBuilder.add(request);
}
BulkResponse bulkResponse = requestBuilder.execute().actionGet();
int items = bulkResponse.getItems().length;
LOGGER.info( "Took " + bulkResponse.getTookInMillis() + " ms to index [" + items + "] items in Elasticsearch " + "index: " +
s3ObjectIndexDayName + " index type: " + S3_OBJECT_INDEX_TYPE );
if( bulkResponse.hasFailures() ) {
LOGGER.error( "Failure(s) occured while items in Elasticsearch " + "index: " +
s3ObjectIndexDayName + " index type: " + S3_OBJECT_INDEX_TYPE );
}
}
/**
* {@inheritDoc}
*/
@Override
public void insert( QueryObjectsResult queryObjectsResult, String namespace,
String bucketName, Date collectionTime ) {
if( queryObjectsResult == null ||
queryObjectsResult.getObjects() == null ||
queryObjectsResult.getObjects().isEmpty() ) {
// nothing to insert
return;
}
BulkRequestBuilder requestBuilder = elasticClient.prepareBulk();
// Generate JSON for object buckets info
for( QueryObject queryObject : queryObjectsResult.getObjects() ) {
XContentBuilder s3ObjectBuilder = toJsonFormat(queryObject, namespace, bucketName, collectionTime);
IndexRequestBuilder request = elasticClient.prepareIndex()
.setIndex(s3ObjectIndexDayName)
.setType(S3_OBJECT_INDEX_TYPE)
.setSource(s3ObjectBuilder);
requestBuilder.add(request);
}
BulkResponse bulkResponse = requestBuilder.execute().actionGet();
int items = bulkResponse.getItems().length;
LOGGER.info( "Took " + bulkResponse.getTookInMillis() + " ms to index [" + items + "] items in Elasticsearch " + "index: " +
s3ObjectIndexDayName + " index type: " + S3_OBJECT_INDEX_TYPE );
if( bulkResponse.hasFailures() ) {
LOGGER.error( "Failure(s) occured while items in Elasticsearch " + "index: " +
s3ObjectIndexDayName + " index type: " + S3_OBJECT_INDEX_TYPE );
}
}
/**
* {@inheritDoc}
*/
@Override
public void insert(ListVersionsResult listVersionsResult, String namespace,
String bucketName, Date collectionTime) {
if( listVersionsResult == null ||
listVersionsResult.getVersions() == null ||
listVersionsResult.getVersions().isEmpty() ) {
// nothing to insert
return;
}
BulkRequestBuilder requestBuilder = elasticClient.prepareBulk();
// Generate JSON for object version info
for( AbstractVersion abstractVersion : listVersionsResult.getVersions() ) {
if(abstractVersion instanceof Version) {
XContentBuilder s3ObjectVersionBuilder = toJsonFormat((Version)abstractVersion, namespace, bucketName, collectionTime);
IndexRequestBuilder request = elasticClient.prepareIndex()
.setIndex(s3ObjectVersionIndexDayName)
.setType(S3_OBJECT_VERSION_INDEX_TYPE)
.setSource(s3ObjectVersionBuilder);
requestBuilder.add(request);
} else if(abstractVersion instanceof DeleteMarker) {
XContentBuilder s3ObjectVersionBuilder = toJsonFormat((DeleteMarker)abstractVersion, namespace, bucketName, collectionTime);
IndexRequestBuilder request = elasticClient.prepareIndex()
.setIndex(s3ObjectVersionIndexDayName)
.setType(S3_OBJECT_VERSION_INDEX_TYPE)
.setSource(s3ObjectVersionBuilder);
requestBuilder.add(request);
}
}
BulkResponse bulkResponse = requestBuilder.execute().actionGet();
int items = bulkResponse.getItems().length;
LOGGER.info( "Took " + bulkResponse.getTookInMillis() + " ms to index [" + items + "] items in Elasticsearch " + "index: " +
s3ObjectVersionIndexDayName + " index type: " + S3_OBJECT_VERSION_INDEX_TYPE );
if( bulkResponse.hasFailures() ) {
LOGGER.error( "Failure(s) occured while items in Elasticsearch " + "index: " +
s3ObjectVersionIndexDayName + " index type: " + S3_OBJECT_VERSION_INDEX_TYPE );
}
}
/**
* {@inheritDoc}
*/
@Override
public Long purgeOldData(ObjectDataType type, Date thresholdDate) {
switch(type) {
case object:
// Purge old S3 Objects
ElasticIndexCleaner.truncateOldIndexes( elasticClient, thresholdDate,
S3_OBJECT_INDEX_NAME, S3_OBJECT_INDEX_TYPE);
return 0L;
case object_versions:
// Purge old S3 Object Versions
ElasticIndexCleaner.truncateOldIndexes( elasticClient, thresholdDate,
S3_OBJECT_VERSION_INDEX_NAME,
S3_OBJECT_VERSION_INDEX_TYPE );
return 0L;
default:
return 0L;
}
}
/**
* Converts Object data in JSON format for Elasticsearch
*
* @param s3Object - S3 Object
* @param namespace - namespace
* @param bucket - bucket name
* @param collectionTime - collection time
* @return XContentBuilder
*/
public static XContentBuilder toJsonFormat( S3Object s3Object, String namespace, String bucket, Date collectionTime ) {
return toJsonFormat(s3Object, namespace, bucket,collectionTime, null);
}
/**
* Converts Object data in JSON format for Elasticsearch
*
* @param version - version
* @param namespace - namespace
* @param bucketName - bucket name
* @param collectionTime - collectionTime
* @return XContentBuilder
*/
public XContentBuilder toJsonFormat(Version version,
String namespace, String bucketName, Date collectionTime) {
return toJsonFormat( version, namespace, bucketName, collectionTime, null);
}
/**
* Converts Object data in JSON format for Elasticsearch
*
* @param deleteMarker - deleteMarker
* @param namespace - namespace
* @param bucketName - bucket name
* @param collectionTime - collection time
* @return XContentBuilders
*/
public XContentBuilder toJsonFormat(DeleteMarker deleteMarker,
String namespace, String bucketName, Date collectionTime) {
return toJsonFormat( deleteMarker, namespace, bucketName, collectionTime, null);
}
/**
* Converts Object data in JSON format for Elasticsearch
*
* @param s3Object - S3 Object
* @param namespace - namespace
* @param bucket - bucket
* @param collectionTime - collection time
* @return XContentBuilder
*/
public static XContentBuilder toJsonFormat( QueryObject s3Object, String namespace, String bucket, Date collectionTime ) {
return toJsonFormat(s3Object, namespace, bucket,collectionTime, null);
}
//=======================
// Private methods
//=======================
/**
* Init Object index
*/
private void initS3ObjectIndex( Date collectionTime ) {
String collectionDayString = DATA_DATE_FORMAT_SEC.format(collectionTime);
s3ObjectIndexDayName = S3_OBJECT_INDEX_NAME + "-" + collectionDayString.replaceAll(" ", "-");
if (elasticClient
.admin()
.indices()
.exists(new IndicesExistsRequest(s3ObjectIndexDayName))
.actionGet()
.isExists()) {
// Index already exists need to truncate it and recreate it
DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(s3ObjectIndexDayName);
ActionFuture<DeleteIndexResponse> futureResult = elasticClient.admin().indices().delete(deleteIndexRequest);
// Wait until deletion is done
while( !futureResult.isDone() ) {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
elasticClient.admin().indices().create(new CreateIndexRequest(s3ObjectIndexDayName)).actionGet();
try {
PutMappingResponse putMappingResponse = elasticClient.admin().indices()
.preparePutMapping(s3ObjectIndexDayName)
.setType(S3_OBJECT_INDEX_TYPE)
.setSource(XContentFactory.jsonBuilder().prettyPrint()
.startObject()
.startObject(S3_OBJECT_INDEX_TYPE)
// ========================================
// Define how the basic fields are defined
// ========================================
.startObject("properties")
// LAST_MODIFIED_TAG
.startObject( LAST_MODIFIED_TAG ).field("type", "date")
.field("format", "strict_date_optional_time||epoch_millis").endObject()
// SIZE_TAG
.startObject( SIZE_TAG ).field("type", "string").field("type", "long").endObject()
// KEY_TAG
.startObject( KEY_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// KEY_TAG Analyzed
.startObject( KEY_TAG + ANALYZED_TAG).field("type", "string")
.field("index", ANALYZED_INDEX).endObject()
.startObject( ETAG_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// NAMESPACE_TAG
.startObject( NAMESPACE_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// BUCKET_TAG
.startObject( BUCKET_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// OWNER_ID_TAG
.startObject( OWNER_ID_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// OWNER_NAME_TAG
.startObject( OWNER_NAME_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// COLLECTION_TIME
.startObject( COLLECTION_TIME ).field("type", "date")
.field("format", "strict_date_optional_time||epoch_millis||date_time_no_millis").endObject()
// CUSTOM_GID_TAG
.startObject( CUSTOM_GID_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// CUSTOM_UID_TAG
.startObject( CUSTOM_UID_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// CUSTOM_MODIFIED_TIME_TAG
.startObject( CUSTOM_MODIFIED_TIME_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
.endObject()
// =================================
// Dynamic fields won't be analyzed
// =================================
.startArray("dynamic_templates")
.startObject()
.startObject("notanalyzed")
.field("match", "*")
.field("match_mapping_type", "string")
.startObject( "mapping" ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
.endObject()
.endObject()
.endArray()
.endObject()
.endObject() )
.execute().actionGet();
if (putMappingResponse.isAcknowledged()) {
LOGGER.info("Index Created: " + s3ObjectIndexDayName);
} else {
LOGGER.error("Index {} did not exist. " +
"While attempting to create the index from stored ElasticSearch " +
"Templates we were unable to get an acknowledgement.", s3ObjectIndexDayName);
LOGGER.error("Error Message: {}", putMappingResponse.toString());
throw new RuntimeException("Unable to create index " + s3ObjectIndexDayName);
}
} catch (IOException e) {
throw new RuntimeException( "Unable to create index " +
s3ObjectIndexDayName + " " + e.getMessage() );
}
}
/**
* Converts Object data into JSON format
*
* @param s3Object - S3 Object
* @param namespace - namespace
* @param bucket - bucket
* @param collectionTime - collection time
* @param builder - builder
* @return XContentBuilder
*/
private static XContentBuilder toJsonFormat( S3Object s3Object,
String namespace,
String bucket,
Date collectionTime,
XContentBuilder builder) {
try {
if(builder == null) {
builder = XContentFactory.jsonBuilder();
}
// add relevant fileds
builder = builder.startObject()
.field( LAST_MODIFIED_TAG, s3Object.getLastModified() )
.field( SIZE_TAG, s3Object.getSize() )
.field( KEY_TAG, s3Object.getKey() )
.field( KEY_TAG + ANALYZED_TAG, s3Object.getKey() )
.field( ETAG_TAG , s3Object.getETag())
.field( NAMESPACE_TAG, namespace )
.field( BUCKET_TAG, bucket )
.field( OWNER_ID_TAG, (s3Object.getOwner() != null && s3Object.getOwner().getId() != null)
? s3Object.getOwner().getId() : null )
.field( OWNER_NAME_TAG, (s3Object.getOwner() != null && s3Object.getOwner().getDisplayName() != null)
? s3Object.getOwner().getDisplayName() : null )
.field( COLLECTION_TIME, collectionTime )
.endObject();
} catch (IOException e) {
throw new RuntimeException(e.getLocalizedMessage());
}
return builder;
}
/**
* Init Object version index
*/
private void initS3ObjectVersionIndex( Date collectionTime ) {
String collectionDayString = DATA_DATE_FORMAT.format(collectionTime);
s3ObjectVersionIndexDayName = S3_OBJECT_VERSION_INDEX_NAME + "-" + collectionDayString;
if (elasticClient
.admin()
.indices()
.exists(new IndicesExistsRequest(s3ObjectVersionIndexDayName))
.actionGet()
.isExists()) {
// Index already exists need to truncate it and recreate it
DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(s3ObjectVersionIndexDayName);
ActionFuture<DeleteIndexResponse> futureResult = elasticClient.admin().indices().delete(deleteIndexRequest);
// Wait until deletion is done
while( !futureResult.isDone() ) {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
elasticClient.admin().indices().create(new CreateIndexRequest(s3ObjectVersionIndexDayName)).actionGet();
try {
PutMappingResponse putMappingResponse = elasticClient.admin().indices()
.preparePutMapping(s3ObjectVersionIndexDayName)
.setType(S3_OBJECT_VERSION_INDEX_TYPE)
.setSource(XContentFactory.jsonBuilder().prettyPrint()
.startObject()
.startObject(S3_OBJECT_VERSION_INDEX_TYPE)
// ========================================
// Define how the basic fields are defined
// ========================================
.startObject("properties")
// LAST_MODIFIED_TAG
.startObject( LAST_MODIFIED_TAG ).field("type", "date")
.field("format", "strict_date_optional_time||epoch_millis").endObject()
// SIZE_TAG
.startObject( SIZE_TAG ).field("type", "string").field("type", "long").endObject()
// KEY_TAG
.startObject( KEY_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// KEY_TAG Analyzed<|fim▁hole|> .startObject( ETAG_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// NAMESPACE_TAG
.startObject( NAMESPACE_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// BUCKET_TAG
.startObject( BUCKET_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// VERSION_ID_TAG
.startObject( VERSION_ID_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// IS_LATEST_TAG
.startObject( IS_LATEST_TAG ).field("type", "boolean")
.field("index", NOT_ANALYZED_INDEX).endObject()
// OWNER_ID_TAG
.startObject( OWNER_ID_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// OWNER_NAME_TAG
.startObject( OWNER_NAME_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// COLLECTION_TIME
.startObject( COLLECTION_TIME ).field("type", "date")
.field("format", "strict_date_optional_time||epoch_millis").endObject()
.endObject()
// =================================
// Dynamic fields won't be analyzed
// =================================
.startArray("dynamic_templates")
.startObject()
.startObject("notanalyzed")
.field("match", "*")
.field("match_mapping_type", "string")
.startObject( "mapping" ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
.endObject()
.endObject()
.endArray()
.endObject()
.endObject() )
.execute().actionGet();
if (putMappingResponse.isAcknowledged()) {
LOGGER.info("Index Created: " + s3ObjectVersionIndexDayName);
} else {
LOGGER.error("Index {} did not exist. " +
"While attempting to create the index from stored ElasticSearch " +
"Templates we were unable to get an acknowledgement.", s3ObjectVersionIndexDayName);
LOGGER.error("Error Message: {}", putMappingResponse.toString());
throw new RuntimeException("Unable to create index " + s3ObjectVersionIndexDayName);
}
} catch (IOException e) {
throw new RuntimeException( "Unable to create index " +
s3ObjectVersionIndexDayName +
" " + e.getMessage() );
}
}
/**
* Converts object version data to json
*
* @param version - version
* @param namespace - namespace
* @param bucket - bucket
* @param collectionTime - collection time
* @param builder - builder
* @return XContentBuilder
*/
private static XContentBuilder toJsonFormat( Version version,
String namespace,
String bucket,
Date collectionTime,
XContentBuilder builder) {
try {
if(builder == null) {
builder = XContentFactory.jsonBuilder();
}
// add relevant fields
builder = builder.startObject()
.field( LAST_MODIFIED_TAG, version.getLastModified() )
.field( SIZE_TAG, version.getSize() )
.field( KEY_TAG, version.getKey() )
.field( KEY_TAG + ANALYZED_TAG, version.getKey() )
.field( ETAG_TAG , version.getETag())
.field( NAMESPACE_TAG, namespace )
.field( BUCKET_TAG, bucket )
.field( VERSION_ID_TAG, version.getVersionId() )
.field( IS_LATEST_TAG, version.isLatest())
.field( OWNER_ID_TAG, (version.getOwner() != null && version.getOwner().getId() != null)
? version.getOwner().getId() : null )
.field( OWNER_NAME_TAG, (version.getOwner() != null && version.getOwner().getDisplayName() != null)
? version.getOwner().getDisplayName() : null )
.field( COLLECTION_TIME, collectionTime )
.endObject();
} catch (IOException e) {
throw new RuntimeException(e.getLocalizedMessage());
}
return builder;
}
/**
* Converts
*
* @param deleteMarker - delete marker
* @param namespace - namespace
* @param bucket - bucket
* @param collectionTime - collection time
* @param builder - builder
* @return XContentBuilder
*/
private static XContentBuilder toJsonFormat( DeleteMarker deleteMarker,
String namespace,
String bucket,
Date collectionTime,
XContentBuilder builder) {
try {
if(builder == null) {
builder = XContentFactory.jsonBuilder();
}
// add relevant fields
builder = builder.startObject()
.field( LAST_MODIFIED_TAG, deleteMarker.getLastModified() )
.field( KEY_TAG, deleteMarker.getKey() )
.field( KEY_TAG + ANALYZED_TAG, deleteMarker.getKey() )
.field( NAMESPACE_TAG, namespace )
.field( BUCKET_TAG, bucket )
.field( VERSION_ID_TAG, deleteMarker.getVersionId() )
.field( IS_LATEST_TAG, deleteMarker.isLatest())
.field( OWNER_ID_TAG, (deleteMarker.getOwner() != null && deleteMarker.getOwner().getId() != null)
? deleteMarker.getOwner().getId() : null )
.field( OWNER_NAME_TAG, (deleteMarker.getOwner() != null && deleteMarker.getOwner().getDisplayName() != null)
? deleteMarker.getOwner().getDisplayName() : null )
.field( COLLECTION_TIME, collectionTime )
.endObject();
} catch (IOException e) {
throw new RuntimeException(e.getLocalizedMessage());
}
return builder;
}
/**
* Converts Query Object data into JSON
*
* @param queryObject - Query Object
* @param namespace - Namespace
* @param bucket - Bucket
* @param collectionTime - Collection Time
* @param builder - Builder
* @return XContentBuilder
*/
private static XContentBuilder toJsonFormat( QueryObject queryObject,
String namespace,
String bucket,
Date collectionTime,
XContentBuilder builder) {
try {
if(builder == null) {
builder = XContentFactory.jsonBuilder();
}
// add known basic fields
builder = builder.startObject()
.field( KEY_TAG, queryObject.getObjectName() )
.field( KEY_TAG + ANALYZED_TAG, queryObject.getObjectName() )
.field( ETAG_TAG , queryObject.getObjectId())
.field( NAMESPACE_TAG, namespace )
.field( BUCKET_TAG, bucket )
.field( COLLECTION_TIME, collectionTime );
// Add custom MS Key values as dynamic fields
for( QueryMetadata metadata : queryObject.getQueryMds() ) {
for( Entry<String, String> entry : metadata.getMdMap().entrySet() ) {
builder.field(entry.getKey(), entry.getValue());
}
}
builder.endObject();
} catch (IOException e) {
throw new RuntimeException(e.getLocalizedMessage());
}
return builder;
}
}<|fim▁end|>
|
.startObject( KEY_TAG + ANALYZED_TAG).field("type", "string")
.field("index", ANALYZED_INDEX).endObject()
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>#![feature(plugin)]
#[macro_use]
extern crate serde_derive;
extern crate serde;
extern crate serde_json;
mod primary_index;
mod secondary_index;
mod table;
mod find;
use std::fs::File;
use std::io::Read;
use serde_json::{Value, Error};
fn main() {
let db_path = "/data/testdb";
let mut t = table::Table::new("test_table", db_path);
t.create_secondary_index("Type", secondary_index::key_types::Str(20));
t.create_secondary_index("Name", secondary_index::key_types::Str(128));
t.create_secondary_index("X", secondary_index::key_types::F32);
t.create_secondary_index("Y", secondary_index::key_types::F32);
// load_dummy_data(&mut t);
println!("Get by id 3 {:?}", t.get(3));
println!("Get by id 4 {:?}", t.get(4));
println!("Get by id 33 {:?}", t.get(33));
println!("Get by id 333 {:?}, has an index of {:?}", t.get(333), t.get(333).get_id());
println!("Search index {:?}", t.secondary_indexes[0].get("TEST", "TEST2"));
println!("{}", t.table_name);
}
fn load_dummy_data(t:&mut table::Table) {
// load json from thing
let mut s = String::new();
File::open("data.json").unwrap().read_to_string(&mut s).unwrap();
let n:Vec<serde_json::Value> = serde_json::from_str(&s).unwrap();
for item in n.into_iter() {<|fim▁hole|> t.insert(&item.to_string());
//println!("{:?}", item.to_string());
}
}<|fim▁end|>
| |
<|file_name|>idirwatcher_test.py<|end_file_name|><|fim▁begin|>"""Unit test for directory watcher (inotify).
"""
import errno
import os
import shutil
import tempfile
import select
import unittest
# Disable W0611: Unused import
import tests.treadmill_test_deps # pylint: disable=W0611
import mock<|fim▁hole|>class DirWatcherTest(unittest.TestCase):
"""Tests for teadmill.idirwatch."""
def setUp(self):
self.root = tempfile.mkdtemp()
def tearDown(self):
if self.root and os.path.isdir(self.root):
shutil.rmtree(self.root)
def test_watcher(self):
"""Tests created/deleted callbackes."""
created = []
modified = []
deleted = []
test_file = os.path.join(self.root, 'a')
watcher = idirwatch.DirWatcher(self.root)
watcher.on_created = lambda x: created.append(x) or 'one'
watcher.on_modified = lambda x: modified.append(x) or 'two'
watcher.on_deleted = lambda x: deleted.append(x) or 'three'
with open(test_file, 'w') as f:
f.write('hello')
with open(test_file, 'a') as f:
f.write(' world!')
os.unlink(test_file)
with open(test_file, 'w') as f:
f.write('hello again')
res = watcher.process_events(max_events=3)
self.assertEqual([test_file], created)
self.assertEqual([test_file], modified)
self.assertEqual([test_file], deleted)
self.assertEqual(
[
(idirwatch.DirWatcherEvent.CREATED, test_file, 'one'),
(idirwatch.DirWatcherEvent.MODIFIED, test_file, 'two'),
(idirwatch.DirWatcherEvent.DELETED, test_file, 'three'),
(idirwatch.DirWatcherEvent.MORE_PENDING, None, None),
],
res,
)
@mock.patch('select.poll', mock.Mock())
def test_signal(self):
"""Tests behavior when signalled during wait."""
watcher = idirwatch.DirWatcher(self.root)
mocked_pollobj = select.poll.return_value
mocked_pollobj.poll.side_effect = select.error(errno.EINTR, '')
self.assertFalse(watcher.wait_for_events())
if __name__ == '__main__':
unittest.main()<|fim▁end|>
|
from treadmill import idirwatch
|
<|file_name|>runauto.py<|end_file_name|><|fim▁begin|>import requests
import pytest
import subprocess
# ============================================================================
class TestAuto(object):
PREFIX = 'http://localhost:8089'
USER = 'testauto'
LIST_ID = ''
AUTO_ID = ''
NUM_BROWSERS = 2
@classmethod
def setup_class(cls):
cls.session = requests.session()
@classmethod
def teardown_class(cls):
pass
def get(self, url, **kwargs):
full_url = self.PREFIX + url
return self.session.get(full_url, **kwargs)
def post(self, url, **kwargs):
full_url = self.PREFIX + url
return self.session.post(full_url, **kwargs)
@classmethod
def delete(self, url, **kwargs):
full_url = self.PREFIX + url
return self.session.delete(full_url, **kwargs)<|fim▁hole|> "-c", "[email protected]", "testauto", "TestTest123", "archivist", "Auto Test"],
stdout=subprocess.PIPE)
assert b'Created user testauto' in res.stdout or b'A user already exists' in res.stdout
assert res.returncode == 0
@pytest.mark.always
def test_login(self):
params = {'username': self.USER,
'password': 'TestTest123',
}
res = self.post('/api/v1/auth/login', json=params)
assert res.json()['user']['username'] == self.USER
def test_create_coll(self):
res = self.post('/api/v1/collections?user=testauto',
json={'title': 'Auto Test'})
assert res.json()['collection']['id'] == 'auto-test'
assert res.json()['collection']['title'] == 'Auto Test'
def test_create_auto(self):
params = {'scope_type': 'single-page',
'num_browsers': self.NUM_BROWSERS,
}
res = self.post('/api/v1/auto?user=testauto&coll=auto-test', json=params)
assert res.json()['auto']
TestAuto.AUTO_ID = res.json()['auto']
def test_add_urls(self):
params = {'urls': [
'https://twitter.com/webrecorder_io',
'https://rhizome.org/'
]}
res = self.post('/api/v1/auto/{0}/queue_urls?user=testauto&coll=auto-test'.format(self.AUTO_ID), json=params)
assert res.json()['success']
def test_start(self):
res = self.post('/api/v1/auto/{0}/start?user=testauto&coll=auto-test'.format(self.AUTO_ID))
print(res.json())
assert res.json()['success']
@pytest.mark.append
def _test_append_only(self, append, auto_id):
params = {'title': 'Add Url'}
res = self.post('/api/v1/lists?user=testauto&coll=auto-test', json=params)
list_id = res.json()['list']['id']
bookmarks = [{'url': append, 'title': append}]
res = self.post('/api/v1/list/%s/bulk_bookmarks?user=testauto&coll=auto-test' % list_id,
json=bookmarks)
assert res.json()['list']
params = {'list': list_id}
res = self.post('/api/v1/auto/{0}/queue_list?user=testauto&coll=auto-test'.format(auto_id), json=params)
assert res.json()['status']
def test_get_auto(self):
res = self.get('/api/v1/auto/{0}?user=testauto&coll=auto-test'.format(self.AUTO_ID))
auto = res.json()['auto']
assert auto['queue'] is not None
assert auto['seen'] is not None
assert auto['pending'] is not None
assert len(auto['browsers']) == self.NUM_BROWSERS
assert auto['scope_type'] == 'single-page'
@pytest.mark.delete
def _test_delete_auto(self):
res = self.delete('/api/v1/auto/{0}?user=testauto&coll=auto-test'.format(self.AUTO_ID))
assert res.json() == {'deleted_id': str(self.AUTO_ID)}
@pytest.mark.delete
def test_delete_coll(self):
res = self.delete('/api/v1/collection/auto-test?user=testauto')
assert res.json() == {'deleted_id': 'auto-test'} or res.json() == {'error': 'no_such_collection'}<|fim▁end|>
|
@pytest.mark.always
def test_create_user(self):
res = subprocess.run(['docker', 'exec', 'webrecorder_app_1', "python", "-m", "webrecorder.admin",
|
<|file_name|>test_artificial_32_Logit_MovingMedian_0__100.py<|end_file_name|><|fim▁begin|>import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
<|fim▁hole|><|fim▁end|>
|
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "MovingMedian", cycle_length = 0, transform = "Logit", sigma = 0.0, exog_count = 100, ar_order = 0);
|
<|file_name|>register_map.cpp<|end_file_name|><|fim▁begin|>//---------------------------------------------------------------------------
#include "register_map.h"
//---------------------------------------------------------------------------
//Register map for Si5356A
Reg_Data Reg_Store[NUM_REGS_MAX] = {
{ 0,0x00,0x00},
{ 1,0x00,0x00},
{ 2,0x00,0x00},
{ 3,0x00,0x00},
{ 4,0x00,0x00},
{ 5,0x00,0x00},
{ 6,0x04,0x1D},
{ 7,0x00,0x00},
{ 8,0x70,0x00},
{ 9,0x0F,0x00},
{ 10,0x00,0x00},
{ 11,0x00,0x00},
{ 12,0x00,0x00},
{ 13,0x00,0x00},
{ 14,0x00,0x00},
{ 15,0x00,0x00},
{ 16,0x00,0x00},
{ 17,0x00,0x00},
{ 18,0x00,0x00},
{ 19,0x00,0x00},
{ 20,0x00,0x00},
{ 21,0x00,0x00},
{ 22,0x00,0x00},
{ 23,0x00,0x00},
{ 24,0x00,0x00},
{ 25,0x00,0x00},
{ 26,0x00,0x00},
{ 27,0x70,0x80},
{ 28,0x37,0xFF},
{ 29,0x20,0xFF},
{ 30,0xA8,0xFF},
{ 31,0xE3,0xFF},
{ 32,0xC0,0xFF},
{ 33,0xC0,0xFF},
{ 34,0xE3,0xFF},
{ 35,0x00,0xFF},
{ 36,0x00,0x1F},
{ 37,0x0B,0x1F},
{ 38,0x03,0x1F},
{ 39,0x00,0x1F},
{ 40,0xF7,0xFF},
{ 41,0x5E,0x7F},
{ 42,0x37,0x3F},
{ 43,0x00,0x00},
{ 44,0x00,0x00},
{ 45,0x00,0x00},
{ 46,0x00,0x00},
{ 47,0x14,0x3C},
{ 48,0x2E,0x7F},
{ 49,0x90,0x7F},
{ 50,0xDE,0xC0},
{ 51,0x07,0x00},
{ 52,0x10,0x0C},
{ 53,0x00,0xFF},
{ 54,0x00,0xFF},
{ 55,0x00,0xFF},
{ 56,0x00,0xFF},
{ 57,0x00,0xFF},
{ 58,0x00,0xFF},
{ 59,0x00,0xFF},
{ 60,0x00,0xFF},
{ 61,0x00,0xFF},
{ 62,0x00,0x3F},
{ 63,0x10,0x0C},
{ 64,0x00,0xFF},
{ 65,0x35,0xFF},
{ 66,0x00,0xFF},
{ 67,0x00,0xFF},
{ 68,0x00,0xFF},
{ 69,0x00,0xFF},
{ 70,0x01,0xFF},
{ 71,0x00,0xFF},
{ 72,0x00,0xFF},
{ 73,0x00,0x3F},
{ 74,0x10,0x0C},
{ 75,0x00,0xFF},
{ 76,0x35,0xFF},
{ 77,0x00,0xFF},
{ 78,0x00,0xFF},
{ 79,0x00,0xFF},
{ 80,0x00,0xFF},
{ 81,0x01,0xFF},
{ 82,0x00,0xFF},
{ 83,0x00,0xFF},
{ 84,0x00,0x3F},
{ 85,0x10,0x0C},
{ 86,0x00,0xFF},
{ 87,0x00,0xFF},
{ 88,0x00,0xFF},
{ 89,0x00,0xFF},
{ 90,0x00,0xFF},
{ 91,0x00,0xFF},
{ 92,0x00,0xFF},
{ 93,0x00,0xFF},
{ 94,0x00,0xFF},
{ 95,0x00,0x3F},
{ 96,0x10,0x00},
{ 97,0xCE,0xFF},
{ 98,0x21,0xFF},
{ 99,0x00,0xFF},
{100,0x01,0xFF},
{101,0x00,0xFF},
{102,0x00,0xFF},
{103,0x60,0xFF},
{104,0x00,0xFF},
{105,0x00,0xFF},
{106,0x80,0x3F},
{107,0x00,0xFF},
{108,0x00,0x7F},
{109,0x00,0x00},
{110,0x40,0xC0},
{111,0x00,0xFF},
{112,0x00,0x7F},
{113,0x00,0x00},
{114,0x40,0xC0},
{115,0x00,0xFF},
{116,0x80,0x7F},
{117,0x00,0x00},
{118,0x40,0xC0},
{119,0x00,0xFF},
{120,0x00,0xFF},
{121,0x00,0x00},
{122,0x40,0xC0},
{123,0x00,0x00},
{124,0x00,0x00},
{125,0x00,0x00},
{126,0x00,0x00},
{127,0x00,0x00},
{128,0x00,0x00},
{129,0x00,0x0F},
{130,0x00,0x0F},
{131,0x00,0x00},
{132,0x00,0x00},
{133,0x00,0x00},
{134,0x00,0x00},
{135,0x00,0x00},
{136,0x00,0x00},
{137,0x00,0x00},
{138,0x00,0x00},
{139,0x00,0x00},
{140,0x00,0x00},
{141,0x00,0x00},
{142,0x00,0x00},
{143,0x00,0x00},
{144,0x00,0x80},
{145,0x00,0x00},
{146,0xFF,0x00},
{147,0x00,0x00},
{148,0x00,0x00},
{149,0x00,0x00},
{150,0x00,0x00},
{151,0x00,0x00},
{152,0x00,0x00},
{153,0x00,0x00},
{154,0x00,0x00},
{155,0x00,0x00},
{156,0x00,0x00},
{157,0x00,0x00},
{158,0x00,0x0F},
{159,0x00,0x0F},
{160,0x00,0x00},
{161,0x00,0x00},
<|fim▁hole|>{163,0x00,0x00},
{164,0x00,0x00},
{165,0x00,0x00},
{166,0x00,0x00},
{167,0x00,0x00},
{168,0x00,0x00},
{169,0x00,0x00},
{170,0x00,0x00},
{171,0x00,0x00},
{172,0x00,0x00},
{173,0x00,0x00},
{174,0x00,0x00},
{175,0x00,0x00},
{176,0x00,0x00},
{177,0x00,0x00},
{178,0x00,0x00},
{179,0x00,0x00},
{180,0x00,0x00},
{181,0x00,0x0F},
{182,0x00,0x00},
{183,0x00,0x00},
{184,0x00,0x00},
{185,0x00,0x00},
{186,0x00,0x00},
{187,0x00,0x00},
{188,0x00,0x00},
{189,0x00,0x00},
{190,0x00,0x00},
{191,0x00,0x00},
{192,0x00,0x00},
{193,0x00,0x00},
{194,0x00,0x00},
{195,0x00,0x00},
{196,0x00,0x00},
{197,0x00,0x00},
{198,0x00,0x00},
{199,0x00,0x00},
{200,0x00,0x00},
{201,0x00,0x00},
{202,0x00,0x00},
{203,0x00,0x0F},
{204,0x00,0x00},
{205,0x00,0x00},
{206,0x00,0x00},
{207,0x00,0x00},
{208,0x00,0x00},
{209,0x00,0x00},
{210,0x00,0x00},
{211,0x00,0x00},
{212,0x00,0x00},
{213,0x00,0x00},
{214,0x00,0x00},
{215,0x00,0x00},
{216,0x00,0x00},
{217,0x00,0x00},
{218,0x00,0x00},
{219,0x00,0x00},
{220,0x00,0x00},
{221,0x0D,0x00},
{222,0x00,0x00},
{223,0x00,0x00},
{224,0xF4,0x00},
{225,0xF0,0x00},
{226,0x00,0x00},
{227,0x00,0x00},
{228,0x00,0x00},
{229,0x00,0x00},
{231,0x00,0x00},
{232,0x00,0x00},
{233,0x00,0x00},
{234,0x00,0x00},
{235,0x00,0x00},
{236,0x00,0x00},
{237,0x00,0x00},
{238,0x14,0x00},
{239,0x00,0x00},
{240,0x00,0x00},
{242,0x00,0x00},
{243,0xF0,0x00},
{244,0x00,0x00},
{245,0x00,0x00},
{247,0x00,0x00},
{248,0x00,0x00},
{249,0xA8,0x00},
{250,0x00,0x00},
{251,0x84,0x00},
{252,0x00,0x00},
{253,0x00,0x00},
{254,0x00,0x00},
{255, 1, 0xFF}, // set page bit to 1
{ 0,0x00,0x00},
{ 1,0x00,0x00},
{ 2,0x00,0x00},
{ 3,0x00,0x00},
{ 4,0x00,0x00},
{ 5,0x00,0x00},
{ 6,0x00,0x00},
{ 7,0x00,0x00},
{ 8,0x00,0x00},
{ 9,0x00,0x00},
{ 10,0x00,0x00},
{ 11,0x00,0x00},
{ 12,0x00,0x00},
{ 13,0x00,0x00},
{ 14,0x00,0x00},
{ 15,0x00,0x00},
{ 16,0x00,0x00},
{ 17,0x01,0x00},
{ 18,0x00,0x00},
{ 19,0x00,0x00},
{ 20,0x90,0x00},
{ 21,0x31,0x00},
{ 22,0x00,0x00},
{ 23,0x00,0x00},
{ 24,0x01,0x00},
{ 25,0x00,0x00},
{ 26,0x00,0x00},
{ 27,0x00,0x00},
{ 28,0x00,0x00},
{ 29,0x00,0x00},
{ 30,0x00,0x00},
{ 31,0x00,0xFF},
{ 32,0x00,0xFF},
{ 33,0x01,0xFF},
{ 34,0x00,0xFF},
{ 35,0x00,0xFF},
{ 36,0x90,0xFF},
{ 37,0x31,0xFF},
{ 38,0x00,0xFF},
{ 39,0x00,0xFF},
{ 40,0x01,0xFF},
{ 41,0x00,0xFF},
{ 42,0x00,0xFF},
{ 43,0x00,0x0F},
{ 44,0x00,0x00},
{ 45,0x00,0x00},
{ 46,0x00,0x00},
{ 47,0x00,0xFF},
{ 48,0x00,0xFF},
{ 49,0x01,0xFF},
{ 50,0x00,0xFF},
{ 51,0x00,0xFF},
{ 52,0x90,0xFF},
{ 53,0x31,0xFF},
{ 54,0x00,0xFF},
{ 55,0x00,0xFF},
{ 56,0x01,0xFF},
{ 57,0x00,0xFF},
{ 58,0x00,0xFF},
{ 59,0x00,0x0F},
{ 60,0x00,0x00},
{ 61,0x00,0x00},
{ 62,0x00,0x00},
{ 63,0x00,0xFF},
{ 64,0x00,0xFF},
{ 65,0x01,0xFF},
{ 66,0x00,0xFF},
{ 67,0x00,0xFF},
{ 68,0x90,0xFF},
{ 69,0x31,0xFF},
{ 70,0x00,0xFF},
{ 71,0x00,0xFF},
{ 72,0x01,0xFF},
{ 73,0x00,0xFF},
{ 74,0x00,0xFF},
{ 75,0x00,0x0F},
{ 76,0x00,0x00},
{ 77,0x00,0x00},
{ 78,0x00,0x00},
{ 79,0x00,0xFF},
{ 80,0x00,0xFF},
{ 81,0x00,0xFF},
{ 82,0x00,0xFF},
{ 83,0x00,0xFF},
{ 84,0x90,0xFF},
{ 85,0x31,0xFF},
{ 86,0x00,0xFF},
{ 87,0x00,0xFF},
{ 88,0x01,0xFF},
{ 89,0x00,0xFF},
{ 90,0x00,0xFF},
{ 91,0x00,0x0F},
{ 92,0x00,0x00},
{ 93,0x00,0x00},
{ 94,0x00,0x00},
{255, 0, 0xFF} }; // set page bit to 0
//End of file
//CHECKSUM = 14CFBC138966364B666BF9BF3AA0FACA34D009AF<|fim▁end|>
|
{162,0x00,0x00},
|
<|file_name|>skills.en.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
export declare const skills_en: string;
|
<|file_name|>tabmanager.ts<|end_file_name|><|fim▁begin|><!DOCTYPE TS><TS>
<context>
<name>AppEdit</name>
<message>
<source>Application</source>
<translation>Aplikācija</translation>
</message>
<message>
<source>Name:</source>
<translation>Nosaukums:</translation>
</message>
<message>
<source>Icon:</source>
<translation>Ikona:</translation>
</message>
<message>
<source>Exec:</source>
<translation>Palaist:</translation>
</message>
<message>
<source>Comment:</source>
<translation>Komentārs:</translation>
</message>
</context>
<context>
<name>TabManager</name>
<message>
<source>Message</source>
<translation>Ziņa</translation>
</message>
<message>
<source>Can't remove with applications
still in the group.</source>
<translation>Nevaru izņemt aplikācijas
kas ir grupā.</translation>
</message>
<message>
<source>Ok</source>
<translation>Labi</translation>
</message>
<message>
<source>Are you sure you want to delete?</source>
<translation>Tiešām vēlaties dzēst?</translation>
</message>
<message>
<source>Yes</source>
<translation>Labi</translation>
</message>
<message>
<source>Cancel</source>
<translation>Atlikt</translation>
</message>
<message>
<source>Can't remove.</source><|fim▁hole|> <message>
<source>Gathering icons...</source>
<translation>Savācu ikonas...</translation>
</message>
<message>
<source>Tab</source>
<translation>Tabs</translation>
</message>
<message>
<source>Application</source>
<translation>Aplikācija</translation>
</message>
</context>
<context>
<name>TabManagerBase</name>
<message>
<source>Tab Manager</source>
<translation>Tabu Menedžeris</translation>
</message>
<message>
<source>Hierarchy</source>
<translation>Hierarhija</translation>
</message>
</context>
<context>
<name>Wait</name>
<message>
<source>Please Wait...</source>
<translation>Lūdzu Uzgaidiet...</translation>
</message>
</context>
</TS><|fim▁end|>
|
<translation>Nevaru izņemt.</translation>
</message>
|
<|file_name|>nlp.py<|end_file_name|><|fim▁begin|>#! /usr/bin/python
# -*- coding: utf8 -*-
import tensorflow as tf
import os
from sys import platform as _platform
import collections
import random
import numpy as np
import warnings
from six.moves import xrange
from tensorflow.python.platform import gfile
import re
## Iteration functions
def generate_skip_gram_batch(data, batch_size, num_skips, skip_window, data_index=0):
"""Generate a training batch for the Skip-Gram model.
Parameters
----------
data : a list
To present context.
batch_size : an int
Batch size to return.
num_skips : an int
How many times to reuse an input to generate a label.
skip_window : an int
How many words to consider left and right.
data_index : an int
Index of the context location.
without using yield, this code use data_index to instead.
Returns
--------
batch : a list
Inputs
labels : a list
Labels
data_index : an int
Index of the context location.
Examples
--------
>>> Setting num_skips=2, skip_window=1, use the right and left words.
>>> In the same way, num_skips=4, skip_window=2 means use the nearby 4 words.
>>> data = [1,2,3,4,5,6,7,8,9,10,11]
>>> batch, labels, data_index = tl.nlp.generate_skip_gram_batch(data=data, batch_size=8, num_skips=2, skip_window=1, data_index=0)
>>> print(batch)
... [2 2 3 3 4 4 5 5]
>>> print(labels)
... [[3]
... [1]
... [4]
... [2]
... [5]
... [3]
... [4]
... [6]]
References
-----------
- `TensorFlow word2vec tutorial <https://www.tensorflow.org/versions/r0.9/tutorials/word2vec/index.html#vector-representations-of-words>`_
"""
# global data_index # you can put data_index outside the function, then
# modify the global data_index in the function without return it.
# note: without using yield, this code use data_index to instead.
assert batch_size % num_skips == 0
assert num_skips <= 2 * skip_window
batch = np.ndarray(shape=(batch_size), dtype=np.int32)
labels = np.ndarray(shape=(batch_size, 1), dtype=np.int32)
span = 2 * skip_window + 1 # [ skip_window target skip_window ]
buffer = collections.deque(maxlen=span)
for _ in range(span):
buffer.append(data[data_index])
data_index = (data_index + 1) % len(data)
for i in range(batch_size // num_skips):
target = skip_window # target label at the center of the buffer
targets_to_avoid = [ skip_window ]
for j in range(num_skips):
while target in targets_to_avoid:
target = random.randint(0, span - 1)
targets_to_avoid.append(target)
batch[i * num_skips + j] = buffer[skip_window]
labels[i * num_skips + j, 0] = buffer[target]
buffer.append(data[data_index])
data_index = (data_index + 1) % len(data)
return batch, labels, data_index
## Sampling functions
def sample(a=[], temperature=1.0):
"""Sample an index from a probability array.
Parameters
----------
a : a list
List of probabilities.
temperature : float or None
The higher the more uniform.\n
When a = [0.1, 0.2, 0.7],\n
temperature = 0.7, the distribution will be sharpen [ 0.05048273 0.13588945 0.81362782]\n
temperature = 1.0, the distribution will be the same [0.1 0.2 0.7]\n
temperature = 1.5, the distribution will be filtered [ 0.16008435 0.25411807 0.58579758]\n
If None, it will be ``np.argmax(a)``
Notes
------
No matter what is the temperature and input list, the sum of all probabilities will be one.
Even if input list = [1, 100, 200], the sum of all probabilities will still be one.
For large vocabulary_size, choice a higher temperature to avoid error.
"""
b = np.copy(a)
try:
if temperature == 1:
return np.argmax(np.random.multinomial(1, a, 1))
if temperature is None:
return np.argmax(a)
else:
a = np.log(a) / temperature
a = np.exp(a) / np.sum(np.exp(a))
return np.argmax(np.random.multinomial(1, a, 1))
except:
# np.set_printoptions(threshold=np.nan)
# print(a)
# print(np.sum(a))
# print(np.max(a))
# print(np.min(a))
# exit()
message = "For large vocabulary_size, choice a higher temperature\
to avoid log error. Hint : use ``sample_top``. "
warnings.warn(message, Warning)
# print(a)
# print(b)
return np.argmax(np.random.multinomial(1, b, 1))
def sample_top(a=[], top_k=10):
"""Sample from ``top_k`` probabilities.
Parameters
----------
a : a list
List of probabilities.
top_k : int
Number of candidates to be considered.
"""
idx = np.argpartition(a, -top_k)[-top_k:]
probs = a[idx]
# print("new", probs)
probs = probs / np.sum(probs)
choice = np.random.choice(idx, p=probs)
return choice
## old implementation
# a = np.array(a)
# idx = np.argsort(a)[::-1]
# idx = idx[:top_k]
# # a = a[idx]
# probs = a[idx]
# print("prev", probs)
# # probs = probs / np.sum(probs)
# # choice = np.random.choice(idx, p=probs)
# # return choice
## Vector representations of words (Advanced) UNDOCUMENT
class SimpleVocabulary(object):
"""Simple vocabulary wrapper, see create_vocab().
Parameters
------------
vocab : A dictionary of word to word_id.
unk_id : Id of the special 'unknown' word.
"""
def __init__(self, vocab, unk_id):
"""Initializes the vocabulary."""
self._vocab = vocab
self._unk_id = unk_id
def word_to_id(self, word):
"""Returns the integer id of a word string."""
if word in self._vocab:
return self._vocab[word]
else:
return self._unk_id
class Vocabulary(object):
"""Create Vocabulary class from a given vocabulary and its id-word, word-id convert,
see create_vocab() and ``tutorial_tfrecord3.py``.
Parameters
-----------
vocab_file : File containing the vocabulary, where the words are the first
whitespace-separated token on each line (other tokens are ignored) and
the word ids are the corresponding line numbers.
start_word : Special word denoting sentence start.
end_word : Special word denoting sentence end.
unk_word : Special word denoting unknown words.
Properties
------------
vocab : a dictionary from word to id.
reverse_vocab : a list from id to word.
start_id : int of start id
end_id : int of end id
unk_id : int of unk id
pad_id : int of padding id
Vocab_files
-------------
>>> Look as follow, includes `start_word` , `end_word` but no `unk_word` .
>>> a 969108
>>> <S> 586368
>>> </S> 586368
>>> . 440479
>>> on 213612
>>> of 202290
>>> the 196219
>>> in 182598
>>> with 152984
>>> and 139109
>>> is 97322
"""
def __init__(self,
vocab_file,
start_word="<S>",
end_word="</S>",
unk_word="<UNK>",
pad_word="<PAD>"):
if not tf.gfile.Exists(vocab_file):
tf.logging.fatal("Vocab file %s not found.", vocab_file)
tf.logging.info("Initializing vocabulary from file: %s", vocab_file)
with tf.gfile.GFile(vocab_file, mode="r") as f:
reverse_vocab = list(f.readlines())
reverse_vocab = [line.split()[0] for line in reverse_vocab]
assert start_word in reverse_vocab
assert end_word in reverse_vocab
if unk_word not in reverse_vocab:
reverse_vocab.append(unk_word)
vocab = dict([(x, y) for (y, x) in enumerate(reverse_vocab)])
print(" [TL] Vocabulary from %s : %s %s %s" % (vocab_file, start_word, end_word, unk_word))
print(" vocabulary with %d words (includes start_word, end_word, unk_word)" % len(vocab))
# tf.logging.info(" vocabulary with %d words" % len(vocab))
self.vocab = vocab # vocab[word] = id
self.reverse_vocab = reverse_vocab # reverse_vocab[id] = word
# Save special word ids.
self.start_id = vocab[start_word]
self.end_id = vocab[end_word]
self.unk_id = vocab[unk_word]
self.pad_id = vocab[pad_word]<|fim▁hole|> print(" end_id: %d" % self.end_id)
print(" unk_id: %d" % self.unk_id)
print(" pad_id: %d" % self.pad_id)
def word_to_id(self, word):
"""Returns the integer word id of a word string."""
if word in self.vocab:
return self.vocab[word]
else:
return self.unk_id
def id_to_word(self, word_id):
"""Returns the word string of an integer word id."""
if word_id >= len(self.reverse_vocab):
return self.reverse_vocab[self.unk_id]
else:
return self.reverse_vocab[word_id]
def process_sentence(sentence, start_word="<S>", end_word="</S>"):
"""Converts a sentence string into a list of string words, add start_word and end_word,
see ``create_vocab()`` and ``tutorial_tfrecord3.py``.
Parameter
---------
sentence : a sentence in string.
start_word : a string or None, if None, non start word will be appended.
end_word : a string or None, if None, non end word will be appended.
Returns
---------
A list of strings; the processed caption.
Examples
-----------
>>> c = "how are you?"
>>> c = tl.nlp.process_sentence(c)
>>> print(c)
... ['<S>', 'how', 'are', 'you', '?', '</S>']
"""
try:
import nltk
except:
raise Exception("Hint : NLTK is required.")
if start_word is not None:
process_sentence = [start_word]
else:
process_sentence = []
process_sentence.extend(nltk.tokenize.word_tokenize(sentence.lower()))
if end_word is not None:
process_sentence.append(end_word)
return process_sentence
def create_vocab(sentences, word_counts_output_file, min_word_count=1):
"""Creates the vocabulary of word to word_id, see create_vocab() and ``tutorial_tfrecord3.py``.
The vocabulary is saved to disk in a text file of word counts. The id of each
word in the file is its corresponding 0-based line number.
Parameters
------------
sentences : a list of lists of strings.
word_counts_output_file : A string
The file name.
min_word_count : a int
Minimum number of occurrences for a word.
Returns
--------
- tl.nlp.SimpleVocabulary object.
Mores
-----
- ``tl.nlp.build_vocab()``
Examples
--------
>>> captions = ["one two , three", "four five five"]
>>> processed_capts = []
>>> for c in captions:
>>> c = tl.nlp.process_sentence(c, start_word="<S>", end_word="</S>")
>>> processed_capts.append(c)
>>> print(processed_capts)
...[['<S>', 'one', 'two', ',', 'three', '</S>'], ['<S>', 'four', 'five', 'five', '</S>']]
>>> tl.nlp.create_vocab(processed_capts, word_counts_output_file='vocab.txt', min_word_count=1)
... [TL] Creating vocabulary.
... Total words: 8
... Words in vocabulary: 8
... Wrote vocabulary file: vocab.txt
>>> vocab = tl.nlp.Vocabulary('vocab.txt', start_word="<S>", end_word="</S>", unk_word="<UNK>")
... INFO:tensorflow:Initializing vocabulary from file: vocab.txt
... [TL] Vocabulary from vocab.txt : <S> </S> <UNK>
... vocabulary with 10 words (includes start_word, end_word, unk_word)
... start_id: 2
... end_id: 3
... unk_id: 9
... pad_id: 0
"""
from collections import Counter
print(" [TL] Creating vocabulary.")
counter = Counter()
for c in sentences:
counter.update(c)
# print('c',c)
print(" Total words: %d" % len(counter))
# Filter uncommon words and sort by descending count.
word_counts = [x for x in counter.items() if x[1] >= min_word_count]
word_counts.sort(key=lambda x: x[1], reverse=True)
word_counts = [("<PAD>", 0)] + word_counts # 1st id should be reserved for padding
# print(word_counts)
print(" Words in vocabulary: %d" % len(word_counts))
# Write out the word counts file.
with tf.gfile.FastGFile(word_counts_output_file, "w") as f:
f.write("\n".join(["%s %d" % (w, c) for w, c in word_counts]))
print(" Wrote vocabulary file: %s" % word_counts_output_file)
# Create the vocabulary dictionary.
reverse_vocab = [x[0] for x in word_counts]
unk_id = len(reverse_vocab)
vocab_dict = dict([(x, y) for (y, x) in enumerate(reverse_vocab)])
vocab = SimpleVocabulary(vocab_dict, unk_id)
return vocab
## Vector representations of words
def simple_read_words(filename="nietzsche.txt"):
"""Read context from file without any preprocessing.
Parameters
----------
filename : a string
A file path (like .txt file)
Returns
--------
The context in a string
"""
with open("nietzsche.txt", "r") as f:
words = f.read()
return words
def read_words(filename="nietzsche.txt", replace = ['\n', '<eos>']):
"""File to list format context. Note that, this script can not handle punctuations.
For customized read_words method, see ``tutorial_generate_text.py``.
Parameters
----------
filename : a string
A file path (like .txt file),
replace : a list
[original string, target string], to disable replace use ['', '']
Returns
--------
The context in a list, split by space by default, and use ``'<eos>'`` to represent ``'\n'``,
e.g. ``[... 'how', 'useful', 'it', "'s" ... ]``.
Code References
---------------
- `tensorflow.models.rnn.ptb.reader <https://github.com/tensorflow/tensorflow/tree/master/tensorflow/models/rnn/ptb>`_
"""
with tf.gfile.GFile(filename, "r") as f:
try: # python 3.4 or older
context_list = f.read().replace(*replace).split()
except: # python 3.5
f.seek(0)
replace = [x.encode('utf-8') for x in replace]
context_list = f.read().replace(*replace).split()
return context_list
def read_analogies_file(eval_file='questions-words.txt', word2id={}):
"""Reads through an analogy question file, return its id format.
Parameters
----------
eval_data : a string
The file name.
word2id : a dictionary
Mapping words to unique IDs.
Returns
--------
analogy_questions : a [n, 4] numpy array containing the analogy question's
word ids.
questions_skipped: questions skipped due to unknown words.
Examples
---------
>>> eval_file should be in this format :
>>> : capital-common-countries
>>> Athens Greece Baghdad Iraq
>>> Athens Greece Bangkok Thailand
>>> Athens Greece Beijing China
>>> Athens Greece Berlin Germany
>>> Athens Greece Bern Switzerland
>>> Athens Greece Cairo Egypt
>>> Athens Greece Canberra Australia
>>> Athens Greece Hanoi Vietnam
>>> Athens Greece Havana Cuba
...
>>> words = tl.files.load_matt_mahoney_text8_dataset()
>>> data, count, dictionary, reverse_dictionary = \
tl.nlp.build_words_dataset(words, vocabulary_size, True)
>>> analogy_questions = tl.nlp.read_analogies_file( \
eval_file='questions-words.txt', word2id=dictionary)
>>> print(analogy_questions)
... [[ 3068 1248 7161 1581]
... [ 3068 1248 28683 5642]
... [ 3068 1248 3878 486]
... ...,
... [ 1216 4309 19982 25506]
... [ 1216 4309 3194 8650]
... [ 1216 4309 140 312]]
"""
questions = []
questions_skipped = 0
with open(eval_file, "rb") as analogy_f:
for line in analogy_f:
if line.startswith(b":"): # Skip comments.
continue
words = line.strip().lower().split(b" ") # lowercase
ids = [word2id.get(w.strip()) for w in words]
if None in ids or len(ids) != 4:
questions_skipped += 1
else:
questions.append(np.array(ids))
print("Eval analogy file: ", eval_file)
print("Questions: ", len(questions))
print("Skipped: ", questions_skipped)
analogy_questions = np.array(questions, dtype=np.int32)
return analogy_questions
def build_vocab(data):
"""Build vocabulary.
Given the context in list format.
Return the vocabulary, which is a dictionary for word to id.
e.g. {'campbell': 2587, 'atlantic': 2247, 'aoun': 6746 .... }
Parameters
----------
data : a list of string
the context in list format
Returns
--------
word_to_id : a dictionary
mapping words to unique IDs. e.g. {'campbell': 2587, 'atlantic': 2247, 'aoun': 6746 .... }
Code References
---------------
- `tensorflow.models.rnn.ptb.reader <https://github.com/tensorflow/tensorflow/tree/master/tensorflow/models/rnn/ptb>`_
Examples
--------
>>> data_path = os.getcwd() + '/simple-examples/data'
>>> train_path = os.path.join(data_path, "ptb.train.txt")
>>> word_to_id = build_vocab(read_txt_words(train_path))
"""
# data = _read_words(filename)
counter = collections.Counter(data)
# print('counter', counter) # dictionary for the occurrence number of each word, e.g. 'banknote': 1, 'photography': 1, 'kia': 1
count_pairs = sorted(counter.items(), key=lambda x: (-x[1], x[0]))
# print('count_pairs',count_pairs) # convert dictionary to list of tuple, e.g. ('ssangyong', 1), ('swapo', 1), ('wachter', 1)
words, _ = list(zip(*count_pairs))
word_to_id = dict(zip(words, range(len(words))))
# print(words) # list of words
# print(word_to_id) # dictionary for word to id, e.g. 'campbell': 2587, 'atlantic': 2247, 'aoun': 6746
return word_to_id
def build_reverse_dictionary(word_to_id):
"""Given a dictionary for converting word to integer id.
Returns a reverse dictionary for converting a id to word.
Parameters
----------
word_to_id : dictionary
mapping words to unique ids
Returns
--------
reverse_dictionary : a dictionary
mapping ids to words
"""
reverse_dictionary = dict(zip(word_to_id.values(), word_to_id.keys()))
return reverse_dictionary
def build_words_dataset(words=[], vocabulary_size=50000, printable=True, unk_key = 'UNK'):
"""Build the words dictionary and replace rare words with 'UNK' token.
The most common word has the smallest integer id.
Parameters
----------
words : a list of string or byte
The context in list format. You may need to do preprocessing on the words,
such as lower case, remove marks etc.
vocabulary_size : an int
The maximum vocabulary size, limiting the vocabulary size.
Then the script replaces rare words with 'UNK' token.
printable : boolean
Whether to print the read vocabulary size of the given words.
unk_key : a string
Unknown words = unk_key
Returns
--------
data : a list of integer
The context in a list of ids
count : a list of tuple and list
count[0] is a list : the number of rare words\n
count[1:] are tuples : the number of occurrence of each word\n
e.g. [['UNK', 418391], (b'the', 1061396), (b'of', 593677), (b'and', 416629), (b'one', 411764)]
dictionary : a dictionary
word_to_id, mapping words to unique IDs.
reverse_dictionary : a dictionary
id_to_word, mapping id to unique word.
Examples
--------
>>> words = tl.files.load_matt_mahoney_text8_dataset()
>>> vocabulary_size = 50000
>>> data, count, dictionary, reverse_dictionary = tl.nlp.build_words_dataset(words, vocabulary_size)
Code References
-----------------
- `tensorflow/examples/tutorials/word2vec/word2vec_basic.py <https://github.com/tensorflow/tensorflow/blob/r0.7/tensorflow/examples/tutorials/word2vec/word2vec_basic.py>`_
"""
import collections
count = [[unk_key, -1]]
count.extend(collections.Counter(words).most_common(vocabulary_size - 1))
dictionary = dict()
for word, _ in count:
dictionary[word] = len(dictionary)
data = list()
unk_count = 0
for word in words:
if word in dictionary:
index = dictionary[word]
else:
index = 0 # dictionary['UNK']
unk_count += 1
data.append(index)
count[0][1] = unk_count
reverse_dictionary = dict(zip(dictionary.values(), dictionary.keys()))
if printable:
print('Real vocabulary size %d' % len(collections.Counter(words).keys()))
print('Limited vocabulary size {}'.format(vocabulary_size))
assert len(collections.Counter(words).keys()) >= vocabulary_size , \
"the limited vocabulary_size must be less than or equal to the read vocabulary_size"
return data, count, dictionary, reverse_dictionary
def words_to_word_ids(data=[], word_to_id={}, unk_key = 'UNK'):
"""Given a context (words) in list format and the vocabulary,
Returns a list of IDs to represent the context.
Parameters
----------
data : a list of string or byte
the context in list format
word_to_id : a dictionary
mapping words to unique IDs.
unk_key : a string
Unknown words = unk_key
Returns
--------
A list of IDs to represent the context.
Examples
--------
>>> words = tl.files.load_matt_mahoney_text8_dataset()
>>> vocabulary_size = 50000
>>> data, count, dictionary, reverse_dictionary = \
... tl.nlp.build_words_dataset(words, vocabulary_size, True)
>>> context = [b'hello', b'how', b'are', b'you']
>>> ids = tl.nlp.words_to_word_ids(words, dictionary)
>>> context = tl.nlp.word_ids_to_words(ids, reverse_dictionary)
>>> print(ids)
... [6434, 311, 26, 207]
>>> print(context)
... [b'hello', b'how', b'are', b'you']
Code References
---------------
- `tensorflow.models.rnn.ptb.reader <https://github.com/tensorflow/tensorflow/tree/master/tensorflow/models/rnn/ptb>`_
"""
# if isinstance(data[0], six.string_types):
# print(type(data[0]))
# # exit()
# print(data[0])
# print(word_to_id)
# return [word_to_id[str(word)] for word in data]
# else:
word_ids = []
for word in data:
if word_to_id.get(word) is not None:
word_ids.append(word_to_id[word])
else:
word_ids.append(word_to_id[unk_key])
return word_ids
# return [word_to_id[word] for word in data] # this one
# if isinstance(data[0], str):
# # print('is a string object')
# return [word_to_id[word] for word in data]
# else:#if isinstance(s, bytes):
# # print('is a unicode object')
# # print(data[0])
# return [word_to_id[str(word)] f
def word_ids_to_words(data, id_to_word):
"""Given a context (ids) in list format and the vocabulary,
Returns a list of words to represent the context.
Parameters
----------
data : a list of integer
the context in list format
id_to_word : a dictionary
mapping id to unique word.
Returns
--------
A list of string or byte to represent the context.
Examples
---------
>>> see words_to_word_ids
"""
return [id_to_word[i] for i in data]
def save_vocab(count=[], name='vocab.txt'):
"""Save the vocabulary to a file so the model can be reloaded.
Parameters
----------
count : a list of tuple and list
count[0] is a list : the number of rare words\n
count[1:] are tuples : the number of occurrence of each word\n
e.g. [['UNK', 418391], (b'the', 1061396), (b'of', 593677), (b'and', 416629), (b'one', 411764)]
Examples
---------
>>> words = tl.files.load_matt_mahoney_text8_dataset()
>>> vocabulary_size = 50000
>>> data, count, dictionary, reverse_dictionary = \
... tl.nlp.build_words_dataset(words, vocabulary_size, True)
>>> tl.nlp.save_vocab(count, name='vocab_text8.txt')
>>> vocab_text8.txt
... UNK 418391
... the 1061396
... of 593677
... and 416629
... one 411764
... in 372201
... a 325873
... to 316376
"""
pwd = os.getcwd()
vocabulary_size = len(count)
with open(os.path.join(pwd, name), "w") as f:
for i in xrange(vocabulary_size):
f.write("%s %d\n" % (tf.compat.as_text(count[i][0]), count[i][1]))
print("%d vocab saved to %s in %s" % (vocabulary_size, name, pwd))
## Functions for translation
def basic_tokenizer(sentence, _WORD_SPLIT=re.compile(b"([.,!?\"':;)(])")):
"""Very basic tokenizer: split the sentence into a list of tokens.
Parameters
-----------
sentence : tensorflow.python.platform.gfile.GFile Object
_WORD_SPLIT : regular expression for word spliting.
Examples
--------
>>> see create_vocabulary
>>> from tensorflow.python.platform import gfile
>>> train_path = "wmt/giga-fren.release2"
>>> with gfile.GFile(train_path + ".en", mode="rb") as f:
>>> for line in f:
>>> tokens = tl.nlp.basic_tokenizer(line)
>>> print(tokens)
>>> exit()
... [b'Changing', b'Lives', b'|', b'Changing', b'Society', b'|', b'How',
... b'It', b'Works', b'|', b'Technology', b'Drives', b'Change', b'Home',
... b'|', b'Concepts', b'|', b'Teachers', b'|', b'Search', b'|', b'Overview',
... b'|', b'Credits', b'|', b'HHCC', b'Web', b'|', b'Reference', b'|',
... b'Feedback', b'Virtual', b'Museum', b'of', b'Canada', b'Home', b'Page']
References
----------
- Code from ``/tensorflow/models/rnn/translation/data_utils.py``
"""
words = []
sentence = tf.compat.as_bytes(sentence)
for space_separated_fragment in sentence.strip().split():
words.extend(re.split(_WORD_SPLIT, space_separated_fragment))
return [w for w in words if w]
def create_vocabulary(vocabulary_path, data_path, max_vocabulary_size,
tokenizer=None, normalize_digits=True,
_DIGIT_RE=re.compile(br"\d"),
_START_VOCAB=[b"_PAD", b"_GO", b"_EOS", b"_UNK"]):
"""Create vocabulary file (if it does not exist yet) from data file.
Data file is assumed to contain one sentence per line. Each sentence is
tokenized and digits are normalized (if normalize_digits is set).
Vocabulary contains the most-frequent tokens up to max_vocabulary_size.
We write it to vocabulary_path in a one-token-per-line format, so that later
token in the first line gets id=0, second line gets id=1, and so on.
Parameters
-----------
vocabulary_path : path where the vocabulary will be created.
data_path : data file that will be used to create vocabulary.
max_vocabulary_size : limit on the size of the created vocabulary.
tokenizer : a function to use to tokenize each data sentence.
if None, basic_tokenizer will be used.
normalize_digits : Boolean
if true, all digits are replaced by 0s.
References
----------
- Code from ``/tensorflow/models/rnn/translation/data_utils.py``
"""
if not gfile.Exists(vocabulary_path):
print("Creating vocabulary %s from data %s" % (vocabulary_path, data_path))
vocab = {}
with gfile.GFile(data_path, mode="rb") as f:
counter = 0
for line in f:
counter += 1
if counter % 100000 == 0:
print(" processing line %d" % counter)
tokens = tokenizer(line) if tokenizer else basic_tokenizer(line)
for w in tokens:
word = re.sub(_DIGIT_RE, b"0", w) if normalize_digits else w
if word in vocab:
vocab[word] += 1
else:
vocab[word] = 1
vocab_list = _START_VOCAB + sorted(vocab, key=vocab.get, reverse=True)
if len(vocab_list) > max_vocabulary_size:
vocab_list = vocab_list[:max_vocabulary_size]
with gfile.GFile(vocabulary_path, mode="wb") as vocab_file:
for w in vocab_list:
vocab_file.write(w + b"\n")
else:
print("Vocabulary %s from data %s exists" % (vocabulary_path, data_path))
def initialize_vocabulary(vocabulary_path):
"""Initialize vocabulary from file, return the word_to_id (dictionary)
and id_to_word (list).
We assume the vocabulary is stored one-item-per-line, so a file:\n
dog\n
cat\n
will result in a vocabulary {"dog": 0, "cat": 1}, and this function will
also return the reversed-vocabulary ["dog", "cat"].
Parameters
-----------
vocabulary_path : path to the file containing the vocabulary.
Returns
--------
vocab : a dictionary
Word to id. A dictionary mapping string to integers.
rev_vocab : a list
Id to word. The reversed vocabulary (a list, which reverses the vocabulary mapping).
Examples
---------
>>> Assume 'test' contains
... dog
... cat
... bird
>>> vocab, rev_vocab = tl.nlp.initialize_vocabulary("test")
>>> print(vocab)
>>> {b'cat': 1, b'dog': 0, b'bird': 2}
>>> print(rev_vocab)
>>> [b'dog', b'cat', b'bird']
Raises
-------
ValueError : if the provided vocabulary_path does not exist.
"""
if gfile.Exists(vocabulary_path):
rev_vocab = []
with gfile.GFile(vocabulary_path, mode="rb") as f:
rev_vocab.extend(f.readlines())
rev_vocab = [tf.compat.as_bytes(line.strip()) for line in rev_vocab]
vocab = dict([(x, y) for (y, x) in enumerate(rev_vocab)])
return vocab, rev_vocab
else:
raise ValueError("Vocabulary file %s not found.", vocabulary_path)
def sentence_to_token_ids(sentence, vocabulary,
tokenizer=None, normalize_digits=True,
UNK_ID=3, _DIGIT_RE=re.compile(br"\d")):
"""Convert a string to list of integers representing token-ids.
For example, a sentence "I have a dog" may become tokenized into
["I", "have", "a", "dog"] and with vocabulary {"I": 1, "have": 2,
"a": 4, "dog": 7"} this function will return [1, 2, 4, 7].
Parameters
-----------
sentence : tensorflow.python.platform.gfile.GFile Object
The sentence in bytes format to convert to token-ids.\n
see basic_tokenizer(), data_to_token_ids()
vocabulary : a dictionary mapping tokens to integers.
tokenizer : a function to use to tokenize each sentence;
If None, basic_tokenizer will be used.
normalize_digits : Boolean
If true, all digits are replaced by 0s.
Returns
--------
A list of integers, the token-ids for the sentence.
"""
if tokenizer:
words = tokenizer(sentence)
else:
words = basic_tokenizer(sentence)
if not normalize_digits:
return [vocabulary.get(w, UNK_ID) for w in words]
# Normalize digits by 0 before looking words up in the vocabulary.
return [vocabulary.get(re.sub(_DIGIT_RE, b"0", w), UNK_ID) for w in words]
def data_to_token_ids(data_path, target_path, vocabulary_path,
tokenizer=None, normalize_digits=True,
UNK_ID=3, _DIGIT_RE=re.compile(br"\d")):
"""Tokenize data file and turn into token-ids using given vocabulary file.
This function loads data line-by-line from data_path, calls the above
sentence_to_token_ids, and saves the result to target_path. See comment
for sentence_to_token_ids on the details of token-ids format.
Parameters
-----------
data_path : path to the data file in one-sentence-per-line format.
target_path : path where the file with token-ids will be created.
vocabulary_path : path to the vocabulary file.
tokenizer : a function to use to tokenize each sentence;
if None, basic_tokenizer will be used.
normalize_digits : Boolean; if true, all digits are replaced by 0s.
References
----------
- Code from ``/tensorflow/models/rnn/translation/data_utils.py``
"""
if not gfile.Exists(target_path):
print("Tokenizing data in %s" % data_path)
vocab, _ = initialize_vocabulary(vocabulary_path)
with gfile.GFile(data_path, mode="rb") as data_file:
with gfile.GFile(target_path, mode="w") as tokens_file:
counter = 0
for line in data_file:
counter += 1
if counter % 100000 == 0:
print(" tokenizing line %d" % counter)
token_ids = sentence_to_token_ids(line, vocab, tokenizer,
normalize_digits, UNK_ID=UNK_ID,
_DIGIT_RE=_DIGIT_RE)
tokens_file.write(" ".join([str(tok) for tok in token_ids]) + "\n")
else:
print("Target path %s exists" % target_path)<|fim▁end|>
|
print(" start_id: %d" % self.start_id)
|
<|file_name|>object_list.js<|end_file_name|><|fim▁begin|>//Here is all the code that builds the list of objects on the right-hand
//side of the Labelme tool.
//The styles for this tools are defined inside:
//annotationTools/css/object_list.css
var IsHidingAllPolygons = false;
var ProgressChecking = false;
//var IsHidingAllFilled = true;
var ListOffSet = 0;
//This function creates and populates the list
function RenderObjectList() {
// If object list has been rendered, then remove it:
var scrollPos = $("#anno_list").scrollTop();
if($('#anno_list').length) {
$('#anno_list').remove();
}
var html_str = '<div class="object_list" id="anno_list" style="border:0px solid black;z-index:0;" ondrop="drop(event, -1)" ondragenter="return dragEnter(event)" ondragover="return dragOver(event)">';
var Npolygons = LMnumberOfObjects(LM_xml);
// name of image
html_str += '<p style="font-size:14px;line-height:100%"><h>Image: <a href="javascript:GetThisURL();">'+main_media.file_info.im_name+'</a></h></p>';
if (!ProgressChecking){
//Checks progress by filling all polygons
html_str += '<p style="font-size:12px;line-height:50%" id="check_progress" ><a href="javascript:CheckProgress();"><b>Check progress</b></a> (s)</p>';
} else {//Clear Progress check
html_str += '<p style="font-size:12px;line-height:50%" id="end_check_progress"><a href="javascript:EndCheckProgress();" style="color:red"><b>Clear progress check</b></a> (h)</p>';
}
if (IsHidingAllPolygons){ //Polygons hidden, press to show outlines permanently
html_str += '<p style="font-size:12px;line-height:50%"><a id="hold_poly" href="javascript:ShowAllPolygons();"><b>Press to hold outlines</b></a></p>';
} else
{ //Outlines held status msg
html_str += '<p style="font-size:12px;line-height:50%"><a id="poly_held" style="text-decoration:none; font-style:italic; color:#708090;">Outlines held (\'Hide all\' to release)</a></p>';
}
//Hide all polygons
html_str += '<p style="font-size:12px;line-height:50%"><a id="hide_all" href="javascript:HideAllPolygons();"><b>Hide all </b></a></p>';
// Create DIV
html_str += '<u><i>'+ Npolygons +'</i> classes labeled in all:</u>';
html_str += '<ol>';
for(var i=0; i < Npolygons; i++) {
html_str += '<div class="objectListLink" id="LinkAnchor' + i + '" style="z-index:1; margin-left:0em"> ';
html_str += '<li>';
// show object name:
html_str += '<a class="objectListLink" id="Link' + i + '" '+
'href="javascript:main_handler.AnnotationLinkClick('+ i +');" '+
'onmouseover="main_handler.AnnotationLinkMouseOver('+ i +');" ' +
'onmouseout="main_handler.AnnotationLinkMouseOut();" ';
html_str += '>';
var obj_name = LMgetObjectField(LM_xml,i,'name');
html_str += obj_name;
html_str += '</a>';
html_str += '</li></div>';
}
html_str += '</ol><p><br/></p></div>';
// Attach annotation list to 'anno_anchor' DIV element:
$('#anno_anchor').append(html_str);
$('#Link'+add_parts_to).css('font-weight',700); //
$('#anno_list').scrollTop(scrollPos);
}
function RemoveObjectList() {
$('#anno_list').remove();
}
function ChangeLinkColorBG(idx) {
if(document.getElementById('Link'+idx)) {
var isDeleted = parseInt($(LM_xml).children("annotation").children("object").eq(idx).children("deleted").text());
if(isDeleted) document.getElementById('Link'+idx).style.color = '#888888';
else document.getElementById('Link'+idx).style.color = '#0000FF';
var anid = main_canvas.GetAnnoIndex(idx);
// If we're hiding all polygons, then remove rendered polygon from canvas:
if(IsHidingAllPolygons && main_canvas.annotations[anid].hidden) {
main_canvas.annotations[anid].DeletePolygon();
}
}
}
function ChangeLinkColorFG(idx) {
document.getElementById('Link'+idx).style.color = '#FF0000';
var anid = main_canvas.GetAnnoIndex(idx);
// If we're hiding all polygons, then render polygon on canvas:
if(IsHidingAllPolygons && main_canvas.annotations[anid].hidden) {
main_canvas.annotations[anid].DrawPolygon(main_media.GetImRatio(), main_canvas.annotations[anid].GetPtsX(), main_canvas.annotations[anid].GetPtsY());
}
}
function HideAllPolygons() {
if(!edit_popup_open) {
IsHidingAllPolygons = true;
ProgressChecking = false;
// Delete all polygons from the canvas:
for(var i = 0; i < main_canvas.annotations.length; i++) {
main_canvas.annotations[i].DeletePolygon();
main_canvas.annotations[i].hidden = true;
}
<|fim▁hole|> else {
alert('Close edit popup bubble first');
}
}
function ShowAllPolygons() {
if (ProgressChecking) return; //hold outline setting not allowed to be trigger when progress checking ongoing
// Set global variable:
IsHidingAllPolygons = false;
ProgressChecking = false;
// Render the annotations:
main_canvas.UnhideAllAnnotations();
main_canvas.RenderAnnotations();
// swap hold poly with poly held
$('#hold_poly').replaceWith('<a id="poly_held" style="text-decoration:none; font-style:italic; color:#708090;">Outlines held (\'Hide all\' to release)</a>');
}
function CheckProgress() {
ProgressChecking = true;
//clear all polygons first
if(!edit_popup_open) {
// Delete all polygons from the canvas:
for(var i = 0; i < main_canvas.annotations.length; i++) {
main_canvas.annotations[i].DeletePolygon();
main_canvas.annotations[i].hidden = true;
}
}
else {
alert('Close edit popup bubble first');
}
//reset annotations to take into account user editting labels while checking progress.
main_canvas.annotations.length = 0;
// Attach valid annotations to the main_canvas:
for(var pp = 0; pp < LMnumberOfObjects(LM_xml); pp++) {
// Attach to main_canvas:
main_canvas.AttachAnnotation(new annotation(pp));
if (!video_mode && LMgetObjectField(LM_xml, pp, 'x') == null){
main_canvas.annotations[main_canvas.annotations.length -1].SetType(1);
main_canvas.annotations[main_canvas.annotations.length -1].scribble = new scribble(pp);
}
}
// Render the annotations:
main_canvas.UnhideAllAnnotations();
main_canvas.RenderAnnotations();
//Fill all
for (var i= 0; i < LMnumberOfObjects(LM_xml); i++){
main_canvas.annotations[i].FillPolygon();
}
console.log("check progress");
// Create "hide all" button:
$('#show_all_filled_button').replaceWith('<a id="hide_all_filled_button" href="javascript:HideAllFilled();">Hide back</a>');
$('#check_progress').replaceWith('<p style="font-size:12px;line-height:50%" id="end_check_progress"><a href="javascript:EndCheckProgress();" style="color:red"><b>Clear progress check</b></a> (h)</p>');
}
function EndCheckProgress() {
if(!edit_popup_open) {
ProgressChecking = false;
if(IsHidingAllPolygons){
// Delete all polygons from the canvas:
for(var i = 0; i < main_canvas.annotations.length; i++) {
main_canvas.annotations[i].DeletePolygon();
main_canvas.annotations[i].hidden = true;
}
}
else {//if we are holding all polygons
for(var i = 0; i < main_canvas.annotations.length; i++) {
main_canvas.annotations[i].UnfillPolygon();
}
}
console.log("end check progress");
// Create "show all" button:
$('#end_check_progress').replaceWith('<p style="font-size:12px;line-height:50%" id="check_progress" ><a href="javascript:CheckProgress();"><b>Check progress</b></a> (s)</p>');
}
else {
alert('Close edit popup bubble first');
}
}
//*******************************************
//Private functions:
//*******************************************
//DRAG FUNCTIONS
function drag(event, part_id) {
// stores the object id in the data that is being dragged.
event.dataTransfer.setData("Text", part_id);
}
function dragend(event, object_id) {
event.preventDefault();
// Write XML to server:
WriteXML(SubmitXmlUrl,LM_xml,function(){return;});
}
function dragEnter(event) {
event.preventDefault();
return true;
}
function dragOver(event) {
event.preventDefault();
}
function drop(event, object_id) {
event.preventDefault();
var part_id=event.dataTransfer.getData("Text");
event.stopPropagation();
// modify part structure
if(object_id!=part_id) {
addPart(object_id, part_id);
// redraw object list
RenderObjectList();
}
}<|fim▁end|>
|
// Create "show all" button:
$('#poly_held').replaceWith('<a id="hold_poly" href="javascript:ShowAllPolygons();"><b>Press to hold outlines</b></a>');
$('#end_check_progress').replaceWith('<p style="font-size:12px;line-height:50%" id="check_progress" ><a href="javascript:CheckProgress();"><b>Check progress</b></a> (s)</p>');
}
|
<|file_name|>snagfilms_scraper.py<|end_file_name|><|fim▁begin|>"""
SALTS XBMC Addon
Copyright (C) 2014 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
import urllib
import urlparse
import kodi
import log_utils # @UnusedImport
import dom_parser
import json
from salts_lib import scraper_utils
from salts_lib.constants import FORCE_NO_MATCH
from salts_lib.constants import VIDEO_TYPES
from salts_lib.constants import XHR
from salts_lib.utils2 import i18n
import scraper
BASE_URL = 'http://www.snagfilms.com'
SOURCE_BASE_URL = 'http://mp4.snagfilms.com'
SEARCH_URL = '/apis/search.json'
SEARCH_TYPES = {VIDEO_TYPES.MOVIE: 'film', VIDEO_TYPES.TVSHOW: 'show'}
class Scraper(scraper.Scraper):
base_url = BASE_URL
def __init__(self, timeout=scraper.DEFAULT_TIMEOUT):
self.timeout = timeout
self.base_url = kodi.get_setting('%s-base_url' % (self.get_name()))
self.username = kodi.get_setting('%s-username' % (self.get_name()))
self.password = kodi.get_setting('%s-password' % (self.get_name()))
@classmethod
def provides(cls):
return frozenset([VIDEO_TYPES.MOVIE, VIDEO_TYPES.TVSHOW, VIDEO_TYPES.EPISODE])
@classmethod
def get_name(cls):
return 'SnagFilms'
def get_sources(self, video):
source_url = self.get_url(video)
hosters = []
if source_url and source_url != FORCE_NO_MATCH:
page_url = urlparse.urljoin(self.base_url, source_url)
html = self._http_get(page_url, cache_limit=.5)
fragment = dom_parser.parse_dom(html, 'div', {'class': 'film-container'})
if fragment:
iframe_url = dom_parser.parse_dom(fragment[0], 'iframe', ret='src')
if iframe_url:
iframe_url = urlparse.urljoin(self.base_url, iframe_url[0])
headers = {'Referer': page_url}
html = self._http_get(iframe_url, headers=headers, cache_limit=.5)
sources = self._parse_sources_list(html)
for source in sources:
quality = sources[source]['quality']
host = self._get_direct_hostname(source)<|fim▁hole|> match = re.search('(\d+[a-z]bps)', source)
if match:
hoster['extra'] = match.group(1)
hosters.append(hoster)
hosters.sort(key=lambda x: x.get('extra', ''), reverse=True)
return hosters
def _get_episode_url(self, season_url, video):
episode_pattern = 'data-title\s*=\s*"Season\s+0*%s\s+Episode\s+0*%s[^>]*data-permalink\s*=\s*"([^"]+)' % (video.season, video.episode)
title_pattern = 'data-title\s*=\s*"Season\s+\d+\s+Episode\s+\d+\s*(?P<title>[^"]+)[^>]+data-permalink\s*=\s*"(?P<url>[^"]+)'
return self._default_get_episode_url(season_url, video, episode_pattern, title_pattern)
def search(self, video_type, title, year, season=''): # @UnusedVariable
results = []
search_url = urlparse.urljoin(self.base_url, SEARCH_URL)
referer = urlparse.urljoin(self.base_url, '/search/?q=%s')
referer = referer % (urllib.quote_plus(title))
headers = {'Referer': referer}
headers.update(XHR)
params = {'searchTerm': title, 'type': SEARCH_TYPES[video_type], 'limit': 500}
html = self._http_get(search_url, params=params, headers=headers, auth=False, cache_limit=2)
js_data = scraper_utils.parse_json(html, search_url)
if 'results' in js_data:
for result in js_data['results']:
match_year = str(result.get('year', ''))
match_url = result.get('permalink', '')
match_title = result.get('title', '')
if not year or not match_year or year == match_year:
result = {'title': scraper_utils.cleanse_title(match_title), 'year': match_year, 'url': scraper_utils.pathify_url(match_url)}
results.append(result)
return results
@classmethod
def get_settings(cls):
settings = super(cls, cls).get_settings()
name = cls.get_name()
settings.append(' <setting id="%s-username" type="text" label=" %s" default="" visible="eq(-4,true)"/>' % (name, i18n('username')))
settings.append(' <setting id="%s-password" type="text" label=" %s" option="hidden" default="" visible="eq(-5,true)"/>' % (name, i18n('password')))
return settings
def _http_get(self, url, params=None, data=None, headers=None, auth=True, method=None, cache_limit=8):
# return all uncached blank pages if no user or pass
if not self.username or not self.password:
return ''
html = super(self.__class__, self)._http_get(url, params=params, data=data, headers=headers, method=method, cache_limit=cache_limit)
if auth and not dom_parser.parse_dom(html, 'span', {'class': 'user-name'}):
log_utils.log('Logging in for url (%s)' % (url), log_utils.LOGDEBUG)
self.__login()
html = super(self.__class__, self)._http_get(url, params=params, data=data, headers=headers, method=method, cache_limit=0)
return html
def __login(self):
url = urlparse.urljoin(self.base_url, '/apis/v2/user/login.json')
data = {'email': self.username, 'password': self.password, 'rememberMe': True}
referer = urlparse.urljoin(self.base_url, '/login')
headers = {'Content-Type': 'application/json', 'Referer': referer}
headers.update(XHR)
html = super(self.__class__, self)._http_get(url, data=json.dumps(data), headers=headers, cache_limit=0)
js_data = scraper_utils.parse_json(html, url)
return js_data.get('status') == 'success'<|fim▁end|>
|
stream_url = source + scraper_utils.append_headers({'User-Agent': scraper_utils.get_ua(), 'Referer': iframe_url})
hoster = {'multi-part': False, 'host': host, 'class': self, 'quality': quality, 'views': None, 'rating': None, 'url': stream_url, 'direct': True}
|
<|file_name|>FContDef.java<|end_file_name|><|fim▁begin|>package org.opendaylight.opflex.genie.content.format.meta.mdl;
import java.util.TreeMap;
import org.opendaylight.opflex.genie.content.model.mclass.MClass;
import org.opendaylight.opflex.genie.content.model.mprop.MProp;
import org.opendaylight.opflex.genie.engine.file.WriteStats;
import org.opendaylight.opflex.genie.engine.format.*;
import org.opendaylight.opflex.genie.engine.model.Ident;
/**
* Created by midvorki on 8/4/14.
*/
public class FContDef
extends GenericFormatterTask
{
public FContDef(
FormatterCtx aInFormatterCtx,
FileNameRule aInFileNameRule,
Indenter aInIndenter,
BlockFormatDirective aInHeaderFormatDirective,
BlockFormatDirective aInCommentFormatDirective,
boolean aInIsUserFile,
WriteStats aInStats)
{
super(aInFormatterCtx,
aInFileNameRule,
aInIndenter,
aInHeaderFormatDirective,
aInCommentFormatDirective,
aInIsUserFile,
aInStats);<|fim▁hole|> out.println();
MClass lRoot = MClass.getContainmentRoot();
genClass(0, lRoot);
}
private void genClass(int aInIndent, MClass aInClass)
{
out.print(aInIndent, aInClass.getFullConcatenatedName());
genProps(aInIndent + 1, aInClass);
genContained(aInIndent, aInClass);
}
private void genProps(int aInIndent, MClass aInClass)
{
TreeMap<String,MProp> lProps = new TreeMap<String, MProp>();
aInClass.findProp(lProps, false);
if (!lProps.isEmpty())
{
out.print('[');
for (MProp lProp : lProps.values())
{
genProp(aInIndent,lProp);
}
out.println(']');
}
else
{
out.println();
}
}
private void genProp(int aInIndent, MProp aInProp)
{
out.println();
out.print(aInIndent,
aInProp.getLID().getName() +
"=<" +
aInProp.getType(false).getFullConcatenatedName() + "/" + aInProp.getType(true).getFullConcatenatedName() + "|group:" + aInProp.getGroup() + ">;");
}
private void genContained(int aInIndent, MClass aInParent)
{
TreeMap<Ident,MClass> lContained = new TreeMap<Ident,MClass>();
aInParent.getContainsClasses(lContained, true, true);
if (!lContained.isEmpty())
{
out.println(aInIndent, '{');
for (MClass lClass : lContained.values())
{
genClass(aInIndent+1, lClass);
}
out.println(aInIndent, '}');
}
}
}<|fim▁end|>
|
}
public void generate()
{
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from typing import Optional
from thinc.api import Model
from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS
from .stop_words import STOP_WORDS
from .lex_attrs import LEX_ATTRS
from .lemmatizer import UkrainianLemmatizer
from ...language import Language
class UkrainianDefaults(Language.Defaults):<|fim▁hole|> tokenizer_exceptions = TOKENIZER_EXCEPTIONS
lex_attr_getters = LEX_ATTRS
stop_words = STOP_WORDS
class Ukrainian(Language):
lang = "uk"
Defaults = UkrainianDefaults
@Ukrainian.factory(
"lemmatizer",
assigns=["token.lemma"],
default_config={"model": None, "mode": "pymorphy2", "overwrite": False},
default_score_weights={"lemma_acc": 1.0},
)
def make_lemmatizer(
nlp: Language, model: Optional[Model], name: str, mode: str, overwrite: bool
):
return UkrainianLemmatizer(nlp.vocab, model, name, mode=mode, overwrite=overwrite)
__all__ = ["Ukrainian"]<|fim▁end|>
| |
<|file_name|>ProductPropertyInput.js<|end_file_name|><|fim▁begin|>/**
* @flow
* @module ProductPropertyInput
* @extends React.PureComponent
*
* @author Oleg Nosov <[email protected]>
* @license MIT
*
* @description
* React form for product property(options select only).
*
*/
import React, { PureComponent } from "react";
import { isObject } from "../../../helpers";
import type {
GetLocalization,
InputEvent,
ProductPropertyOption,
Prices,
} from "../../../types";
/**
* @typedef {Object.<string, number>} OptionIndex
*/
export type OptionIndex = {
[propertyName: string]: number,
};
/**
* @typedef {Object} OptionObject
*/
export type OptionObject = {|
onSelect?: (option: OptionObject) => void,
additionalCost?: Prices,
value: ProductPropertyOption,
|};
/** @ */
export type PropertyOption = ProductPropertyOption | OptionObject;
/** @ */
export type PropertyOptions = Array<PropertyOption>;
/** @ */
export type OnChange = (obj: { value: OptionIndex }) => void;
export type Props = {|
name: string,
options: PropertyOptions,
selectedOptionIndex: number,
currency: string,
onChange: OnChange,
getLocalization: GetLocalization,
|};
const defaultProps = {
selectedOptionIndex: 0,
};
export default class ProductPropertyInput extends PureComponent<Props, void> {
props: Props;
static defaultProps = defaultProps;
static displayName = "ProductPropertyInput";
/*
* If option value is an object, we need to extract primitive value
*/
static getOptionValue = (value: PropertyOption): ProductPropertyOption =>
isObject(value) ? ProductPropertyInput.getOptionValue(value.value) : value;
/*
* Generate select input options based on options values
*/
static generateOptionsSelectionList = (
options: PropertyOptions,
getLocalization: GetLocalization,
currency: string,
localizationScope: Object = {},
): Array<React$Element<*>> =>
options
.map(ProductPropertyInput.getOptionValue)
.map((optionValue, index) => (
<option key={optionValue} value={optionValue}>
{typeof optionValue === "string"
? getLocalization(optionValue, {<|fim▁hole|> cost:
(isObject(options[index].additionalCost) &&
options[index].additionalCost[currency]) ||
0,
}
: {}),
})
: optionValue}
</option>
));
handleSelectInputValueChange = ({ currentTarget }: InputEvent) => {
const { value: optionValue } = currentTarget;
const { name, options, onChange } = this.props;
const { getOptionValue } = ProductPropertyInput;
const selectedOptionIndex = options
.map(getOptionValue)
.indexOf(optionValue);
const selectedOption = options[selectedOptionIndex];
if (
isObject(selectedOption) &&
typeof selectedOption.onSelect === "function"
)
selectedOption.onSelect(selectedOption);
onChange({
value: { [name]: selectedOptionIndex },
});
};
render() {
const {
name,
options,
selectedOptionIndex,
currency,
getLocalization,
} = this.props;
const { handleSelectInputValueChange } = this;
const {
generateOptionsSelectionList,
getOptionValue,
} = ProductPropertyInput;
const localizationScope = {
name,
currency,
get localizedName() {
return getLocalization(name, localizationScope);
},
get localizedCurrency() {
return getLocalization(currency, localizationScope);
},
};
return (
<div className="form-group row">
<label
htmlFor={name}
className="col-xs-3 col-sm-3 col-md-3 col-lg-3 col-form-label"
>
{getLocalization("propertyLabel", localizationScope)}
</label>
<div className="col-xs-9 col-sm-9 col-md-9 col-lg-9">
<select
onChange={handleSelectInputValueChange}
className="form-control"
value={getOptionValue(options[selectedOptionIndex | 0])}
>
{generateOptionsSelectionList(
options,
getLocalization,
currency,
localizationScope,
)}
</select>
</div>
</div>
);
}
}<|fim▁end|>
|
...localizationScope,
...(isObject(options[index])
? {
|
<|file_name|>stream.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/// Stream channels
///
/// This is the flavor of channels which are optimized for one sender and one
/// receiver. The sender will be upgraded to a shared channel if the channel is
/// cloned.
///
/// High level implementation details can be found in the comment of the parent
/// module.
pub use self::Failure::*;
pub use self::UpgradeResult::*;
pub use self::SelectionResult::*;
use self::Message::*;
use core::prelude::*;
use core::cmp;
use core::isize;
use thread;
use sync::atomic::{AtomicIsize, AtomicUsize, Ordering, AtomicBool};
use sync::mpsc::Receiver;
use sync::mpsc::blocking::{self, SignalToken};
use sync::mpsc::spsc_queue as spsc;
const DISCONNECTED: isize = isize::MIN;
#[cfg(test)]
const MAX_STEALS: isize = 5;
#[cfg(not(test))]
const MAX_STEALS: isize = 1 << 20;
pub struct Packet<T> {
queue: spsc::Queue<Message<T>>, // internal queue for all message
cnt: AtomicIsize, // How many items are on this channel
steals: isize, // How many times has a port received without blocking?
to_wake: AtomicUsize, // SignalToken for the blocked thread to wake up
port_dropped: AtomicBool, // flag if the channel has been destroyed.
}
pub enum Failure<T> {
Empty,
Disconnected,
Upgraded(Receiver<T>),
}
pub enum UpgradeResult {
UpSuccess,
UpDisconnected,
UpWoke(SignalToken),
}
pub enum SelectionResult<T> {
SelSuccess,
SelCanceled,
SelUpgraded(SignalToken, Receiver<T>),
}
// Any message could contain an "upgrade request" to a new shared port, so the
// internal queue it's a queue of T, but rather Message<T>
enum Message<T> {
Data(T),
GoUp(Receiver<T>),
}
impl<T> Packet<T> {
pub fn new() -> Packet<T> {
Packet {
queue: unsafe { spsc::Queue::new(128) },
cnt: AtomicIsize::new(0),
steals: 0,
to_wake: AtomicUsize::new(0),
port_dropped: AtomicBool::new(false),
}
}
pub fn send(&mut self, t: T) -> Result<(), T> {
// If the other port has deterministically gone away, then definitely
// must return the data back up the stack. Otherwise, the data is
// considered as being sent.
if self.port_dropped.load(Ordering::SeqCst) { return Err(t) }
match self.do_send(Data(t)) {
UpSuccess | UpDisconnected => {},
UpWoke(token) => { token.signal(); }
}
Ok(())
}
pub fn upgrade(&mut self, up: Receiver<T>) -> UpgradeResult {
// If the port has gone away, then there's no need to proceed any
// further.
if self.port_dropped.load(Ordering::SeqCst) { return UpDisconnected }
self.do_send(GoUp(up))
}
fn do_send(&mut self, t: Message<T>) -> UpgradeResult {
self.queue.push(t);
match self.cnt.fetch_add(1, Ordering::SeqCst) {
// As described in the mod's doc comment, -1 == wakeup
-1 => UpWoke(self.take_to_wake()),
// As as described before, SPSC queues must be >= -2
-2 => UpSuccess,
// Be sure to preserve the disconnected state, and the return value
// in this case is going to be whether our data was received or not.
// This manifests itself on whether we have an empty queue or not.
//
// Primarily, are required to drain the queue here because the port
// will never remove this data. We can only have at most one item to
// drain (the port drains the rest).
DISCONNECTED => {
self.cnt.store(DISCONNECTED, Ordering::SeqCst);
let first = self.queue.pop();
let second = self.queue.pop();
assert!(second.is_none());
match first {
Some(..) => UpSuccess, // we failed to send the data
None => UpDisconnected, // we successfully sent data
}
}
// Otherwise we just sent some data on a non-waiting queue, so just
// make sure the world is sane and carry on!
n => { assert!(n >= 0); UpSuccess }
}
}
// Consumes ownership of the 'to_wake' field.
fn take_to_wake(&mut self) -> SignalToken {
let ptr = self.to_wake.load(Ordering::SeqCst);
self.to_wake.store(0, Ordering::SeqCst);
assert!(ptr != 0);
unsafe { SignalToken::cast_from_usize(ptr) }
}
// Decrements the count on the channel for a sleeper, returning the sleeper
// back if it shouldn't sleep. Note that this is the location where we take
// steals into account.
fn decrement(&mut self, token: SignalToken) -> Result<(), SignalToken> {<|fim▁hole|> self.to_wake.store(ptr, Ordering::SeqCst);
let steals = self.steals;
self.steals = 0;
match self.cnt.fetch_sub(1 + steals, Ordering::SeqCst) {
DISCONNECTED => { self.cnt.store(DISCONNECTED, Ordering::SeqCst); }
// If we factor in our steals and notice that the channel has no
// data, we successfully sleep
n => {
assert!(n >= 0);
if n - steals <= 0 { return Ok(()) }
}
}
self.to_wake.store(0, Ordering::SeqCst);
Err(unsafe { SignalToken::cast_from_usize(ptr) })
}
pub fn recv(&mut self) -> Result<T, Failure<T>> {
// Optimistic preflight check (scheduling is expensive).
match self.try_recv() {
Err(Empty) => {}
data => return data,
}
// Welp, our channel has no data. Deschedule the current task and
// initiate the blocking protocol.
let (wait_token, signal_token) = blocking::tokens();
if self.decrement(signal_token).is_ok() {
wait_token.wait()
}
match self.try_recv() {
// Messages which actually popped from the queue shouldn't count as
// a steal, so offset the decrement here (we already have our
// "steal" factored into the channel count above).
data @ Ok(..) |
data @ Err(Upgraded(..)) => {
self.steals -= 1;
data
}
data => data,
}
}
pub fn try_recv(&mut self) -> Result<T, Failure<T>> {
match self.queue.pop() {
// If we stole some data, record to that effect (this will be
// factored into cnt later on).
//
// Note that we don't allow steals to grow without bound in order to
// prevent eventual overflow of either steals or cnt as an overflow
// would have catastrophic results. Sometimes, steals > cnt, but
// other times cnt > steals, so we don't know the relation between
// steals and cnt. This code path is executed only rarely, so we do
// a pretty slow operation, of swapping 0 into cnt, taking steals
// down as much as possible (without going negative), and then
// adding back in whatever we couldn't factor into steals.
Some(data) => {
if self.steals > MAX_STEALS {
match self.cnt.swap(0, Ordering::SeqCst) {
DISCONNECTED => {
self.cnt.store(DISCONNECTED, Ordering::SeqCst);
}
n => {
let m = cmp::min(n, self.steals);
self.steals -= m;
self.bump(n - m);
}
}
assert!(self.steals >= 0);
}
self.steals += 1;
match data {
Data(t) => Ok(t),
GoUp(up) => Err(Upgraded(up)),
}
}
None => {
match self.cnt.load(Ordering::SeqCst) {
n if n != DISCONNECTED => Err(Empty),
// This is a little bit of a tricky case. We failed to pop
// data above, and then we have viewed that the channel is
// disconnected. In this window more data could have been
// sent on the channel. It doesn't really make sense to
// return that the channel is disconnected when there's
// actually data on it, so be extra sure there's no data by
// popping one more time.
//
// We can ignore steals because the other end is
// disconnected and we'll never need to really factor in our
// steals again.
_ => {
match self.queue.pop() {
Some(Data(t)) => Ok(t),
Some(GoUp(up)) => Err(Upgraded(up)),
None => Err(Disconnected),
}
}
}
}
}
}
pub fn drop_chan(&mut self) {
// Dropping a channel is pretty simple, we just flag it as disconnected
// and then wakeup a blocker if there is one.
match self.cnt.swap(DISCONNECTED, Ordering::SeqCst) {
-1 => { self.take_to_wake().signal(); }
DISCONNECTED => {}
n => { assert!(n >= 0); }
}
}
pub fn drop_port(&mut self) {
// Dropping a port seems like a fairly trivial thing. In theory all we
// need to do is flag that we're disconnected and then everything else
// can take over (we don't have anyone to wake up).
//
// The catch for Ports is that we want to drop the entire contents of
// the queue. There are multiple reasons for having this property, the
// largest of which is that if another chan is waiting in this channel
// (but not received yet), then waiting on that port will cause a
// deadlock.
//
// So if we accept that we must now destroy the entire contents of the
// queue, this code may make a bit more sense. The tricky part is that
// we can't let any in-flight sends go un-dropped, we have to make sure
// *everything* is dropped and nothing new will come onto the channel.
// The first thing we do is set a flag saying that we're done for. All
// sends are gated on this flag, so we're immediately guaranteed that
// there are a bounded number of active sends that we'll have to deal
// with.
self.port_dropped.store(true, Ordering::SeqCst);
// Now that we're guaranteed to deal with a bounded number of senders,
// we need to drain the queue. This draining process happens atomically
// with respect to the "count" of the channel. If the count is nonzero
// (with steals taken into account), then there must be data on the
// channel. In this case we drain everything and then try again. We will
// continue to fail while active senders send data while we're dropping
// data, but eventually we're guaranteed to break out of this loop
// (because there is a bounded number of senders).
let mut steals = self.steals;
while {
let cnt = self.cnt.compare_and_swap(
steals, DISCONNECTED, Ordering::SeqCst);
cnt != DISCONNECTED && cnt != steals
} {
loop {
match self.queue.pop() {
Some(..) => { steals += 1; }
None => break
}
}
}
// At this point in time, we have gated all future senders from sending,
// and we have flagged the channel as being disconnected. The senders
// still have some responsibility, however, because some sends may not
// complete until after we flag the disconnection. There are more
// details in the sending methods that see DISCONNECTED
}
////////////////////////////////////////////////////////////////////////////
// select implementation
////////////////////////////////////////////////////////////////////////////
// Tests to see whether this port can receive without blocking. If Ok is
// returned, then that's the answer. If Err is returned, then the returned
// port needs to be queried instead (an upgrade happened)
pub fn can_recv(&mut self) -> Result<bool, Receiver<T>> {
// We peek at the queue to see if there's anything on it, and we use
// this return value to determine if we should pop from the queue and
// upgrade this channel immediately. If it looks like we've got an
// upgrade pending, then go through the whole recv rigamarole to update
// the internal state.
match self.queue.peek() {
Some(&mut GoUp(..)) => {
match self.recv() {
Err(Upgraded(port)) => Err(port),
_ => unreachable!(),
}
}
Some(..) => Ok(true),
None => Ok(false)
}
}
// increment the count on the channel (used for selection)
fn bump(&mut self, amt: isize) -> isize {
match self.cnt.fetch_add(amt, Ordering::SeqCst) {
DISCONNECTED => {
self.cnt.store(DISCONNECTED, Ordering::SeqCst);
DISCONNECTED
}
n => n
}
}
// Attempts to start selecting on this port. Like a oneshot, this can fail
// immediately because of an upgrade.
pub fn start_selection(&mut self, token: SignalToken) -> SelectionResult<T> {
match self.decrement(token) {
Ok(()) => SelSuccess,
Err(token) => {
let ret = match self.queue.peek() {
Some(&mut GoUp(..)) => {
match self.queue.pop() {
Some(GoUp(port)) => SelUpgraded(token, port),
_ => unreachable!(),
}
}
Some(..) => SelCanceled,
None => SelCanceled,
};
// Undo our decrement above, and we should be guaranteed that the
// previous value is positive because we're not going to sleep
let prev = self.bump(1);
assert!(prev == DISCONNECTED || prev >= 0);
return ret;
}
}
}
// Removes a previous task from being blocked in this port
pub fn abort_selection(&mut self,
was_upgrade: bool) -> Result<bool, Receiver<T>> {
// If we're aborting selection after upgrading from a oneshot, then
// we're guarantee that no one is waiting. The only way that we could
// have seen the upgrade is if data was actually sent on the channel
// half again. For us, this means that there is guaranteed to be data on
// this channel. Furthermore, we're guaranteed that there was no
// start_selection previously, so there's no need to modify `self.cnt`
// at all.
//
// Hence, because of these invariants, we immediately return `Ok(true)`.
// Note that the data may not actually be sent on the channel just yet.
// The other end could have flagged the upgrade but not sent data to
// this end. This is fine because we know it's a small bounded windows
// of time until the data is actually sent.
if was_upgrade {
assert_eq!(self.steals, 0);
assert_eq!(self.to_wake.load(Ordering::SeqCst), 0);
return Ok(true)
}
// We want to make sure that the count on the channel goes non-negative,
// and in the stream case we can have at most one steal, so just assume
// that we had one steal.
let steals = 1;
let prev = self.bump(steals + 1);
// If we were previously disconnected, then we know for sure that there
// is no task in to_wake, so just keep going
let has_data = if prev == DISCONNECTED {
assert_eq!(self.to_wake.load(Ordering::SeqCst), 0);
true // there is data, that data is that we're disconnected
} else {
let cur = prev + steals + 1;
assert!(cur >= 0);
// If the previous count was negative, then we just made things go
// positive, hence we passed the -1 boundary and we're responsible
// for removing the to_wake() field and trashing it.
//
// If the previous count was positive then we're in a tougher
// situation. A possible race is that a sender just incremented
// through -1 (meaning it's going to try to wake a task up), but it
// hasn't yet read the to_wake. In order to prevent a future recv()
// from waking up too early (this sender picking up the plastered
// over to_wake), we spin loop here waiting for to_wake to be 0.
// Note that this entire select() implementation needs an overhaul,
// and this is *not* the worst part of it, so this is not done as a
// final solution but rather out of necessity for now to get
// something working.
if prev < 0 {
drop(self.take_to_wake());
} else {
while self.to_wake.load(Ordering::SeqCst) != 0 {
thread::yield_now();
}
}
assert_eq!(self.steals, 0);
self.steals = steals;
// if we were previously positive, then there's surely data to
// receive
prev >= 0
};
// Now that we've determined that this queue "has data", we peek at the
// queue to see if the data is an upgrade or not. If it's an upgrade,
// then we need to destroy this port and abort selection on the
// upgraded port.
if has_data {
match self.queue.peek() {
Some(&mut GoUp(..)) => {
match self.queue.pop() {
Some(GoUp(port)) => Err(port),
_ => unreachable!(),
}
}
_ => Ok(true),
}
} else {
Ok(false)
}
}
}
#[unsafe_destructor]
impl<T> Drop for Packet<T> {
fn drop(&mut self) {
// Note that this load is not only an assert for correctness about
// disconnection, but also a proper fence before the read of
// `to_wake`, so this assert cannot be removed with also removing
// the `to_wake` assert.
assert_eq!(self.cnt.load(Ordering::SeqCst), DISCONNECTED);
assert_eq!(self.to_wake.load(Ordering::SeqCst), 0);
}
}<|fim▁end|>
|
assert_eq!(self.to_wake.load(Ordering::SeqCst), 0);
let ptr = unsafe { token.cast_to_usize() };
|
<|file_name|>ContextModuleFactory.js<|end_file_name|><|fim▁begin|>/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
var async = require("async");
var path = require("path");
var Tapable = require("tapable");
var ContextModule = require("./ContextModule");
var ContextElementDependency = require("./dependencies/ContextElementDependency");
function ContextModuleFactory(resolvers) {
Tapable.call(this);
this.resolvers = resolvers;
}
module.exports = ContextModuleFactory;
ContextModuleFactory.prototype = Object.create(Tapable.prototype);
ContextModuleFactory.prototype.create = function(context, dependency, callback) {
this.applyPluginsAsyncWaterfall("before-resolve", {
context: context,
request: dependency.request,
recursive: dependency.recursive,
regExp: dependency.regExp
}, function(err, result) {
if(err) return callback(err);
// Ignored
if(!result) return callback();
var context = result.context;
var request = result.request;
var recursive = result.recursive;
var regExp = result.regExp;
var loaders, resource, loadersPrefix = "";
var idx = request.lastIndexOf("!");
if(idx >= 0) {
loaders = request.substr(0, idx+1);
for(var i = 0; i < loaders.length && loaders[i] === "!"; i++) {
loadersPrefix += "!";<|fim▁hole|> if(loaders == "") loaders = [];
else loaders = loaders.split("!");
resource = request.substr(idx+1);
} else {
loaders = [];
resource = request;
}
async.parallel([
this.resolvers.context.resolve.bind(this.resolvers.context, context, resource),
async.map.bind(async, loaders, this.resolvers.loader.resolve.bind(this.resolvers.loader, context))
], function(err, result) {
if(err) return callback(err);
this.applyPluginsAsyncWaterfall("after-resolve", {
loaders: loadersPrefix + result[1].join("!") + (result[1].length > 0 ? "!" : ""),
resource: result[0],
recursive: recursive,
regExp: regExp
}, function(err, result) {
if(err) return callback(err);
// Ignored
if(!result) return callback();
return callback(null, new ContextModule(this.resolveDependencies.bind(this), result.resource, result.recursive, result.regExp, result.loaders));
}.bind(this));
}.bind(this));
}.bind(this));
};
ContextModuleFactory.prototype.resolveDependencies = function resolveDependencies(fs, resource, recursive, regExp, callback) {
(function addDirectory(directory, callback) {
fs.readdir(directory, function(err, files) {
if(!files || files.length == 0) return callback();
async.map(files, function(seqment, callback) {
var subResource = path.join(directory, seqment)
fs.stat(subResource, function(err, stat) {
if(err) return callback(err);
if(stat.isDirectory()) {
if(!recursive) return callback();
addDirectory.call(this, subResource, callback);
} else if(stat.isFile()) {
var obj = {
context: resource,
request: "." + subResource.substr(resource.length).replace(/\\/g, "/")
};
this.applyPluginsAsyncWaterfall("alternatives", [obj], function(err, alternatives) {
alternatives = alternatives.filter(function(obj) {
return regExp.test(obj.request);
}).map(function(obj) {
var dep = new ContextElementDependency(obj.request);
dep.optional = true;
return dep;
});
callback(null, alternatives);
});
} else callback();
}.bind(this));
}.bind(this), function(err, result) {
if(err) return callback(err);
if(!result) return callback(null, []);
callback(null, result.filter(function(i) { return !!i; }).reduce(function(a, i) {
return a.concat(i);
}, []));
});
}.bind(this));
}.call(this, resource, callback));
};<|fim▁end|>
|
}
loaders = loaders.substr(i).replace(/!+$/, "").replace(/!!+/g, "!");
|
<|file_name|>slice.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Slice management and manipulation
//!
//! For more details `std::slice`.
#![stable]
#![doc(primitive = "slice")]
// How this module is organized.
//
// The library infrastructure for slices is fairly messy. There's
// a lot of stuff defined here. Let's keep it clean.
//
// Since slices don't support inherent methods; all operations
// on them are defined on traits, which are then reexported from
// the prelude for convenience. So there are a lot of traits here.
//
// The layout of this file is thus:
//
// * Slice-specific 'extension' traits and their implementations. This
// is where most of the slice API resides.
// * Implementations of a few common traits with important slice ops.
// * Definitions of a bunch of iterators.
// * Free functions.
// * The `raw` and `bytes` submodules.
// * Boilerplate trait implementations.
use mem::transmute;
use clone::Clone;
use cmp::{Ordering, PartialEq, PartialOrd, Eq, Ord};
use cmp::Ordering::{Less, Equal, Greater};
use cmp;
use default::Default;
use iter::*;
use marker::Copy;
use num::Int;
use ops::{FnMut, self, Index};
use option::Option;
use option::Option::{None, Some};
use result::Result;
use result::Result::{Ok, Err};
use ptr;
use ptr::PtrExt;
use mem;
use mem::size_of;
use marker::{Sized, self};
use raw::Repr;
// Avoid conflicts with *both* the Slice trait (buggy) and the `slice::raw` module.
use raw::Slice as RawSlice;
//
// Extension traits
//
/// Extension methods for slices.
#[allow(missing_docs)] // docs in libcollections
pub trait SliceExt {
type Item;
fn slice<'a>(&'a self, start: uint, end: uint) -> &'a [Self::Item];
fn slice_from<'a>(&'a self, start: uint) -> &'a [Self::Item];
fn slice_to<'a>(&'a self, end: uint) -> &'a [Self::Item];
fn split_at<'a>(&'a self, mid: uint) -> (&'a [Self::Item], &'a [Self::Item]);
fn iter<'a>(&'a self) -> Iter<'a, Self::Item>;
fn split<'a, P>(&'a self, pred: P) -> Split<'a, Self::Item, P>
where P: FnMut(&Self::Item) -> bool;
fn splitn<'a, P>(&'a self, n: uint, pred: P) -> SplitN<'a, Self::Item, P>
where P: FnMut(&Self::Item) -> bool;
fn rsplitn<'a, P>(&'a self, n: uint, pred: P) -> RSplitN<'a, Self::Item, P>
where P: FnMut(&Self::Item) -> bool;
fn windows<'a>(&'a self, size: uint) -> Windows<'a, Self::Item>;
fn chunks<'a>(&'a self, size: uint) -> Chunks<'a, Self::Item>;
fn get<'a>(&'a self, index: uint) -> Option<&'a Self::Item>;
fn first<'a>(&'a self) -> Option<&'a Self::Item>;
fn tail<'a>(&'a self) -> &'a [Self::Item];
fn init<'a>(&'a self) -> &'a [Self::Item];
fn last<'a>(&'a self) -> Option<&'a Self::Item>;
unsafe fn get_unchecked<'a>(&'a self, index: uint) -> &'a Self::Item;
fn as_ptr(&self) -> *const Self::Item;
fn binary_search_by<F>(&self, f: F) -> Result<uint, uint> where
F: FnMut(&Self::Item) -> Ordering;
fn len(&self) -> uint;
fn is_empty(&self) -> bool { self.len() == 0 }
fn get_mut<'a>(&'a mut self, index: uint) -> Option<&'a mut Self::Item>;
fn as_mut_slice<'a>(&'a mut self) -> &'a mut [Self::Item];
fn slice_mut<'a>(&'a mut self, start: uint, end: uint) -> &'a mut [Self::Item];
fn slice_from_mut<'a>(&'a mut self, start: uint) -> &'a mut [Self::Item];
fn slice_to_mut<'a>(&'a mut self, end: uint) -> &'a mut [Self::Item];
fn iter_mut<'a>(&'a mut self) -> IterMut<'a, Self::Item>;
fn first_mut<'a>(&'a mut self) -> Option<&'a mut Self::Item>;
fn tail_mut<'a>(&'a mut self) -> &'a mut [Self::Item];
fn init_mut<'a>(&'a mut self) -> &'a mut [Self::Item];
fn last_mut<'a>(&'a mut self) -> Option<&'a mut Self::Item>;
fn split_mut<'a, P>(&'a mut self, pred: P) -> SplitMut<'a, Self::Item, P>
where P: FnMut(&Self::Item) -> bool;
fn splitn_mut<P>(&mut self, n: uint, pred: P) -> SplitNMut<Self::Item, P>
where P: FnMut(&Self::Item) -> bool;
fn rsplitn_mut<P>(&mut self, n: uint, pred: P) -> RSplitNMut<Self::Item, P>
where P: FnMut(&Self::Item) -> bool;
fn chunks_mut<'a>(&'a mut self, chunk_size: uint) -> ChunksMut<'a, Self::Item>;
fn swap(&mut self, a: uint, b: uint);
fn split_at_mut<'a>(&'a mut self, mid: uint) -> (&'a mut [Self::Item], &'a mut [Self::Item]);
fn reverse(&mut self);
unsafe fn get_unchecked_mut<'a>(&'a mut self, index: uint) -> &'a mut Self::Item;
fn as_mut_ptr(&mut self) -> *mut Self::Item;
fn position_elem(&self, t: &Self::Item) -> Option<uint> where Self::Item: PartialEq;
fn rposition_elem(&self, t: &Self::Item) -> Option<uint> where Self::Item: PartialEq;
fn contains(&self, x: &Self::Item) -> bool where Self::Item: PartialEq;
fn starts_with(&self, needle: &[Self::Item]) -> bool where Self::Item: PartialEq;
fn ends_with(&self, needle: &[Self::Item]) -> bool where Self::Item: PartialEq;
fn binary_search(&self, x: &Self::Item) -> Result<uint, uint> where Self::Item: Ord;
fn next_permutation(&mut self) -> bool where Self::Item: Ord;
fn prev_permutation(&mut self) -> bool where Self::Item: Ord;
fn clone_from_slice(&mut self, &[Self::Item]) -> uint where Self::Item: Clone;
}
#[unstable]
impl<T> SliceExt for [T] {
type Item = T;
#[inline]
fn slice(&self, start: uint, end: uint) -> &[T] {
assert!(start <= end);
assert!(end <= self.len());
unsafe {
transmute(RawSlice {
data: self.as_ptr().offset(start as int),
len: (end - start)
})
}
}
#[inline]
fn slice_from(&self, start: uint) -> &[T] {
self.slice(start, self.len())
}
#[inline]
fn slice_to(&self, end: uint) -> &[T] {
self.slice(0, end)
}
#[inline]
fn split_at(&self, mid: uint) -> (&[T], &[T]) {
(&self[..mid], &self[mid..])
}
#[inline]
fn iter<'a>(&'a self) -> Iter<'a, T> {
unsafe {
let p = self.as_ptr();
if mem::size_of::<T>() == 0 {
Iter {ptr: p,
end: (p as uint + self.len()) as *const T,
marker: marker::ContravariantLifetime::<'a>}
} else {
Iter {ptr: p,
end: p.offset(self.len() as int),
marker: marker::ContravariantLifetime::<'a>}
}
}
}
#[inline]
fn split<'a, P>(&'a self, pred: P) -> Split<'a, T, P> where P: FnMut(&T) -> bool {
Split {
v: self,
pred: pred,
finished: false
}
}
#[inline]
fn splitn<'a, P>(&'a self, n: uint, pred: P) -> SplitN<'a, T, P> where
P: FnMut(&T) -> bool,
{
SplitN {
inner: GenericSplitN {
iter: self.split(pred),
count: n,
invert: false
}
}
}
#[inline]
fn rsplitn<'a, P>(&'a self, n: uint, pred: P) -> RSplitN<'a, T, P> where
P: FnMut(&T) -> bool,
{
RSplitN {
inner: GenericSplitN {
iter: self.split(pred),
count: n,
invert: true
}
}
}
#[inline]
fn windows(&self, size: uint) -> Windows<T> {
assert!(size != 0);
Windows { v: self, size: size }
}
#[inline]
fn chunks(&self, size: uint) -> Chunks<T> {
assert!(size != 0);
Chunks { v: self, size: size }
}
#[inline]
fn get(&self, index: uint) -> Option<&T> {
if index < self.len() { Some(&self[index]) } else { None }
}
#[inline]
fn first(&self) -> Option<&T> {
if self.len() == 0 { None } else { Some(&self[0]) }
}
#[inline]
fn tail(&self) -> &[T] { &self[1..] }
#[inline]
fn init(&self) -> &[T] {
&self[..(self.len() - 1)]
}
#[inline]
fn last(&self) -> Option<&T> {
if self.len() == 0 { None } else { Some(&self[self.len() - 1]) }
}
#[inline]
unsafe fn get_unchecked(&self, index: uint) -> &T {
transmute(self.repr().data.offset(index as int))
}
#[inline]
fn as_ptr(&self) -> *const T {
self.repr().data
}
#[unstable]
fn binary_search_by<F>(&self, mut f: F) -> Result<uint, uint> where
F: FnMut(&T) -> Ordering
{
let mut base : uint = 0;
let mut lim : uint = self.len();
while lim != 0 {
let ix = base + (lim >> 1);
match f(&self[ix]) {
Equal => return Ok(ix),
Less => {
base = ix + 1;
lim -= 1;
}
Greater => ()
}
lim >>= 1;
}
Err(base)
}
#[inline]
fn len(&self) -> uint { self.repr().len }
#[inline]
fn get_mut(&mut self, index: uint) -> Option<&mut T> {
if index < self.len() { Some(&mut self[index]) } else { None }
}
#[inline]
fn as_mut_slice(&mut self) -> &mut [T] { self }
fn slice_mut(&mut self, start: uint, end: uint) -> &mut [T] {
ops::IndexMut::index_mut(self, &ops::Range { start: start, end: end } )
}
#[inline]
fn slice_from_mut(&mut self, start: uint) -> &mut [T] {
ops::IndexMut::index_mut(self, &ops::RangeFrom { start: start } )
}
#[inline]
fn slice_to_mut(&mut self, end: uint) -> &mut [T] {
ops::IndexMut::index_mut(self, &ops::RangeTo { end: end } )
}
#[inline]
fn split_at_mut(&mut self, mid: uint) -> (&mut [T], &mut [T]) {
unsafe {
let self2: &mut [T] = mem::transmute_copy(&self);
(ops::IndexMut::index_mut(self, &ops::RangeTo { end: mid } ),
ops::IndexMut::index_mut(self2, &ops::RangeFrom { start: mid } ))
}
}
#[inline]
fn iter_mut<'a>(&'a mut self) -> IterMut<'a, T> {
unsafe {
let p = self.as_mut_ptr();
if mem::size_of::<T>() == 0 {
IterMut {ptr: p,
end: (p as uint + self.len()) as *mut T,
marker: marker::ContravariantLifetime::<'a>}
} else {
IterMut {ptr: p,
end: p.offset(self.len() as int),
marker: marker::ContravariantLifetime::<'a>}
}
}
}
#[inline]
fn last_mut(&mut self) -> Option<&mut T> {
let len = self.len();
if len == 0 { return None; }
Some(&mut self[len - 1])
}
#[inline]
fn first_mut(&mut self) -> Option<&mut T> {
if self.len() == 0 { None } else { Some(&mut self[0]) }
}
#[inline]
fn tail_mut(&mut self) -> &mut [T] {
self.slice_from_mut(1)
}
#[inline]
fn init_mut(&mut self) -> &mut [T] {
let len = self.len();
self.slice_to_mut(len-1)
}
#[inline]
fn split_mut<'a, P>(&'a mut self, pred: P) -> SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
SplitMut { v: self, pred: pred, finished: false }
}
#[inline]
fn splitn_mut<'a, P>(&'a mut self, n: uint, pred: P) -> SplitNMut<'a, T, P> where
P: FnMut(&T) -> bool
{
SplitNMut {
inner: GenericSplitN {
iter: self.split_mut(pred),
count: n,
invert: false
}
}
}
#[inline]
fn rsplitn_mut<'a, P>(&'a mut self, n: uint, pred: P) -> RSplitNMut<'a, T, P> where
P: FnMut(&T) -> bool,
{
RSplitNMut {
inner: GenericSplitN {
iter: self.split_mut(pred),
count: n,
invert: true
}
}
}
#[inline]
fn chunks_mut(&mut self, chunk_size: uint) -> ChunksMut<T> {
assert!(chunk_size > 0);
ChunksMut { v: self, chunk_size: chunk_size }
}
fn swap(&mut self, a: uint, b: uint) {
unsafe {
// Can't take two mutable loans from one vector, so instead just cast
// them to their raw pointers to do the swap
let pa: *mut T = &mut self[a];
let pb: *mut T = &mut self[b];
ptr::swap(pa, pb);
}
}
fn reverse(&mut self) {
let mut i: uint = 0;
let ln = self.len();
while i < ln / 2 {
// Unsafe swap to avoid the bounds check in safe swap.
unsafe {
let pa: *mut T = self.get_unchecked_mut(i);
let pb: *mut T = self.get_unchecked_mut(ln - i - 1);
ptr::swap(pa, pb);
}
i += 1;
}
}
#[inline]
unsafe fn get_unchecked_mut(&mut self, index: uint) -> &mut T {
transmute((self.repr().data as *mut T).offset(index as int))
}
#[inline]
fn as_mut_ptr(&mut self) -> *mut T {
self.repr().data as *mut T
}
#[inline]
fn position_elem(&self, x: &T) -> Option<uint> where T: PartialEq {
self.iter().position(|y| *x == *y)
}
#[inline]
fn rposition_elem(&self, t: &T) -> Option<uint> where T: PartialEq {
self.iter().rposition(|x| *x == *t)
}
#[inline]
fn contains(&self, x: &T) -> bool where T: PartialEq {
self.iter().any(|elt| *x == *elt)
}
#[inline]
fn starts_with(&self, needle: &[T]) -> bool where T: PartialEq {
let n = needle.len();
self.len() >= n && needle == &self[..n]
}
#[inline]
fn ends_with(&self, needle: &[T]) -> bool where T: PartialEq {
let (m, n) = (self.len(), needle.len());
m >= n && needle == &self[(m-n)..]
}
#[unstable]
fn binary_search(&self, x: &T) -> Result<uint, uint> where T: Ord {
self.binary_search_by(|p| p.cmp(x))
}
#[unstable]
fn next_permutation(&mut self) -> bool where T: Ord {
// These cases only have 1 permutation each, so we can't do anything.
if self.len() < 2 { return false; }
// Step 1: Identify the longest, rightmost weakly decreasing part of the vector
let mut i = self.len() - 1;
while i > 0 && self[i-1] >= self[i] {
i -= 1;
}
// If that is the entire vector, this is the last-ordered permutation.
if i == 0 {
return false;
}
// Step 2: Find the rightmost element larger than the pivot (i-1)
let mut j = self.len() - 1;
while j >= i && self[j] <= self[i-1] {
j -= 1;
}
// Step 3: Swap that element with the pivot
self.swap(j, i-1);
// Step 4: Reverse the (previously) weakly decreasing part
self.slice_from_mut(i).reverse();
true
}
#[unstable]
fn prev_permutation(&mut self) -> bool where T: Ord {
// These cases only have 1 permutation each, so we can't do anything.
if self.len() < 2 { return false; }
// Step 1: Identify the longest, rightmost weakly increasing part of the vector
let mut i = self.len() - 1;
while i > 0 && self[i-1] <= self[i] {
i -= 1;
}
// If that is the entire vector, this is the first-ordered permutation.
if i == 0 {
return false;
}
// Step 2: Reverse the weakly increasing part
self.slice_from_mut(i).reverse();
// Step 3: Find the rightmost element equal to or bigger than the pivot (i-1)
let mut j = self.len() - 1;
while j >= i && self[j-1] < self[i-1] {
j -= 1;
}
// Step 4: Swap that element with the pivot
self.swap(i-1, j);
true
}
#[inline]
fn clone_from_slice(&mut self, src: &[T]) -> uint where T: Clone {
let min = cmp::min(self.len(), src.len());
let dst = self.slice_to_mut(min);
let src = src.slice_to(min);
for i in range(0, min) {
dst[i].clone_from(&src[i]);
}
min
}
}
impl<T> ops::Index<uint> for [T] {
type Output = T;
fn index(&self, &index: &uint) -> &T {
assert!(index < self.len());
unsafe { mem::transmute(self.repr().data.offset(index as int)) }
}
}
impl<T> ops::IndexMut<uint> for [T] {
type Output = T;
fn index_mut(&mut self, &index: &uint) -> &mut T {
assert!(index < self.len());
unsafe { mem::transmute(self.repr().data.offset(index as int)) }
}
}
impl<T> ops::Index<ops::Range<uint>> for [T] {
type Output = [T];
#[inline]
fn index(&self, index: &ops::Range<uint>) -> &[T] {
assert!(index.start <= index.end);
assert!(index.end <= self.len());
unsafe {
transmute(RawSlice {
data: self.as_ptr().offset(index.start as int),
len: index.end - index.start
})
}
}
}
impl<T> ops::Index<ops::RangeTo<uint>> for [T] {
type Output = [T];
#[inline]
fn index(&self, index: &ops::RangeTo<uint>) -> &[T] {
self.index(&ops::Range{ start: 0, end: index.end })
}
}
impl<T> ops::Index<ops::RangeFrom<uint>> for [T] {
type Output = [T];
#[inline]
fn index(&self, index: &ops::RangeFrom<uint>) -> &[T] {
self.index(&ops::Range{ start: index.start, end: self.len() })
}
}
impl<T> ops::Index<ops::FullRange> for [T] {
type Output = [T];
#[inline]
fn index(&self, _index: &ops::FullRange) -> &[T] {
self
}
}
impl<T> ops::IndexMut<ops::Range<uint>> for [T] {
type Output = [T];
#[inline]
fn index_mut(&mut self, index: &ops::Range<uint>) -> &mut [T] {
assert!(index.start <= index.end);
assert!(index.end <= self.len());
unsafe {
transmute(RawSlice {
data: self.as_ptr().offset(index.start as int),
len: index.end - index.start
})
}
}
}
impl<T> ops::IndexMut<ops::RangeTo<uint>> for [T] {
type Output = [T];
#[inline]
fn index_mut(&mut self, index: &ops::RangeTo<uint>) -> &mut [T] {
self.index_mut(&ops::Range{ start: 0, end: index.end })
}
}
impl<T> ops::IndexMut<ops::RangeFrom<uint>> for [T] {
type Output = [T];
#[inline]
fn index_mut(&mut self, index: &ops::RangeFrom<uint>) -> &mut [T] {
let len = self.len();
self.index_mut(&ops::Range{ start: index.start, end: len })
}
}
impl<T> ops::IndexMut<ops::FullRange> for [T] {
type Output = [T];
#[inline]
fn index_mut(&mut self, _index: &ops::FullRange) -> &mut [T] {
self
}
}
////////////////////////////////////////////////////////////////////////////////
// Common traits
////////////////////////////////////////////////////////////////////////////////
/// Data that is viewable as a slice.
#[unstable = "will be replaced by slice syntax"]
pub trait AsSlice<T> {
/// Work with `self` as a slice.
fn as_slice<'a>(&'a self) -> &'a [T];
}
#[unstable = "trait is experimental"]
impl<T> AsSlice<T> for [T] {
#[inline(always)]
fn as_slice<'a>(&'a self) -> &'a [T] { self }
}
#[unstable = "trait is experimental"]
impl<'a, T, U: ?Sized + AsSlice<T>> AsSlice<T> for &'a U {
#[inline(always)]
fn as_slice(&self) -> &[T] { AsSlice::as_slice(*self) }
}<|fim▁hole|> fn as_slice(&self) -> &[T] { AsSlice::as_slice(*self) }
}
#[stable]
impl<'a, T> Default for &'a [T] {
#[stable]
fn default() -> &'a [T] { &[] }
}
//
// Iterators
//
// The shared definition of the `Iter` and `IterMut` iterators
macro_rules! iterator {
(struct $name:ident -> $ptr:ty, $elem:ty) => {
#[stable]
impl<'a, T> Iterator for $name<'a, T> {
type Item = $elem;
#[inline]
fn next(&mut self) -> Option<$elem> {
// could be implemented with slices, but this avoids bounds checks
unsafe {
if self.ptr == self.end {
None
} else {
if mem::size_of::<T>() == 0 {
// purposefully don't use 'ptr.offset' because for
// vectors with 0-size elements this would return the
// same pointer.
self.ptr = transmute(self.ptr as uint + 1);
// Use a non-null pointer value
Some(transmute(1u))
} else {
let old = self.ptr;
self.ptr = self.ptr.offset(1);
Some(transmute(old))
}
}
}
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
let diff = (self.end as uint) - (self.ptr as uint);
let size = mem::size_of::<T>();
let exact = diff / (if size == 0 {1} else {size});
(exact, Some(exact))
}
}
#[stable]
impl<'a, T> DoubleEndedIterator for $name<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<$elem> {
// could be implemented with slices, but this avoids bounds checks
unsafe {
if self.end == self.ptr {
None
} else {
if mem::size_of::<T>() == 0 {
// See above for why 'ptr.offset' isn't used
self.end = transmute(self.end as uint - 1);
// Use a non-null pointer value
Some(transmute(1u))
} else {
self.end = self.end.offset(-1);
Some(transmute(self.end))
}
}
}
}
}
}
}
macro_rules! make_slice {
($t: ty => $result: ty: $start: expr, $end: expr) => {{
let diff = $end as uint - $start as uint;
let len = if mem::size_of::<T>() == 0 {
diff
} else {
diff / mem::size_of::<$t>()
};
unsafe {
transmute::<_, $result>(RawSlice { data: $start, len: len })
}
}}
}
/// Immutable slice iterator
#[stable]
pub struct Iter<'a, T: 'a> {
ptr: *const T,
end: *const T,
marker: marker::ContravariantLifetime<'a>
}
#[unstable]
impl<'a, T> ops::Index<ops::Range<uint>> for Iter<'a, T> {
type Output = [T];
#[inline]
fn index(&self, index: &ops::Range<uint>) -> &[T] {
self.as_slice().index(index)
}
}
#[unstable]
impl<'a, T> ops::Index<ops::RangeTo<uint>> for Iter<'a, T> {
type Output = [T];
#[inline]
fn index(&self, index: &ops::RangeTo<uint>) -> &[T] {
self.as_slice().index(index)
}
}
#[unstable]
impl<'a, T> ops::Index<ops::RangeFrom<uint>> for Iter<'a, T> {
type Output = [T];
#[inline]
fn index(&self, index: &ops::RangeFrom<uint>) -> &[T] {
self.as_slice().index(index)
}
}
#[unstable]
impl<'a, T> ops::Index<ops::FullRange> for Iter<'a, T> {
type Output = [T];
#[inline]
fn index(&self, _index: &ops::FullRange) -> &[T] {
self.as_slice()
}
}
impl<'a, T> Iter<'a, T> {
/// View the underlying data as a subslice of the original data.
///
/// This has the same lifetime as the original slice, and so the
/// iterator can continue to be used while this exists.
#[unstable]
pub fn as_slice(&self) -> &'a [T] {
make_slice!(T => &'a [T]: self.ptr, self.end)
}
}
impl<'a,T> Copy for Iter<'a,T> {}
iterator!{struct Iter -> *const T, &'a T}
#[stable]
impl<'a, T> ExactSizeIterator for Iter<'a, T> {}
#[stable]
impl<'a, T> Clone for Iter<'a, T> {
fn clone(&self) -> Iter<'a, T> { *self }
}
#[unstable = "trait is experimental"]
impl<'a, T> RandomAccessIterator for Iter<'a, T> {
#[inline]
fn indexable(&self) -> uint {
let (exact, _) = self.size_hint();
exact
}
#[inline]
fn idx(&mut self, index: uint) -> Option<&'a T> {
unsafe {
if index < self.indexable() {
if mem::size_of::<T>() == 0 {
// Use a non-null pointer value
Some(transmute(1u))
} else {
Some(transmute(self.ptr.offset(index as int)))
}
} else {
None
}
}
}
}
/// Mutable slice iterator.
#[stable]
pub struct IterMut<'a, T: 'a> {
ptr: *mut T,
end: *mut T,
marker: marker::ContravariantLifetime<'a>,
}
#[unstable]
impl<'a, T> ops::Index<ops::Range<uint>> for IterMut<'a, T> {
type Output = [T];
#[inline]
fn index(&self, index: &ops::Range<uint>) -> &[T] {
self.index(&ops::FullRange).index(index)
}
}
#[unstable]
impl<'a, T> ops::Index<ops::RangeTo<uint>> for IterMut<'a, T> {
type Output = [T];
#[inline]
fn index(&self, index: &ops::RangeTo<uint>) -> &[T] {
self.index(&ops::FullRange).index(index)
}
}
#[unstable]
impl<'a, T> ops::Index<ops::RangeFrom<uint>> for IterMut<'a, T> {
type Output = [T];
#[inline]
fn index(&self, index: &ops::RangeFrom<uint>) -> &[T] {
self.index(&ops::FullRange).index(index)
}
}
#[unstable]
impl<'a, T> ops::Index<ops::FullRange> for IterMut<'a, T> {
type Output = [T];
#[inline]
fn index(&self, _index: &ops::FullRange) -> &[T] {
make_slice!(T => &[T]: self.ptr, self.end)
}
}
#[unstable]
impl<'a, T> ops::IndexMut<ops::Range<uint>> for IterMut<'a, T> {
type Output = [T];
#[inline]
fn index_mut(&mut self, index: &ops::Range<uint>) -> &mut [T] {
self.index_mut(&ops::FullRange).index_mut(index)
}
}
#[unstable]
impl<'a, T> ops::IndexMut<ops::RangeTo<uint>> for IterMut<'a, T> {
type Output = [T];
#[inline]
fn index_mut(&mut self, index: &ops::RangeTo<uint>) -> &mut [T] {
self.index_mut(&ops::FullRange).index_mut(index)
}
}
#[unstable]
impl<'a, T> ops::IndexMut<ops::RangeFrom<uint>> for IterMut<'a, T> {
type Output = [T];
#[inline]
fn index_mut(&mut self, index: &ops::RangeFrom<uint>) -> &mut [T] {
self.index_mut(&ops::FullRange).index_mut(index)
}
}
#[unstable]
impl<'a, T> ops::IndexMut<ops::FullRange> for IterMut<'a, T> {
type Output = [T];
#[inline]
fn index_mut(&mut self, _index: &ops::FullRange) -> &mut [T] {
make_slice!(T => &mut [T]: self.ptr, self.end)
}
}
impl<'a, T> IterMut<'a, T> {
/// View the underlying data as a subslice of the original data.
///
/// To avoid creating `&mut` references that alias, this is forced
/// to consume the iterator. Consider using the `Slice` and
/// `SliceMut` implementations for obtaining slices with more
/// restricted lifetimes that do not consume the iterator.
#[unstable]
pub fn into_slice(self) -> &'a mut [T] {
make_slice!(T => &'a mut [T]: self.ptr, self.end)
}
}
iterator!{struct IterMut -> *mut T, &'a mut T}
#[stable]
impl<'a, T> ExactSizeIterator for IterMut<'a, T> {}
/// An internal abstraction over the splitting iterators, so that
/// splitn, splitn_mut etc can be implemented once.
trait SplitIter: DoubleEndedIterator {
/// Mark the underlying iterator as complete, extracting the remaining
/// portion of the slice.
fn finish(&mut self) -> Option<Self::Item>;
}
/// An iterator over subslices separated by elements that match a predicate
/// function.
#[stable]
pub struct Split<'a, T:'a, P> where P: FnMut(&T) -> bool {
v: &'a [T],
pred: P,
finished: bool
}
// FIXME(#19839) Remove in favor of `#[derive(Clone)]`
#[stable]
impl<'a, T, P> Clone for Split<'a, T, P> where P: Clone + FnMut(&T) -> bool {
fn clone(&self) -> Split<'a, T, P> {
Split {
v: self.v,
pred: self.pred.clone(),
finished: self.finished,
}
}
}
#[stable]
impl<'a, T, P> Iterator for Split<'a, T, P> where P: FnMut(&T) -> bool {
type Item = &'a [T];
#[inline]
fn next(&mut self) -> Option<&'a [T]> {
if self.finished { return None; }
match self.v.iter().position(|x| (self.pred)(x)) {
None => self.finish(),
Some(idx) => {
let ret = Some(&self.v[..idx]);
self.v = &self.v[(idx + 1)..];
ret
}
}
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
if self.finished {
(0, Some(0))
} else {
(1, Some(self.v.len() + 1))
}
}
}
#[stable]
impl<'a, T, P> DoubleEndedIterator for Split<'a, T, P> where P: FnMut(&T) -> bool {
#[inline]
fn next_back(&mut self) -> Option<&'a [T]> {
if self.finished { return None; }
match self.v.iter().rposition(|x| (self.pred)(x)) {
None => self.finish(),
Some(idx) => {
let ret = Some(&self.v[(idx + 1)..]);
self.v = &self.v[..idx];
ret
}
}
}
}
impl<'a, T, P> SplitIter for Split<'a, T, P> where P: FnMut(&T) -> bool {
#[inline]
fn finish(&mut self) -> Option<&'a [T]> {
if self.finished { None } else { self.finished = true; Some(self.v) }
}
}
/// An iterator over the subslices of the vector which are separated
/// by elements that match `pred`.
#[stable]
pub struct SplitMut<'a, T:'a, P> where P: FnMut(&T) -> bool {
v: &'a mut [T],
pred: P,
finished: bool
}
impl<'a, T, P> SplitIter for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
#[inline]
fn finish(&mut self) -> Option<&'a mut [T]> {
if self.finished {
None
} else {
self.finished = true;
Some(mem::replace(&mut self.v, &mut []))
}
}
}
#[stable]
impl<'a, T, P> Iterator for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
type Item = &'a mut [T];
#[inline]
fn next(&mut self) -> Option<&'a mut [T]> {
if self.finished { return None; }
let idx_opt = { // work around borrowck limitations
let pred = &mut self.pred;
self.v.iter().position(|x| (*pred)(x))
};
match idx_opt {
None => self.finish(),
Some(idx) => {
let tmp = mem::replace(&mut self.v, &mut []);
let (head, tail) = tmp.split_at_mut(idx);
self.v = tail.slice_from_mut(1);
Some(head)
}
}
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
if self.finished {
(0, Some(0))
} else {
// if the predicate doesn't match anything, we yield one slice
// if it matches every element, we yield len+1 empty slices.
(1, Some(self.v.len() + 1))
}
}
}
#[stable]
impl<'a, T, P> DoubleEndedIterator for SplitMut<'a, T, P> where
P: FnMut(&T) -> bool,
{
#[inline]
fn next_back(&mut self) -> Option<&'a mut [T]> {
if self.finished { return None; }
let idx_opt = { // work around borrowck limitations
let pred = &mut self.pred;
self.v.iter().rposition(|x| (*pred)(x))
};
match idx_opt {
None => self.finish(),
Some(idx) => {
let tmp = mem::replace(&mut self.v, &mut []);
let (head, tail) = tmp.split_at_mut(idx);
self.v = head;
Some(tail.slice_from_mut(1))
}
}
}
}
/// An private iterator over subslices separated by elements that
/// match a predicate function, splitting at most a fixed number of
/// times.
struct GenericSplitN<I> {
iter: I,
count: uint,
invert: bool
}
impl<T, I: SplitIter<Item=T>> Iterator for GenericSplitN<I> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
if self.count == 0 {
self.iter.finish()
} else {
self.count -= 1;
if self.invert { self.iter.next_back() } else { self.iter.next() }
}
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
let (lower, upper_opt) = self.iter.size_hint();
(lower, upper_opt.map(|upper| cmp::min(self.count + 1, upper)))
}
}
/// An iterator over subslices separated by elements that match a predicate
/// function, limited to a given number of splits.
#[stable]
pub struct SplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool {
inner: GenericSplitN<Split<'a, T, P>>
}
/// An iterator over subslices separated by elements that match a
/// predicate function, limited to a given number of splits, starting
/// from the end of the slice.
#[stable]
pub struct RSplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool {
inner: GenericSplitN<Split<'a, T, P>>
}
/// An iterator over subslices separated by elements that match a predicate
/// function, limited to a given number of splits.
#[stable]
pub struct SplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool {
inner: GenericSplitN<SplitMut<'a, T, P>>
}
/// An iterator over subslices separated by elements that match a
/// predicate function, limited to a given number of splits, starting
/// from the end of the slice.
#[stable]
pub struct RSplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool {
inner: GenericSplitN<SplitMut<'a, T, P>>
}
macro_rules! forward_iterator {
($name:ident: $elem:ident, $iter_of:ty) => {
#[stable]
impl<'a, $elem, P> Iterator for $name<'a, $elem, P> where
P: FnMut(&T) -> bool
{
type Item = $iter_of;
#[inline]
fn next(&mut self) -> Option<$iter_of> {
self.inner.next()
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
self.inner.size_hint()
}
}
}
}
forward_iterator! { SplitN: T, &'a [T] }
forward_iterator! { RSplitN: T, &'a [T] }
forward_iterator! { SplitNMut: T, &'a mut [T] }
forward_iterator! { RSplitNMut: T, &'a mut [T] }
/// An iterator over overlapping subslices of length `size`.
#[derive(Clone)]
#[stable]
pub struct Windows<'a, T:'a> {
v: &'a [T],
size: uint
}
#[stable]
impl<'a, T> Iterator for Windows<'a, T> {
type Item = &'a [T];
#[inline]
fn next(&mut self) -> Option<&'a [T]> {
if self.size > self.v.len() {
None
} else {
let ret = Some(&self.v[..self.size]);
self.v = &self.v[1..];
ret
}
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
if self.size > self.v.len() {
(0, Some(0))
} else {
let x = self.v.len() - self.size;
(x.saturating_add(1), x.checked_add(1u))
}
}
}
/// An iterator over a slice in (non-overlapping) chunks (`size` elements at a
/// time).
///
/// When the slice len is not evenly divided by the chunk size, the last slice
/// of the iteration will be the remainder.
#[derive(Clone)]
#[stable]
pub struct Chunks<'a, T:'a> {
v: &'a [T],
size: uint
}
#[stable]
impl<'a, T> Iterator for Chunks<'a, T> {
type Item = &'a [T];
#[inline]
fn next(&mut self) -> Option<&'a [T]> {
if self.v.len() == 0 {
None
} else {
let chunksz = cmp::min(self.v.len(), self.size);
let (fst, snd) = self.v.split_at(chunksz);
self.v = snd;
Some(fst)
}
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
if self.v.len() == 0 {
(0, Some(0))
} else {
let n = self.v.len() / self.size;
let rem = self.v.len() % self.size;
let n = if rem > 0 { n+1 } else { n };
(n, Some(n))
}
}
}
#[stable]
impl<'a, T> DoubleEndedIterator for Chunks<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a [T]> {
if self.v.len() == 0 {
None
} else {
let remainder = self.v.len() % self.size;
let chunksz = if remainder != 0 { remainder } else { self.size };
let (fst, snd) = self.v.split_at(self.v.len() - chunksz);
self.v = fst;
Some(snd)
}
}
}
#[unstable = "trait is experimental"]
impl<'a, T> RandomAccessIterator for Chunks<'a, T> {
#[inline]
fn indexable(&self) -> uint {
self.v.len()/self.size + if self.v.len() % self.size != 0 { 1 } else { 0 }
}
#[inline]
fn idx(&mut self, index: uint) -> Option<&'a [T]> {
if index < self.indexable() {
let lo = index * self.size;
let mut hi = lo + self.size;
if hi < lo || hi > self.v.len() { hi = self.v.len(); }
Some(&self.v[lo..hi])
} else {
None
}
}
}
/// An iterator over a slice in (non-overlapping) mutable chunks (`size`
/// elements at a time). When the slice len is not evenly divided by the chunk
/// size, the last slice of the iteration will be the remainder.
#[stable]
pub struct ChunksMut<'a, T:'a> {
v: &'a mut [T],
chunk_size: uint
}
#[stable]
impl<'a, T> Iterator for ChunksMut<'a, T> {
type Item = &'a mut [T];
#[inline]
fn next(&mut self) -> Option<&'a mut [T]> {
if self.v.len() == 0 {
None
} else {
let sz = cmp::min(self.v.len(), self.chunk_size);
let tmp = mem::replace(&mut self.v, &mut []);
let (head, tail) = tmp.split_at_mut(sz);
self.v = tail;
Some(head)
}
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
if self.v.len() == 0 {
(0, Some(0))
} else {
let n = self.v.len() / self.chunk_size;
let rem = self.v.len() % self.chunk_size;
let n = if rem > 0 { n + 1 } else { n };
(n, Some(n))
}
}
}
#[stable]
impl<'a, T> DoubleEndedIterator for ChunksMut<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a mut [T]> {
if self.v.len() == 0 {
None
} else {
let remainder = self.v.len() % self.chunk_size;
let sz = if remainder != 0 { remainder } else { self.chunk_size };
let tmp = mem::replace(&mut self.v, &mut []);
let tmp_len = tmp.len();
let (head, tail) = tmp.split_at_mut(tmp_len - sz);
self.v = head;
Some(tail)
}
}
}
//
// Free functions
//
/// Converts a pointer to A into a slice of length 1 (without copying).
#[unstable]
pub fn ref_slice<'a, A>(s: &'a A) -> &'a [A] {
unsafe {
transmute(RawSlice { data: s, len: 1 })
}
}
/// Converts a pointer to A into a slice of length 1 (without copying).
#[unstable]
pub fn mut_ref_slice<'a, A>(s: &'a mut A) -> &'a mut [A] {
unsafe {
let ptr: *const A = transmute(s);
transmute(RawSlice { data: ptr, len: 1 })
}
}
/// Forms a slice from a pointer and a length.
///
/// The pointer given is actually a reference to the base of the slice. This
/// reference is used to give a concrete lifetime to tie the returned slice to.
/// Typically this should indicate that the slice is valid for as long as the
/// pointer itself is valid.
///
/// The `len` argument is the number of **elements**, not the number of bytes.
///
/// This function is unsafe as there is no guarantee that the given pointer is
/// valid for `len` elements, nor whether the lifetime provided is a suitable
/// lifetime for the returned slice.
///
/// # Example
///
/// ```rust
/// use std::slice;
///
/// // manifest a slice out of thin air!
/// let ptr = 0x1234 as *const uint;
/// let amt = 10;
/// unsafe {
/// let slice = slice::from_raw_buf(&ptr, amt);
/// }
/// ```
#[inline]
#[unstable = "should be renamed to from_raw_parts"]
pub unsafe fn from_raw_buf<'a, T>(p: &'a *const T, len: uint) -> &'a [T] {
transmute(RawSlice { data: *p, len: len })
}
/// Performs the same functionality as `from_raw_buf`, except that a mutable
/// slice is returned.
///
/// This function is unsafe for the same reasons as `from_raw_buf`, as well as
/// not being able to provide a non-aliasing guarantee of the returned mutable
/// slice.
#[inline]
#[unstable = "should be renamed to from_raw_parts_mut"]
pub unsafe fn from_raw_mut_buf<'a, T>(p: &'a *mut T, len: uint) -> &'a mut [T] {
transmute(RawSlice { data: *p, len: len })
}
//
// Submodules
//
/// Operations on `[u8]`.
#[unstable = "needs review"]
pub mod bytes {
use ptr;
use slice::SliceExt;
/// A trait for operations on mutable `[u8]`s.
pub trait MutableByteVector {
/// Sets all bytes of the receiver to the given value.
fn set_memory(&mut self, value: u8);
}
impl MutableByteVector for [u8] {
#[inline]
#[allow(unstable)]
fn set_memory(&mut self, value: u8) {
unsafe { ptr::set_memory(self.as_mut_ptr(), value, self.len()) };
}
}
/// Copies data from `src` to `dst`
///
/// Panics if the length of `dst` is less than the length of `src`.
#[inline]
pub fn copy_memory(dst: &mut [u8], src: &[u8]) {
let len_src = src.len();
assert!(dst.len() >= len_src);
// `dst` is unaliasable, so we know statically it doesn't overlap
// with `src`.
unsafe {
ptr::copy_nonoverlapping_memory(dst.as_mut_ptr(),
src.as_ptr(),
len_src);
}
}
}
//
// Boilerplate traits
//
#[stable]
impl<A, B> PartialEq<[B]> for [A] where A: PartialEq<B> {
fn eq(&self, other: &[B]) -> bool {
self.len() == other.len() &&
order::eq(self.iter(), other.iter())
}
fn ne(&self, other: &[B]) -> bool {
self.len() != other.len() ||
order::ne(self.iter(), other.iter())
}
}
#[stable]
impl<T: Eq> Eq for [T] {}
#[stable]
impl<T: Ord> Ord for [T] {
fn cmp(&self, other: &[T]) -> Ordering {
order::cmp(self.iter(), other.iter())
}
}
#[stable]
impl<T: PartialOrd> PartialOrd for [T] {
#[inline]
fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
order::partial_cmp(self.iter(), other.iter())
}
#[inline]
fn lt(&self, other: &[T]) -> bool {
order::lt(self.iter(), other.iter())
}
#[inline]
fn le(&self, other: &[T]) -> bool {
order::le(self.iter(), other.iter())
}
#[inline]
fn ge(&self, other: &[T]) -> bool {
order::ge(self.iter(), other.iter())
}
#[inline]
fn gt(&self, other: &[T]) -> bool {
order::gt(self.iter(), other.iter())
}
}
/// Extension methods for slices containing integers.
#[unstable]
pub trait IntSliceExt<U, S> {
/// Converts the slice to an immutable slice of unsigned integers with the same width.
fn as_unsigned<'a>(&'a self) -> &'a [U];
/// Converts the slice to an immutable slice of signed integers with the same width.
fn as_signed<'a>(&'a self) -> &'a [S];
/// Converts the slice to a mutable slice of unsigned integers with the same width.
fn as_unsigned_mut<'a>(&'a mut self) -> &'a mut [U];
/// Converts the slice to a mutable slice of signed integers with the same width.
fn as_signed_mut<'a>(&'a mut self) -> &'a mut [S];
}
macro_rules! impl_int_slice {
($u:ty, $s:ty, $t:ty) => {
#[unstable]
impl IntSliceExt<$u, $s> for [$t] {
#[inline]
fn as_unsigned(&self) -> &[$u] { unsafe { transmute(self) } }
#[inline]
fn as_signed(&self) -> &[$s] { unsafe { transmute(self) } }
#[inline]
fn as_unsigned_mut(&mut self) -> &mut [$u] { unsafe { transmute(self) } }
#[inline]
fn as_signed_mut(&mut self) -> &mut [$s] { unsafe { transmute(self) } }
}
}
}
macro_rules! impl_int_slices {
($u:ty, $s:ty) => {
impl_int_slice! { $u, $s, $u }
impl_int_slice! { $u, $s, $s }
}
}
impl_int_slices! { u8, i8 }
impl_int_slices! { u16, i16 }
impl_int_slices! { u32, i32 }
impl_int_slices! { u64, i64 }
impl_int_slices! { uint, int }<|fim▁end|>
|
#[unstable = "trait is experimental"]
impl<'a, T, U: ?Sized + AsSlice<T>> AsSlice<T> for &'a mut U {
#[inline(always)]
|
<|file_name|>read.go<|end_file_name|><|fim▁begin|>package msgp
import (
"io"
"math"
"sync"
"time"
"github.com/philhofer/fwd"
)
// where we keep old *Readers
var readerPool = sync.Pool{New: func() interface{} { return &Reader{} }}
// Type is a MessagePack wire type,
// including this package's built-in
// extension types.
type Type byte
// MessagePack Types
//
// The zero value of Type
// is InvalidType.
const (
InvalidType Type = iota
// MessagePack built-in types
StrType
BinType
MapType
ArrayType
Float64Type
Float32Type
BoolType
IntType
UintType
NilType
ExtensionType
// pseudo-types provided
// by extensions
Complex64Type
Complex128Type
TimeType
_maxtype
)
// String implements fmt.Stringer
func (t Type) String() string {
switch t {
case StrType:
return "str"
case BinType:
return "bin"
case MapType:
return "map"
case ArrayType:
return "array"
case Float64Type:
return "float64"
case Float32Type:
return "float32"
case BoolType:
return "bool"
case UintType:
return "uint"
case IntType:
return "int"
case ExtensionType:
return "ext"
case NilType:
return "nil"
default:
return "<invalid>"
}
}
func freeR(m *Reader) {
readerPool.Put(m)
}
// Unmarshaler is the interface fulfilled
// by objects that know how to unmarshal
// themselves from MessagePack.
// UnmarshalMsg unmarshals the object
// from binary, returing any leftover
// bytes and any errors encountered.
type Unmarshaler interface {
UnmarshalMsg([]byte) ([]byte, error)
}
// Decodable is the interface fulfilled
// by objects that know how to read
// themselves from a *Reader.
type Decodable interface {
DecodeMsg(*Reader) error
}
// Decode decodes 'd' from 'r'.
func Decode(r io.Reader, d Decodable) error {
rd := NewReader(r)
err := d.DecodeMsg(rd)
freeR(rd)
return err
}
// NewReader returns a *Reader that
// reads from the provided reader. The
// reader will be buffered.
func NewReader(r io.Reader) *Reader {
p := readerPool.Get().(*Reader)
if p.R == nil {
p.R = fwd.NewReader(r)
} else {
p.R.Reset(r)
}
return p
}
// NewReaderSize returns a *Reader with a buffer of the given size.
// (This is vastly preferable to passing the decoder a reader that is already buffered.)
func NewReaderSize(r io.Reader, sz int) *Reader {
return &Reader{R: fwd.NewReaderSize(r, sz)}
}
// Reader wraps an io.Reader and provides
// methods to read MessagePack-encoded values
// from it. Readers are buffered.
type Reader struct {
// R is the buffered reader
// that the Reader uses
// to decode MessagePack.
// The Reader itself
// is stateless; all the
// buffering is done
// within R.
R *fwd.Reader
scratch []byte
}
// Read implements `io.Reader`
func (m *Reader) Read(p []byte) (int, error) {
return m.R.Read(p)
}
// CopyNext reads the next object from m without decoding it and writes it to w.
// It avoids unnecessary copies internally.
func (m *Reader) CopyNext(w io.Writer) (int64, error) {
sz, o, err := getNextSize(m.R)
if err != nil {
return 0, err
}
var n int64
// Opportunistic optimization: if we can fit the whole thing in the m.R
// buffer, then just get a pointer to that, and pass it to w.Write,
// avoiding an allocation.
if int(sz) <= m.R.BufferSize() {
var nn int
var buf []byte
buf, err = m.R.Next(int(sz))
if err != nil {
if err == io.ErrUnexpectedEOF {
err = ErrShortBytes
}
return 0, err
}
nn, err = w.Write(buf)
n += int64(nn)
} else {
// Fall back to io.CopyN.
// May avoid allocating if w is a ReaderFrom (e.g. bytes.Buffer)
n, err = io.CopyN(w, m.R, int64(sz))
if err == io.ErrUnexpectedEOF {
err = ErrShortBytes
}
}
if err != nil {
return n, err
} else if n < int64(sz) {
return n, io.ErrShortWrite
}
// for maps and slices, read elements
for x := uintptr(0); x < o; x++ {
var n2 int64
n2, err = m.CopyNext(w)
if err != nil {
return n, err
}
n += n2
}
return n, nil
}
// ReadFull implements `io.ReadFull`
func (m *Reader) ReadFull(p []byte) (int, error) {
return m.R.ReadFull(p)
}
// Reset resets the underlying reader.
func (m *Reader) Reset(r io.Reader) { m.R.Reset(r) }
// Buffered returns the number of bytes currently in the read buffer.
func (m *Reader) Buffered() int { return m.R.Buffered() }
// BufferSize returns the capacity of the read buffer.
func (m *Reader) BufferSize() int { return m.R.BufferSize() }
// NextType returns the next object type to be decoded.
func (m *Reader) NextType() (Type, error) {
p, err := m.R.Peek(1)
if err != nil {
return InvalidType, err
}
t := getType(p[0])
if t == InvalidType {
return t, InvalidPrefixError(p[0])
}
if t == ExtensionType {
v, err := m.peekExtensionType()
if err != nil {
return InvalidType, err
}
switch v {
case Complex64Extension:
return Complex64Type, nil
case Complex128Extension:
return Complex128Type, nil
case TimeExtension:
return TimeType, nil
}
}
return t, nil
}
// IsNil returns whether or not
// the next byte is a null messagepack byte
func (m *Reader) IsNil() bool {
p, err := m.R.Peek(1)
return err == nil && p[0] == mnil
}
// getNextSize returns the size of the next object on the wire.
// returns (obj size, obj elements, error)
// only maps and arrays have non-zero obj elements
// for maps and arrays, obj size does not include elements
//
// use uintptr b/c it's guaranteed to be large enough
// to hold whatever we can fit in memory.
func getNextSize(r *fwd.Reader) (uintptr, uintptr, error) {
b, err := r.Peek(1)
if err != nil {
return 0, 0, err
}
lead := b[0]
spec := &sizes[lead]
size, mode := spec.size, spec.extra
if size == 0 {
return 0, 0, InvalidPrefixError(lead)
}
if mode >= 0 {
return uintptr(size), uintptr(mode), nil
}
b, err = r.Peek(int(size))
if err != nil {
return 0, 0, err
}
switch mode {
case extra8:
return uintptr(size) + uintptr(b[1]), 0, nil
case extra16:
return uintptr(size) + uintptr(big.Uint16(b[1:])), 0, nil
case extra32:
return uintptr(size) + uintptr(big.Uint32(b[1:])), 0, nil
case map16v:
return uintptr(size), 2 * uintptr(big.Uint16(b[1:])), nil
case map32v:
return uintptr(size), 2 * uintptr(big.Uint32(b[1:])), nil
case array16v:
return uintptr(size), uintptr(big.Uint16(b[1:])), nil
case array32v:
return uintptr(size), uintptr(big.Uint32(b[1:])), nil
default:
return 0, 0, fatal
}
}
// Skip skips over the next object, regardless of
// its type. If it is an array or map, the whole array
// or map will be skipped.
func (m *Reader) Skip() error {
var (
v uintptr // bytes
o uintptr // objects
err error
p []byte
)
// we can use the faster
// method if we have enough
// buffered data
if m.R.Buffered() >= 5 {
p, err = m.R.Peek(5)
if err != nil {
return err
}
v, o, err = getSize(p)
if err != nil {
return err
}
} else {
v, o, err = getNextSize(m.R)
if err != nil {
return err
}
}
// 'v' is always non-zero
// if err == nil
_, err = m.R.Skip(int(v))
if err != nil {
return err
}
// for maps and slices, skip elements
for x := uintptr(0); x < o; x++ {
err = m.Skip()
if err != nil {
return err
}
}
return nil
}
// ReadMapHeader reads the next object
// as a map header and returns the size
// of the map and the number of bytes written.
// It will return a TypeError{} if the next
// object is not a map.
func (m *Reader) ReadMapHeader() (sz uint32, err error) {
var p []byte
var lead byte
p, err = m.R.Peek(1)
if err != nil {
return
}
lead = p[0]
if isfixmap(lead) {
sz = uint32(rfixmap(lead))
_, err = m.R.Skip(1)
return
}
switch lead {
case mmap16:
p, err = m.R.Next(3)
if err != nil {
return
}
sz = uint32(big.Uint16(p[1:]))
return
case mmap32:
p, err = m.R.Next(5)
if err != nil {
return
}
sz = big.Uint32(p[1:])
return
default:
err = badPrefix(MapType, lead)
return
}
}
// ReadMapKey reads either a 'str' or 'bin' field from
// the reader and returns the value as a []byte. It uses
// scratch for storage if it is large enough.
func (m *Reader) ReadMapKey(scratch []byte) ([]byte, error) {
out, err := m.ReadStringAsBytes(scratch)
if err != nil {
if tperr, ok := err.(TypeError); ok && tperr.Encoded == BinType {
return m.ReadBytes(scratch)
}
return nil, err
}
return out, nil
}
// MapKeyPtr returns a []byte pointing to the contents
// of a valid map key. The key cannot be empty, and it
// must be shorter than the total buffer size of the
// *Reader. Additionally, the returned slice is only
// valid until the next *Reader method call. Users
// should exercise extreme care when using this
// method; writing into the returned slice may
// corrupt future reads.
func (m *Reader) ReadMapKeyPtr() ([]byte, error) {
p, err := m.R.Peek(1)
if err != nil {
return nil, err
}
lead := p[0]
var read int
if isfixstr(lead) {
read = int(rfixstr(lead))
m.R.Skip(1)
goto fill
}
switch lead {
case mstr8, mbin8:
p, err = m.R.Next(2)
if err != nil {
return nil, err
}
read = int(p[1])
case mstr16, mbin16:
p, err = m.R.Next(3)
if err != nil {
return nil, err
}
read = int(big.Uint16(p[1:]))
case mstr32, mbin32:
p, err = m.R.Next(5)
if err != nil {
return nil, err
}
read = int(big.Uint32(p[1:]))
default:
return nil, badPrefix(StrType, lead)
}
fill:
if read == 0 {
return nil, ErrShortBytes
}
return m.R.Next(read)
}
// ReadArrayHeader reads the next object as an
// array header and returns the size of the array
// and the number of bytes read.
func (m *Reader) ReadArrayHeader() (sz uint32, err error) {
var lead byte
var p []byte
p, err = m.R.Peek(1)
if err != nil {
return
}
lead = p[0]
if isfixarray(lead) {
sz = uint32(rfixarray(lead))
_, err = m.R.Skip(1)
return
}
switch lead {
case marray16:
p, err = m.R.Next(3)
if err != nil {
return
}
sz = uint32(big.Uint16(p[1:]))
return
case marray32:
p, err = m.R.Next(5)
if err != nil {
return
}
sz = big.Uint32(p[1:])
return
default:
err = badPrefix(ArrayType, lead)
return
}
}
// ReadNil reads a 'nil' MessagePack byte from the reader
func (m *Reader) ReadNil() error {
p, err := m.R.Peek(1)
if err != nil {
return err
}
if p[0] != mnil {
return badPrefix(NilType, p[0])
}
_, err = m.R.Skip(1)
return err
}
// ReadFloat64 reads a float64 from the reader.
// (If the value on the wire is encoded as a float32,
// it will be up-cast to a float64.)
func (m *Reader) ReadFloat64() (f float64, err error) {
var p []byte
p, err = m.R.Peek(9)
if err != nil {
// we'll allow a coversion from float32 to float64,
// since we don't lose any precision
if err == io.EOF && len(p) > 0 && p[0] == mfloat32 {
ef, err := m.ReadFloat32()
return float64(ef), err
}
return
}
if p[0] != mfloat64 {
// see above
if p[0] == mfloat32 {
ef, err := m.ReadFloat32()
return float64(ef), err
}
err = badPrefix(Float64Type, p[0])
return
}
f = math.Float64frombits(getMuint64(p))
_, err = m.R.Skip(9)
return
}
// ReadFloat32 reads a float32 from the reader
func (m *Reader) ReadFloat32() (f float32, err error) {
var p []byte
p, err = m.R.Peek(5)
if err != nil {
return
}
if p[0] != mfloat32 {
err = badPrefix(Float32Type, p[0])
return
}<|fim▁hole|>
// ReadBool reads a bool from the reader
func (m *Reader) ReadBool() (b bool, err error) {
var p []byte
p, err = m.R.Peek(1)
if err != nil {
return
}
switch p[0] {
case mtrue:
b = true
case mfalse:
default:
err = badPrefix(BoolType, p[0])
return
}
_, err = m.R.Skip(1)
return
}
// ReadInt64 reads an int64 from the reader
func (m *Reader) ReadInt64() (i int64, err error) {
var p []byte
var lead byte
p, err = m.R.Peek(1)
if err != nil {
return
}
lead = p[0]
if isfixint(lead) {
i = int64(rfixint(lead))
_, err = m.R.Skip(1)
return
} else if isnfixint(lead) {
i = int64(rnfixint(lead))
_, err = m.R.Skip(1)
return
}
switch lead {
case mint8:
p, err = m.R.Next(2)
if err != nil {
return
}
i = int64(getMint8(p))
return
case mint16:
p, err = m.R.Next(3)
if err != nil {
return
}
i = int64(getMint16(p))
return
case mint32:
p, err = m.R.Next(5)
if err != nil {
return
}
i = int64(getMint32(p))
return
case mint64:
p, err = m.R.Next(9)
if err != nil {
return
}
i = getMint64(p)
return
default:
err = badPrefix(IntType, lead)
return
}
}
// ReadInt32 reads an int32 from the reader
func (m *Reader) ReadInt32() (i int32, err error) {
var in int64
in, err = m.ReadInt64()
if in > math.MaxInt32 || in < math.MinInt32 {
err = IntOverflow{Value: in, FailedBitsize: 32}
return
}
i = int32(in)
return
}
// ReadInt16 reads an int16 from the reader
func (m *Reader) ReadInt16() (i int16, err error) {
var in int64
in, err = m.ReadInt64()
if in > math.MaxInt16 || in < math.MinInt16 {
err = IntOverflow{Value: in, FailedBitsize: 16}
return
}
i = int16(in)
return
}
// ReadInt8 reads an int8 from the reader
func (m *Reader) ReadInt8() (i int8, err error) {
var in int64
in, err = m.ReadInt64()
if in > math.MaxInt8 || in < math.MinInt8 {
err = IntOverflow{Value: in, FailedBitsize: 8}
return
}
i = int8(in)
return
}
// ReadInt reads an int from the reader
func (m *Reader) ReadInt() (i int, err error) {
if smallint {
var in int32
in, err = m.ReadInt32()
i = int(in)
return
}
var in int64
in, err = m.ReadInt64()
i = int(in)
return
}
// ReadUint64 reads a uint64 from the reader
func (m *Reader) ReadUint64() (u uint64, err error) {
var p []byte
var lead byte
p, err = m.R.Peek(1)
if err != nil {
return
}
lead = p[0]
if isfixint(lead) {
u = uint64(rfixint(lead))
_, err = m.R.Skip(1)
return
}
switch lead {
case muint8:
p, err = m.R.Next(2)
if err != nil {
return
}
u = uint64(getMuint8(p))
return
case muint16:
p, err = m.R.Next(3)
if err != nil {
return
}
u = uint64(getMuint16(p))
return
case muint32:
p, err = m.R.Next(5)
if err != nil {
return
}
u = uint64(getMuint32(p))
return
case muint64:
p, err = m.R.Next(9)
if err != nil {
return
}
u = getMuint64(p)
return
default:
err = badPrefix(UintType, lead)
return
}
}
// ReadUint32 reads a uint32 from the reader
func (m *Reader) ReadUint32() (u uint32, err error) {
var in uint64
in, err = m.ReadUint64()
if in > math.MaxUint32 {
err = UintOverflow{Value: in, FailedBitsize: 32}
return
}
u = uint32(in)
return
}
// ReadUint16 reads a uint16 from the reader
func (m *Reader) ReadUint16() (u uint16, err error) {
var in uint64
in, err = m.ReadUint64()
if in > math.MaxUint16 {
err = UintOverflow{Value: in, FailedBitsize: 16}
return
}
u = uint16(in)
return
}
// ReadUint8 reads a uint8 from the reader
func (m *Reader) ReadUint8() (u uint8, err error) {
var in uint64
in, err = m.ReadUint64()
if in > math.MaxUint8 {
err = UintOverflow{Value: in, FailedBitsize: 8}
return
}
u = uint8(in)
return
}
// ReadUint reads a uint from the reader
func (m *Reader) ReadUint() (u uint, err error) {
if smallint {
var un uint32
un, err = m.ReadUint32()
u = uint(un)
return
}
var un uint64
un, err = m.ReadUint64()
u = uint(un)
return
}
// ReadByte is analogous to ReadUint8.
//
// NOTE: this is *not* an implementation
// of io.ByteReader.
func (m *Reader) ReadByte() (b byte, err error) {
var in uint64
in, err = m.ReadUint64()
if in > math.MaxUint8 {
err = UintOverflow{Value: in, FailedBitsize: 8}
return
}
b = byte(in)
return
}
// ReadBytes reads a MessagePack 'bin' object
// from the reader and returns its value. It may
// use 'scratch' for storage if it is non-nil.
func (m *Reader) ReadBytes(scratch []byte) (b []byte, err error) {
var p []byte
var lead byte
p, err = m.R.Peek(2)
if err != nil {
return
}
lead = p[0]
var read int64
switch lead {
case mbin8:
read = int64(p[1])
m.R.Skip(2)
case mbin16:
p, err = m.R.Next(3)
if err != nil {
return
}
read = int64(big.Uint16(p[1:]))
case mbin32:
p, err = m.R.Next(5)
if err != nil {
return
}
read = int64(big.Uint32(p[1:]))
default:
err = badPrefix(BinType, lead)
return
}
if int64(cap(scratch)) < read {
b = make([]byte, read)
} else {
b = scratch[0:read]
}
_, err = m.R.ReadFull(b)
return
}
// ReadBytesHeader reads the size header
// of a MessagePack 'bin' object. The user
// is responsible for dealing with the next
// 'sz' bytes from the reader in an application-specific
// way.
func (m *Reader) ReadBytesHeader() (sz uint32, err error) {
var p []byte
p, err = m.R.Peek(1)
if err != nil {
return
}
switch p[0] {
case mbin8:
p, err = m.R.Next(2)
if err != nil {
return
}
sz = uint32(p[1])
return
case mbin16:
p, err = m.R.Next(3)
if err != nil {
return
}
sz = uint32(big.Uint16(p[1:]))
return
case mbin32:
p, err = m.R.Next(5)
if err != nil {
return
}
sz = uint32(big.Uint32(p[1:]))
return
default:
err = badPrefix(BinType, p[0])
return
}
}
// ReadExactBytes reads a MessagePack 'bin'-encoded
// object off of the wire into the provided slice. An
// ArrayError will be returned if the object is not
// exactly the length of the input slice.
func (m *Reader) ReadExactBytes(into []byte) error {
p, err := m.R.Peek(2)
if err != nil {
return err
}
lead := p[0]
var read int64 // bytes to read
var skip int // prefix size to skip
switch lead {
case mbin8:
read = int64(p[1])
skip = 2
case mbin16:
p, err = m.R.Peek(3)
if err != nil {
return err
}
read = int64(big.Uint16(p[1:]))
skip = 3
case mbin32:
p, err = m.R.Peek(5)
if err != nil {
return err
}
read = int64(big.Uint32(p[1:]))
skip = 5
default:
return badPrefix(BinType, lead)
}
if read != int64(len(into)) {
return ArrayError{Wanted: uint32(len(into)), Got: uint32(read)}
}
m.R.Skip(skip)
_, err = m.R.ReadFull(into)
return err
}
// ReadStringAsBytes reads a MessagePack 'str' (utf-8) string
// and returns its value as bytes. It may use 'scratch' for storage
// if it is non-nil.
func (m *Reader) ReadStringAsBytes(scratch []byte) (b []byte, err error) {
var p []byte
var lead byte
p, err = m.R.Peek(1)
if err != nil {
return
}
lead = p[0]
var read int64
if isfixstr(lead) {
read = int64(rfixstr(lead))
m.R.Skip(1)
goto fill
}
switch lead {
case mstr8:
p, err = m.R.Next(2)
if err != nil {
return
}
read = int64(uint8(p[1]))
case mstr16:
p, err = m.R.Next(3)
if err != nil {
return
}
read = int64(big.Uint16(p[1:]))
case mstr32:
p, err = m.R.Next(5)
if err != nil {
return
}
read = int64(big.Uint32(p[1:]))
default:
err = badPrefix(StrType, lead)
return
}
fill:
if int64(cap(scratch)) < read {
b = make([]byte, read)
} else {
b = scratch[0:read]
}
_, err = m.R.ReadFull(b)
return
}
// ReadStringHeader reads a string header
// off of the wire. The user is then responsible
// for dealing with the next 'sz' bytes from
// the reader in an application-specific manner.
func (m *Reader) ReadStringHeader() (sz uint32, err error) {
var p []byte
p, err = m.R.Peek(1)
if err != nil {
return
}
lead := p[0]
if isfixstr(lead) {
sz = uint32(rfixstr(lead))
m.R.Skip(1)
return
}
switch lead {
case mstr8:
p, err = m.R.Next(2)
if err != nil {
return
}
sz = uint32(p[1])
return
case mstr16:
p, err = m.R.Next(3)
if err != nil {
return
}
sz = uint32(big.Uint16(p[1:]))
return
case mstr32:
p, err = m.R.Next(5)
if err != nil {
return
}
sz = big.Uint32(p[1:])
return
default:
err = badPrefix(StrType, lead)
return
}
}
// ReadString reads a utf-8 string from the reader
func (m *Reader) ReadString() (s string, err error) {
var p []byte
var lead byte
var read int64
p, err = m.R.Peek(1)
if err != nil {
return
}
lead = p[0]
if isfixstr(lead) {
read = int64(rfixstr(lead))
m.R.Skip(1)
goto fill
}
switch lead {
case mstr8:
p, err = m.R.Next(2)
if err != nil {
return
}
read = int64(uint8(p[1]))
case mstr16:
p, err = m.R.Next(3)
if err != nil {
return
}
read = int64(big.Uint16(p[1:]))
case mstr32:
p, err = m.R.Next(5)
if err != nil {
return
}
read = int64(big.Uint32(p[1:]))
default:
err = badPrefix(StrType, lead)
return
}
fill:
if read == 0 {
s, err = "", nil
return
}
// reading into the memory
// that will become the string
// itself has vastly superior
// worst-case performance, because
// the reader buffer doesn't have
// to be large enough to hold the string.
// the idea here is to make it more
// difficult for someone malicious
// to cause the system to run out of
// memory by sending very large strings.
//
// NOTE: this works because the argument
// passed to (*fwd.Reader).ReadFull escapes
// to the heap; its argument may, in turn,
// be passed to the underlying reader, and
// thus escape analysis *must* conclude that
// 'out' escapes.
out := make([]byte, read)
_, err = m.R.ReadFull(out)
if err != nil {
return
}
s = UnsafeString(out)
return
}
// ReadComplex64 reads a complex64 from the reader
func (m *Reader) ReadComplex64() (f complex64, err error) {
var p []byte
p, err = m.R.Peek(10)
if err != nil {
return
}
if p[0] != mfixext8 {
err = badPrefix(Complex64Type, p[0])
return
}
if int8(p[1]) != Complex64Extension {
err = errExt(int8(p[1]), Complex64Extension)
return
}
f = complex(math.Float32frombits(big.Uint32(p[2:])),
math.Float32frombits(big.Uint32(p[6:])))
_, err = m.R.Skip(10)
return
}
// ReadComplex128 reads a complex128 from the reader
func (m *Reader) ReadComplex128() (f complex128, err error) {
var p []byte
p, err = m.R.Peek(18)
if err != nil {
return
}
if p[0] != mfixext16 {
err = badPrefix(Complex128Type, p[0])
return
}
if int8(p[1]) != Complex128Extension {
err = errExt(int8(p[1]), Complex128Extension)
return
}
f = complex(math.Float64frombits(big.Uint64(p[2:])),
math.Float64frombits(big.Uint64(p[10:])))
_, err = m.R.Skip(18)
return
}
// ReadMapStrIntf reads a MessagePack map into a map[string]interface{}.
// (You must pass a non-nil map into the function.)
func (m *Reader) ReadMapStrIntf(mp map[string]interface{}) (err error) {
var sz uint32
sz, err = m.ReadMapHeader()
if err != nil {
return
}
for key := range mp {
delete(mp, key)
}
for i := uint32(0); i < sz; i++ {
var key string
var val interface{}
key, err = m.ReadString()
if err != nil {
return
}
val, err = m.ReadIntf()
if err != nil {
return
}
mp[key] = val
}
return
}
// ReadTime reads a time.Time object from the reader.
// The returned time's location will be set to time.Local.
func (m *Reader) ReadTime() (t time.Time, err error) {
var p []byte
p, err = m.R.Peek(15)
if err != nil {
return
}
if p[0] != mext8 || p[1] != 12 {
err = badPrefix(TimeType, p[0])
return
}
if int8(p[2]) != TimeExtension {
err = errExt(int8(p[2]), TimeExtension)
return
}
sec, nsec := getUnix(p[3:])
t = time.Unix(sec, int64(nsec)).Local()
_, err = m.R.Skip(15)
return
}
// ReadIntf reads out the next object as a raw interface{}.
// Arrays are decoded as []interface{}, and maps are decoded
// as map[string]interface{}. Integers are decoded as int64
// and unsigned integers are decoded as uint64.
func (m *Reader) ReadIntf() (i interface{}, err error) {
var t Type
t, err = m.NextType()
if err != nil {
return
}
switch t {
case BoolType:
i, err = m.ReadBool()
return
case IntType:
i, err = m.ReadInt64()
return
case UintType:
i, err = m.ReadUint64()
return
case BinType:
i, err = m.ReadBytes(nil)
return
case StrType:
i, err = m.ReadString()
return
case Complex64Type:
i, err = m.ReadComplex64()
return
case Complex128Type:
i, err = m.ReadComplex128()
return
case TimeType:
i, err = m.ReadTime()
return
case ExtensionType:
var t int8
t, err = m.peekExtensionType()
if err != nil {
return
}
f, ok := extensionReg[t]
if ok {
e := f()
err = m.ReadExtension(e)
i = e
return
}
var e RawExtension
e.Type = t
err = m.ReadExtension(&e)
i = &e
return
case MapType:
mp := make(map[string]interface{})
err = m.ReadMapStrIntf(mp)
i = mp
return
case NilType:
err = m.ReadNil()
i = nil
return
case Float32Type:
i, err = m.ReadFloat32()
return
case Float64Type:
i, err = m.ReadFloat64()
return
case ArrayType:
var sz uint32
sz, err = m.ReadArrayHeader()
if err != nil {
return
}
out := make([]interface{}, int(sz))
for j := range out {
out[j], err = m.ReadIntf()
if err != nil {
return
}
}
i = out
return
default:
return nil, fatal // unreachable
}
}<|fim▁end|>
|
f = math.Float32frombits(getMuint32(p))
_, err = m.R.Skip(5)
return
}
|
<|file_name|>frequencyAnalysis.py<|end_file_name|><|fim▁begin|><|fim▁hole|>The encryption process in the story you read involves frequency analysis: it is known that letter 'e' is the
most frequent one in the English language, so it's pretty safe to assume that the most common character in the
encryptedText stands for 'e'. To begin with, implement a function that will find the most frequent character
in the given encryptedText.
Example
For encryptedText = "$~NmiNmim$/NVeirp@dlzrCCCCfFfQQQ", the output should be
frequencyAnalysis(encryptedText) = 'C'.
Letter 'C' appears in the text more than any other character (4 times), which is why it is the answer.
""
from collections import Counter # "Counter" is what CodeFights asks for
def frequencyAnalysis(encryptedText):
return max(Counter(encryptedText), key=Counter(encryptedText).get) # CodeFights asks to change this line only<|fim▁end|>
|
""" You've recently read "The Gold-Bug" by Edgar Allan Poe, and was so impressed by the cryptogram in it that
decided to try and decipher an encrypted text yourself. You asked your friend to encode a piece of text using
a substitution cipher, and now have an encryptedText that you'd like to decipher.
|
<|file_name|>test_meanstd.py<|end_file_name|><|fim▁begin|>import unittest2
from zounds.util import simple_in_memory_settings
from .preprocess import MeanStdNormalization, PreprocessingPipeline
import featureflow as ff
import numpy as np
class MeanStdTests(unittest2.TestCase):
def _forward_backward(self, shape):
@simple_in_memory_settings
class Model(ff.BaseModel):
meanstd = ff.PickleFeature(
MeanStdNormalization,
store=False)
pipeline = ff.PickleFeature(
PreprocessingPipeline,
needs=(meanstd,),
store=True)
training = np.random.random_sample((100,) + shape)
_id = Model.process(meanstd=training)
model = Model(_id)
data_shape = (10,) + shape
data = np.random.random_sample(data_shape)
result = model.pipeline.transform(data)
self.assertEqual(data_shape, result.data.shape)
inverted = result.inverse_transform()
self.assertEqual(inverted.shape, data.shape)
np.testing.assert_allclose(inverted, data)
def test_can_process_1d(self):
self._forward_backward((9,))<|fim▁hole|> def test_can_process_2d(self):
self._forward_backward((3, 4))
def test_can_process_3d(self):
self._forward_backward((5, 4, 7))<|fim▁end|>
| |
<|file_name|>pprof_profiler_test.py<|end_file_name|><|fim▁begin|># Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0<|fim▁hole|># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for pprof_profiler."""
import gzip
from proto import profile_pb2
from tensorflow.core.framework import step_stats_pb2
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import test_util
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
from tensorflow.python.profiler import pprof_profiler
class PprofProfilerTest(test.TestCase):
def testDataEmpty(self):
output_dir = test.get_temp_dir()
run_metadata = config_pb2.RunMetadata()
graph = test.mock.MagicMock()
graph.get_operations.return_value = []
profiles = pprof_profiler.get_profiles(graph, run_metadata)
self.assertEqual(0, len(profiles))
profile_files = pprof_profiler.profile(
graph, run_metadata, output_dir)
self.assertEqual(0, len(profile_files))
def testRunMetadataEmpty(self):
output_dir = test.get_temp_dir()
run_metadata = config_pb2.RunMetadata()
graph = test.mock.MagicMock()
op1 = test.mock.MagicMock()
op1.name = 'Add/123'
op1.traceback = [('a/b/file1', 10, 'some_var')]
op1.type = 'add'
graph.get_operations.return_value = [op1]
profiles = pprof_profiler.get_profiles(graph, run_metadata)
self.assertEqual(0, len(profiles))
profile_files = pprof_profiler.profile(
graph, run_metadata, output_dir)
self.assertEqual(0, len(profile_files))
def testValidProfile(self):
output_dir = test.get_temp_dir()
run_metadata = config_pb2.RunMetadata()
node1 = step_stats_pb2.NodeExecStats(
node_name='Add/123',
op_start_rel_micros=3,
op_end_rel_micros=5,
all_end_rel_micros=4)
run_metadata = config_pb2.RunMetadata()
device1 = run_metadata.step_stats.dev_stats.add()
device1.device = 'deviceA'
device1.node_stats.extend([node1])
graph = test.mock.MagicMock()
op1 = test.mock.MagicMock()
op1.name = 'Add/123'
op1.traceback = [
('a/b/file1', 10, 'apply_op', 'abc'), ('a/c/file2', 12, 'my_op', 'def')]
op1.type = 'add'
graph.get_operations.return_value = [op1]
expected_proto = """sample_type {
type: 5
unit: 5
}
sample_type {
type: 6
unit: 7
}
sample_type {
type: 8
unit: 7
}
sample {
value: 1
value: 4
value: 2
label {
key: 1
str: 2
}
label {
key: 3
str: 4
}
}
string_table: ""
string_table: "node_name"
string_table: "Add/123"
string_table: "op_type"
string_table: "add"
string_table: "count"
string_table: "all_time"
string_table: "nanoseconds"
string_table: "op_time"
string_table: "Device 1 of 1: deviceA"
comment: 9
"""
# Test with protos
profiles = pprof_profiler.get_profiles(graph, run_metadata)
self.assertEqual(1, len(profiles))
self.assertTrue('deviceA' in profiles)
self.assertEqual(expected_proto, str(profiles['deviceA']))
# Test with files
profile_files = pprof_profiler.profile(
graph, run_metadata, output_dir)
self.assertEqual(1, len(profile_files))
with gzip.open(profile_files[0]) as profile_file:
profile_contents = profile_file.read()
profile = profile_pb2.Profile()
profile.ParseFromString(profile_contents)
self.assertEqual(expected_proto, str(profile))
@test_util.run_v1_only('b/120545219')
def testProfileWithWhileLoop(self):
options = config_pb2.RunOptions()
options.trace_level = config_pb2.RunOptions.FULL_TRACE
run_metadata = config_pb2.RunMetadata()
num_iters = 5
with self.cached_session() as sess:
i = constant_op.constant(0)
c = lambda i: math_ops.less(i, num_iters)
b = lambda i: math_ops.add(i, 1)
r = control_flow_ops.while_loop(c, b, [i])
sess.run(r, options=options, run_metadata=run_metadata)
profiles = pprof_profiler.get_profiles(sess.graph, run_metadata)
self.assertEqual(1, len(profiles))
profile = next(iter(profiles.values()))
add_samples = [] # Samples for the while/Add node
for sample in profile.sample:
if profile.string_table[sample.label[0].str] == 'while/Add':
add_samples.append(sample)
# Values for same nodes are aggregated.
self.assertEqual(1, len(add_samples))
# Value of "count" should be equal to number of iterations.
self.assertEqual(num_iters, add_samples[0].value[0])
if __name__ == '__main__':
test.main()<|fim▁end|>
|
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>"use strict";
let datafire = require('datafire');<|fim▁hole|><|fim▁end|>
|
let openapi = require('./openapi.json');
module.exports = datafire.Integration.fromOpenAPI(openapi, "azure_network_virtualrouter");
|
<|file_name|>PositionDefinition.java<|end_file_name|><|fim▁begin|>package fr.npellegrin.xebia.mower.parser.model;
/**
* Parsed position.
*/
public class PositionDefinition {
private int x;
private int y;<|fim▁hole|> return x;
}
public void setX(final int x) {
this.x = x;
}
public int getY() {
return y;
}
public void setY(final int y) {
this.y = y;
}
public OrientationDefinition getOrientation() {
return orientation;
}
public void setOrientation(final OrientationDefinition orientation) {
this.orientation = orientation;
}
}<|fim▁end|>
|
private OrientationDefinition orientation;
public int getX() {
|
<|file_name|>driver.py<|end_file_name|><|fim▁begin|># Copyright 2012 NetApp
# Copyright 2015 Mirantis inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Drivers for shares.
"""
import re
import time
from oslo_config import cfg
from oslo_log import log
import six
from manila import exception
from manila.i18n import _, _LE
from manila import network
from manila.share import utils as share_utils
from manila import utils
LOG = log.getLogger(__name__)
share_opts = [
# NOTE(rushiagr): Reasonable to define this option at only one place.
cfg.IntOpt(
'num_shell_tries',
default=3,
help='Number of times to attempt to run flakey shell commands.'),
cfg.IntOpt(
'reserved_share_percentage',
default=0,
help='The percentage of backend capacity reserved.'),
cfg.StrOpt(
'share_backend_name',
default=None,
help='The backend name for a given driver implementation.'),
cfg.StrOpt(
'network_config_group',
default=None,
help="Name of the configuration group in the Manila conf file "
"to look for network config options."
"If not set, the share backend's config group will be used."
"If an option is not found within provided group, then"
"'DEFAULT' group will be used for search of option."),
cfg.BoolOpt(
'driver_handles_share_servers',
help="There are two possible approaches for share drivers in Manila. "
"First is when share driver is able to handle share-servers and "
"second when not. Drivers can support either both or only one "
"of these approaches. So, set this opt to True if share driver "
"is able to handle share servers and it is desired mode else set "
"False. It is set to None by default to make this choice "
"intentional."),
cfg.FloatOpt(
'max_over_subscription_ratio',
default=20.0,
help='Float representation of the over subscription ratio '
'when thin provisioning is involved. Default ratio is '
'20.0, meaning provisioned capacity can be 20 times '
'the total physical capacity. If the ratio is 10.5, it '
'means provisioned capacity can be 10.5 times the '
'total physical capacity. A ratio of 1.0 means '
'provisioned capacity cannot exceed the total physical '
'capacity. A ratio lower than 1.0 is invalid.'),
cfg.StrOpt(
'migration_tmp_location',
default='/tmp/',
help="Temporary path to create and mount shares during migration."),
cfg.ListOpt(
'migration_ignore_files',
default=['lost+found'],
help="List of files and folders to be ignored when migrating shares. "
"Items should be names (not including any path)."),
cfg.IntOpt(
'migration_wait_access_rules_timeout',
default=90,
help="Time to wait for access rules to be allowed/denied on backends "
"when migrating shares using generic approach (seconds)."),
cfg.IntOpt(
'migration_create_delete_share_timeout',
default=300,
help='Timeout for creating and deleting share instances '
'when performing share migration (seconds).'),
cfg.StrOpt(
'migration_mounting_backend_ip',
default=None,
help="Backend IP in admin network to use for mounting "
"shares during migration."),
cfg.StrOpt(
'migration_data_copy_node_ip',
default=None,
help="The IP of the node responsible for copying data during "
"migration, such as the data copy service node, reachable by "
"the backend."),
cfg.StrOpt(
'migration_protocol_mount_command',
default=None,
help="The command for mounting shares for this backend. Must specify"
"the executable and all necessary parameters for the protocol "
"supported. It is advisable to separate protocols per backend."),
cfg.BoolOpt(
'migration_readonly_support',
default=True,
help="Specify whether read only access mode is supported in this"
"backend."),
]
ssh_opts = [
cfg.IntOpt(
'ssh_conn_timeout',
default=60,
help='Backend server SSH connection timeout.'),
cfg.IntOpt(
'ssh_min_pool_conn',
default=1,
help='Minimum number of connections in the SSH pool.'),
cfg.IntOpt(
'ssh_max_pool_conn',
default=10,
help='Maximum number of connections in the SSH pool.'),
]
ganesha_opts = [
cfg.StrOpt('ganesha_config_dir',
default='/etc/ganesha',
help='Directory where Ganesha config files are stored.'),
cfg.StrOpt('ganesha_config_path',
default='$ganesha_config_dir/ganesha.conf',
help='Path to main Ganesha config file.'),
cfg.StrOpt('ganesha_nfs_export_options',
default='maxread = 65536, prefread = 65536',
help='Options to use when exporting a share using ganesha '
'NFS server. Note that these defaults can be overridden '
'when a share is created by passing metadata with key '
'name export_options. Also note the complete set of '
'default ganesha export options is specified in '
'ganesha_utils. (GPFS only.)'),
cfg.StrOpt('ganesha_service_name',
default='ganesha.nfsd',
help='Name of the ganesha nfs service.'),
cfg.StrOpt('ganesha_db_path',
default='$state_path/manila-ganesha.db',
help='Location of Ganesha database file. '
'(Ganesha module only.)'),
cfg.StrOpt('ganesha_export_dir',
default='$ganesha_config_dir/export.d',
help='Path to directory containing Ganesha export '
'configuration. (Ganesha module only.)'),
cfg.StrOpt('ganesha_export_template_dir',
default='/etc/manila/ganesha-export-templ.d',
help='Path to directory containing Ganesha export '
'block templates. (Ganesha module only.)'),
]
CONF = cfg.CONF
CONF.register_opts(share_opts)
CONF.register_opts(ssh_opts)
CONF.register_opts(ganesha_opts)
class ExecuteMixin(object):
"""Provides an executable functionality to a driver class."""
def init_execute_mixin(self, *args, **kwargs):
if self.configuration:
self.configuration.append_config_values(ssh_opts)
self.set_execute(kwargs.pop('execute', utils.execute))
def set_execute(self, execute):
self._execute = execute
def _try_execute(self, *command, **kwargs):
# NOTE(vish): Volume commands can partially fail due to timing, but
# running them a second time on failure will usually
# recover nicely.
tries = 0
while True:
try:
self._execute(*command, **kwargs)
return True
except exception.ProcessExecutionError:
tries += 1
if tries >= self.configuration.num_shell_tries:
raise
LOG.exception(_LE("Recovering from a failed execute. "
"Try number %s"), tries)
time.sleep(tries ** 2)
class GaneshaMixin(object):
"""Augment derived classes with Ganesha configuration."""
def init_ganesha_mixin(self, *args, **kwargs):
if self.configuration:
self.configuration.append_config_values(ganesha_opts)
class ShareDriver(object):
"""Class defines interface of NAS driver."""
def __init__(self, driver_handles_share_servers, *args, **kwargs):
"""Implements base functionality for share drivers.
:param driver_handles_share_servers: expected boolean value or
tuple/list/set of boolean values.
There are two possible approaches for share drivers in Manila.
First is when share driver is able to handle share-servers and
second when not.
Drivers can support either both or only one of these approaches.
So, it is allowed to be 'True' when share driver does support
handling of share servers and allowed to be 'False' when does
support usage of unhandled share-servers that are not tracked by
Manila.
Share drivers are allowed to work only in one of two possible
driver modes, that is why only one should be chosen.
"""
super(ShareDriver, self).__init__()
self.configuration = kwargs.get('configuration', None)
self._stats = {}
self.pools = []
if self.configuration:
self.configuration.append_config_values(share_opts)
network_config_group = (self.configuration.network_config_group or
self.configuration.config_group)
else:
network_config_group = None
self._verify_share_server_handling(driver_handles_share_servers)
if self.driver_handles_share_servers:
self.network_api = network.API(
config_group_name=network_config_group)
if hasattr(self, 'init_execute_mixin'):
# Instance with 'ExecuteMixin'
self.init_execute_mixin(*args, **kwargs) # pylint: disable=E1101
if hasattr(self, 'init_ganesha_mixin'):
# Instance with 'GaneshaMixin'
self.init_ganesha_mixin(*args, **kwargs) # pylint: disable=E1101
@property
def driver_handles_share_servers(self):
if self.configuration:
return self.configuration.safe_get('driver_handles_share_servers')
return CONF.driver_handles_share_servers
def _verify_share_server_handling(self, driver_handles_share_servers):
if not isinstance(self.driver_handles_share_servers, bool):
raise exception.ManilaException(
"Config opt 'driver_handles_share_servers' has improper "
"value - '%s'. Please define it as boolean." %
self.driver_handles_share_servers)
elif isinstance(driver_handles_share_servers, bool):
driver_handles_share_servers = [driver_handles_share_servers]
elif not isinstance(driver_handles_share_servers, (tuple, list, set)):
raise exception.ManilaException(
"Improper data provided for 'driver_handles_share_servers' - "
"%s" % driver_handles_share_servers)
if any(not isinstance(v, bool) for v in driver_handles_share_servers):
raise exception.ManilaException(
"Provided wrong data: %s" % driver_handles_share_servers)
if (self.driver_handles_share_servers not in
driver_handles_share_servers):
raise exception.ManilaException(
"Driver does not support mode 'driver_handles_share_servers="
"%(actual)s'. It can be used only with value '%(allowed)s'." %
{'actual': self.driver_handles_share_servers,
'allowed': driver_handles_share_servers})
def migrate_share(self, context, share_ref, host,
dest_driver_migration_info):
"""Is called to perform driver migration.
Driver should implement this method if willing to perform migration
in an optimized way, useful for when driver understands destination
backend.
:param context: The 'context.RequestContext' object for the request.
:param share_ref: Reference to the share being migrated.
:param host: Destination host and its capabilities.
:param dest_driver_migration_info: Migration information provided by
destination host.
:returns: Boolean value indicating if driver migration succeeded.
:returns: Dictionary containing a model update.
"""
return None, None
def get_driver_migration_info(self, context, share_instance, share_server):
"""Is called to provide necessary driver migration logic."""
return None
def get_migration_info(self, context, share_instance, share_server):
"""Is called to provide necessary generic migration logic."""
mount_cmd = self._get_mount_command(context, share_instance,
share_server)
umount_cmd = self._get_unmount_command(context, share_instance,
share_server)
access = self._get_access_rule_for_data_copy(
context, share_instance, share_server)
return {'mount': mount_cmd,
'umount': umount_cmd,
'access': access}
def _get_mount_command(self, context, share_instance, share_server):
"""Is called to delegate mounting share logic."""
mount_cmd = self._get_mount_command_protocol(share_instance,
share_server)
mount_ip = self._get_mount_ip(share_instance, share_server)
mount_cmd.append(mount_ip)
mount_path = self.configuration.safe_get(
'migration_tmp_location') + share_instance['id']
mount_cmd.append(mount_path)
return mount_cmd
def _get_mount_command_protocol(self, share_instance, share_server):
mount_cmd = self.configuration.safe_get(
'migration_protocol_mount_command')
if mount_cmd:
return mount_cmd.split()
else:
return ['mount', '-t', share_instance['share_proto'].lower()]
def _get_mount_ip(self, share_instance, share_server):
# Note(ganso): DHSS = true drivers may need to override this method
# and use information saved in share_server structure.
mount_ip = self.configuration.safe_get('migration_mounting_backend_ip')
old_ip = share_instance['export_locations'][0]['path']
if mount_ip:
# NOTE(ganso): Does not currently work with hostnames and ipv6.
p = re.compile("\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}")
new_ip = p.sub(mount_ip, old_ip)
return new_ip
else:
return old_ip
def _get_unmount_command(self, context, share_instance, share_server):
return ['umount',
self.configuration.safe_get('migration_tmp_location')
+ share_instance['id']]
def _get_access_rule_for_data_copy(
self, context, share_instance, share_server):
"""Is called to obtain access rule so data copy node can mount."""
# Note(ganso): The current method implementation is intended to work
# with Data Copy Service approach. If Manila Node is used for copying,
# then DHSS = true drivers may need to override this method.
service_ip = self.configuration.safe_get('migration_data_copy_node_ip')
return {'access_type': 'ip',
'access_level': 'rw',
'access_to': service_ip}
def copy_share_data(self, context, helper, share, share_instance,
share_server, new_share_instance, new_share_server,
migration_info_src, migration_info_dest):
# NOTE(ganso): This method is here because it is debatable if it can
# be overridden by a driver or not. Personally I think it should not,
# else it would be possible to lose compatibility with generic
# migration between backends, but allows the driver to use it on its
# own implementation if it wants to.
migrated = False
mount_path = self.configuration.safe_get('migration_tmp_location')
src_access = migration_info_src['access']
dest_access = migration_info_dest['access']
if None in (src_access['access_to'], dest_access['access_to']):
msg = _("Access rules not appropriate for mounting share instances"
" for migration of share %(share_id)s,"
" source share access: %(src_ip)s, destination share"
" access: %(dest_ip)s. Aborting.") % {
'src_ip': src_access['access_to'],
'dest_ip': dest_access['access_to'],
'share_id': share['id']}
raise exception.ShareMigrationFailed(reason=msg)
# NOTE(ganso): Removing any previously conflicting access rules, which
# would cause the following access_allow to fail for one instance.
helper.deny_migration_access(None, src_access, False)
helper.deny_migration_access(None, dest_access, False)
# NOTE(ganso): I would rather allow access to instances separately,
# but I require an access_id since it is a new access rule and
# destination manager must receive an access_id. I can either move
# this code to manager code so I can create the rule in DB manually,
# or ignore duplicate access rule errors for some specific scenarios.
try:
src_access_ref = helper.allow_migration_access(src_access)
except Exception as e:
LOG.error(_LE("Share migration failed attempting to allow "
"access of %(access_to)s to share "
"instance %(instance_id)s.") % {
'access_to': src_access['access_to'],
'instance_id': share_instance['id']})
msg = six.text_type(e)
LOG.exception(msg)
raise exception.ShareMigrationFailed(reason=msg)
try:
dest_access_ref = helper.allow_migration_access(dest_access)
except Exception as e:
LOG.error(_LE("Share migration failed attempting to allow "
"access of %(access_to)s to share "
"instance %(instance_id)s.") % {
'access_to': dest_access['access_to'],
'instance_id': new_share_instance['id']})
msg = six.text_type(e)
LOG.exception(msg)
helper.cleanup_migration_access(src_access_ref, src_access)
raise exception.ShareMigrationFailed(reason=msg)
# NOTE(ganso): From here we have the possibility of not cleaning
# anything when facing an error. At this moment, we have the
# destination instance in "inactive" state, while we are performing
# operations on the source instance. I think it is best to not clean
# the instance, leave it in "inactive" state, but try to clean
# temporary access rules, mounts, folders, etc, since no additional
# harm is done.
def _mount_for_migration(migration_info):
try:
utils.execute(*migration_info['mount'], run_as_root=True)
except Exception:<|fim▁hole|> "%(share_instance_id)s "
"to %(new_share_instance_id)s") % {
'share_instance_id': share_instance['id'],
'new_share_instance_id': new_share_instance['id']})
helper.cleanup_migration_access(
src_access_ref, src_access)
helper.cleanup_migration_access(
dest_access_ref, dest_access)
raise
utils.execute('mkdir', '-p',
''.join((mount_path, share_instance['id'])))
utils.execute('mkdir', '-p',
''.join((mount_path, new_share_instance['id'])))
# NOTE(ganso): mkdir command sometimes returns faster than it
# actually runs, so we better sleep for 1 second.
time.sleep(1)
try:
_mount_for_migration(migration_info_src)
except Exception as e:
LOG.error(_LE("Share migration failed attempting to mount "
"share instance %s.") % share_instance['id'])
msg = six.text_type(e)
LOG.exception(msg)
helper.cleanup_temp_folder(share_instance, mount_path)
helper.cleanup_temp_folder(new_share_instance, mount_path)
raise exception.ShareMigrationFailed(reason=msg)
try:
_mount_for_migration(migration_info_dest)
except Exception as e:
LOG.error(_LE("Share migration failed attempting to mount "
"share instance %s.") % new_share_instance['id'])
msg = six.text_type(e)
LOG.exception(msg)
helper.cleanup_unmount_temp_folder(share_instance,
migration_info_src)
helper.cleanup_temp_folder(share_instance, mount_path)
helper.cleanup_temp_folder(new_share_instance, mount_path)
raise exception.ShareMigrationFailed(reason=msg)
try:
ignore_list = self.configuration.safe_get('migration_ignore_files')
copy = share_utils.Copy(mount_path + share_instance['id'],
mount_path + new_share_instance['id'],
ignore_list)
copy.run()
if copy.get_progress()['total_progress'] == 100:
migrated = True
except Exception as e:
LOG.exception(six.text_type(e))
LOG.error(_LE("Failed to copy files for "
"migration of share instance %(share_instance_id)s "
"to %(new_share_instance_id)s") % {
'share_instance_id': share_instance['id'],
'new_share_instance_id': new_share_instance['id']})
# NOTE(ganso): For some reason I frequently get AMQP errors after
# copying finishes, which seems like is the service taking too long to
# copy while not replying heartbeat messages, so AMQP closes the
# socket. There is no impact, it just shows a big trace and AMQP
# reconnects after, although I would like to prevent this situation
# without the use of additional threads. Suggestions welcome.
utils.execute(*migration_info_src['umount'], run_as_root=True)
utils.execute(*migration_info_dest['umount'], run_as_root=True)
utils.execute('rmdir', ''.join((mount_path, share_instance['id'])),
check_exit_code=False)
utils.execute('rmdir', ''.join((mount_path, new_share_instance['id'])),
check_exit_code=False)
helper.deny_migration_access(src_access_ref, src_access)
helper.deny_migration_access(dest_access_ref, dest_access)
if not migrated:
msg = ("Copying from share instance %(instance_id)s "
"to %(new_instance_id)s did not succeed." % {
'instance_id': share_instance['id'],
'new_instance_id': new_share_instance['id']})
raise exception.ShareMigrationFailed(reason=msg)
LOG.debug("Copying completed in migration for share %s." % share['id'])
def create_share(self, context, share, share_server=None):
"""Is called to create share."""
raise NotImplementedError()
def create_share_from_snapshot(self, context, share, snapshot,
share_server=None):
"""Is called to create share from snapshot."""
raise NotImplementedError()
def create_snapshot(self, context, snapshot, share_server=None):
"""Is called to create snapshot.
:param context: Current context
:param snapshot: Snapshot model. Share model could be
retrieved through snapshot['share'].
:param share_server: Share server model or None.
"""
raise NotImplementedError()
def delete_share(self, context, share, share_server=None):
"""Is called to remove share."""
raise NotImplementedError()
def delete_snapshot(self, context, snapshot, share_server=None):
"""Is called to remove snapshot.
:param context: Current context
:param snapshot: Snapshot model. Share model could be
retrieved through snapshot['share'].
:param share_server: Share server model or None.
"""
raise NotImplementedError()
def get_pool(self, share):
"""Return pool name where the share resides on.
:param share: The share hosted by the driver.
"""
def ensure_share(self, context, share, share_server=None):
"""Invoked to ensure that share is exported.
Driver can use this method to update the list of export locations of
the share if it changes. To do that, you should return list with
export locations.
:return None or list with export locations
"""
raise NotImplementedError()
def allow_access(self, context, share, access, share_server=None):
"""Allow access to the share."""
raise NotImplementedError()
def deny_access(self, context, share, access, share_server=None):
"""Deny access to the share."""
raise NotImplementedError()
def check_for_setup_error(self):
"""Check for setup error."""
max_ratio = self.configuration.safe_get('max_over_subscription_ratio')
if not max_ratio or float(max_ratio) < 1.0:
msg = (_("Invalid max_over_subscription_ratio '%s'. "
"Valid value should be >= 1.0.") % max_ratio)
raise exception.InvalidParameterValue(err=msg)
def do_setup(self, context):
"""Any initialization the share driver does while starting."""
def get_share_stats(self, refresh=False):
"""Get share status.
If 'refresh' is True, run update the stats first.
"""
if refresh:
self._update_share_stats()
return self._stats
def get_network_allocations_number(self):
"""Returns number of network allocations for creating VIFs.
Drivers that use Nova for share servers should return zero (0) here
same as Generic driver does.
Because Nova will handle network resources allocation.
Drivers that handle networking itself should calculate it according
to their own requirements. It can have 1+ network interfaces.
"""
raise NotImplementedError()
def allocate_network(self, context, share_server, share_network,
count=None, **kwargs):
"""Allocate network resources using given network information."""
if count is None:
count = self.get_network_allocations_number()
if count:
kwargs.update(count=count)
self.network_api.allocate_network(
context, share_server, share_network, **kwargs)
def deallocate_network(self, context, share_server_id):
"""Deallocate network resources for the given share server."""
if self.get_network_allocations_number():
self.network_api.deallocate_network(context, share_server_id)
def choose_share_server_compatible_with_share(self, context, share_servers,
share, snapshot=None,
consistency_group=None):
"""Method that allows driver to choose share server for provided share.
If compatible share-server is not found, method should return None.
:param context: Current context
:param share_servers: list with share-server models
:param share: share model
:param snapshot: snapshot model
:param consistency_group: ConsistencyGroup model with shares
:returns: share-server or None
"""
# If creating in a consistency group, use its share server
if consistency_group:
for share_server in share_servers:
if (consistency_group.get('share_server_id') ==
share_server['id']):
return share_server
return None
return share_servers[0] if share_servers else None
def choose_share_server_compatible_with_cg(self, context, share_servers,
cg_ref, cgsnapshot=None):
return share_servers[0] if share_servers else None
def setup_server(self, *args, **kwargs):
if self.driver_handles_share_servers:
return self._setup_server(*args, **kwargs)
else:
LOG.debug(
"Skipping step 'setup share server', because driver is "
"enabled with mode when Manila does not handle share servers.")
def _setup_server(self, network_info, metadata=None):
"""Sets up and configures share server with given network parameters.
Redefine it within share driver when it is going to handle share
servers.
"""
raise NotImplementedError()
def manage_existing(self, share, driver_options):
"""Brings an existing share under Manila management.
If provided share is not valid, then raise a
ManageInvalidShare exception, specifying a reason for the failure.
The share has a share_type, and the driver can inspect that and
compare against the properties of the referenced backend share.
If they are incompatible, raise a
ManageExistingShareTypeMismatch, specifying a reason for the failure.
:param share: Share model
:param driver_options: Driver-specific options provided by admin.
:return: share_update dictionary with required key 'size',
which should contain size of the share.
"""
raise NotImplementedError()
def unmanage(self, share):
"""Removes the specified share from Manila management.
Does not delete the underlying backend share.
For most drivers, this will not need to do anything. However, some
drivers might use this call as an opportunity to clean up any
Manila-specific configuration that they have associated with the
backend share.
If provided share cannot be unmanaged, then raise an
UnmanageInvalidShare exception, specifying a reason for the failure.
"""
def extend_share(self, share, new_size, share_server=None):
"""Extends size of existing share.
:param share: Share model
:param new_size: New size of share (new_size > share['size'])
:param share_server: Optional -- Share server model
"""
raise NotImplementedError()
def shrink_share(self, share, new_size, share_server=None):
"""Shrinks size of existing share.
If consumed space on share larger than new_size driver should raise
ShareShrinkingPossibleDataLoss exception:
raise ShareShrinkingPossibleDataLoss(share_id=share['id'])
:param share: Share model
:param new_size: New size of share (new_size < share['size'])
:param share_server: Optional -- Share server model
:raises ShareShrinkingPossibleDataLoss, NotImplementedError
"""
raise NotImplementedError()
def teardown_server(self, *args, **kwargs):
if self.driver_handles_share_servers:
return self._teardown_server(*args, **kwargs)
else:
LOG.debug(
"Skipping step 'teardown share server', because driver is "
"enabled with mode when Manila does not handle share servers.")
def _teardown_server(self, server_details, security_services=None):
"""Tears down share server.
Redefine it within share driver when it is going to handle share
servers.
"""
raise NotImplementedError()
def _has_redefined_driver_methods(self, methods):
"""Returns boolean as a result of methods presence and redefinition."""
if not isinstance(methods, (set, list, tuple)):
methods = (methods, )
for method_name in methods:
method = getattr(type(self), method_name, None)
if (not method or method == getattr(ShareDriver, method_name)):
return False
return True
@property
def snapshots_are_supported(self):
if not hasattr(self, '_snapshots_are_supported'):
methods = (
"create_snapshot",
"delete_snapshot",
"create_share_from_snapshot")
# NOTE(vponomaryov): calculate default value for
# stat 'snapshot_support' based on implementation of
# appropriate methods of this base driver class.
self._snapshots_are_supported = self._has_redefined_driver_methods(
methods)
return self._snapshots_are_supported
def _update_share_stats(self, data=None):
"""Retrieve stats info from share group.
:param data: dict -- dict with key-value pairs to redefine common ones.
"""
LOG.debug("Updating share stats.")
backend_name = (self.configuration.safe_get('share_backend_name') or
CONF.share_backend_name)
# Note(zhiteng): These information are driver/backend specific,
# each driver may define these values in its own config options
# or fetch from driver specific configuration file.
common = dict(
share_backend_name=backend_name or 'Generic_NFS',
driver_handles_share_servers=self.driver_handles_share_servers,
vendor_name='Open Source',
driver_version='1.0',
storage_protocol=None,
total_capacity_gb='unknown',
free_capacity_gb='unknown',
reserved_percentage=0,
QoS_support=False,
pools=self.pools or None,
snapshot_support=self.snapshots_are_supported,
)
if isinstance(data, dict):
common.update(data)
self._stats = common
def get_share_server_pools(self, share_server):
"""Return list of pools related to a particular share server.
:param share_server: ShareServer class instance.
"""
return []
def create_consistency_group(self, context, cg_dict, share_server=None):
"""Create a consistency group.
:param context:
:param cg_dict: The consistency group details
EXAMPLE:
{
'status': 'creating',
'project_id': '13c0be6290934bd98596cfa004650049',
'user_id': 'a0314a441ca842019b0952224aa39192',
'description': None,
'deleted': 'False',
'created_at': datetime.datetime(2015, 8, 10, 15, 14, 6),
'updated_at': None,
'source_cgsnapshot_id': 'f6aa3b59-57eb-421e-965c-4e182538e36a',
'host': 'openstack2@cmodeSSVMNFS',
'deleted_at': None,
'share_types': [<models.ConsistencyGroupShareTypeMapping>],
'id': 'eda52174-0442-476d-9694-a58327466c14',
'name': None
}
:returns: (cg_model_update, share_update_list)
cg_model_update - a dict containing any values to be updated
for the CG in the database. This value may be None.
"""
raise NotImplementedError()
def create_consistency_group_from_cgsnapshot(self, context, cg_dict,
cgsnapshot_dict,
share_server=None):
"""Create a consistency group from a cgsnapshot.
:param context:
:param cg_dict: The consistency group details
EXAMPLE:
.. code::
{
'status': 'creating',
'project_id': '13c0be6290934bd98596cfa004650049',
'user_id': 'a0314a441ca842019b0952224aa39192',
'description': None,
'deleted': 'False',
'created_at': datetime.datetime(2015, 8, 10, 15, 14, 6),
'updated_at': None,
'source_cgsnapshot_id': 'f6aa3b59-57eb-421e-965c-4e182538e36a',
'host': 'openstack2@cmodeSSVMNFS',
'deleted_at': None,
'shares': [<models.Share>], # The new shares being created
'share_types': [<models.ConsistencyGroupShareTypeMapping>],
'id': 'eda52174-0442-476d-9694-a58327466c14',
'name': None
}
:param cgsnapshot_dict: The cgsnapshot details
EXAMPLE:
.. code::
{
'status': 'available',
'project_id': '13c0be6290934bd98596cfa004650049',
'user_id': 'a0314a441ca842019b0952224aa39192',
'description': None,
'deleted': '0',
'created_at': datetime.datetime(2015, 8, 10, 0, 5, 58),
'updated_at': datetime.datetime(2015, 8, 10, 0, 5, 58),
'consistency_group_id': '4b04fdc3-00b9-4909-ba1a-06e9b3f88b67',
'cgsnapshot_members': [
{
'status': 'available',
'share_type_id': '1a9ed31e-ee70-483d-93ba-89690e028d7f',
'user_id': 'a0314a441ca842019b0952224aa39192',
'deleted': 'False',
'created_at': datetime.datetime(2015, 8, 10, 0, 5, 58),
'share': <models.Share>,
'updated_at': datetime.datetime(2015, 8, 10, 0, 5, 58),
'share_proto': 'NFS',
'project_id': '13c0be6290934bd98596cfa004650049',
'cgsnapshot_id': 'f6aa3b59-57eb-421e-965c-4e182538e36a',
'deleted_at': None,
'id': '6813e06b-a8f5-4784-b17d-f3e91afa370e',
'size': 1
}
],
'deleted_at': None,
'id': 'f6aa3b59-57eb-421e-965c-4e182538e36a',
'name': None
}
:return: (cg_model_update, share_update_list)
cg_model_update - a dict containing any values to be updated
for the CG in the database. This value may be None.
share_update_list - a list of dictionaries containing dicts for
every share created in the CG. Any share dicts should at a minimum
contain the 'id' key and 'export_locations'. Export locations
should be in the same format as returned by a share_create. This
list may be empty or None.
EXAMPLE:
.. code::
[{'id': 'uuid', 'export_locations': ['export_path']}]
"""
raise NotImplementedError()
def delete_consistency_group(self, context, cg_dict, share_server=None):
"""Delete a consistency group
:param context: The request context
:param cg_dict: The consistency group details
EXAMPLE:
.. code::
{
'status': 'creating',
'project_id': '13c0be6290934bd98596cfa004650049',
'user_id': 'a0314a441ca842019b0952224aa39192',
'description': None,
'deleted': 'False',
'created_at': datetime.datetime(2015, 8, 10, 15, 14, 6),
'updated_at': None,
'source_cgsnapshot_id': 'f6aa3b59-57eb-421e-965c-4e182538e36a',
'host': 'openstack2@cmodeSSVMNFS',
'deleted_at': None,
'shares': [<models.Share>], # The new shares being created
'share_types': [<models.ConsistencyGroupShareTypeMapping>],
'id': 'eda52174-0442-476d-9694-a58327466c14',
'name': None
}
:return: cg_model_update
cg_model_update - a dict containing any values to be updated
for the CG in the database. This value may be None.
"""
raise NotImplementedError()
def create_cgsnapshot(self, context, snap_dict, share_server=None):
"""Create a consistency group snapshot.
:param context:
:param snap_dict: The cgsnapshot details
EXAMPLE:
.. code::
{
'status': 'available',
'project_id': '13c0be6290934bd98596cfa004650049',
'user_id': 'a0314a441ca842019b0952224aa39192',
'description': None,
'deleted': '0',
'created_at': datetime.datetime(2015, 8, 10, 0, 5, 58),
'updated_at': datetime.datetime(2015, 8, 10, 0, 5, 58),
'consistency_group_id': '4b04fdc3-00b9-4909-ba1a-06e9b3f88b67',
'cgsnapshot_members': [
{
'status': 'available',
'share_type_id': '1a9ed31e-ee70-483d-93ba-89690e028d7f',
'user_id': 'a0314a441ca842019b0952224aa39192',
'deleted': 'False',
'created_at': datetime.datetime(2015, 8, 10, 0, 5, 58),
'share': <models.Share>,
'updated_at': datetime.datetime(2015, 8, 10, 0, 5, 58),
'share_proto': 'NFS',
'project_id': '13c0be6290934bd98596cfa004650049',
'cgsnapshot_id': 'f6aa3b59-57eb-421e-965c-4e182538e36a',
'deleted_at': None,
'id': '6813e06b-a8f5-4784-b17d-f3e91afa370e',
'size': 1
}
],
'deleted_at': None,
'id': 'f6aa3b59-57eb-421e-965c-4e182538e36a',
'name': None
}
:return: (cgsnapshot_update, member_update_list)
cgsnapshot_update - a dict containing any values to be updated
for the CGSnapshot in the database. This value may be None.
member_update_list - a list of dictionaries containing for every
member of the cgsnapshot. Each dict should contains values to be
updated for teh CGSnapshotMember in the database. This list may be
empty or None.
"""
raise NotImplementedError()
def delete_cgsnapshot(self, context, snap_dict, share_server=None):
"""Delete a consistency group snapshot
:param context:
:param snap_dict: The cgsnapshot details
EXAMPLE:
.. code::
{
'status': 'available',
'project_id': '13c0be6290934bd98596cfa004650049',
'user_id': 'a0314a441ca842019b0952224aa39192',
'description': None,
'deleted': '0',
'created_at': datetime.datetime(2015, 8, 10, 0, 5, 58),
'updated_at': datetime.datetime(2015, 8, 10, 0, 5, 58),
'consistency_group_id': '4b04fdc3-00b9-4909-ba1a-06e9b3f88b67',
'cgsnapshot_members': [
{
'status': 'available',
'share_type_id': '1a9ed31e-ee70-483d-93ba-89690e028d7f',
'share_id': 'e14b5174-e534-4f35-bc4f-fe81c1575d6f',
'user_id': 'a0314a441ca842019b0952224aa39192',
'deleted': 'False',
'created_at': datetime.datetime(2015, 8, 10, 0, 5, 58),
'share': <models.Share>,
'updated_at': datetime.datetime(2015, 8, 10, 0, 5, 58),
'share_proto': 'NFS',
'project_id': '13c0be6290934bd98596cfa004650049',
'cgsnapshot_id': 'f6aa3b59-57eb-421e-965c-4e182538e36a',
'deleted_at': None,
'id': '6813e06b-a8f5-4784-b17d-f3e91afa370e',
'size': 1
}
],
'deleted_at': None,
'id': 'f6aa3b59-57eb-421e-965c-4e182538e36a',
'name': None
}
:return: (cgsnapshot_update, member_update_list)
cgsnapshot_update - a dict containing any values to be updated
for the CGSnapshot in the database. This value may be None.
"""
raise NotImplementedError()
def get_periodic_hook_data(self, context, share_instances):
"""Dedicated for update/extend of data for existing share instances.
Redefine this method in share driver to be able to update/change/extend
share instances data that will be used by periodic hook action.
One of possible updates is add-on of "automount" CLI commands for each
share instance for case of notification is enabled using 'hook'
approach.
:param context: Current context
:param share_instances: share instances list provided by share manager
:return: list of share instances.
"""
return share_instances<|fim▁end|>
|
LOG.error(_LE("Failed to mount temporary folder for "
"migration of share instance "
|
<|file_name|>account_invoice.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from openerp import api, fields, models, _
from . import exceptions
class AccountInvoice(models.Model):<|fim▁hole|>
enable_datev_checks = fields.Boolean('Perform Datev Checks', default=True)
@api.multi
def is_datev_validation_active(self):
self.ensure_one()
return self.enable_datev_checks and self.env['res.users'].browse(self._uid).company_id.enable_datev_checks
@api.multi
def perform_datev_validation(self, silent=False):
is_valid = True
errors = list()
for rec in self:
if rec.is_datev_validation_active():
if silent: # Shorter, more performant version w/o string and exception handling
for line in rec.invoice_line:
if not line.perform_datev_validation(silent=True):
return False
else:
for line_no, line in enumerate(rec.invoice_line, start=1):
try:
line.perform_datev_validation(line_no=line_no)
except exceptions.DatevWarning as dw:
is_valid = False
errors.append(dw.message)
if not (silent or is_valid):
raise exceptions.DatevWarning(u'\n'.join(errors))
return is_valid
class AccountInvoiceLine(models.Model):
_inherit = 'account.invoice.line'
@api.multi
def perform_datev_validation(self, silent=False, line_no=None):
"""
Performs tests on an invoice line for whether the taxes are correctly set or not.
The major use of this method is in the condition of a workflow transition.
:param line_no: int Line number to be displayed in an error message.
:param silent: bool Specifies whether an exception in case of a failed test should be thrown
or if the checks should be performed silently.
:return: True if all checks were performed w/o errors or no datev checks are applicable. False otherwise.
:rtype: bool
"""
self.ensure_one()
if not self.is_datev_validation_applicable():
return True
is_valid = len(self.invoice_line_tax_id) == 1 and self.account_id.datev_steuer == self.invoice_line_tax_id
if not (silent or is_valid):
raise exceptions.DatevWarning(
_(u'Line {line}: The taxes specified in the invoice line ({tax_line}) and the corresponding account ({tax_account}) mismatch!').format(
line=line_no, tax_line=self.invoice_line_tax_id.description, tax_account=self.account_id.datev_steuer.description
)
)
return is_valid
@api.multi
def is_datev_validation_applicable(self):
"""
Tests if an invoice line is applicable to datev checks or not.
:return: True if it is applicable. Otherwise False.
:rtype: bool
"""
self.ensure_one()
return self.account_id.automatic<|fim▁end|>
|
_inherit = 'account.invoice'
|
<|file_name|>header-test.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2012, Susumu Yata
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE<|fim▁hole|>// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
#include <iostream>
#include <madoka/exception.h>
#include <madoka/header.h>
int main() try {
madoka::Header header;
header.set_width(1ULL << 30);
header.set_depth(3);
header.set_max_value((1ULL << 28) - 1);
header.set_value_size(28);
header.set_seed(123456789);
header.set_table_size(1ULL << 32);
header.set_file_size((1ULL << 32) + sizeof(madoka::Header));
MADOKA_THROW_IF(header.width() != (1ULL << 30));
MADOKA_THROW_IF(header.width_mask() != ((1ULL << 30) - 1));
MADOKA_THROW_IF(header.depth() != (3));
MADOKA_THROW_IF(header.max_value() != ((1ULL << 28) - 1));
MADOKA_THROW_IF(header.value_size() != 28);
MADOKA_THROW_IF(header.seed() != 123456789);
MADOKA_THROW_IF(header.table_size() != (1ULL << 32));
MADOKA_THROW_IF(header.file_size() !=
((1ULL << 32) + sizeof(madoka::Header)));
header.set_width(123456789);
MADOKA_THROW_IF(header.width() != 123456789);
MADOKA_THROW_IF(header.width_mask() != 0);
return 0;
} catch (const madoka::Exception &ex) {
std::cerr << "error: " << ex.what() << std::endl;
return 1;
}<|fim▁end|>
|
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
<|file_name|>ffi.rs<|end_file_name|><|fim▁begin|>#![allow(raw_pointer_derive)]
#![allow(non_camel_case_types)]
use std::mem;
use libc::{c_void, c_char, c_uchar, c_short, c_ushort, c_int, c_uint, c_long, c_ulong};
// APACHE PORTABLE RUNTIME
pub const APR_RFC822_DATE_LEN: apr_size_t = 30;
// run this hook first, before ANYTHING
pub const APR_HOOK_REALLY_FIRST: c_int = -10;
// run this hook first
pub const APR_HOOK_FIRST: c_int = 0;
// run this hook somewhere
pub const APR_HOOK_MIDDLE: c_int = 10;
// run this hook after every other hook which is defined
pub const APR_HOOK_LAST: c_int = 20;
// run this hook last, after EVERYTHING
pub const APR_HOOK_REALLY_LAST: c_int = 30;
pub type sockaddr_in = c_void;
pub type sockaddr_in6 = c_void;
pub type sockaddr_storage = c_void;
pub type conn_state_t = c_void;
pub type apr_byte_t = c_uchar;
pub type apr_int16_t = c_short;
pub type apr_uint16_t = c_ushort;
pub type apr_int32_t = c_int;
pub type apr_uint32_t = c_uint;
pub type apr_int64_t = c_long;
pub type apr_uint64_t = c_ulong;
pub type apr_size_t = c_ulong;
pub type apr_ssize_t = c_long;
pub type apr_off_t = c_long;
pub type apr_socklen_t = c_uint;
pub type apr_ino_t = c_ulong;
pub type apr_uintptr_t = apr_uint64_t;
pub type apr_status_t = c_int;
pub type apr_signum_t = c_int;
pub type apr_read_type_e = c_uint;
pub type apr_bucket_is_metadata_t = c_uint;
pub type apr_filetype_e = c_uint;
pub type apr_uid_t = c_uint;
pub type apr_gid_t = c_uint;
pub type apr_dev_t = c_ulong;
pub type apr_fileperms_t = apr_int32_t;
pub type apr_time_t = apr_int64_t;
pub type apr_interval_time_t = apr_int64_t;
pub type apr_port_t = apr_uint16_t;
#[repr(C)]
#[derive(Copy, Clone)]
pub struct apr_array_header_t {
pub pool: *mut apr_pool_t,
pub elt_size: c_int,
pub nelts: c_int,
pub nalloc: c_int,
pub elts: *mut c_char,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct apr_table_entry_t {
pub key: *mut c_char,
pub val: *mut c_char,
pub key_checksum: apr_uint32_t,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct apr_bucket_brigade {
pub p: *mut apr_pool_t,
pub list: apr_bucket_list,
pub bucket_alloc: *mut apr_bucket_alloc_t,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct apr_bucket_list {
pub next: *mut apr_bucket,
pub prev: *mut apr_bucket,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct apr_bucket {
pub link: apr_bucket_list,
pub _type: *const apr_bucket_type_t,
pub length: apr_size_t,
pub start: apr_off_t,
pub data: *mut c_void,
pub free: Option<extern "C" fn(e: *mut c_void) -> ()>,
pub list: *mut apr_bucket_alloc_t,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct apr_bucket_type_t {
pub name: *const c_char,
pub num_func: c_int,
pub is_metadata: apr_bucket_is_metadata_t,
pub destroy: Option<extern "C" fn(
data: *mut c_void
) -> ()>,
pub read: Option<extern "C" fn(
b: *mut apr_bucket,
str: *mut *const c_char,
len: *mut apr_size_t,
block: apr_read_type_e
) -> apr_status_t>,
pub setaside: Option<extern "C" fn(
e: *mut apr_bucket,
pool: *mut apr_pool_t
) -> apr_status_t>,
pub split: Option<extern "C" fn(
e: *mut apr_bucket,
point: apr_size_t
) -> apr_status_t>,
pub copy: Option<extern "C" fn(<|fim▁hole|>
#[repr(C)]
#[derive(Copy, Clone)]
pub struct apr_uri_t {
pub scheme: *mut c_char,
pub hostinfo: *mut c_char,
pub user: *mut c_char,
pub password: *mut c_char,
pub hostname: *mut c_char,
pub port_str: *mut c_char,
pub path: *mut c_char,
pub query: *mut c_char,
pub fragment: *mut c_char,
pub hostent: *mut hostent,
pub port: apr_port_t,
pub _bindgen_bitfield_1_: c_uint,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct apr_sockaddr_t {
pub pool: *mut apr_pool_t,
pub hostname: *mut c_char,
pub servname: *mut c_char,
pub port: apr_port_t,
pub family: apr_int32_t,
pub salen: apr_socklen_t,
pub ipaddr_len: c_int,
pub addr_str_len: c_int,
pub ipaddr_ptr: *mut c_void,
pub next: *mut apr_sockaddr_t,
pub sa: apr_sockaddr_sa_t,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct apr_sockaddr_sa_t {
pub _bindgen_data_: [u64; 16usize],
}
impl apr_sockaddr_sa_t {
pub unsafe fn sin(&mut self) -> *mut sockaddr_in {
let raw: *mut u8 = mem::transmute(&self._bindgen_data_);
mem::transmute(raw.offset(0))
}
pub unsafe fn sin6(&mut self) -> *mut sockaddr_in6 {
let raw: *mut u8 = mem::transmute(&self._bindgen_data_);
mem::transmute(raw.offset(0))
}
pub unsafe fn sas(&mut self) -> *mut sockaddr_storage {
let raw: *mut u8 = mem::transmute(&self._bindgen_data_);
mem::transmute(raw.offset(0))
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct apr_finfo_t {
pub pool: *mut apr_pool_t,
pub valid: apr_int32_t,
pub protection: apr_fileperms_t,
pub filetype: apr_filetype_e,
pub user: apr_uid_t,
pub group: apr_gid_t,
pub inode: apr_ino_t,
pub device: apr_dev_t,
pub nlink: apr_int32_t,
pub size: apr_off_t,
pub csize: apr_off_t,
pub atime: apr_time_t,
pub mtime: apr_time_t,
pub ctime: apr_time_t,
pub fname: *const c_char,
pub name: *const c_char,
pub filehand: *mut apr_file_t,
}
#[derive(Copy, Clone)]
pub enum hostent { }
#[derive(Copy, Clone)]
pub enum apr_bucket_alloc_t { }
#[derive(Copy, Clone)]
pub enum apr_pool_t { }
#[derive(Copy, Clone)]
pub enum apr_table_t { }
#[derive(Copy, Clone)]
pub enum apr_thread_mutex_t { }
#[derive(Copy, Clone)]
pub enum apr_thread_t { }
#[derive(Copy, Clone)]
pub enum apr_file_t { }
extern "C" {
pub fn apr_version_string() -> *const c_char;
pub fn apu_version_string() -> *const c_char;
pub fn apr_table_get(t: *const apr_table_t, key: *const c_char) -> *const c_char;
pub fn apr_table_set(t: *mut apr_table_t, key: *const c_char, val: *const c_char) -> ();
pub fn apr_table_add(t: *mut apr_table_t, key: *const c_char, val: *const c_char) -> ();
pub fn apr_table_elts(t: *const apr_table_t) -> *const apr_array_header_t;
pub fn apr_pstrmemdup(p: *mut apr_pool_t, s: *const c_char, n: apr_size_t) -> *mut c_char;
pub fn apr_palloc(p: *mut apr_pool_t, size: apr_size_t) -> *mut c_void;
pub fn apr_pcalloc(p: *mut apr_pool_t, size: apr_size_t) -> *mut c_void;
pub fn apr_base64_encode_len(len: c_int) -> c_int;
pub fn apr_base64_encode(coded_dst: *mut c_char, plain_src: *const c_char, len_plain_src: c_int) -> c_int;
pub fn apr_base64_decode_len(coded_src: *const c_char) -> c_int;
pub fn apr_base64_decode(plain_dst: *mut c_char, coded_src: *const c_char) -> c_int;
pub fn apr_time_now() -> apr_time_t;
pub fn apr_rfc822_date(date_str: *mut c_char, t: apr_time_t) -> apr_status_t;
}
pub fn strdup<T: Into<Vec<u8>>>(pool: *mut apr_pool_t, data: T) -> *mut c_char {
let bytes = data.into();
unsafe {
apr_pstrmemdup(
pool,
bytes.as_ptr() as *const c_char,
bytes.len() as apr_size_t
)
}
}
// APACHE HTTPD
pub const MODULE_MAGIC_COOKIE: c_ulong = 0x41503234u64; /* "AP24" */
pub const MODULE_MAGIC_NUMBER_MAJOR: c_int = 20120211;
pub const MODULE_MAGIC_NUMBER_MINOR: c_int = 36;
pub const OK: c_int = 0;
pub const DECLINED: c_int = -1;
pub const DONE: c_int = -2;
pub const SUSPENDED: c_int = -3;
pub const HTTP_CONTINUE: c_int = 100;
pub const HTTP_SWITCHING_PROTOCOLS: c_int = 101;
pub const HTTP_PROCESSING: c_int = 102;
pub const HTTP_OK: c_int = 200;
pub const HTTP_CREATED: c_int = 201;
pub const HTTP_ACCEPTED: c_int = 202;
pub const HTTP_NON_AUTHORITATIVE: c_int = 203;
pub const HTTP_NO_CONTENT: c_int = 204;
pub const HTTP_RESET_CONTENT: c_int = 205;
pub const HTTP_PARTIAL_CONTENT: c_int = 206;
pub const HTTP_MULTI_STATUS: c_int = 207;
pub const HTTP_ALREADY_REPORTED: c_int = 208;
pub const HTTP_IM_USED: c_int = 226;
pub const HTTP_MULTIPLE_CHOICES: c_int = 300;
pub const HTTP_MOVED_PERMANENTLY: c_int = 301;
pub const HTTP_MOVED_TEMPORARILY: c_int = 302;
pub const HTTP_SEE_OTHER: c_int = 303;
pub const HTTP_NOT_MODIFIED: c_int = 304;
pub const HTTP_USE_PROXY: c_int = 305;
pub const HTTP_TEMPORARY_REDIRECT: c_int = 307;
pub const HTTP_PERMANENT_REDIRECT: c_int = 308;
pub const HTTP_BAD_REQUEST: c_int = 400;
pub const HTTP_UNAUTHORIZED: c_int = 401;
pub const HTTP_PAYMENT_REQUIRED: c_int = 402;
pub const HTTP_FORBIDDEN: c_int = 403;
pub const HTTP_NOT_FOUND: c_int = 404;
pub const HTTP_METHOD_NOT_ALLOWED: c_int = 405;
pub const HTTP_NOT_ACCEPTABLE: c_int = 406;
pub const HTTP_PROXY_AUTHENTICATION_REQUIRED: c_int = 407;
pub const HTTP_REQUEST_TIME_OUT: c_int = 408;
pub const HTTP_CONFLICT: c_int = 409;
pub const HTTP_GONE: c_int = 410;
pub const HTTP_LENGTH_REQUIRED: c_int = 411;
pub const HTTP_PRECONDITION_FAILED: c_int = 412;
pub const HTTP_REQUEST_ENTITY_TOO_LARGE: c_int = 413;
pub const HTTP_REQUEST_URI_TOO_LARGE: c_int = 414;
pub const HTTP_UNSUPPORTED_MEDIA_TYPE: c_int = 415;
pub const HTTP_RANGE_NOT_SATISFIABLE: c_int = 416;
pub const HTTP_EXPECTATION_FAILED: c_int = 417;
pub const HTTP_IM_A_TEAPOT: c_int = 418;
pub const HTTP_UNPROCESSABLE_ENTITY: c_int = 422;
pub const HTTP_LOCKED: c_int = 423;
pub const HTTP_FAILED_DEPENDENCY: c_int = 424;
pub const HTTP_UPGRADE_REQUIRED: c_int = 426;
pub const HTTP_PRECONDITION_REQUIRED: c_int = 428;
pub const HTTP_TOO_MANY_REQUESTS: c_int = 429;
pub const HTTP_REQUEST_HEADER_FIELDS_TOO_LARGE: c_int = 431;
pub const HTTP_INTERNAL_SERVER_ERROR: c_int = 500;
pub const HTTP_NOT_IMPLEMENTED: c_int = 501;
pub const HTTP_BAD_GATEWAY: c_int = 502;
pub const HTTP_SERVICE_UNAVAILABLE: c_int = 503;
pub const HTTP_GATEWAY_TIME_OUT: c_int = 504;
pub const HTTP_VERSION_NOT_SUPPORTED: c_int = 505;
pub const HTTP_VARIANT_ALSO_VARIES: c_int = 506;
pub const HTTP_INSUFFICIENT_STORAGE: c_int = 507;
pub const HTTP_LOOP_DETECTED: c_int = 508;
pub const HTTP_NOT_EXTENDED: c_int = 510;
pub const HTTP_NETWORK_AUTHENTICATION_REQUIRED: c_int = 511;
pub const PROXYREQ_NONE: c_int = 0;
pub const PROXYREQ_PROXY: c_int = 1;
pub const PROXYREQ_REVERSE: c_int = 2;
pub const PROXYREQ_RESPONSE: c_int = 3;
pub const RAW_ARGS: c_uint = 0;
pub const TAKE1: c_uint = 1;
pub const TAKE2: c_uint = 2;
pub const ITERATE: c_uint = 3;
pub const ITERATE2: c_uint = 4;
pub const FLAG: c_uint = 5;
pub const NO_ARGS: c_uint = 6;
pub const TAKE12: c_uint = 7;
pub const TAKE3: c_uint = 8;
pub const TAKE23: c_uint = 9;
pub const TAKE123: c_uint = 10;
pub const TAKE13: c_uint = 11;
pub const TAKE_ARGV: c_uint = 12;
pub const OR_NONE: c_int = 0;
pub const OR_LIMIT: c_int = 1;
pub const OR_OPTIONS: c_int = 2;
pub const OR_FILEINFO: c_int = 4;
pub const OR_AUTHCFG: c_int = 8;
pub const OR_INDEXES: c_int = 16;
pub const OR_UNSET: c_int = 32;
pub const ACCESS_CONF: c_int = 64;
pub const RSRC_CONF: c_int = 128;
pub const EXEC_ON_READ: c_int = 256;
pub const NONFATAL_OVERRIDE: c_int = 512;
pub const NONFATAL_UNKNOWN: c_int = 1024;
pub const NONFATAL_ALL: c_int = NONFATAL_OVERRIDE | NONFATAL_UNKNOWN;
pub const OR_ALL: c_int = OR_LIMIT | OR_OPTIONS | OR_FILEINFO | OR_AUTHCFG | OR_INDEXES;
#[repr(C)]
#[derive(Copy, Clone)]
pub struct request_rec {
pub pool: *mut apr_pool_t,
pub connection: *mut conn_rec,
pub server: *mut server_rec,
pub next: *mut request_rec,
pub prev: *mut request_rec,
pub main: *mut request_rec,
pub the_request: *mut c_char,
pub assbackwards: c_int,
pub proxyreq: c_int,
pub header_only: c_int,
pub proto_num: c_int,
pub protocol: *mut c_char,
pub hostname: *const c_char,
pub request_time: apr_time_t,
pub status_line: *const c_char,
pub status: c_int,
pub method_number: c_int,
pub method: *const c_char,
pub allowed: apr_int64_t,
pub allowed_xmethods: *mut apr_array_header_t,
pub allowed_methods: *mut ap_method_list_t,
pub sent_bodyct: apr_off_t,
pub bytes_sent: apr_off_t,
pub mtime: apr_time_t,
pub range: *const c_char,
pub clength: apr_off_t,
pub chunked: c_int,
pub read_body: c_int,
pub read_chunked: c_int,
pub expecting_100: c_uint,
pub kept_body: *mut apr_bucket_brigade,
pub body_table: *mut apr_table_t,
pub remaining: apr_off_t,
pub read_length: apr_off_t,
pub headers_in: *mut apr_table_t,
pub headers_out: *mut apr_table_t,
pub err_headers_out: *mut apr_table_t,
pub subprocess_env: *mut apr_table_t,
pub notes: *mut apr_table_t,
pub content_type: *const c_char,
pub handler: *const c_char,
pub content_encoding: *const c_char,
pub content_languages: *mut apr_array_header_t,
pub vlist_validator: *mut c_char,
pub user: *mut c_char,
pub ap_auth_type: *mut c_char,
pub unparsed_uri: *mut c_char,
pub uri: *mut c_char,
pub filename: *mut c_char,
pub canonical_filename: *mut c_char,
pub path_info: *mut c_char,
pub args: *mut c_char,
pub used_path_info: c_int,
pub eos_sent: c_int,
pub per_dir_config: *mut ap_conf_vector_t,
pub request_config: *mut ap_conf_vector_t,
pub log: *const ap_logconf,
pub log_id: *const c_char,
pub htaccess: *const htaccess_result,
pub output_filters: *mut ap_filter_t,
pub input_filters: *mut ap_filter_t,
pub proto_output_filters: *mut ap_filter_t,
pub proto_input_filters: *mut ap_filter_t,
pub no_cache: c_int,
pub no_local_copy: c_int,
pub invoke_mtx: *mut apr_thread_mutex_t,
pub parsed_uri: apr_uri_t,
pub finfo: apr_finfo_t,
pub useragent_addr: *mut apr_sockaddr_t,
pub useragent_ip: *mut c_char,
pub trailers_in: *mut apr_table_t,
pub trailers_out: *mut apr_table_t,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct conn_rec {
pub pool: *mut apr_pool_t,
pub base_server: *mut server_rec,
pub vhost_lookup_data: *mut c_void,
pub local_addr: *mut apr_sockaddr_t,
pub client_addr: *mut apr_sockaddr_t,
pub client_ip: *mut c_char,
pub remote_host: *mut c_char,
pub remote_logname: *mut c_char,
pub local_ip: *mut c_char,
pub local_host: *mut c_char,
pub id: c_long,
pub conn_config: *mut ap_conf_vector_t,
pub notes: *mut apr_table_t,
pub input_filters: *mut ap_filter_t,
pub output_filters: *mut ap_filter_t,
pub sbh: *mut c_void,
pub bucket_alloc: *mut apr_bucket_alloc_t,
pub cs: *mut conn_state_t,
pub data_in_input_filters: c_int,
pub data_in_output_filters: c_int,
pub _bindgen_bitfield_1_: c_uint,
pub _bindgen_bitfield_2_: c_int,
pub aborted: c_uint,
pub keepalive: ap_conn_keepalive_e,
pub keepalives: c_int,
pub log: *const ap_logconf,
pub log_id: *const c_char,
pub current_thread: *mut apr_thread_t,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct server_rec {
pub process: *mut process_rec,
pub next: *mut server_rec,
pub error_fname: *mut c_char,
pub error_log: *mut apr_file_t,
pub log: ap_logconf,
pub module_config: *mut ap_conf_vector_t,
pub lookup_defaults: *mut ap_conf_vector_t,
pub defn_name: *const c_char,
pub defn_line_number: c_uint,
pub is_virtual: c_char,
pub port: apr_port_t,
pub server_scheme: *const c_char,
pub server_admin: *mut c_char,
pub server_hostname: *mut c_char,
pub addrs: *mut server_addr_rec,
pub timeout: apr_interval_time_t,
pub keep_alive_timeout: apr_interval_time_t,
pub keep_alive_max: c_int,
pub keep_alive: c_int,
pub names: *mut apr_array_header_t,
pub wild_names: *mut apr_array_header_t,
pub path: *const c_char,
pub pathlen: c_int,
pub limit_req_line: c_int,
pub limit_req_fieldsize: c_int,
pub limit_req_fields: c_int,
pub context: *mut c_void,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct ap_logconf {
pub module_levels: *mut c_char,
pub level: c_int,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct module {
pub version: c_int,
pub minor_version: c_int,
pub module_index: c_int,
pub name: *const c_char,
pub dynamic_load_handle: *mut c_void,
pub next: *mut module,
pub magic: c_ulong,
pub rewrite_args: Option<rewrite_args_fn>,
pub create_dir_config: Option<create_dir_config_fn>,
pub merge_dir_config: Option<merge_config_fn>,
pub create_server_config: Option<create_server_config_fn>,
pub merge_server_config: Option<merge_config_fn>,
pub cmds: *const command_rec,
pub register_hooks: Option<register_hooks_fn>
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct cmd_func {
pub _bindgen_data_: [u64; 1usize],
}
impl cmd_func {
pub unsafe fn no_args(&mut self) -> *mut Option<no_args_fn> {
mem::transmute(&self._bindgen_data_)
}
pub unsafe fn raw_args(&mut self) -> *mut Option<raw_args_fn> {
mem::transmute(&self._bindgen_data_)
}
pub unsafe fn take_argv(&mut self) -> *mut Option<take_argv_fn> {
mem::transmute(&self._bindgen_data_)
}
pub unsafe fn take1(&mut self) -> *mut Option<take1_fn> {
mem::transmute(&self._bindgen_data_)
}
pub unsafe fn take2(&mut self) -> *mut Option<take2_fn> {
mem::transmute(&self._bindgen_data_)
}
pub unsafe fn take3(&mut self) -> *mut Option<take3_fn> {
mem::transmute(&self._bindgen_data_)
}
pub unsafe fn flag(&mut self) -> *mut Option<flag_fn> {
mem::transmute(&self._bindgen_data_)
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct command_rec {
pub name: *const c_char,
pub func: cmd_func,
pub cmd_data: *mut c_void,
pub req_override: c_int,
pub args_how: cmd_how,
pub errmsg: *const c_char,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct cmd_parms {
pub info: *mut c_void,
pub _override: c_int,
pub override_opts: c_int,
pub override_list: *mut apr_table_t,
pub limited: apr_int64_t,
pub limited_xmethods: *mut apr_array_header_t,
pub xlimited: *mut ap_method_list_t,
pub config_file: *mut ap_configfile_t,
pub directive: *mut ap_directive_t,
pub pool: *mut apr_pool_t,
pub temp_pool: *mut apr_pool_t,
pub server: *mut server_rec,
pub path: *mut c_char,
pub cmd: *const command_rec,
pub context: *mut ap_conf_vector_t,
pub err_directive: *const ap_directive_t,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct ap_list_provider_names_t {
pub provider_name: *const c_char,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct ap_list_provider_groups_t {
pub provider_group: *const c_char,
pub provider_version: *const c_char,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct ap_method_list_t {
pub method_mask: apr_int64_t,
pub method_list: *mut apr_array_header_t,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct ap_configfile_t {
pub getch: Option<extern "C" fn(
ch: *mut c_char,
param: *mut c_void
) -> apr_status_t>,
pub getstr: Option<extern "C" fn(
buf: *mut c_void,
bufsiz: apr_size_t,
param: *mut c_void
) -> apr_status_t>,
pub close: Option<extern "C" fn(param: *mut c_void) -> apr_status_t>,
pub param: *mut c_void,
pub name: *const c_char,
pub line_number: c_uint,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct ap_directive_t {
pub directive: *const c_char,
pub args: *const c_char,
pub next: *mut ap_directive_t,
pub first_child: *mut ap_directive_t,
pub parent: *mut ap_directive_t,
pub data: *mut c_void,
pub filename: *const c_char,
pub line_num: c_int,
pub last: *mut ap_directive_t,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct htaccess_result {
pub dir: *const c_char,
pub _override: c_int,
pub override_opts: c_int,
pub override_list: *mut apr_table_t,
pub htaccess: *mut ap_conf_vector_t,
pub next: *const htaccess_result,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct process_rec {
pub pool: *mut apr_pool_t,
pub pconf: *mut apr_pool_t,
pub short_name: *const c_char,
pub argv: *const *const c_char,
pub argc: c_int,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct server_addr_rec {
pub next: *mut server_addr_rec,
pub virthost: *mut c_char,
pub host_addr: *mut apr_sockaddr_t,
pub host_port: apr_port_t,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct ap_filter_t {
pub frec: *mut ap_filter_rec_t,
pub ctx: *mut c_void,
pub next: *mut ap_filter_t,
pub r: *mut request_rec,
pub c: *mut conn_rec,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct ap_filter_rec_t {
pub name: *const c_char,
pub filter_func: ap_filter_func,
pub filter_init_func: Option<ap_init_filter_func>,
pub next: *mut ap_filter_rec_t,
pub providers: *mut ap_filter_provider_t,
pub ftype: ap_filter_type,
pub debug: c_int,
pub proto_flags: c_uint,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct ap_filter_func {
pub _bindgen_data_: [u64; 1usize],
}
impl ap_filter_func {
pub unsafe fn out_func(&mut self) -> *mut Option<ap_out_filter_func> {
let raw: *mut u8 = mem::transmute(&self._bindgen_data_);
mem::transmute(raw.offset(0))
}
pub unsafe fn in_func(&mut self) -> *mut Option<ap_in_filter_func> {
let raw: *mut u8 = mem::transmute(&self._bindgen_data_);
mem::transmute(raw.offset(0))
}
}
#[derive(Copy, Clone)]
pub enum ap_conf_vector_t { }
#[derive(Copy, Clone)]
pub enum ap_filter_provider_t { }
pub type cmd_how = c_uint;
pub type ap_conn_keepalive_e = c_uint;
pub type ap_filter_type = c_uint;
pub type ap_input_mode_t = c_uint;
pub type ap_init_filter_func = extern "C" fn(f: *mut ap_filter_t) -> c_int;
pub type ap_out_filter_func = extern "C" fn(
f: *mut ap_filter_t,
b: *mut apr_bucket_brigade
) -> apr_status_t;
pub type ap_in_filter_func = extern "C" fn(
f: *mut ap_filter_t,
b: *mut apr_bucket_brigade,
mode: ap_input_mode_t,
block: apr_read_type_e,
readbytes: apr_off_t
) -> apr_status_t;
pub type rewrite_args_fn = extern "C" fn(
process: *mut process_rec
);
pub type create_dir_config_fn = extern "C" fn(p: *mut apr_pool_t, dir: *mut c_char) -> *mut c_void;
pub type merge_config_fn = extern "C" fn(p: *mut apr_pool_t, base_conf: *mut c_void, new_conf: *mut c_void) -> *mut c_void;
pub type create_server_config_fn = extern "C" fn(p: *mut apr_pool_t, s: *mut server_rec) -> *mut c_void;
pub type register_hooks_fn = extern "C" fn(p: *mut apr_pool_t);
pub type no_args_fn = extern "C" fn(parms: *mut cmd_parms, mconfig: *mut c_void) -> *const c_char;
pub type raw_args_fn = extern "C" fn(parms: *mut cmd_parms, mconfig: *mut c_void, args: *const c_char) -> *const c_char;
pub type take_argv_fn = extern "C" fn(parms: *mut cmd_parms, mconfig: *mut c_void, argc: c_int, argv: *const *mut c_char) -> *const c_char;
pub type take1_fn = extern "C" fn(parms: *mut cmd_parms, mconfig: *mut c_void, w: *const c_char) -> *const c_char;
pub type take2_fn = extern "C" fn(parms: *mut cmd_parms, mconfig: *mut c_void, w: *const c_char, w2: *const c_char) -> *const c_char;
pub type take3_fn = extern "C" fn(parms: *mut cmd_parms, mconfig: *mut c_void, w: *const c_char, w2: *const c_char, w3: *const c_char) -> *const c_char;
pub type flag_fn = extern "C" fn(parms: *mut cmd_parms, mconfig: *mut c_void, on: c_int) -> *const c_char;
pub type hook_handler_fn = extern "C" fn(r: *mut request_rec) -> c_int;
pub type hook_pre_config_fn = extern "C" fn(conf: *mut apr_pool_t, log: *mut apr_pool_t, temp: *mut apr_pool_t) -> c_int;
pub type hook_check_config_fn = extern "C" fn(conf: *mut apr_pool_t, log: *mut apr_pool_t, temp: *mut apr_pool_t, s: *mut server_rec) -> c_int;
pub type hook_test_config_fn = extern "C" fn(conf: *mut apr_pool_t, s: *mut server_rec) -> c_int;
pub type hook_post_config_fn = extern "C" fn(conf: *mut apr_pool_t, log: *mut apr_pool_t, temp: *mut apr_pool_t, s: *mut server_rec) -> c_int;
extern "C" {
pub fn ap_get_server_banner() -> *const c_char;
pub fn ap_get_server_description() -> *const c_char;
pub fn ap_get_server_built() -> *const c_char;
pub fn ap_show_mpm() -> *const c_char;
pub fn ap_escape_html2(p: *mut apr_pool_t, s: *const c_char, toasc: c_int) -> *mut c_char;
pub fn ap_rwrite(buf: *const c_void, nbyte: c_int, r: *const request_rec) -> c_int;
pub fn ap_set_content_type(r: *const request_rec, ct: *const c_char) -> ();
pub fn ap_get_basic_auth_pw(r: *const request_rec, pw: *mut *const c_char) -> c_int;
pub fn ap_context_document_root(r: *const request_rec) -> *const c_char;
pub fn ap_context_prefix(r: *const request_rec) -> *const c_char;
pub fn ap_run_http_scheme(r: *const request_rec) -> *const c_char;
pub fn ap_run_default_port(r: *const request_rec) -> apr_port_t;
pub fn ap_is_initial_req(r: *const request_rec) -> c_int;
pub fn ap_some_auth_required(r: *const request_rec) -> c_int;
pub fn ap_cookie_read(r: *const request_rec, name: *const c_char, val: *mut *const c_char, remove: c_int) -> apr_status_t;
pub fn ap_cookie_write(r: *const request_rec, name: *const c_char, val: *const c_char, attrs: *const c_char, maxage: c_int, ...) -> apr_status_t;
pub fn ap_escape_urlencoded(p: *mut apr_pool_t, s: *const c_char) -> *mut c_char;
pub fn ap_unescape_urlencoded(query: *mut c_char) -> c_int;
pub fn ap_document_root(r: *const request_rec) -> *const c_char;
pub fn ap_get_server_name(r: *const request_rec) -> *const c_char;
pub fn ap_get_server_port(r: *const request_rec) -> apr_port_t;
pub fn ap_auth_name(r: *const request_rec) -> *const c_char;
pub fn ap_set_last_modified(r: *mut request_rec) -> ();
pub fn ap_update_mtime(r: *mut request_rec, dependency_mtime: apr_time_t) -> ();
pub fn ap_get_module_config(cv: *const ap_conf_vector_t, m: *const module) -> *mut c_void;
pub fn ap_set_module_config(cv: *mut ap_conf_vector_t, m: *const module, val: *mut c_void) -> ();
pub fn ap_register_provider(pool: *mut apr_pool_t, provider_group: *const c_char, provider_name: *const c_char, provider_version: *const c_char, provider: *const c_void) -> apr_status_t;
pub fn ap_lookup_provider(provider_group: *const c_char, provider_name: *const c_char, provider_version: *const c_char) -> *mut c_void;
pub fn ap_list_provider_names(pool: *mut apr_pool_t, provider_group: *const c_char, provider_version: *const c_char) -> *mut apr_array_header_t;
pub fn ap_list_provider_groups(pool: *mut apr_pool_t) -> *mut apr_array_header_t;
pub fn ap_hook_handler(f: Option<hook_handler_fn>, pre: *const *const c_char, succ: *const *const c_char, order: c_int);
pub fn ap_hook_pre_config(f: Option<hook_pre_config_fn>, pre: *const *const c_char, succ: *const *const c_char, order: c_int);
pub fn ap_hook_check_config(f: Option<hook_check_config_fn>, pre: *const *const c_char, succ: *const *const c_char, order: c_int);
pub fn ap_hook_test_config(f: Option<hook_test_config_fn>, pre: *const *const c_char, succ: *const *const c_char, order: c_int);
pub fn ap_hook_post_config(f: Option<hook_post_config_fn>, pre: *const *const c_char, succ: *const *const c_char, order: c_int);
pub fn ap_hook_create_request(f: Option<hook_handler_fn>, pre: *const *const c_char, succ: *const *const c_char, order: c_int);
pub fn ap_hook_translate_name(f: Option<hook_handler_fn>, pre: *const *const c_char, succ: *const *const c_char, order: c_int);
pub fn ap_hook_map_to_storage(f: Option<hook_handler_fn>, pre: *const *const c_char, succ: *const *const c_char, order: c_int);
pub fn ap_hook_check_user_id(f: Option<hook_handler_fn>, pre: *const *const c_char, succ: *const *const c_char, order: c_int);
pub fn ap_hook_fixups(f: Option<hook_handler_fn>, pre: *const *const c_char, succ: *const *const c_char, order: c_int);
pub fn ap_hook_type_checker(f: Option<hook_handler_fn>, pre: *const *const c_char, succ: *const *const c_char, order: c_int);
pub fn ap_hook_access_checker(f: Option<hook_handler_fn>, pre: *const *const c_char, succ: *const *const c_char, order: c_int);
pub fn ap_hook_access_checker_ex(f: Option<hook_handler_fn>, pre: *const *const c_char, succ: *const *const c_char, order: c_int);
pub fn ap_hook_auth_checker(f: Option<hook_handler_fn>, pre: *const *const c_char, succ: *const *const c_char, order: c_int);
pub fn ap_hook_insert_error_filter(f: Option<hook_handler_fn>, pre: *const *const c_char, succ: *const *const c_char, order: c_int);
pub fn ap_hook_log_transaction(f: Option<hook_handler_fn>, pre: *const *const c_char, succ: *const *const c_char, order: c_int);
}<|fim▁end|>
|
e: *mut apr_bucket,
c: *mut *mut apr_bucket
) -> apr_status_t>,
}
|
<|file_name|>fa_icon_definitions.py<|end_file_name|><|fim▁begin|>fa_icons = {
'fa-glass': u"\uf000",
'fa-music': u"\uf001",
'fa-search': u"\uf002",
'fa-envelope-o': u"\uf003",
'fa-heart': u"\uf004",
'fa-star': u"\uf005",
'fa-star-o': u"\uf006",
'fa-user': u"\uf007",
'fa-film': u"\uf008",
'fa-th-large': u"\uf009",
'fa-th': u"\uf00a",
'fa-th-list': u"\uf00b",
'fa-check': u"\uf00c",
'fa-times': u"\uf00d",
'fa-search-plus': u"\uf00e",
'fa-search-minus': u"\uf010",
'fa-power-off': u"\uf011",
'fa-signal': u"\uf012",
'fa-gear': u"\uf013",
'fa-cog': u"\uf013",
'fa-trash-o': u"\uf014",
'fa-home': u"\uf015",
'fa-file-o': u"\uf016",
'fa-clock-o': u"\uf017",
'fa-road': u"\uf018",
'fa-download': u"\uf019",
'fa-arrow-circle-o-down': u"\uf01a",
'fa-arrow-circle-o-up': u"\uf01b",
'fa-inbox': u"\uf01c",
'fa-play-circle-o': u"\uf01d",
'fa-rotate-right': u"\uf01e",
'fa-repeat': u"\uf01e",
'fa-refresh': u"\uf021",
'fa-list-alt': u"\uf022",
'fa-lock': u"\uf023",
'fa-flag': u"\uf024",
'fa-headphones': u"\uf025",
'fa-volume-off': u"\uf026",
'fa-volume-down': u"\uf027",
'fa-volume-up': u"\uf028",
'fa-qrcode': u"\uf029",
'fa-barcode': u"\uf02a",
'fa-tag': u"\uf02b",
'fa-tags': u"\uf02c",
'fa-book': u"\uf02d",
'fa-bookmark': u"\uf02e",
'fa-print': u"\uf02f",
'fa-camera': u"\uf030",
'fa-font': u"\uf031",
'fa-bold': u"\uf032",
'fa-italic': u"\uf033",
'fa-text-height': u"\uf034",
'fa-text-width': u"\uf035",
'fa-align-left': u"\uf036",
'fa-align-center': u"\uf037",
'fa-align-right': u"\uf038",
'fa-align-justify': u"\uf039",
'fa-list': u"\uf03a",
'fa-dedent': u"\uf03b",
'fa-outdent': u"\uf03b",
'fa-indent': u"\uf03c",
'fa-video-camera': u"\uf03d",
'fa-photo': u"\uf03e",
'fa-image': u"\uf03e",
'fa-picture-o': u"\uf03e",
'fa-pencil': u"\uf040",
'fa-map-marker': u"\uf041",
'fa-adjust': u"\uf042",
'fa-tint': u"\uf043",
'fa-edit': u"\uf044",
'fa-pencil-square-o': u"\uf044",
'fa-share-square-o': u"\uf045",
'fa-check-square-o': u"\uf046",
'fa-arrows': u"\uf047",
'fa-step-backward': u"\uf048",
'fa-fast-backward': u"\uf049",
'fa-backward': u"\uf04a",
'fa-play': u"\uf04b",
'fa-pause': u"\uf04c",
'fa-stop': u"\uf04d",
'fa-forward': u"\uf04e",
'fa-fast-forward': u"\uf050",
'fa-step-forward': u"\uf051",
'fa-eject': u"\uf052",
'fa-chevron-left': u"\uf053",
'fa-chevron-right': u"\uf054",
'fa-plus-circle': u"\uf055",
'fa-minus-circle': u"\uf056",
'fa-times-circle': u"\uf057",
'fa-check-circle': u"\uf058",
'fa-question-circle': u"\uf059",
'fa-info-circle': u"\uf05a",
'fa-crosshairs': u"\uf05b",
'fa-times-circle-o': u"\uf05c",
'fa-check-circle-o': u"\uf05d",
'fa-ban': u"\uf05e",
'fa-arrow-left': u"\uf060",
'fa-arrow-right': u"\uf061",
'fa-arrow-up': u"\uf062",
'fa-arrow-down': u"\uf063",
'fa-mail-forward': u"\uf064",
'fa-share': u"\uf064",
'fa-expand': u"\uf065",
'fa-compress': u"\uf066",
'fa-plus': u"\uf067",
'fa-minus': u"\uf068",
'fa-asterisk': u"\uf069",
'fa-exclamation-circle': u"\uf06a",
'fa-gift': u"\uf06b",
'fa-leaf': u"\uf06c",
'fa-fire': u"\uf06d",
'fa-eye': u"\uf06e",
'fa-eye-slash': u"\uf070",
'fa-warning': u"\uf071",
'fa-exclamation-triangle': u"\uf071",
'fa-plane': u"\uf072",
'fa-calendar': u"\uf073",
'fa-random': u"\uf074",
'fa-comment': u"\uf075",
'fa-magnet': u"\uf076",
'fa-chevron-up': u"\uf077",
'fa-chevron-down': u"\uf078",
'fa-retweet': u"\uf079",
'fa-shopping-cart': u"\uf07a",
'fa-folder': u"\uf07b",
'fa-folder-open': u"\uf07c",
'fa-arrows-v': u"\uf07d",
'fa-arrows-h': u"\uf07e",
'fa-bar-chart-o': u"\uf080",
'fa-twitter-square': u"\uf081",
'fa-facebook-square': u"\uf082",
'fa-camera-retro': u"\uf083",
'fa-key': u"\uf084",
'fa-gears': u"\uf085",
'fa-cogs': u"\uf085",
'fa-comments': u"\uf086",
'fa-thumbs-o-up': u"\uf087",
'fa-thumbs-o-down': u"\uf088",
'fa-star-half': u"\uf089",
'fa-heart-o': u"\uf08a",
'fa-sign-out': u"\uf08b",
'fa-linkedin-square': u"\uf08c",
'fa-thumb-tack': u"\uf08d",
'fa-external-link': u"\uf08e",
'fa-sign-in': u"\uf090",
'fa-trophy': u"\uf091",
'fa-github-square': u"\uf092",
'fa-upload': u"\uf093",
'fa-lemon-o': u"\uf094",
'fa-phone': u"\uf095",
'fa-square-o': u"\uf096",
'fa-bookmark-o': u"\uf097",
'fa-phone-square': u"\uf098",
'fa-twitter': u"\uf099",
'fa-facebook': u"\uf09a",
'fa-github': u"\uf09b",
'fa-unlock': u"\uf09c",
'fa-credit-card': u"\uf09d",
'fa-rss': u"\uf09e",
'fa-hdd-o': u"\uf0a0",
'fa-bullhorn': u"\uf0a1",
'fa-bell': u"\uf0f3",
'fa-certificate': u"\uf0a3",
'fa-hand-o-right': u"\uf0a4",
'fa-hand-o-left': u"\uf0a5",
'fa-hand-o-up': u"\uf0a6",
'fa-hand-o-down': u"\uf0a7",
'fa-arrow-circle-left': u"\uf0a8",
'fa-arrow-circle-right': u"\uf0a9",
'fa-arrow-circle-up': u"\uf0aa",
'fa-arrow-circle-down': u"\uf0ab",
'fa-globe': u"\uf0ac",
'fa-wrench': u"\uf0ad",
'fa-tasks': u"\uf0ae",
'fa-filter': u"\uf0b0",
'fa-briefcase': u"\uf0b1",
'fa-arrows-alt': u"\uf0b2",
'fa-group': u"\uf0c0",
'fa-users': u"\uf0c0",
'fa-chain': u"\uf0c1",
'fa-link': u"\uf0c1",
'fa-cloud': u"\uf0c2",
'fa-flask': u"\uf0c3",
'fa-cut': u"\uf0c4",
'fa-scissors': u"\uf0c4",
'fa-copy': u"\uf0c5",
'fa-files-o': u"\uf0c5",
'fa-paperclip': u"\uf0c6",
'fa-save': u"\uf0c7",
'fa-floppy-o': u"\uf0c7",
'fa-square': u"\uf0c8",
'fa-navicon': u"\uf0c9",
'fa-reorder': u"\uf0c9",
'fa-bars': u"\uf0c9",
'fa-list-ul': u"\uf0ca",
'fa-list-ol': u"\uf0cb",
'fa-strikethrough': u"\uf0cc",
'fa-underline': u"\uf0cd",
'fa-table': u"\uf0ce",
'fa-magic': u"\uf0d0",
'fa-truck': u"\uf0d1",
'fa-pinterest': u"\uf0d2",
'fa-pinterest-square': u"\uf0d3",
'fa-google-plus-square': u"\uf0d4",
'fa-google-plus': u"\uf0d5",
'fa-money': u"\uf0d6",
'fa-caret-down': u"\uf0d7",
'fa-caret-up': u"\uf0d8",
'fa-caret-left': u"\uf0d9",
'fa-caret-right': u"\uf0da",
'fa-columns': u"\uf0db",
'fa-unsorted': u"\uf0dc",
'fa-sort': u"\uf0dc",
'fa-sort-down': u"\uf0dd",
'fa-sort-desc': u"\uf0dd",
'fa-sort-up': u"\uf0de",
'fa-sort-asc': u"\uf0de",
'fa-envelope': u"\uf0e0",
'fa-linkedin': u"\uf0e1",
'fa-rotate-left': u"\uf0e2",
'fa-undo': u"\uf0e2",
'fa-legal': u"\uf0e3",
'fa-gavel': u"\uf0e3",
'fa-dashboard': u"\uf0e4",
'fa-tachometer': u"\uf0e4",
'fa-comment-o': u"\uf0e5",
'fa-comments-o': u"\uf0e6",
'fa-flash': u"\uf0e7",
'fa-bolt': u"\uf0e7",
'fa-sitemap': u"\uf0e8",
'fa-umbrella': u"\uf0e9",
'fa-paste': u"\uf0ea",
'fa-clipboard': u"\uf0ea",
'fa-lightbulb-o': u"\uf0eb",
'fa-exchange': u"\uf0ec",
'fa-cloud-download': u"\uf0ed",
'fa-cloud-upload': u"\uf0ee",
'fa-user-md': u"\uf0f0",
'fa-stethoscope': u"\uf0f1",
'fa-suitcase': u"\uf0f2",
'fa-bell-o': u"\uf0a2",
'fa-coffee': u"\uf0f4",
'fa-cutlery': u"\uf0f5",
'fa-file-text-o': u"\uf0f6",
'fa-building-o': u"\uf0f7",
'fa-hospital-o': u"\uf0f8",
'fa-ambulance': u"\uf0f9",
'fa-medkit': u"\uf0fa",
'fa-fighter-jet': u"\uf0fb",
'fa-beer': u"\uf0fc",
'fa-h-square': u"\uf0fd",
'fa-plus-square': u"\uf0fe",
'fa-angle-double-left': u"\uf100",
'fa-angle-double-right': u"\uf101",
'fa-angle-double-up': u"\uf102",
'fa-angle-double-down': u"\uf103",
'fa-angle-left': u"\uf104",
'fa-angle-right': u"\uf105",
'fa-angle-up': u"\uf106",
'fa-angle-down': u"\uf107",
'fa-desktop': u"\uf108",
'fa-laptop': u"\uf109",
'fa-tablet': u"\uf10a",
'fa-mobile-phone': u"\uf10b",
'fa-mobile': u"\uf10b",
'fa-circle-o': u"\uf10c",
'fa-quote-left': u"\uf10d",
'fa-quote-right': u"\uf10e",
'fa-spinner': u"\uf110",
'fa-circle': u"\uf111",
'fa-mail-reply': u"\uf112",
'fa-reply': u"\uf112",
'fa-github-alt': u"\uf113",
'fa-folder-o': u"\uf114",
'fa-folder-open-o': u"\uf115",
'fa-smile-o': u"\uf118",
'fa-frown-o': u"\uf119",
'fa-meh-o': u"\uf11a",
'fa-gamepad': u"\uf11b",
'fa-keyboard-o': u"\uf11c",
'fa-flag-o': u"\uf11d",
'fa-flag-checkered': u"\uf11e",
'fa-terminal': u"\uf120",
'fa-code': u"\uf121",
'fa-mail-reply-all': u"\uf122",
'fa-reply-all': u"\uf122",
'fa-star-half-empty': u"\uf123",
'fa-star-half-full': u"\uf123",
'fa-star-half-o': u"\uf123",
'fa-location-arrow': u"\uf124",
'fa-crop': u"\uf125",
'fa-code-fork': u"\uf126",
'fa-unlink': u"\uf127",
'fa-chain-broken': u"\uf127",
'fa-question': u"\uf128",
'fa-info': u"\uf129",
'fa-exclamation': u"\uf12a",
'fa-superscript': u"\uf12b",
'fa-subscript': u"\uf12c",
'fa-eraser': u"\uf12d",
'fa-puzzle-piece': u"\uf12e",
'fa-microphone': u"\uf130",
'fa-microphone-slash': u"\uf131",
'fa-shield': u"\uf132",
'fa-calendar-o': u"\uf133",
'fa-fire-extinguisher': u"\uf134",
'fa-rocket': u"\uf135",
'fa-maxcdn': u"\uf136",
'fa-chevron-circle-left': u"\uf137",
'fa-chevron-circle-right': u"\uf138",
'fa-chevron-circle-up': u"\uf139",
'fa-chevron-circle-down': u"\uf13a",
'fa-html5': u"\uf13b",
'fa-css3': u"\uf13c",
'fa-anchor': u"\uf13d",
'fa-unlock-alt': u"\uf13e",
'fa-bullseye': u"\uf140",
'fa-ellipsis-h': u"\uf141",
'fa-ellipsis-v': u"\uf142",
'fa-rss-square': u"\uf143",
'fa-play-circle': u"\uf144",
'fa-ticket': u"\uf145",
'fa-minus-square': u"\uf146",
'fa-minus-square-o': u"\uf147",
'fa-level-up': u"\uf148",
'fa-level-down': u"\uf149",
'fa-check-square': u"\uf14a",
'fa-pencil-square': u"\uf14b",
'fa-external-link-square': u"\uf14c",
'fa-share-square': u"\uf14d",
'fa-compass': u"\uf14e",
'fa-toggle-down': u"\uf150",
'fa-caret-square-o-down': u"\uf150",
'fa-toggle-up': u"\uf151",
'fa-caret-square-o-up': u"\uf151",
'fa-toggle-right': u"\uf152",
'fa-caret-square-o-right': u"\uf152",
'fa-euro': u"\uf153",
'fa-eur': u"\uf153",
'fa-gbp': u"\uf154",
'fa-dollar': u"\uf155",
'fa-usd': u"\uf155",
'fa-rupee': u"\uf156",
'fa-inr': u"\uf156",
'fa-cny': u"\uf157",
'fa-rmb': u"\uf157",
'fa-yen': u"\uf157",
'fa-jpy': u"\uf157",
'fa-ruble': u"\uf158",
'fa-rouble': u"\uf158",
'fa-rub': u"\uf158",<|fim▁hole|> 'fa-won': u"\uf159",
'fa-krw': u"\uf159",
'fa-bitcoin': u"\uf15a",
'fa-btc': u"\uf15a",
'fa-file': u"\uf15b",
'fa-file-text': u"\uf15c",
'fa-sort-alpha-asc': u"\uf15d",
'fa-sort-alpha-desc': u"\uf15e",
'fa-sort-amount-asc': u"\uf160",
'fa-sort-amount-desc': u"\uf161",
'fa-sort-numeric-asc': u"\uf162",
'fa-sort-numeric-desc': u"\uf163",
'fa-thumbs-up': u"\uf164",
'fa-thumbs-down': u"\uf165",
'fa-youtube-square': u"\uf166",
'fa-youtube': u"\uf167",
'fa-xing': u"\uf168",
'fa-xing-square': u"\uf169",
'fa-youtube-play': u"\uf16a",
'fa-dropbox': u"\uf16b",
'fa-stack-overflow': u"\uf16c",
'fa-instagram': u"\uf16d",
'fa-flickr': u"\uf16e",
'fa-adn': u"\uf170",
'fa-bitbucket': u"\uf171",
'fa-bitbucket-square': u"\uf172",
'fa-tumblr': u"\uf173",
'fa-tumblr-square': u"\uf174",
'fa-long-arrow-down': u"\uf175",
'fa-long-arrow-up': u"\uf176",
'fa-long-arrow-left': u"\uf177",
'fa-long-arrow-right': u"\uf178",
'fa-apple': u"\uf179",
'fa-windows': u"\uf17a",
'fa-android': u"\uf17b",
'fa-linux': u"\uf17c",
'fa-dribbble': u"\uf17d",
'fa-skype': u"\uf17e",
'fa-foursquare': u"\uf180",
'fa-trello': u"\uf181",
'fa-female': u"\uf182",
'fa-male': u"\uf183",
'fa-gittip': u"\uf184",
'fa-sun-o': u"\uf185",
'fa-moon-o': u"\uf186",
'fa-archive': u"\uf187",
'fa-bug': u"\uf188",
'fa-vk': u"\uf189",
'fa-weibo': u"\uf18a",
'fa-renren': u"\uf18b",
'fa-pagelines': u"\uf18c",
'fa-stack-exchange': u"\uf18d",
'fa-arrow-circle-o-right': u"\uf18e",
'fa-arrow-circle-o-left': u"\uf190",
'fa-toggle-left': u"\uf191",
'fa-caret-square-o-left': u"\uf191",
'fa-dot-circle-o': u"\uf192",
'fa-wheelchair': u"\uf193",
'fa-vimeo-square': u"\uf194",
'fa-turkish-lira': u"\uf195",
'fa-try': u"\uf195",
'fa-plus-square-o': u"\uf196",
'fa-space-shuttle': u"\uf197",
'fa-slack': u"\uf198",
'fa-envelope-square': u"\uf199",
'fa-wordpress': u"\uf19a",
'fa-openid': u"\uf19b",
'fa-institution': u"\uf19c",
'fa-bank': u"\uf19c",
'fa-university': u"\uf19c",
'fa-mortar-board': u"\uf19d",
'fa-graduation-cap': u"\uf19d",
'fa-yahoo': u"\uf19e",
'fa-google': u"\uf1a0",
'fa-reddit': u"\uf1a1",
'fa-reddit-square': u"\uf1a2",
'fa-stumbleupon-circle': u"\uf1a3",
'fa-stumbleupon': u"\uf1a4",
'fa-delicious': u"\uf1a5",
'fa-digg': u"\uf1a6",
'fa-pied-piper-square': u"\uf1a7",
'fa-pied-piper': u"\uf1a7",
'fa-pied-piper-alt': u"\uf1a8",
'fa-drupal': u"\uf1a9",
'fa-joomla': u"\uf1aa",
'fa-language': u"\uf1ab",
'fa-fax': u"\uf1ac",
'fa-building': u"\uf1ad",
'fa-child': u"\uf1ae",
'fa-paw': u"\uf1b0",
'fa-spoon': u"\uf1b1",
'fa-cube': u"\uf1b2",
'fa-cubes': u"\uf1b3",
'fa-behance': u"\uf1b4",
'fa-behance-square': u"\uf1b5",
'fa-steam': u"\uf1b6",
'fa-steam-square': u"\uf1b7",
'fa-recycle': u"\uf1b8",
'fa-automobile': u"\uf1b9",
'fa-car': u"\uf1b9",
'fa-cab': u"\uf1ba",
'fa-taxi': u"\uf1ba",
'fa-tree': u"\uf1bb",
'fa-spotify': u"\uf1bc",
'fa-deviantart': u"\uf1bd",
'fa-soundcloud': u"\uf1be",
'fa-database': u"\uf1c0",
'fa-file-pdf-o': u"\uf1c1",
'fa-file-word-o': u"\uf1c2",
'fa-file-excel-o': u"\uf1c3",
'fa-file-powerpoint-o': u"\uf1c4",
'fa-file-photo-o': u"\uf1c5",
'fa-file-picture-o': u"\uf1c5",
'fa-file-image-o': u"\uf1c5",
'fa-file-zip-o': u"\uf1c6",
'fa-file-archive-o': u"\uf1c6",
'fa-file-sound-o': u"\uf1c7",
'fa-file-audio-o': u"\uf1c7",
'fa-file-movie-o': u"\uf1c8",
'fa-file-video-o': u"\uf1c8",
'fa-file-code-o': u"\uf1c9",
'fa-vine': u"\uf1ca",
'fa-codepen': u"\uf1cb",
'fa-jsfiddle': u"\uf1cc",
'fa-life-bouy': u"\uf1cd",
'fa-life-saver': u"\uf1cd",
'fa-support': u"\uf1cd",
'fa-life-ring': u"\uf1cd",
'fa-circle-o-notch': u"\uf1ce",
'fa-ra': u"\uf1d0",
'fa-rebel': u"\uf1d0",
'fa-ge': u"\uf1d1",
'fa-empire': u"\uf1d1",
'fa-git-square': u"\uf1d2",
'fa-git': u"\uf1d3",
'fa-hacker-news': u"\uf1d4",
'fa-tencent-weibo': u"\uf1d5",
'fa-qq': u"\uf1d6",
'fa-wechat': u"\uf1d7",
'fa-weixin': u"\uf1d7",
'fa-send': u"\uf1d8",
'fa-paper-plane': u"\uf1d8",
'fa-send-o': u"\uf1d9",
'fa-paper-plane-o': u"\uf1d9",
'fa-history': u"\uf1da",
'fa-circle-thin': u"\uf1db",
'fa-header': u"\uf1dc",
'fa-paragraph': u"\uf1dd",
'fa-sliders': u"\uf1de",
'fa-share-alt': u"\uf1e0",
'fa-share-alt-square': u"\uf1e1",
'fa-bomb': u"\uf1e2",
}<|fim▁end|>
| |
<|file_name|>empty-env.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the<|fim▁hole|># "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Test Qt creation from a copied empty environment.
"""
import TestSCons
test = TestSCons.TestSCons()
test.Qt_dummy_installation('qt')
test.write('SConstruct', """\
orig = Environment()
env = orig.Clone(QTDIR = r'%s',
QT_LIB = r'%s',
QT_MOC = r'%s',
QT_UIC = r'%s',
tools=['qt'])
env.Program('main', 'main.cpp', CPPDEFINES=['FOO'], LIBS=[])
""" % (test.QT, test.QT_LIB, test.QT_MOC, test.QT_UIC))
test.write('main.cpp', r"""
#include "foo6.h"
int main() { foo6(); return 0; }
""")
test.write(['qt', 'include', 'foo6.h'], """\
#include <stdio.h>
void
foo6(void)
{
#ifdef FOO
printf("qt/include/foo6.h\\n");
#endif
}
""")
# we can receive warnings about a non detected qt (empty QTDIR)
# these are not critical, but may be annoying.
test.run(stderr=None)
test.run(program = test.workpath('main' + TestSCons._exe),
stderr = None,
stdout = 'qt/include/foo6.h\n')
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:<|fim▁end|>
| |
<|file_name|>max_prod_three.py<|end_file_name|><|fim▁begin|><|fim▁hole|>def solution(a):
l = len(a)
if l < 3: return reduce(lambda x, y: x * y, a)
products = []
for i in xrange(0, l):
for j in xrange(i+1, l):
for k in xrange (j+1, l):
products.append(a[i] * a[j] * a[k])
return max(products)
if __name__ == '__main__':
array = [ -3 , 1 , 2 , -2 , 5 , 6]
print "result: ", solution(array)<|fim▁end|>
|
# correctness: 100%, performance: 0%
|
<|file_name|>cytest.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
from storer import Storer
import sys
s = Storer()
if s.get_value() != 0:
print('Initial value incorrect.')
sys.exit(1)
s.set_value(42)
<|fim▁hole|> sys.exit(1)
try:
s.set_value('not a number')
print('Using wrong argument type did not fail.')
sys.exit(1)
except TypeError:
pass<|fim▁end|>
|
if s.get_value() != 42:
print('Setting value failed.')
|
<|file_name|>unify.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::kinds::marker;
use middle::ty::{expected_found, IntVarValue};
use middle::ty;
use middle::typeck::infer::{Bounds, uok, ures};
use middle::typeck::infer::InferCtxt;
use std::cell::RefCell;
use std::fmt::Show;
use std::mem;
use syntax::ast;
use util::ppaux::Repr;
/**
* This trait is implemented by any type that can serve as a type
* variable. We call such variables *unification keys*. For example,
* this trait is implemented by `TyVid`, which represents normal
* type variables, and `IntVid`, which represents integral variables.
*
* Each key type has an associated value type `V`. For example,
* for `TyVid`, this is `Bounds<ty::t>`, representing a pair of
* upper- and lower-bound types.
*
* Implementations of this trait are at the end of this file.
*/
pub trait UnifyKey<V> : Clone + Show + PartialEq + Repr {
fn index(&self) -> uint;
fn from_index(u: uint) -> Self;
/**
* Given an inference context, returns the unification table
* appropriate to this key type.
*/
fn unification_table<'v>(infcx: &'v InferCtxt)
-> &'v RefCell<UnificationTable<Self,V>>;
fn tag(k: Option<Self>) -> &'static str;
}
/**
* Trait for valid types that a type variable can be set to. Note
* that this is typically not the end type that the value will
* take on, but rather some wrapper: for example, for normal type
* variables, the associated type is not `ty::t` but rather
* `Bounds<ty::t>`.
*
* Implementations of this trait are at the end of this file.
*/
pub trait UnifyValue : Clone + Repr + PartialEq {
}
/**
* Value of a unification key. We implement Tarjan's union-find
* algorithm: when two keys are unified, one of them is converted
* into a "redirect" pointing at the other. These redirects form a
* DAG: the roots of the DAG (nodes that are not redirected) are each
* associated with a value of type `V` and a rank. The rank is used
* to keep the DAG relatively balanced, which helps keep the running
* time of the algorithm under control. For more information, see
* <http://en.wikipedia.org/wiki/Disjoint-set_data_structure>.
*/
#[deriving(PartialEq,Clone)]
pub enum VarValue<K,V> {
Redirect(K),
Root(V, uint),
}
/**
* Table of unification keys and their values.
*/
pub struct UnificationTable<K,V> {
/**
* Indicates the current value of each key.
*/
values: Vec<VarValue<K,V>>,
/**
* When a snapshot is active, logs each change made to the table
* so that they can be unrolled.
*/
undo_log: Vec<UndoLog<K,V>>,
}
/**
* At any time, users may snapshot a unification table. The changes
* made during the snapshot may either be *commited* or *rolled back*.
*/
pub struct Snapshot<K> {
// Ensure that this snapshot is keyed to the table type.
marker1: marker::CovariantType<K>,
// Snapshots are tokens that should be created/consumed linearly.
marker2: marker::NoCopy,
// Length of the undo log at the time the snapshot was taken.
length: uint,
}
#[deriving(PartialEq)]
enum UndoLog<K,V> {
/// Indicates where a snapshot started.
OpenSnapshot,
/// Indicates a snapshot that has been committed.
CommittedSnapshot,
/// New variable with given index was created.
NewVar(uint),
/// Variable with given index was changed *from* the given value.
SetVar(uint, VarValue<K,V>),
}
/**
* Internal type used to represent the result of a `get()` operation.
* Conveys the current root and value of the key.
*/
pub struct Node<K,V> {
pub key: K,
pub value: V,
pub rank: uint,
}
// We can't use V:LatticeValue, much as I would like to,
// because frequently the pattern is that V=Bounds<U> for some
// other type parameter U, and we have no way to say
// Bounds<U>:
impl<V:PartialEq+Clone+Repr,K:UnifyKey<V>> UnificationTable<K,V> {
pub fn new() -> UnificationTable<K,V> {
UnificationTable {
values: Vec::new(),
undo_log: Vec::new()
}
}
pub fn in_snapshot(&self) -> bool {
/*! True if a snapshot has been started. */
self.undo_log.len() > 0
}
/**
* Starts a new snapshot. Each snapshot must be either
* rolled back or commited in a "LIFO" (stack) order.
*/
pub fn snapshot(&mut self) -> Snapshot<K> {
let length = self.undo_log.len();
debug!("{}: snapshot at length {}",
UnifyKey::tag(None::<K>),
length);
self.undo_log.push(OpenSnapshot);
Snapshot { length: length,
marker1: marker::CovariantType,
marker2: marker::NoCopy }
}
fn assert_open_snapshot(&self, snapshot: &Snapshot<K>) {
// Or else there was a failure to follow a stack discipline:
assert!(self.undo_log.len() > snapshot.length);
// Invariant established by start_snapshot():
assert!(*self.undo_log.get(snapshot.length) == OpenSnapshot);
}
/**
* Reverses all changes since the last snapshot. Also
* removes any keys that have been created since then.
*/
pub fn rollback_to(&mut self, tcx: &ty::ctxt, snapshot: Snapshot<K>) {
debug!("{}: rollback_to({})",
UnifyKey::tag(None::<K>),
snapshot.length);
self.assert_open_snapshot(&snapshot);
while self.undo_log.len() > snapshot.length + 1 {
match self.undo_log.pop().unwrap() {
OpenSnapshot => {
// This indicates a failure to obey the stack discipline.
tcx.sess.bug("Cannot rollback an uncommited snapshot");
}
CommittedSnapshot => {
// This occurs when there are nested snapshots and
// the inner is commited but outer is rolled back.
}
NewVar(i) => {
assert!(self.values.len() == i);
self.values.pop();
}
SetVar(i, v) => {
*self.values.get_mut(i) = v;
}
}
}
let v = self.undo_log.pop().unwrap();
assert!(v == OpenSnapshot);
assert!(self.undo_log.len() == snapshot.length);
}
/**
* Commits all changes since the last snapshot. Of course, they
* can still be undone if there is a snapshot further out.
*/
pub fn commit(&mut self, snapshot: Snapshot<K>) {
debug!("{}: commit({})",
UnifyKey::tag(None::<K>),
snapshot.length);
self.assert_open_snapshot(&snapshot);
if snapshot.length == 0 {
// The root snapshot.
self.undo_log.truncate(0);
} else {
*self.undo_log.get_mut(snapshot.length) = CommittedSnapshot;
}
}
pub fn new_key(&mut self, value: V) -> K {
let index = self.values.len();
if self.in_snapshot() {
self.undo_log.push(NewVar(index));
}
self.values.push(Root(value, 0));
let k = UnifyKey::from_index(index);
debug!("{}: created new key: {}",
UnifyKey::tag(None::<K>),
k);
k
}
fn swap_value(&mut self,
index: uint,
new_value: VarValue<K,V>)
-> VarValue<K,V>
{
/*!
* Primitive operation to swap a value in the var array.
* Caller should update the undo log if we are in a snapshot.
*/
let loc = self.values.get_mut(index);
mem::replace(loc, new_value)
}
pub fn get(&mut self, tcx: &ty::ctxt, vid: K) -> Node<K,V> {
/*!
* Find the root node for `vid`. This uses the standard
* union-find algorithm with path compression:
* http://en.wikipedia.org/wiki/Disjoint-set_data_structure
*/
let index = vid.index();
let value = (*self.values.get(index)).clone();
match value {
Redirect(redirect) => {
let node: Node<K,V> = self.get(tcx, redirect.clone());
if node.key != redirect {
// Path compression
let old_value =
self.swap_value(index, Redirect(node.key.clone()));
// If we are in a snapshot, record this compression,
// because it's possible that the unification which
// caused it will be rolled back later.
if self.in_snapshot() {
self.undo_log.push(SetVar(index, old_value));
}
}
node
}
Root(value, rank) => {
Node { key: vid, value: value, rank: rank }
}
}
}
fn is_root(&self, key: &K) -> bool {
match *self.values.get(key.index()) {
Redirect(..) => false,
Root(..) => true,
}
}
pub fn set(&mut self,
tcx: &ty::ctxt,
key: K,
new_value: VarValue<K,V>)
{
/*!
* Sets the value for `vid` to `new_value`. `vid` MUST be a
* root node! Also, we must be in the middle of a snapshot.
*/
assert!(self.is_root(&key));
assert!(self.in_snapshot());
debug!("Updating variable {} to {}",
key.repr(tcx),
new_value.repr(tcx));
let index = key.index();
let old_value = self.swap_value(index, new_value);
self.undo_log.push(SetVar(index, old_value));
}
pub fn unify(&mut self,
tcx: &ty::ctxt,
node_a: &Node<K,V>,
node_b: &Node<K,V>)
-> (K, uint)
{
/*!
* Either redirects node_a to node_b or vice versa, depending
* on the relative rank. Returns the new root and rank. You
* should then update the value of the new root to something
* suitable.
*/
debug!("unify(node_a(id={}, rank={}), node_b(id={}, rank={}))",
node_a.key.repr(tcx),
node_a.rank,
node_b.key.repr(tcx),
node_b.rank);
if node_a.rank > node_b.rank {
// a has greater rank, so a should become b's parent,
// i.e., b should redirect to a.
self.set(tcx, node_b.key.clone(), Redirect(node_a.key.clone()));
(node_a.key.clone(), node_a.rank)
} else if node_a.rank < node_b.rank {
// b has greater rank, so a should redirect to b.
self.set(tcx, node_a.key.clone(), Redirect(node_b.key.clone()));
(node_b.key.clone(), node_b.rank)
} else {
// If equal, redirect one to the other and increment the
// other's rank.
assert_eq!(node_a.rank, node_b.rank);
self.set(tcx, node_b.key.clone(), Redirect(node_a.key.clone()));
(node_a.key.clone(), node_a.rank + 1)
}
}
}
///////////////////////////////////////////////////////////////////////////
// Code to handle simple keys like ints, floats---anything that
// doesn't have a subtyping relationship we need to worry about.
/**
* Indicates a type that does not have any kind of subtyping
* relationship.
*/
pub trait SimplyUnifiable : Clone + PartialEq + Repr {
fn to_type_err(expected_found<Self>) -> ty::type_err;
}
pub fn err<V:SimplyUnifiable>(a_is_expected: bool,
a_t: V,
b_t: V) -> ures {
if a_is_expected {
Err(SimplyUnifiable::to_type_err(
ty::expected_found {expected: a_t, found: b_t}))
} else {
Err(SimplyUnifiable::to_type_err(
ty::expected_found {expected: b_t, found: a_t}))
}
}
pub trait InferCtxtMethodsForSimplyUnifiableTypes<V:SimplyUnifiable,
K:UnifyKey<Option<V>>> {
fn simple_vars(&self,
a_is_expected: bool,
a_id: K,
b_id: K)
-> ures;
fn simple_var_t(&self,
a_is_expected: bool,
a_id: K,
b: V)
-> ures;
}
impl<'tcx,V:SimplyUnifiable,K:UnifyKey<Option<V>>>
InferCtxtMethodsForSimplyUnifiableTypes<V,K> for InferCtxt<'tcx>
{
fn simple_vars(&self,
a_is_expected: bool,
a_id: K,
b_id: K)
-> ures
{
/*!
* Unifies two simple keys. Because simple keys do
* not have any subtyping relationships, if both keys
* have already been associated with a value, then those two
* values must be the same.
*/
let tcx = self.tcx;
let table = UnifyKey::unification_table(self);
let node_a = table.borrow_mut().get(tcx, a_id);
let node_b = table.borrow_mut().get(tcx, b_id);
let a_id = node_a.key.clone();
let b_id = node_b.key.clone();
if a_id == b_id { return uok(); }
let combined = {
match (&node_a.value, &node_b.value) {
(&None, &None) => {
None
}
(&Some(ref v), &None) | (&None, &Some(ref v)) => {
Some((*v).clone())
}
(&Some(ref v1), &Some(ref v2)) => {
if *v1 != *v2 {
return err(a_is_expected, (*v1).clone(), (*v2).clone())
}
Some((*v1).clone())
}
}
};
let (new_root, new_rank) = table.borrow_mut().unify(tcx,
&node_a,
&node_b);
table.borrow_mut().set(tcx, new_root, Root(combined, new_rank));
return Ok(())
}
fn simple_var_t(&self,
a_is_expected: bool,
a_id: K,
b: V)
-> ures
{
/*!
* Sets the value of the key `a_id` to `b`. Because
* simple keys do not have any subtyping relationships,
* if `a_id` already has a value, it must be the same as
* `b`.
*/
let tcx = self.tcx;
let table = UnifyKey::unification_table(self);
let node_a = table.borrow_mut().get(tcx, a_id);
let a_id = node_a.key.clone();
match node_a.value {
None => {
table.borrow_mut().set(tcx, a_id, Root(Some(b), node_a.rank));
return Ok(());
}
Some(ref a_t) => {
if *a_t == b {
return Ok(());
} else {
return err(a_is_expected, (*a_t).clone(), b);
}
}
}
}
}
///////////////////////////////////////////////////////////////////////////
// General type keys
impl UnifyKey<Bounds<ty::t>> for ty::TyVid {
fn index(&self) -> uint { self.index }
fn from_index(i: uint) -> ty::TyVid { ty::TyVid { index: i } }
fn unification_table<'v>(infcx: &'v InferCtxt)
-> &'v RefCell<UnificationTable<ty::TyVid, Bounds<ty::t>>>
{
return &infcx.type_unification_table;
}
fn tag(_: Option<ty::TyVid>) -> &'static str {
"TyVid"
}<|fim▁hole|>
impl UnifyValue for Bounds<ty::t> { }
// Integral type keys
impl UnifyKey<Option<IntVarValue>> for ty::IntVid {
fn index(&self) -> uint { self.index }
fn from_index(i: uint) -> ty::IntVid { ty::IntVid { index: i } }
fn unification_table<'v>(infcx: &'v InferCtxt)
-> &'v RefCell<UnificationTable<ty::IntVid, Option<IntVarValue>>>
{
return &infcx.int_unification_table;
}
fn tag(_: Option<ty::IntVid>) -> &'static str {
"IntVid"
}
}
impl SimplyUnifiable for IntVarValue {
fn to_type_err(err: expected_found<IntVarValue>) -> ty::type_err {
return ty::terr_int_mismatch(err);
}
}
impl UnifyValue for Option<IntVarValue> { }
// Floating point type keys
impl UnifyKey<Option<ast::FloatTy>> for ty::FloatVid {
fn index(&self) -> uint { self.index }
fn from_index(i: uint) -> ty::FloatVid { ty::FloatVid { index: i } }
fn unification_table<'v>(infcx: &'v InferCtxt)
-> &'v RefCell<UnificationTable<ty::FloatVid, Option<ast::FloatTy>>>
{
return &infcx.float_unification_table;
}
fn tag(_: Option<ty::FloatVid>) -> &'static str {
"FloatVid"
}
}
impl UnifyValue for Option<ast::FloatTy> {
}
impl SimplyUnifiable for ast::FloatTy {
fn to_type_err(err: expected_found<ast::FloatTy>) -> ty::type_err {
return ty::terr_float_mismatch(err);
}
}<|fim▁end|>
|
}
|
<|file_name|>YesNoAgent.java<|end_file_name|><|fim▁begin|>package com.almende.dialog.example.agent;
import java.io.Serializable;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.logging.Logger;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Response;
import com.almende.dialog.Settings;
import com.almende.dialog.model.Answer;
import com.almende.dialog.model.Question;
import com.almende.util.ParallelInit;
import com.almende.util.twigmongo.QueryResultIterator;
import com.almende.util.twigmongo.TwigCompatibleMongoDatastore;
import com.almende.util.twigmongo.TwigCompatibleMongoDatastore.RootFindCommand;
import com.almende.util.twigmongo.annotations.Id;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
@Path("yesno")
public class YesNoAgent {
static final ObjectMapper om =ParallelInit.getObjectMapper();
private static final String URL = "http://"+Settings.HOST+"/yesno/";
private static final String SOUNDURL = "http://ask4604.ask46.customers.luna.net/rest/";
private static final Logger log = Logger
.getLogger("DialogHandler");
public Question getQuestion(int question_no, String preferred_medium, String phonenumber) {
String questionURL = URL+"questions/"+question_no;
String answerYesURL = URL+"answers/0";
String answerNoURL = URL+"answers/1";
if (preferred_medium != null && preferred_medium.startsWith("audio")){
questionURL = this.getAudioFile(question_no);
answerYesURL= SOUNDURL+"14.wav";
answerNoURL= SOUNDURL+"14.wav";
}
Question question=new Question();
question.setRequester(URL+"id/");
question.setType("closed");
question.setQuestion_text(questionURL);
question.setAnswers(new ArrayList<Answer>(Arrays.asList(
new Answer(answerYesURL, URL+"questions/"+question_no+"?preferred_medium="+preferred_medium+"&pn="+phonenumber+"&answer=yes"),
new Answer(answerNoURL, URL+"questions/"+question_no+"?preferred_medium="+preferred_medium+"&pn="+phonenumber+"&answer=no"))));
return question;
}
@GET
@Path("/id/")
public Response getId(@QueryParam("preferred_language") String preferred_language){
ObjectNode node= om.createObjectNode();
node.put("url", URL);
node.put("nickname", "YesNo");
return Response.ok(node.toString()).build();
}
@GET
@Produces("application/json")
public Response firstQuestion(@QueryParam("preferred_medium") String preferred_medium, @QueryParam("remoteAddress") String responder, @QueryParam("requester") String requester){<|fim▁hole|> if(requester.contains("live") || requester.contains("0107421217")){
questionNo=1;
}
try {
responder = URLDecoder.decode(responder, "UTF-8");
} catch (Exception ex) {
log.severe(ex.getMessage());
}
Question question = getQuestion(questionNo, preferred_medium, responder);
return Response.ok(question.toJSON()).build();
}
@Path("/questions/{question_no}")
@POST
@Produces("application/json")
@Consumes("*/*")
public Response answerQuestion(@PathParam("question_no") String question_no, @QueryParam("preferred_medium") String preferred_medium,
@QueryParam("pn") String phonenumber, @QueryParam("answer") String answer){
Group group = this.getGroup("Group."+question_no+"."+answer);
group.addMember(phonenumber);
TwigCompatibleMongoDatastore datastore = new TwigCompatibleMongoDatastore();
datastore.store(group);
int responseQuestion=99;
String questionURL = URL+"questions/"+responseQuestion;
if (preferred_medium != null && preferred_medium.startsWith("audio")){
questionURL = this.getAudioFile(responseQuestion);
}
Question question=new Question();
question.setRequester(URL+"id/");
question.setType("comment");
question.setQuestion_text(questionURL);
return Response.ok( question.toJSON() ).build();
}
@Path("/questions/{question_no}")
@GET
@Produces("text/plain")
@Consumes("*/*")
public Response getQuestionText(@PathParam("question_no") String question_no ){
Integer questionNo = Integer.parseInt(question_no);
String result = "";
// These messages are now static but should be loaded from the LifeRay Database.
switch (questionNo){
case 0: result="Press 1 if you are available, press 2 if you are unavailable."; break;
case 1: result="Are you available?"; break;
case 99: result="Thank you for your input"; break;
default: result="Sorry, for some strange reason I don't have that question text available...";
}
return Response.ok(result).build();
}
@Path("/answers/{answer_no}")
@GET
@Produces("text/plain")
@Consumes("*/*")
public Response getAnswerText(@PathParam("answer_no") String answer_no, @QueryParam("preferred_medium") String prefered_mimeType){
Integer answerNo = Integer.parseInt(answer_no);
String result="";
// These messages can be static, because they are always the same.
switch (answerNo){
case 0: result="Yes"; break;
case 1: result="No"; break;
default: result="Sorry, for some strange reason I don't have that answer text available...";
}
return Response.ok(result).build();
}
// This urls will present the results
@Path("result")
@GET
public Response getResults() {
String result="";
ArrayList<Group> groups = (ArrayList<Group>) this.getAllGroups();
try {
result = om.writeValueAsString(groups);
} catch(Exception ex) {
ex.printStackTrace();
}
return Response.ok( result ).build();
}
// These functions should get there data from the liferay database.
// These are the audio files linked to the questions
public String getAudioFile(int question_no) {
switch(question_no) {
case 0: return SOUNDURL+"571.wav";
case 1: return SOUNDURL+"572.wav";
case 99: return SOUNDURL+"567.wav";
default: return SOUNDURL+"529.wav";
}
}
// These 2 functions are the group management
public Group getGroup(String id) {
TwigCompatibleMongoDatastore datastore = new TwigCompatibleMongoDatastore();
Group group = datastore.load(Group.class, id);
if(group!=null)
return group;
group = new Group();
group.setId(id);
return group;
}
public List<Group> getAllGroups() {
TwigCompatibleMongoDatastore datastore = new TwigCompatibleMongoDatastore();
RootFindCommand<Group> command = datastore.find()
.type(Group.class);
QueryResultIterator<Group> it = command.now();
List<Group> groups = new ArrayList<Group>();
while (it.hasNext()) {
groups.add(it.next());
}
return groups;
}
}
@SuppressWarnings("serial")
class Group implements Serializable {
public Group() {
this.members=new HashSet<String>();
}
public String getId(){
return id;
}
public void setId(String id){
this.id=id;
}
public Set<String> getMembers() {
return this.members;
}
public void addMember(String member) {
this.members.add(member);
}
@Id private String id=null;
private Set<String> members=null;
}<|fim▁end|>
|
int questionNo=0;
|
<|file_name|>all.go<|end_file_name|><|fim▁begin|>// Original: src/os/path.go
//
// Copyright 2009 The Go Authors. All rights reserved.
// Portions Copyright 2016 Hiroshi Ioka. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package smb2
import (
"io"
"os"
"syscall"
)
// MkdirAll mimics os.MkdirAll
func (fs *Share) MkdirAll(path string, perm os.FileMode) error {
path = normPath(path)
// Fast path: if we can tell whether path is a directory or file, stop with success or error.
dir, err := fs.Stat(path)
if err == nil {
if dir.IsDir() {
return nil
}
return &os.PathError{Op: "mkdir", Path: path, Err: syscall.ENOTDIR}
}
// Slow path: make sure parent exists and then call Mkdir for path.
i := len(path)
for i > 0 && IsPathSeparator(path[i-1]) { // Skip trailing path separator.
i--
}
j := i
for j > 0 && !IsPathSeparator(path[j-1]) { // Scan backward over element.
j--
}
if j > 1 {
// Create parent
err = fs.MkdirAll(path[0:j-1], perm)
if err != nil {
return err
}
}
// Parent now exists; invoke Mkdir and use its result.
err = fs.Mkdir(path, perm)
if err != nil {
// Handle arguments like "foo/." by
// double-checking that directory doesn't exist.
dir, err1 := fs.Lstat(path)
if err1 == nil && dir.IsDir() {
return nil
}
return err
}
return nil
}
// RemoveAll removes path and any children it contains.
// It removes everything it can but returns the first error
// it encounters. If the path does not exist, RemoveAll
// returns nil (no error).
func (fs *Share) RemoveAll(path string) error {
path = normPath(path)
// Simple case: if Remove works, we're done.
err := fs.Remove(path)
if err == nil || os.IsNotExist(err) {
return nil
}
// Otherwise, is this a directory we need to recurse into?
dir, serr := fs.Lstat(path)
if serr != nil {
if serr, ok := serr.(*os.PathError); ok && (os.IsNotExist(serr.Err) || serr.Err == syscall.ENOTDIR) {
return nil
}
return serr
}
if !dir.IsDir() {
// Not a directory; return the error from Remove.
return err
}
// Directory.
fd, err := fs.Open(path)
if err != nil {
if os.IsNotExist(err) {
// Race. It was deleted between the Lstat and Open.
// Return nil per RemoveAll's docs.
return nil
}
return err
}
// Remove contents & return first error.
err = nil
for {
names, err1 := fd.Readdirnames(100)
for _, name := range names {
err1 := fs.RemoveAll(path + string(PathSeparator) + name)
if err == nil {
err = err1<|fim▁hole|> break
}
// If Readdirnames returned an error, use it.
if err == nil {
err = err1
}
if len(names) == 0 {
break
}
}
// Close directory, because windows won't remove opened directory.
fd.Close()
// Remove directory.
err1 := fs.Remove(path)
if err1 == nil || os.IsNotExist(err1) {
return nil
}
if err == nil {
err = err1
}
return err
}<|fim▁end|>
|
}
}
if err1 == io.EOF {
|
<|file_name|>app.modules.js<|end_file_name|><|fim▁begin|>/*global require,module,console,angular */
require("angular/angular");
require("angular-route/angular-route");
angular.module("RegistrationApp", ["ngRoute"]);
angular.module("RegistrationApp").controller("RegistrationCtrl", require("./components/registration/controller"));
angular.module("RegistrationApp").directive("myRegistration", require("./components/registration/directive"));
angular.module("MainApp", [
"RegistrationApp"
]);
<|fim▁hole|><|fim▁end|>
|
angular.module("MainApp").config(["$routeProvider", "$locationProvider", require("./app.routes")]);
|
<|file_name|>player.js<|end_file_name|><|fim▁begin|>var m2pong = require('./m2pong');
Player = function(connection, name, nr){
this.connection = connection;
this.name = name;<|fim▁hole|>
this.x = 0;
this.y = 0;
this.height = 0;
this.width = 0;
this.score = 0;
this.move = function(x, y){
this.x = x;
this.y = y;
m2pong.sendToDisplays('movePlayer', {
nr: this.nr,
x: this.x,
y: this.y
});
};
this.setScore = function(score){
this.score = score;
m2pong.sendToDisplays('setScore', {
nr: this.nr,
score: score
});
};
};
exports.Player = Player;<|fim▁end|>
|
this.nr = nr;
|
<|file_name|>read_multifield_dataset.py<|end_file_name|><|fim▁begin|>import sys
if __name__ == "__main__":
# Parse command line arguments
if len(sys.argv) < 2:
sys.exit("python {} <datasetFilename> {{<maxPoints>}}".format(sys.argv[0]))
datasetFilename = sys.argv[1]
if len(sys.argv) >= 3:
maxPoints = int(sys.argv[2])
else:
maxPoints = None
# Perform initial pass through file to determine line count (i.e. # of points)
lineCount = 0
with open(datasetFilename, "r") as f:
line = f.readline()
while line:
lineCount += 1
line = f.readline()
# Read first line and use to make assumption about the dimensionality of each point
numDimensions = 0
with open(datasetFilename, "r") as f:
firstLine = f.readline()
numDimensions = len(firstLine.split())
# If dimensionality of dataset is 0, print error message and exit
if numDimensions == 0:
sys.exit("Could not determine dimensionality of dataset")
# Print initial header at END of file (so we have number of points already)
if maxPoints:
numPoints = min(lineCount, maxPoints)
else:
numPoints = lineCount
print("{} {}".format(numDimensions, numPoints))
# Output dataset header which defines dimensionality of data and number of points
# Read entire file line-by-line, printing out each line as a point
with open(datasetFilename, "r") as f:
pointsRead = 0
line = f.readline()
while line:
fields = line.split()
floatFields = [ str(float(x)) for x in fields ]
print(" ".join(floatFields))
# Stop reading file is maximum number of points have been read
pointsRead += 1
if maxPoints and pointsRead >= maxPoints:
break
# Read next line of file<|fim▁hole|><|fim▁end|>
|
line = f.readline()
|
<|file_name|>pyunit_bernoulli_synthetic_data_GBM_medium.py<|end_file_name|><|fim▁begin|>from builtins import range
import sys, os
sys.path.insert(1, os.path.join("..","..",".."))
import h2o
from tests import pyunit_utils
from h2o import H2OFrame
import numpy as np
import numpy.random
import scipy.stats
from sklearn import ensemble
from sklearn.metrics import roc_auc_score
from h2o.estimators.gbm import H2OGradientBoostingEstimator
def bernoulli_synthetic_data_gbm_medium():
# Generate training dataset (adaptation of http://www.stat.missouri.edu/~speckman/stat461/boost.R)
train_rows = 10000
train_cols = 10
# Generate variables V1, ... V10
X_train = np.random.randn(train_rows, train_cols)
# y = +1 if sum_i x_{ij}^2 > chisq median on 10 df
y_train = np.asarray([1 if rs > scipy.stats.chi2.ppf(0.5, 10) else -1 for rs in [sum(r) for r in
np.multiply(X_train,X_train).tolist()]])
# Train scikit gbm
# TODO: grid-search
distribution = "bernoulli"
ntrees = 150
min_rows = 1
max_depth = 2
learn_rate = .01
nbins = 20
gbm_sci = ensemble.GradientBoostingClassifier(learning_rate=learn_rate, n_estimators=ntrees, max_depth=max_depth,
min_samples_leaf=min_rows, max_features=None)
gbm_sci.fit(X_train,y_train)
# Generate testing dataset
test_rows = 2000<|fim▁hole|>
# Generate variables V1, ... V10
X_test = np.random.randn(test_rows, test_cols)
# y = +1 if sum_i x_{ij}^2 > chisq median on 10 df
y_test = np.asarray([1 if rs > scipy.stats.chi2.ppf(0.5, 10) else -1 for rs in [sum(r) for r in
np.multiply(X_test,X_test).tolist()]])
# Score (AUC) the scikit gbm model on the test data
auc_sci = roc_auc_score(y_test, gbm_sci.predict_proba(X_test)[:,1])
# Compare this result to H2O
xtrain = np.transpose(X_train).tolist()
ytrain = y_train.tolist()
xtest = np.transpose(X_test).tolist()
ytest = y_test.tolist()
train_h2o = H2OFrame(list(zip(*[ytrain]+xtrain)))
test_h2o = H2OFrame(list(zip(*[ytest]+xtest)))
train_h2o["C1"] = train_h2o["C1"].asfactor()
test_h2o["C1"] = test_h2o["C1"].asfactor()
gbm_h2o = H2OGradientBoostingEstimator(distribution=distribution,
ntrees=ntrees,
min_rows=min_rows,
max_depth=max_depth,
learn_rate=learn_rate,
nbins=nbins)
gbm_h2o.train(x=list(range(1,train_h2o.ncol)), y="C1", training_frame=train_h2o)
gbm_perf = gbm_h2o.model_performance(test_h2o)
auc_h2o = gbm_perf.auc()
#Log.info(paste("scikit AUC:", auc_sci, "\tH2O AUC:", auc_h2o))
assert abs(auc_h2o - auc_sci) < 1e-2, "h2o (auc) performance degradation, with respect to scikit. h2o auc: {0} " \
"scickit auc: {1}".format(auc_h2o, auc_sci)
if __name__ == "__main__":
pyunit_utils.standalone_test(bernoulli_synthetic_data_gbm_medium)
else:
bernoulli_synthetic_data_gbm_medium()<|fim▁end|>
|
test_cols = 10
|
<|file_name|>exposure.js<|end_file_name|><|fim▁begin|>/*
Copyright: © 2011 Thomas Stein, CodeLounge.de
<mailto:[email protected]> <http://www.codelounge.de/>
Released under the terms of the GNU General Public License.
You should have received a copy of the GNU General Public License,
along with this software. In the main directory, see: licence.txt
If not, see: <http://www.gnu.org/licenses/>.
*/
/*
* MBP - Mobile boilerplate helper functions
*/
(function(document){
window.MBP = window.MBP || {};
// Fix for iPhone viewport scale bug
// http://www.blog.highub.com/mobile-2/a-fix-for-iphone-viewport-scale-bug/
MBP.viewportmeta = document.querySelector && document.querySelector('meta[name="viewport"]');
MBP.ua = navigator.userAgent;
MBP.scaleFix = function () {
if (MBP.viewportmeta && /iPhone|iPad/.test(MBP.ua) && !/Opera Mini/.test(MBP.ua)) {
MBP.viewportmeta.content = "width=device-width, minimum-scale=1.0, maximum-scale=1.0";
document.addEventListener("gesturestart", MBP.gestureStart, false);
}
};
MBP.gestureStart = function () {
MBP.viewportmeta.content = "width=device-width, minimum-scale=0.25, maximum-scale=1.6";
};
// Hide URL Bar for iOS
// http://remysharp.com/2010/08/05/doing-it-right-skipping-the-iphone-url-bar/
MBP.hideUrlBar = function () {
/iPhone/.test(MBP.ua) && !pageYOffset && !location.hash && setTimeout(function () {
window.scrollTo(0, 1);
}, 1000),
/iPad/.test(MBP.ua) && !pageYOffset && !location.hash && setTimeout(function () {
window.scrollTo(0, 1);
}, 1000);
};
});
jQuery( function() {
$("a.facebox").fancybox();
//$("a.fancybox").prettyPhoto({
// social_tools: false
//});
jQuery('.entenlogo').click(function() {
$('.entenlogo').hide();
});
var current_url = $(location).attr('href');
//console.log($(location).attr('href'));
jQuery('body').bind( 'taphold', function( e ) {
//$('#next_post_link').attr('refresh');
//$('#previous_post_link').attr('refresh');
$('#page').page('refresh');
var next_url = $('#next_post_link').attr('href');
var previous_url = $('#previous_post_link').attr('href');
console.log(next_url + ' --- ' + previous_url);
e.stopImmediatePropagation();
return false;
} );
jQuery('body').bind( 'swipeleft', function( e ) {
var next_url = $('.ui-page-active #next_post_link').attr('href');
var previous_url = $('.ui-page-active #previous_post_link').attr('href');
console.log("Swiped Left: " + next_url + ' --- ' + previous_url);
if (undefined != previous_url) {
//$.mobile.changePage( previous_url,"slide", true);
$.mobile.changePage( previous_url, {
transition: "slide",
reverse: false,
changeHash: true
});
e.stopImmediatePropagation();
return false;
}
} );
jQuery('body').bind( 'swiperight', function( e ) {<|fim▁hole|> console.log("Swiped Right: " + next_url + ' --- ' + previous_url);
if (undefined != next_url) {
//$.mobile.changePage( next_url, "slide", true);
$.mobile.changePage( next_url, {
transition: "slide",
reverse: true,
changeHash: true
});
e.stopImmediatePropagation();
return false;
}
} );
} );<|fim▁end|>
|
var next_url = $('.ui-page-active #next_post_link').attr('href');
var previous_url = $('.ui-page-active #previous_post_link').attr('href');
|
<|file_name|>attribute_test.js<|end_file_name|><|fim▁begin|>"use strict";
import chai from "chai";
import chaiAsPromised from "chai-as-promised";
import sinon from "sinon";
import BusinessElementsClient from "../src";
import uuid from "uuid";
import * as requests from "../src/requests";
chai.use(chaiAsPromised);<|fim▁hole|>const FAKE_SERVER_URL = "http://api.fake-server";
/** @test {Attribute} */
describe("Attribute", () => {
let sandbox, client, attributeId, attribute;
beforeEach(() => {
sandbox = sinon.sandbox.create();
client = new BusinessElementsClient(FAKE_SERVER_URL);
attributeId = uuid.v4();
attribute = client.tenant("example.com").attributes().attribute(attributeId);
});
afterEach(() => {
sandbox.restore();
});
/** @test {Attribute#get} */
describe("#get()", () => {
const data = {id: attributeId};
beforeEach(() => {
sandbox.stub(client, "execute").returns(Promise.resolve(data));
});
it("should get capture", () => {
attribute.get();
sinon.assert.calledWithMatch(client.execute, {
path: `/attributes/${attributeId}`
});
});
it("should return attribute data", () => {
return attribute.get().should.become(data);
});
});
/** @test {Attribute#edit} */
describe("#edit()", () => {
const response = {status: "Ok"};
const schema = {
"type": "object",
"properties": {
"type": {
"title": "type",
"type": "string"
}
}
};
beforeEach(() => {
sandbox.stub(client, "execute").returns(Promise.resolve(response));
sandbox.spy(requests, "updateAttribute");
});
it("should edit the attribute", () => {
attribute.edit(schema, {});
sinon.assert.calledWithMatch(requests.updateAttribute, attributeId, schema);
});
it("should return success", () => {
return attribute.edit(schema, {}).should.eventually.become(response);
});
});
/** @test {Attribute#remove} */
describe("#remove()", () => {
const response = {status: "Ok"};
beforeEach(() => {
sandbox.stub(client, "execute").returns(Promise.resolve(response));
sandbox.spy(requests, "deleteAttribute");
});
it("should delete the attribute", () => {
attribute.remove({});
sinon.assert.calledWithMatch(requests.deleteAttribute, attributeId);
});
it("should return success", () => {
return attribute.remove({}).should.eventually.become(response);
});
});
});<|fim▁end|>
|
chai.should();
chai.config.includeStack = true;
|
<|file_name|>CommandClassHumidityControlSetpointV1.java<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2016-2017 by the respective copyright holders.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package com.zsmartsystems.zwave.commandclass.impl;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Class to implement the Z-Wave command class <b>COMMAND_CLASS_HUMIDITY_CONTROL_SETPOINT</b> version <b>1</b>.
* <p>
* Command Class Humidity Control Setpoint
* <p>
* This class provides static methods for processing received messages (message handler) and
* methods to get a message to send on the Z-Wave network.
* <p>
* Command class key is 0x64.
* <p>
* Note that this code is autogenerated. Manual changes may be overwritten.
*
* @author Chris Jackson - Initial contribution of Java code generator
*/
public class CommandClassHumidityControlSetpointV1 {
private static final Logger logger = LoggerFactory.getLogger(CommandClassHumidityControlSetpointV1.class);
/**
* Integer command class key for COMMAND_CLASS_HUMIDITY_CONTROL_SETPOINT
*/
public final static int COMMAND_CLASS_KEY = 0x64;
/**
* Humidity Control Setpoint Set Command Constant
*/
public final static int HUMIDITY_CONTROL_SETPOINT_SET = 0x01;
/**
* Humidity Control Setpoint Get Command Constant
*/
public final static int HUMIDITY_CONTROL_SETPOINT_GET = 0x02;
/**
* Humidity Control Setpoint Report Command Constant
*/
public final static int HUMIDITY_CONTROL_SETPOINT_REPORT = 0x03;
/**
* Humidity Control Setpoint Supported Get Command Constant
*/
public final static int HUMIDITY_CONTROL_SETPOINT_SUPPORTED_GET = 0x04;
/**
* Humidity Control Setpoint Supported Report Command Constant
*/
public final static int HUMIDITY_CONTROL_SETPOINT_SUPPORTED_REPORT = 0x05;
/**
* Humidity Control Setpoint Scale Supported Get Command Constant
*/
public final static int HUMIDITY_CONTROL_SETPOINT_SCALE_SUPPORTED_GET = 0x06;
/**
* Humidity Control Setpoint Scale Supported Report Command Constant
*/
public final static int HUMIDITY_CONTROL_SETPOINT_SCALE_SUPPORTED_REPORT = 0x07;
/**
* Humidity Control Setpoint Capabilities Get Command Constant
*/
public final static int HUMIDITY_CONTROL_SETPOINT_CAPABILITIES_GET = 0x08;
/**
* Humidity Control Setpoint Capabilities Report Command Constant
*/
public final static int HUMIDITY_CONTROL_SETPOINT_CAPABILITIES_REPORT = 0x09;
/**
* Map holding constants for HumidityControlSetpointGetSetpointType
*/
private static Map<Integer, String> constantHumidityControlSetpointGetSetpointType = new HashMap<Integer, String>();
/**
* Map holding constants for HumidityControlSetpointCapabilitiesGetSetpointType
*/
private static Map<Integer, String> constantHumidityControlSetpointCapabilitiesGetSetpointType = new HashMap<Integer, String>();
/**
* Map holding constants for HumidityControlSetpointSupportedReportBitMask
*/
private static Map<Integer, String> constantHumidityControlSetpointSupportedReportBitMask = new HashMap<Integer, String>();
/**
* Map holding constants for HumidityControlSetpointScaleSupportedGetSetpointType
*/
private static Map<Integer, String> constantHumidityControlSetpointScaleSupportedGetSetpointType = new HashMap<Integer, String>();
/**
* Map holding constants for HumidityControlSetpointSetSetpointType
*/
private static Map<Integer, String> constantHumidityControlSetpointSetSetpointType = new HashMap<Integer, String>();
/**
* Map holding constants for HumidityControlSetpointReportScale
*/
private static Map<Integer, String> constantHumidityControlSetpointReportScale = new HashMap<Integer, String>();
/**
* Map holding constants for HumidityControlSetpointCapabilitiesReportSetpointType
*/
private static Map<Integer, String> constantHumidityControlSetpointCapabilitiesReportSetpointType = new HashMap<Integer, String>();
/**
* Map holding constants for HumidityControlSetpointReportSetpointType
*/
private static Map<Integer, String> constantHumidityControlSetpointReportSetpointType = new HashMap<Integer, String>();
/**
* Map holding constants for HumidityControlSetpointCapabilitiesReportScale2
*/
private static Map<Integer, String> constantHumidityControlSetpointCapabilitiesReportScale2 = new HashMap<Integer, String>();
/**
* Map holding constants for HumidityControlSetpointCapabilitiesReportScale1
*/
private static Map<Integer, String> constantHumidityControlSetpointCapabilitiesReportScale1 = new HashMap<Integer, String>();
/**
* Map holding constants for HumidityControlSetpointScaleSupportedReportScaleBitMask
*/
private static Map<Integer, String> constantHumidityControlSetpointScaleSupportedReportScaleBitMask = new HashMap<Integer, String>();
/**
* Map holding constants for HumidityControlSetpointSetScale
*/
private static Map<Integer, String> constantHumidityControlSetpointSetScale = new HashMap<Integer, String>();
static {
// Constants for HumidityControlSetpointGetSetpointType
constantHumidityControlSetpointGetSetpointType.put(0x00, "HUMIDIFIER");
constantHumidityControlSetpointGetSetpointType.put(0x01, "DEHUMIDIFIER");
// Constants for HumidityControlSetpointCapabilitiesGetSetpointType
constantHumidityControlSetpointCapabilitiesGetSetpointType.put(0x00, "HUMIDIFIER");
constantHumidityControlSetpointCapabilitiesGetSetpointType.put(0x01, "DEHUMIDIFIER");
// Constants for HumidityControlSetpointSupportedReportBitMask
constantHumidityControlSetpointSupportedReportBitMask.put(0x01, "HUMIDIFIER");
constantHumidityControlSetpointSupportedReportBitMask.put(0x02, "DEHUMIDIFIER");
// Constants for HumidityControlSetpointScaleSupportedGetSetpointType
constantHumidityControlSetpointScaleSupportedGetSetpointType.put(0x00, "HUMIDIFIER");
constantHumidityControlSetpointScaleSupportedGetSetpointType.put(0x01, "DEHUMIDIFIER");
// Constants for HumidityControlSetpointSetSetpointType
constantHumidityControlSetpointSetSetpointType.put(0x00, "HUMIDIFIER");
constantHumidityControlSetpointSetSetpointType.put(0x01, "DEHUMIDIFIER");
// Constants for HumidityControlSetpointReportScale
constantHumidityControlSetpointReportScale.put(0x00, "PERCENTAGE");
constantHumidityControlSetpointReportScale.put(0x01, "ABSOLUTE");
// Constants for HumidityControlSetpointCapabilitiesReportSetpointType
constantHumidityControlSetpointCapabilitiesReportSetpointType.put(0x00, "HUMIDIFIER");
constantHumidityControlSetpointCapabilitiesReportSetpointType.put(0x01, "DEHUMIDIFIER");
// Constants for HumidityControlSetpointReportSetpointType
constantHumidityControlSetpointReportSetpointType.put(0x00, "HUMIDIFIER");
constantHumidityControlSetpointReportSetpointType.put(0x01, "DEHUMIDIFIER");
// Constants for HumidityControlSetpointCapabilitiesReportScale2
constantHumidityControlSetpointCapabilitiesReportScale2.put(0x00, "PERCENTAGE");
constantHumidityControlSetpointCapabilitiesReportScale2.put(0x01, "ABSOLUTE");
// Constants for HumidityControlSetpointCapabilitiesReportScale1
constantHumidityControlSetpointCapabilitiesReportScale1.put(0x00, "PERCENTAGE");
constantHumidityControlSetpointCapabilitiesReportScale1.put(0x01, "ABSOLUTE");
// Constants for HumidityControlSetpointScaleSupportedReportScaleBitMask
constantHumidityControlSetpointScaleSupportedReportScaleBitMask.put(0x00, "PERCENTAGE");
constantHumidityControlSetpointScaleSupportedReportScaleBitMask.put(0x01, "ABSOLUTE");
// Constants for HumidityControlSetpointSetScale
constantHumidityControlSetpointSetScale.put(0x00, "PERCENTAGE");
constantHumidityControlSetpointSetScale.put(0x01, "ABSOLUTE");
}
/**
* Creates a new message with the HUMIDITY_CONTROL_SETPOINT_SET command.
* <p>
* Humidity Control Setpoint Set
*
* @param setpointType {@link String}
* Can be one of the following -:
* <p>
* <ul>
* <li>HUMIDIFIER
* <li>DEHUMIDIFIER
* </ul>
* @param scale {@link String}
* Can be one of the following -:
* <p>
* <ul>
* <li>PERCENTAGE
* <li>ABSOLUTE
* </ul>
* @param precision {@link Integer}
* @param value {@link byte[]}
* @return the {@link byte[]} array with the command to send
*/
static public byte[] getHumidityControlSetpointSet(String setpointType, String scale, Integer precision,
byte[] value) {
logger.debug("Creating command message HUMIDITY_CONTROL_SETPOINT_SET version 1");
ByteArrayOutputStream outputData = new ByteArrayOutputStream();
outputData.write(COMMAND_CLASS_KEY);
outputData.write(HUMIDITY_CONTROL_SETPOINT_SET);
// Process 'Properties1'
int varSetpointType = Integer.MAX_VALUE;
for (Integer entry : constantHumidityControlSetpointSetSetpointType.keySet()) {
if (constantHumidityControlSetpointSetSetpointType.get(entry).equals(setpointType)) {
varSetpointType = entry;
break;
}
}
if (varSetpointType == Integer.MAX_VALUE) {
throw new IllegalArgumentException("Unknown constant value '" + setpointType + "' for setpointType");
}
outputData.write(varSetpointType & 0x0F);
// Process 'Properties2'
// Size is used by 'Value'
int size = value.length;
int valProperties2 = 0;
valProperties2 |= size & 0x07;
int varScale = Integer.MAX_VALUE;
for (Integer entry : constantHumidityControlSetpointSetScale.keySet()) {
if (constantHumidityControlSetpointSetScale.get(entry).equals(scale)) {
varScale = entry;
break;
}
}
if (varScale == Integer.MAX_VALUE) {
throw new IllegalArgumentException("Unknown constant value '" + scale + "' for scale");
}
valProperties2 |= varScale << 3 & 0x18;
valProperties2 |= ((precision << 5) & 0xE0);
outputData.write(valProperties2);
// Process 'Value'
if (value != null) {
try {
outputData.write(value);
} catch (IOException e) {
}
}
return outputData.toByteArray();
}
/**
* Processes a received frame with the HUMIDITY_CONTROL_SETPOINT_SET command.
* <p>
* Humidity Control Setpoint Set
* <p>
* The output data {@link Map} has the following properties -:
*
* <ul>
* <li>SETPOINT_TYPE {@link String}
* Can be one of the following -:
* <p>
* <ul>
* <li>HUMIDIFIER
* <li>DEHUMIDIFIER
* </ul>
* <li>SCALE {@link String}
* Can be one of the following -:
* <p>
* <ul>
* <li>PERCENTAGE
* <li>ABSOLUTE
* </ul>
* <li>PRECISION {@link Integer}
* <li>VALUE {@link byte[]}
* </ul>
*
* @param payload the {@link byte[]} payload data to process
* @return a {@link Map} of processed response data
*/
public static Map<String, Object> handleHumidityControlSetpointSet(byte[] payload) {
// Create our response map
Map<String, Object> response = new HashMap<String, Object>();
// We're using variable length fields, so track the offset
int msgOffset = 2;
// Process 'Properties1'
response.put("SETPOINT_TYPE", constantHumidityControlSetpointSetSetpointType.get(payload[msgOffset] & 0x0F));
msgOffset += 1;
// Process 'Properties2'
// Size is used by 'Value'
int varSize = payload[msgOffset] & 0x07;
response.put("SCALE", constantHumidityControlSetpointSetScale.get((payload[msgOffset] & 0x18) >> 3));
response.put("PRECISION", Integer.valueOf(payload[msgOffset] & 0xE0 >> 5));
msgOffset += 1;
// Process 'Value'
ByteArrayOutputStream valValue = new ByteArrayOutputStream();
for (int cntValue = 0; cntValue < varSize; cntValue++) {
valValue.write(payload[msgOffset + cntValue]);
}
response.put("VALUE", valValue.toByteArray());
msgOffset += varSize;
// Return the map of processed response data;
return response;
}
/**
* Creates a new message with the HUMIDITY_CONTROL_SETPOINT_GET command.
* <p>
* Humidity Control Setpoint Get
*
* @param setpointType {@link String}
* Can be one of the following -:
* <p>
* <ul>
* <li>HUMIDIFIER
* <li>DEHUMIDIFIER
* </ul>
* @return the {@link byte[]} array with the command to send
*/
static public byte[] getHumidityControlSetpointGet(String setpointType) {
logger.debug("Creating command message HUMIDITY_CONTROL_SETPOINT_GET version 1");
ByteArrayOutputStream outputData = new ByteArrayOutputStream();
outputData.write(COMMAND_CLASS_KEY);
outputData.write(HUMIDITY_CONTROL_SETPOINT_GET);
// Process 'Properties1'
int varSetpointType = Integer.MAX_VALUE;
for (Integer entry : constantHumidityControlSetpointGetSetpointType.keySet()) {
if (constantHumidityControlSetpointGetSetpointType.get(entry).equals(setpointType)) {
varSetpointType = entry;
break;
}
}
if (varSetpointType == Integer.MAX_VALUE) {
throw new IllegalArgumentException("Unknown constant value '" + setpointType + "' for setpointType");
}
outputData.write(varSetpointType & 0x0F);
return outputData.toByteArray();
}
/**
* Processes a received frame with the HUMIDITY_CONTROL_SETPOINT_GET command.
* <p>
* Humidity Control Setpoint Get
* <p>
* The output data {@link Map} has the following properties -:
*
* <ul>
* <li>SETPOINT_TYPE {@link String}
* Can be one of the following -:
* <p>
* <ul>
* <li>HUMIDIFIER
* <li>DEHUMIDIFIER
* </ul>
* </ul>
*
* @param payload the {@link byte[]} payload data to process
* @return a {@link Map} of processed response data
*/
public static Map<String, Object> handleHumidityControlSetpointGet(byte[] payload) {
// Create our response map
Map<String, Object> response = new HashMap<String, Object>();
// Process 'Properties1'
response.put("SETPOINT_TYPE", constantHumidityControlSetpointGetSetpointType.get(payload[2] & 0x0F));
// Return the map of processed response data;
return response;
}
/**
* Creates a new message with the HUMIDITY_CONTROL_SETPOINT_REPORT command.
* <p>
* Humidity Control Setpoint Report
*
* @param setpointType {@link String}
* Can be one of the following -:
* <p>
* <ul>
* <li>HUMIDIFIER
* <li>DEHUMIDIFIER
* </ul>
* @param scale {@link String}
* Can be one of the following -:
* <p>
* <ul>
* <li>PERCENTAGE
* <li>ABSOLUTE
* </ul>
* @param precision {@link Integer}
* @param value {@link byte[]}
* @return the {@link byte[]} array with the command to send
*/
static public byte[] getHumidityControlSetpointReport(String setpointType, String scale, Integer precision,
byte[] value) {
logger.debug("Creating command message HUMIDITY_CONTROL_SETPOINT_REPORT version 1");
ByteArrayOutputStream outputData = new ByteArrayOutputStream();
outputData.write(COMMAND_CLASS_KEY);
outputData.write(HUMIDITY_CONTROL_SETPOINT_REPORT);
// Process 'Properties1'
int varSetpointType = Integer.MAX_VALUE;
for (Integer entry : constantHumidityControlSetpointReportSetpointType.keySet()) {
if (constantHumidityControlSetpointReportSetpointType.get(entry).equals(setpointType)) {
varSetpointType = entry;
break;
}
}
if (varSetpointType == Integer.MAX_VALUE) {
throw new IllegalArgumentException("Unknown constant value '" + setpointType + "' for setpointType");
}
outputData.write(varSetpointType & 0x0F);
// Process 'Properties2'
// Size is used by 'Value'
int size = value.length;
int valProperties2 = 0;
valProperties2 |= size & 0x07;
int varScale = Integer.MAX_VALUE;
for (Integer entry : constantHumidityControlSetpointReportScale.keySet()) {
if (constantHumidityControlSetpointReportScale.get(entry).equals(scale)) {
varScale = entry;
break;
}
}
if (varScale == Integer.MAX_VALUE) {
throw new IllegalArgumentException("Unknown constant value '" + scale + "' for scale");
}
valProperties2 |= varScale << 3 & 0x18;
valProperties2 |= ((precision << 5) & 0xE0);
outputData.write(valProperties2);
// Process 'Value'
if (value != null) {
try {
outputData.write(value);
} catch (IOException e) {
}
}
return outputData.toByteArray();
}
/**
* Processes a received frame with the HUMIDITY_CONTROL_SETPOINT_REPORT command.
* <p>
* Humidity Control Setpoint Report
* <p>
* The output data {@link Map} has the following properties -:
*
* <ul>
* <li>SETPOINT_TYPE {@link String}
* Can be one of the following -:
* <p>
* <ul>
* <li>HUMIDIFIER
* <li>DEHUMIDIFIER
* </ul>
* <li>SCALE {@link String}
* Can be one of the following -:
* <p>
* <ul>
* <li>PERCENTAGE
* <li>ABSOLUTE
* </ul>
* <li>PRECISION {@link Integer}
* <li>VALUE {@link byte[]}
* </ul>
*
* @param payload the {@link byte[]} payload data to process
* @return a {@link Map} of processed response data
*/
public static Map<String, Object> handleHumidityControlSetpointReport(byte[] payload) {
// Create our response map
Map<String, Object> response = new HashMap<String, Object>();
// We're using variable length fields, so track the offset
int msgOffset = 2;
// Process 'Properties1'
response.put("SETPOINT_TYPE", constantHumidityControlSetpointReportSetpointType.get(payload[msgOffset] & 0x0F));
msgOffset += 1;
// Process 'Properties2'
// Size is used by 'Value'
int varSize = payload[msgOffset] & 0x07;
response.put("SCALE", constantHumidityControlSetpointReportScale.get((payload[msgOffset] & 0x18) >> 3));
response.put("PRECISION", Integer.valueOf(payload[msgOffset] & 0xE0 >> 5));
msgOffset += 1;
// Process 'Value'
ByteArrayOutputStream valValue = new ByteArrayOutputStream();
for (int cntValue = 0; cntValue < varSize; cntValue++) {
valValue.write(payload[msgOffset + cntValue]);
}
response.put("VALUE", valValue.toByteArray());
msgOffset += varSize;
// Return the map of processed response data;
return response;
}
/**
* Creates a new message with the HUMIDITY_CONTROL_SETPOINT_SUPPORTED_GET command.
* <p>
* Humidity Control Setpoint Supported Get
*
* @return the {@link byte[]} array with the command to send
*/
static public byte[] getHumidityControlSetpointSupportedGet() {
logger.debug("Creating command message HUMIDITY_CONTROL_SETPOINT_SUPPORTED_GET version 1");
ByteArrayOutputStream outputData = new ByteArrayOutputStream();
outputData.write(COMMAND_CLASS_KEY);
outputData.write(HUMIDITY_CONTROL_SETPOINT_SUPPORTED_GET);
return outputData.toByteArray();
}
/**
* Processes a received frame with the HUMIDITY_CONTROL_SETPOINT_SUPPORTED_GET command.
* <p>
* Humidity Control Setpoint Supported Get
*
* @param payload the {@link byte[]} payload data to process
* @return a {@link Map} of processed response data
*/
public static Map<String, Object> handleHumidityControlSetpointSupportedGet(byte[] payload) {
// Create our response map
Map<String, Object> response = new HashMap<String, Object>();
// Return the map of processed response data;
return response;
}
/**
* Creates a new message with the HUMIDITY_CONTROL_SETPOINT_SUPPORTED_REPORT command.
* <p>
* Humidity Control Setpoint Supported Report
*
* @param bitMask {@link List<String>}
* Can be one of the following -:
* <p>
* <ul>
* <li>HUMIDIFIER
* <li>DEHUMIDIFIER
* </ul>
* @return the {@link byte[]} array with the command to send
*/
static public byte[] getHumidityControlSetpointSupportedReport(List<String> bitMask) {
logger.debug("Creating command message HUMIDITY_CONTROL_SETPOINT_SUPPORTED_REPORT version 1");
ByteArrayOutputStream outputData = new ByteArrayOutputStream();
outputData.write(COMMAND_CLASS_KEY);
outputData.write(HUMIDITY_CONTROL_SETPOINT_SUPPORTED_REPORT);
// Process 'Bit Mask'
int valBitMask = 0;
for (String value : bitMask) {
boolean foundBitMask = false;
for (Integer entry : constantHumidityControlSetpointSupportedReportBitMask.keySet()) {
if (constantHumidityControlSetpointSupportedReportBitMask.get(entry).equals(value)) {
foundBitMask = true;
valBitMask += entry;
break;
}
}
if (!foundBitMask) {
throw new IllegalArgumentException("Unknown constant value '" + bitMask + "' for bitMask");
}
}
outputData.write(valBitMask);
return outputData.toByteArray();
}
/**
* Processes a received frame with the HUMIDITY_CONTROL_SETPOINT_SUPPORTED_REPORT command.
* <p>
* Humidity Control Setpoint Supported Report
* <p>
* The output data {@link Map} has the following properties -:
*
* <ul>
* <li>BIT_MASK {@link List}<{@link String}>
* Can be one of the following -:
* <p>
* <ul>
* <li>HUMIDIFIER
* <li>DEHUMIDIFIER
* </ul>
* </ul>
*
* @param payload the {@link byte[]} payload data to process
* @return a {@link Map} of processed response data
*/
public static Map<String, Object> handleHumidityControlSetpointSupportedReport(byte[] payload) {
// Create our response map
Map<String, Object> response = new HashMap<String, Object>();
// Process 'Bit Mask'
List<String> responseBitMask = new ArrayList<String>();
int lenBitMask = 1;
for (int cntBitMask = 0; cntBitMask < lenBitMask; cntBitMask++) {
if ((payload[2 + (cntBitMask / 8)] & (1 << cntBitMask % 8)) == 0) {
continue;
}
responseBitMask.add(constantHumidityControlSetpointSupportedReportBitMask.get(cntBitMask));
}
response.put("BIT_MASK", responseBitMask);
// Return the map of processed response data;
return response;
}
/**
* Creates a new message with the HUMIDITY_CONTROL_SETPOINT_SCALE_SUPPORTED_GET command.
* <p>
* Humidity Control Setpoint Scale Supported Get
*
* @param setpointType {@link String}
* Can be one of the following -:
* <p>
* <ul>
* <li>HUMIDIFIER
* <li>DEHUMIDIFIER
* </ul>
* @return the {@link byte[]} array with the command to send
*/
static public byte[] getHumidityControlSetpointScaleSupportedGet(String setpointType) {
logger.debug("Creating command message HUMIDITY_CONTROL_SETPOINT_SCALE_SUPPORTED_GET version 1");
ByteArrayOutputStream outputData = new ByteArrayOutputStream();
outputData.write(COMMAND_CLASS_KEY);
outputData.write(HUMIDITY_CONTROL_SETPOINT_SCALE_SUPPORTED_GET);
// Process 'Properties1'
int varSetpointType = Integer.MAX_VALUE;
for (Integer entry : constantHumidityControlSetpointScaleSupportedGetSetpointType.keySet()) {
if (constantHumidityControlSetpointScaleSupportedGetSetpointType.get(entry).equals(setpointType)) {
varSetpointType = entry;
break;
}
}
if (varSetpointType == Integer.MAX_VALUE) {
throw new IllegalArgumentException("Unknown constant value '" + setpointType + "' for setpointType");
}
outputData.write(varSetpointType & 0x0F);
return outputData.toByteArray();
}
/**
* Processes a received frame with the HUMIDITY_CONTROL_SETPOINT_SCALE_SUPPORTED_GET command.
* <p>
* Humidity Control Setpoint Scale Supported Get
* <p>
* The output data {@link Map} has the following properties -:
*
* <ul>
* <li>SETPOINT_TYPE {@link String}
* Can be one of the following -:
* <p>
* <ul>
* <li>HUMIDIFIER
* <li>DEHUMIDIFIER
* </ul>
* </ul>
*
* @param payload the {@link byte[]} payload data to process
* @return a {@link Map} of processed response data
*/
public static Map<String, Object> handleHumidityControlSetpointScaleSupportedGet(byte[] payload) {
// Create our response map
Map<String, Object> response = new HashMap<String, Object>();
// Process 'Properties1'
response.put("SETPOINT_TYPE", constantHumidityControlSetpointScaleSupportedGetSetpointType.get(payload[2] & 0x0F));
// Return the map of processed response data;
return response;
}
/**
* Creates a new message with the HUMIDITY_CONTROL_SETPOINT_SCALE_SUPPORTED_REPORT command.
* <p>
* Humidity Control Setpoint Scale Supported Report
*
* @param scaleBitMask {@link String}
* Can be one of the following -:
* <p>
* <ul>
* <li>PERCENTAGE
* <li>ABSOLUTE
* </ul>
* @return the {@link byte[]} array with the command to send
*/
static public byte[] getHumidityControlSetpointScaleSupportedReport(String scaleBitMask) {
logger.debug("Creating command message HUMIDITY_CONTROL_SETPOINT_SCALE_SUPPORTED_REPORT version 1");
ByteArrayOutputStream outputData = new ByteArrayOutputStream();
outputData.write(COMMAND_CLASS_KEY);
outputData.write(HUMIDITY_CONTROL_SETPOINT_SCALE_SUPPORTED_REPORT);
// Process 'Properties1'
int varScaleBitMask = Integer.MAX_VALUE;
for (Integer entry : constantHumidityControlSetpointScaleSupportedReportScaleBitMask.keySet()) {
if (constantHumidityControlSetpointScaleSupportedReportScaleBitMask.get(entry).equals(scaleBitMask)) {
varScaleBitMask = entry;
break;
}
}
if (varScaleBitMask == Integer.MAX_VALUE) {
throw new IllegalArgumentException("Unknown constant value '" + scaleBitMask + "' for scaleBitMask");
}
outputData.write(varScaleBitMask & 0x0F);
return outputData.toByteArray();
}
/**
* Processes a received frame with the HUMIDITY_CONTROL_SETPOINT_SCALE_SUPPORTED_REPORT command.
* <p>
* Humidity Control Setpoint Scale Supported Report
* <p>
* The output data {@link Map} has the following properties -:
*
* <ul>
* <li>SCALE_BIT_MASK {@link String}
* Can be one of the following -:
* <p>
* <ul>
* <li>PERCENTAGE
* <li>ABSOLUTE
* </ul>
* </ul>
*
* @param payload the {@link byte[]} payload data to process
* @return a {@link Map} of processed response data
*/
public static Map<String, Object> handleHumidityControlSetpointScaleSupportedReport(byte[] payload) {
// Create our response map
Map<String, Object> response = new HashMap<String, Object>();
// Process 'Properties1'
response.put("SCALE_BIT_MASK", constantHumidityControlSetpointScaleSupportedReportScaleBitMask.get(payload[2] & 0x0F));
// Return the map of processed response data;
return response;
}
/**
* Creates a new message with the HUMIDITY_CONTROL_SETPOINT_CAPABILITIES_GET command.
* <p>
* Humidity Control Setpoint Capabilities Get
*
* @param setpointType {@link String}
* Can be one of the following -:
* <p>
* <ul>
* <li>HUMIDIFIER
* <li>DEHUMIDIFIER
* </ul>
* @return the {@link byte[]} array with the command to send
*/
static public byte[] getHumidityControlSetpointCapabilitiesGet(String setpointType) {
logger.debug("Creating command message HUMIDITY_CONTROL_SETPOINT_CAPABILITIES_GET version 1");
ByteArrayOutputStream outputData = new ByteArrayOutputStream();
outputData.write(COMMAND_CLASS_KEY);
outputData.write(HUMIDITY_CONTROL_SETPOINT_CAPABILITIES_GET);
// Process 'Properties1'
int varSetpointType = Integer.MAX_VALUE;
for (Integer entry : constantHumidityControlSetpointCapabilitiesGetSetpointType.keySet()) {
if (constantHumidityControlSetpointCapabilitiesGetSetpointType.get(entry).equals(setpointType)) {
varSetpointType = entry;
break;
}
}
if (varSetpointType == Integer.MAX_VALUE) {
throw new IllegalArgumentException("Unknown constant value '" + setpointType + "' for setpointType");
}
outputData.write(varSetpointType & 0x0F);
return outputData.toByteArray();
}
/**
* Processes a received frame with the HUMIDITY_CONTROL_SETPOINT_CAPABILITIES_GET command.
* <p>
* Humidity Control Setpoint Capabilities Get
* <p>
* The output data {@link Map} has the following properties -:
*
* <ul>
* <li>SETPOINT_TYPE {@link String}
* Can be one of the following -:
* <p>
* <ul>
* <li>HUMIDIFIER
* <li>DEHUMIDIFIER
* </ul>
* </ul>
*
* @param payload the {@link byte[]} payload data to process
* @return a {@link Map} of processed response data
*/
public static Map<String, Object> handleHumidityControlSetpointCapabilitiesGet(byte[] payload) {
// Create our response map
Map<String, Object> response = new HashMap<String, Object>();
// Process 'Properties1'
response.put("SETPOINT_TYPE", constantHumidityControlSetpointCapabilitiesGetSetpointType.get(payload[2] & 0x0F));
// Return the map of processed response data;
return response;
}
/**
* Creates a new message with the HUMIDITY_CONTROL_SETPOINT_CAPABILITIES_REPORT command.
* <p>
* Humidity Control Setpoint Capabilities Report
*
* @param setpointType {@link String}
* Can be one of the following -:
* <p>
* <ul>
* <li>HUMIDIFIER
* <li>DEHUMIDIFIER
* </ul>
* @param scale1 {@link String}
* Can be one of the following -:
* <p>
* <ul>
* <li>PERCENTAGE
* <li>ABSOLUTE
* </ul>
* @param precision1 {@link Integer}
* @param minimumValue {@link byte[]}
* @param scale2 {@link String}
* Can be one of the following -:
* <p>
* <ul>
* <li>PERCENTAGE
* <li>ABSOLUTE
* </ul>
* @param precision2 {@link Integer}
* @param maximumValue {@link byte[]}
* @return the {@link byte[]} array with the command to send
*/
static public byte[] getHumidityControlSetpointCapabilitiesReport(String setpointType, String scale1,
Integer precision1, byte[] minimumValue, String scale2, Integer precision2, byte[] maximumValue) {
logger.debug("Creating command message HUMIDITY_CONTROL_SETPOINT_CAPABILITIES_REPORT version 1");
ByteArrayOutputStream outputData = new ByteArrayOutputStream();
outputData.write(COMMAND_CLASS_KEY);
outputData.write(HUMIDITY_CONTROL_SETPOINT_CAPABILITIES_REPORT);
// Process 'Properties1'
int varSetpointType = Integer.MAX_VALUE;
for (Integer entry : constantHumidityControlSetpointCapabilitiesReportSetpointType.keySet()) {
if (constantHumidityControlSetpointCapabilitiesReportSetpointType.get(entry).equals(setpointType)) {
varSetpointType = entry;
break;
}<|fim▁hole|> if (varSetpointType == Integer.MAX_VALUE) {
throw new IllegalArgumentException("Unknown constant value '" + setpointType + "' for setpointType");
}
outputData.write(varSetpointType & 0x0F);
// Process 'Properties2'
// Size1 is used by 'Minimum Value'
int size1 = minimumValue.length;
int valProperties2 = 0;
valProperties2 |= size1 & 0x07;
int varScale1 = Integer.MAX_VALUE;
for (Integer entry : constantHumidityControlSetpointCapabilitiesReportScale1.keySet()) {
if (constantHumidityControlSetpointCapabilitiesReportScale1.get(entry).equals(scale1)) {
varScale1 = entry;
break;
}
}
if (varScale1 == Integer.MAX_VALUE) {
throw new IllegalArgumentException("Unknown constant value '" + scale1 + "' for scale1");
}
valProperties2 |= varScale1 << 3 & 0x18;
valProperties2 |= ((precision1 << 5) & 0xE0);
outputData.write(valProperties2);
// Process 'Minimum Value'
if (minimumValue != null) {
try {
outputData.write(minimumValue);
} catch (IOException e) {
}
}
// Process 'Properties3'
// Size2 is used by 'Maximum Value'
int size2 = maximumValue.length;
int valProperties3 = 0;
valProperties3 |= size2 & 0x07;
int varScale2 = Integer.MAX_VALUE;
for (Integer entry : constantHumidityControlSetpointCapabilitiesReportScale2.keySet()) {
if (constantHumidityControlSetpointCapabilitiesReportScale2.get(entry).equals(scale2)) {
varScale2 = entry;
break;
}
}
if (varScale2 == Integer.MAX_VALUE) {
throw new IllegalArgumentException("Unknown constant value '" + scale2 + "' for scale2");
}
valProperties3 |= varScale2 << 3 & 0x18;
valProperties3 |= ((precision2 << 5) & 0xE0);
outputData.write(valProperties3);
// Process 'Maximum Value'
if (maximumValue != null) {
try {
outputData.write(maximumValue);
} catch (IOException e) {
}
}
return outputData.toByteArray();
}
/**
* Processes a received frame with the HUMIDITY_CONTROL_SETPOINT_CAPABILITIES_REPORT command.
* <p>
* Humidity Control Setpoint Capabilities Report
* <p>
* The output data {@link Map} has the following properties -:
*
* <ul>
* <li>SETPOINT_TYPE {@link String}
* Can be one of the following -:
* <p>
* <ul>
* <li>HUMIDIFIER
* <li>DEHUMIDIFIER
* </ul>
* <li>SCALE1 {@link String}
* Can be one of the following -:
* <p>
* <ul>
* <li>PERCENTAGE
* <li>ABSOLUTE
* </ul>
* <li>PRECISION1 {@link Integer}
* <li>MINIMUM_VALUE {@link byte[]}
* <li>SCALE2 {@link String}
* Can be one of the following -:
* <p>
* <ul>
* <li>PERCENTAGE
* <li>ABSOLUTE
* </ul>
* <li>PRECISION2 {@link Integer}
* <li>MAXIMUM_VALUE {@link byte[]}
* </ul>
*
* @param payload the {@link byte[]} payload data to process
* @return a {@link Map} of processed response data
*/
public static Map<String, Object> handleHumidityControlSetpointCapabilitiesReport(byte[] payload) {
// Create our response map
Map<String, Object> response = new HashMap<String, Object>();
// We're using variable length fields, so track the offset
int msgOffset = 2;
// Process 'Properties1'
response.put("SETPOINT_TYPE", constantHumidityControlSetpointCapabilitiesReportSetpointType.get(payload[msgOffset] & 0x0F));
msgOffset += 1;
// Process 'Properties2'
// Size1 is used by 'Minimum Value'
int varSize1 = payload[msgOffset] & 0x07;
response.put("SCALE1", constantHumidityControlSetpointCapabilitiesReportScale1.get((payload[msgOffset] & 0x18) >> 3));
response.put("PRECISION1", Integer.valueOf(payload[msgOffset] & 0xE0 >> 5));
msgOffset += 1;
// Process 'Minimum Value'
ByteArrayOutputStream valMinimumValue = new ByteArrayOutputStream();
for (int cntMinimumValue = 0; cntMinimumValue < varSize1; cntMinimumValue++) {
valMinimumValue.write(payload[msgOffset + cntMinimumValue]);
}
response.put("MINIMUM_VALUE", valMinimumValue.toByteArray());
msgOffset += varSize1;
// Process 'Properties3'
// Size2 is used by 'Maximum Value'
int varSize2 = payload[msgOffset] & 0x07;
response.put("SCALE2", constantHumidityControlSetpointCapabilitiesReportScale2.get((payload[msgOffset] & 0x18) >> 3));
response.put("PRECISION2", Integer.valueOf(payload[msgOffset] & 0xE0 >> 5));
msgOffset += 1;
// Process 'Maximum Value'
ByteArrayOutputStream valMaximumValue = new ByteArrayOutputStream();
for (int cntMaximumValue = 0; cntMaximumValue < varSize2; cntMaximumValue++) {
valMaximumValue.write(payload[msgOffset + cntMaximumValue]);
}
response.put("MAXIMUM_VALUE", valMaximumValue.toByteArray());
msgOffset += varSize2;
// Return the map of processed response data;
return response;
}
}<|fim▁end|>
|
}
|
<|file_name|>Makefile.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# @author Philip
import tarfile as tf
import zipfile as zf
import os, re, shutil, sys, platform
pyversion = platform.python_version()
islinux = platform.system().lower() == 'linux'
if pyversion[:3] in ['2.6', '2.7']:
import urllib as urllib_request
import codecs
open = codecs.open
_unichr = unichr
if sys.maxunicode < 0x10000:
def unichr(i):
if i < 0x10000:
return _unichr(i)
else:
return _unichr( 0xD7C0 + ( i>>10 ) ) + _unichr( 0xDC00 + ( i & 0x3FF ) )
elif pyversion[:2] == '3.':
import urllib.request as urllib_request
unichr = chr
def unichr2( *args ):
return [unichr( int( i.split('<')[0][2:], 16 ) ) for i in args]
def unichr3( *args ):
return [unichr( int( i[2:7], 16 ) ) for i in args if i[2:7]]
# DEFINE
UNIHAN_VER = '5.2.0'
SF_MIRROR = 'cdnetworks-kr-2'
SCIM_TABLES_VER = '0.5.10'
SCIM_PINYIN_VER = '0.5.91'
LIBTABE_VER = '0.2.3'
# END OF DEFINE
def download( url, dest ):
if os.path.isfile( dest ):
print( 'File %s up to date.' % dest )
return
global islinux
if islinux:
# we use wget instead urlretrieve under Linux,
# because wget could display details like download progress
os.system( 'wget %s -O %s' % ( url, dest ) )
else:
print( 'Downloading from [%s] ...' % url )
urllib_request.urlretrieve( url, dest )
print( 'Download complete.\n' )
return
def uncompress( fp, member, encoding = 'U8' ):
name = member.rsplit( '/', 1 )[-1]
print( 'Extracting %s ...' % name )
fp.extract( member )
shutil.move( member, name )
if '/' in member:
shutil.rmtree( member.split( '/', 1 )[0] )
return open( name, 'rb', encoding, 'ignore' )
unzip = lambda path, member, encoding = 'U8': \
uncompress( zf.ZipFile( path ), member, encoding )
untargz = lambda path, member, encoding = 'U8': \
uncompress( tf.open( path, 'r:gz' ), member, encoding )
def parserCore( fp, pos, beginmark = None, endmark = None ):
if beginmark and endmark:
start = False
else: start = True
mlist = set()
for line in fp:
if beginmark and line.startswith( beginmark ):
start = True
continue
elif endmark and line.startswith( endmark ):
break
if start and not line.startswith( '#' ):
elems = line.split()
if len( elems ) < 2:
continue
elif len( elems[0] ) > 1 and \
len( elems[pos] ) > 1: # words only
mlist.add( elems[pos] )
return mlist
def tablesParser( path, name ):
""" Read file from scim-tables and parse it. """
global SCIM_TABLES_VER
src = 'scim-tables-%s/tables/zh/%s' % ( SCIM_TABLES_VER, name )
fp = untargz( path, src, 'U8' )
return parserCore( fp, 1, 'BEGIN_TABLE', 'END_TABLE' )
ezbigParser = lambda path: tablesParser( path, 'EZ-Big.txt.in' )
wubiParser = lambda path: tablesParser( path, 'Wubi.txt.in' )
zrmParser = lambda path: tablesParser( path, 'Ziranma.txt.in' )
def phraseParser( path ):
""" Read phrase_lib.txt and parse it. """
global SCIM_PINYIN_VER
src = 'scim-pinyin-%s/data/phrase_lib.txt' % SCIM_PINYIN_VER
dst = 'phrase_lib.txt'
fp = untargz( path, src, 'U8' )
return parserCore( fp, 0 )
def tsiParser( path ):
""" Read tsi.src and parse it. """
src = 'libtabe/tsi-src/tsi.src'
dst = 'tsi.src'
fp = untargz( path, src, 'big5hkscs' )
return parserCore( fp, 0 )
def unihanParser( path ):
""" Read Unihan_Variants.txt and parse it. """
fp = unzip( path, 'Unihan_Variants.txt', 'U8' )
t2s = dict()
s2t = dict()
for line in fp:
if line.startswith( '#' ):
continue
else:
elems = line.split()
if len( elems ) < 3:
continue
type = elems.pop( 1 )
elems = unichr2( *elems )
if type == 'kTraditionalVariant':
s2t[elems[0]] = elems[1:]
elif type == 'kSimplifiedVariant':
t2s[elems[0]] = elems[1:]
fp.close()
return ( t2s, s2t )
def applyExcludes( mlist, path ):
""" Apply exclude rules from path to mlist. """
excludes = open( path, 'rb', 'U8' ).read().split()
excludes = [word.split( '#' )[0].strip() for word in excludes]
excludes = '|'.join( excludes )
excptn = re.compile( '.*(?:%s).*' % excludes )
diff = [mword for mword in mlist if excptn.search( mword )]
mlist.difference_update( diff )
return mlist
def charManualTable( path ):
fp = open( path, 'rb', 'U8' )
ret = {}
for line in fp:
elems = line.split( '#' )[0].split( '|' )
elems = unichr3( *elems )
if len( elems ) > 1:
ret[elems[0]] = elems[1:]
return ret
def toManyRules( src_table ):
tomany = set()
for ( f, t ) in src_table.iteritems():
for i in range( 1, len( t ) ):
tomany.add( t[i] )
return tomany
def removeRules( path, table ):
fp = open( path, 'rb', 'U8' )
texc = list()
for line in fp:
elems = line.split( '=>' )
f = t = elems[0].strip()
if len( elems ) == 2:
t = elems[1].strip()
f = f.strip('"').strip("'")
t = t.strip('"').strip("'")
if f:
try:
table.pop( f )
except:
pass
if t:
texc.append( t )
texcptn = re.compile( '^(?:%s)$' % '|'.join( texc ) )
for (tmp_f, tmp_t) in table.copy().iteritems():
if texcptn.match( tmp_t ):
table.pop( tmp_f )
return table
def customRules( path ):
fp = open( path, 'rb', 'U8' )
ret = dict()
for line in fp:
elems = line.split( '#' )[0].split()
if len( elems ) > 1:
ret[elems[0]] = elems[1]
return ret
def dictToSortedList( src_table, pos ):
return sorted( src_table.items(), key = lambda m: m[pos] )
def translate( text, conv_table ):
i = 0
while i < len( text ):
for j in range( len( text ) - i, 0, -1 ):
f = text[i:][:j]
t = conv_table.get( f )
if t:
text = text[:i] + t + text[i:][j:]
i += len(t) - 1
break
i += 1
return text
def manualWordsTable( path, conv_table, reconv_table ):
fp = open( path, 'rb', 'U8' )
reconv_table = {}
wordlist = [line.split( '#' )[0].strip() for line in fp]
wordlist = list( set( wordlist ) )
wordlist.sort( key = len, reverse = True )
while wordlist:
word = wordlist.pop()
new_word = translate( word, conv_table )
rcv_word = translate( word, reconv_table )
if word != rcv_word:
reconv_table[word] = word
reconv_table[new_word] = word
return reconv_table
def defaultWordsTable( src_wordlist, src_tomany, char_conv_table, char_reconv_table ):
wordlist = list( src_wordlist )
wordlist.sort( key = len, reverse = True )
word_conv_table = {}
word_reconv_table = {}
conv_table = char_conv_table.copy()
reconv_table = char_reconv_table.copy()
tomanyptn = re.compile( '(?:%s)' % '|'.join( src_tomany ) )
while wordlist:
conv_table.update( word_conv_table )
reconv_table.update( word_reconv_table )
word = wordlist.pop()
new_word_len = word_len = len( word )
while new_word_len == word_len:
add = False
test_word = translate( word, reconv_table )
new_word = translate( word, conv_table )
if not reconv_table.get( new_word ) \
and ( test_word != word \
or ( tomanyptn.search( word ) \
and word != translate( new_word, reconv_table ) ) ):
word_conv_table[word] = new_word
word_reconv_table[new_word] = word
try:
word = wordlist.pop()
except IndexError:
break
new_word_len = len(word)
return word_reconv_table
def PHPArray( table ):
lines = ['\'%s\' => \'%s\',' % (f, t) for (f, t) in table if f and t]
return '\n'.join(lines)
def main():
#Get Unihan.zip:
url = 'http://www.unicode.org/Public/%s/ucd/Unihan.zip' % UNIHAN_VER
han_dest = 'Unihan.zip'
download( url, han_dest )
# Get scim-tables-$(SCIM_TABLES_VER).tar.gz:
url = 'http://%s.dl.sourceforge.net/sourceforge/scim/scim-tables-%s.tar.gz' % ( SF_MIRROR, SCIM_TABLES_VER )
tbe_dest = 'scim-tables-%s.tar.gz' % SCIM_TABLES_VER
download( url, tbe_dest )
# Get scim-pinyin-$(SCIM_PINYIN_VER).tar.gz:
url = 'http://%s.dl.sourceforge.net/sourceforge/scim/scim-pinyin-%s.tar.gz' % ( SF_MIRROR, SCIM_PINYIN_VER )
pyn_dest = 'scim-pinyin-%s.tar.gz' % SCIM_PINYIN_VER
download( url, pyn_dest )
# Get libtabe-$(LIBTABE_VER).tgz:
url = 'http://%s.dl.sourceforge.net/sourceforge/libtabe/libtabe-%s.tgz' % ( SF_MIRROR, LIBTABE_VER )
lbt_dest = 'libtabe-%s.tgz' % LIBTABE_VER
download( url, lbt_dest )
# Unihan.txt
( t2s_1tomany, s2t_1tomany ) = unihanParser( han_dest )
t2s_1tomany.update( charManualTable( 'trad2simp.manual' ) )
s2t_1tomany.update( charManualTable( 'simp2trad.manual' ) )
t2s_1to1 = dict( [( f, t[0] ) for ( f, t ) in t2s_1tomany.iteritems()] )
s2t_1to1 = dict( [( f, t[0] ) for ( f, t ) in s2t_1tomany.iteritems()] )
s_tomany = toManyRules( t2s_1tomany )
t_tomany = toManyRules( s2t_1tomany )
# noconvert rules
t2s_1to1 = removeRules( 'trad2simp_noconvert.manual', t2s_1to1 )
s2t_1to1 = removeRules( 'simp2trad_noconvert.manual', s2t_1to1 )
# the supper set for word to word conversion
t2s_1to1_supp = t2s_1to1.copy()
s2t_1to1_supp = s2t_1to1.copy()
t2s_1to1_supp.update( customRules( 'trad2simp_supp_set.manual' ) )
s2t_1to1_supp.update( customRules( 'simp2trad_supp_set.manual' ) )
# word to word manual rules
t2s_word2word_manual = manualWordsTable( 'simpphrases.manual', s2t_1to1_supp, t2s_1to1_supp )
t2s_word2word_manual.update( customRules( 'toSimp.manual' ) )
s2t_word2word_manual = manualWordsTable( 'tradphrases.manual', t2s_1to1_supp, s2t_1to1_supp )
s2t_word2word_manual.update( customRules( 'toTrad.manual' ) )
# word to word rules from input methods
t_wordlist = set()
s_wordlist = set()
t_wordlist.update( ezbigParser( tbe_dest ),
tsiParser( lbt_dest ) )
s_wordlist.update( wubiParser( tbe_dest ),
zrmParser( tbe_dest ),
phraseParser( pyn_dest ) )
# exclude
s_wordlist = applyExcludes( s_wordlist, 'simpphrases_exclude.manual' )
t_wordlist = applyExcludes( t_wordlist, 'tradphrases_exclude.manual' )
s2t_supp = s2t_1to1_supp.copy()
s2t_supp.update( s2t_word2word_manual )
t2s_supp = t2s_1to1_supp.copy()
t2s_supp.update( t2s_word2word_manual )
# parse list to dict
t2s_word2word = defaultWordsTable( s_wordlist, s_tomany, s2t_1to1_supp, t2s_supp )
t2s_word2word.update( t2s_word2word_manual )
s2t_word2word = defaultWordsTable( t_wordlist, t_tomany, t2s_1to1_supp, s2t_supp )
s2t_word2word.update( s2t_word2word_manual )
# Final tables
# sorted list toHans
t2s_1to1 = dict( [( f, t ) for ( f, t ) in t2s_1to1.iteritems() if f != t] )
toHans = dictToSortedList( t2s_1to1, 0 ) + dictToSortedList( t2s_word2word, 1 )
# sorted list toHant
s2t_1to1 = dict( [( f, t ) for ( f, t ) in s2t_1to1.iteritems() if f != t] )
toHant = dictToSortedList( s2t_1to1, 0 ) + dictToSortedList( s2t_word2word, 1 )
# sorted list toCN
toCN = dictToSortedList( customRules( 'toCN.manual' ), 1 )
# sorted list toHK
toHK = dictToSortedList( customRules( 'toHK.manual' ), 1 )
# sorted list toSG
toSG = dictToSortedList( customRules( 'toSG.manual' ), 1 )
# sorted list toTW
toTW = dictToSortedList( customRules( 'toTW.manual' ), 1 )
<|fim▁hole|> php = '''<?php
/**
* Simplified / Traditional Chinese conversion tables
*
* Automatically generated using code and data in includes/zhtable/
* Do not modify directly!
*
* @file
*/
$zh2Hant = array(\n'''
php += PHPArray( toHant ) \
+ '\n);\n\n$zh2Hans = array(\n' \
+ PHPArray( toHans ) \
+ '\n);\n\n$zh2TW = array(\n' \
+ PHPArray( toTW ) \
+ '\n);\n\n$zh2HK = array(\n' \
+ PHPArray( toHK ) \
+ '\n);\n\n$zh2CN = array(\n' \
+ PHPArray( toCN ) \
+ '\n);\n\n$zh2SG = array(\n' \
+ PHPArray( toSG ) \
+ '\n);'
f = open( 'ZhConversion.php', 'wb', encoding = 'utf8' )
print ('Writing ZhConversion.php ... ')
f.write( php )
f.close()
#Remove temp files
print ('Deleting temp files ... ')
os.remove('EZ-Big.txt.in')
os.remove('phrase_lib.txt')
os.remove('tsi.src')
os.remove('Unihan_Variants.txt')
os.remove('Wubi.txt.in')
os.remove('Ziranma.txt.in')
if __name__ == '__main__':
main()<|fim▁end|>
|
# Get PHP Array
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import os
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager
from flask.ext.openid import OpenID
from config import basedir
from flask_debugtoolbar import DebugToolbarExtension
app = Flask(__name__)
app.config.from_object('config')
db = SQLAlchemy(app)
lm = LoginManager()<|fim▁hole|>lm.init_app(app)
lm.login_view = 'index'
oid = OpenID(app, os.path.join(basedir, 'tmp'))
toolbar = DebugToolbarExtension(app)
app.debug = True
from app import views, models<|fim▁end|>
| |
<|file_name|>terminalLinkHandler.ts<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as nls from 'vs/nls';
import * as path from 'path';
import * as platform from 'vs/base/common/platform';
import * as pfs from 'vs/base/node/pfs';
import Uri from 'vs/base/common/uri';
import { dispose, IDisposable } from 'vs/base/common/lifecycle';
import { IWorkbenchEditorService } from 'vs/workbench/services/editor/common/editorService';
import { IOpenerService } from 'vs/platform/opener/common/opener';
import { TerminalWidgetManager } from 'vs/workbench/parts/terminal/browser/terminalWidgetManager';
import { TPromise } from 'vs/base/common/winjs.base';
import { IConfigurationService } from 'vs/platform/configuration/common/configuration';
const pathPrefix = '(\\.\\.?|\\~)';
const pathSeparatorClause = '\\/';
// '":; are allowed in paths but they are often separators so ignore them
// Also disallow \\ to prevent a catastropic backtracking case #24798
const excludedPathCharactersClause = '[^\\0\\s!$`&*()\\[\\]+\'":;\\\\]';
/** A regex that matches paths in the form /foo, ~/foo, ./foo, ../foo, foo/bar */
const unixLocalLinkClause = '((' + pathPrefix + '|(' + excludedPathCharactersClause + ')+)?(' + pathSeparatorClause + '(' + excludedPathCharactersClause + ')+)+)';
const winDrivePrefix = '[a-zA-Z]:';
const winPathPrefix = '(' + winDrivePrefix + '|\\.\\.?|\\~)';
const winPathSeparatorClause = '(\\\\|\\/)';
const winExcludedPathCharactersClause = '[^\\0<>\\?\\|\\/\\s!$`&*()\\[\\]+\'":;]';
/** A regex that matches paths in the form c:\foo, ~\foo, .\foo, ..\foo, foo\bar */
const winLocalLinkClause = '((' + winPathPrefix + '|(' + winExcludedPathCharactersClause + ')+)?(' + winPathSeparatorClause + '(' + winExcludedPathCharactersClause + ')+)+)';
/** As xterm reads from DOM, space in that case is nonbreaking char ASCII code - 160,
replacing space with nonBreakningSpace or space ASCII code - 32. */
const lineAndColumnClause = [
'((\\S*) on line ((\\d+)(, column (\\d+))?))', // (file path) on line 8, column 13
'((\\S*):line ((\\d+)(, column (\\d+))?))', // (file path):line 8, column 13
'(([^\\s\\(\\)]*)(\\s?[\\(\\[](\\d+)(,\\s?(\\d+))?)[\\)\\]])', // (file path)(45), (file path) (45), (file path)(45,18), (file path) (45,18), (file path)(45, 18), (file path) (45, 18), also with []
'(([^:\\s\\(\\)<>\'\"\\[\\]]*)(:(\\d+))?(:(\\d+))?)' // (file path):336, (file path):336:9
].join('|').replace(/ /g, `[${'\u00A0'} ]`);
// Changing any regex may effect this value, hence changes this as well if required.
const winLineAndColumnMatchIndex = 12;
const unixLineAndColumnMatchIndex = 23;
// Each line and column clause have 6 groups (ie no. of expressions in round brackets)
const lineAndColumnClauseGroupCount = 6;
/** Higher than local link, lower than hypertext */
const CUSTOM_LINK_PRIORITY = -1;
/** Lowest */
const LOCAL_LINK_PRIORITY = -2;
export type XtermLinkMatcherHandler = (event: MouseEvent, uri: string) => boolean | void;
export type XtermLinkMatcherValidationCallback = (uri: string, element: HTMLElement, callback: (isValid: boolean) => void) => void;
export class TerminalLinkHandler {
private _hoverDisposables: IDisposable[] = [];
private _mouseMoveDisposable: IDisposable;
private _widgetManager: TerminalWidgetManager;
private _localLinkPattern: RegExp;
constructor(
private _xterm: any,
private _platform: platform.Platform,
private _initialCwd: string,
@IOpenerService private _openerService: IOpenerService,
@IWorkbenchEditorService private _editorService: IWorkbenchEditorService,
@IConfigurationService private _configurationService: IConfigurationService
) {
const baseLocalLinkClause = _platform === platform.Platform.Windows ? winLocalLinkClause : unixLocalLinkClause;
// Append line and column number regex
this._localLinkPattern = new RegExp(`${baseLocalLinkClause}(${lineAndColumnClause})`);
this._xterm.setHypertextLinkHandler(this._wrapLinkHandler(uri => {
this._handleHypertextLink(uri);
}));
this._xterm.setHypertextValidationCallback((uri: string, callback: (isValid: boolean) => void) => {
this._validateWebLink(uri, callback);
});
}
public setWidgetManager(widgetManager: TerminalWidgetManager): void {
this._widgetManager = widgetManager;
}
public registerCustomLinkHandler(regex: RegExp, handler: (uri: string) => void, matchIndex?: number, validationCallback?: XtermLinkMatcherValidationCallback): number {
// const wrappedValidationCallback = (uri: string, element: HTMLElement, callback) => {
// this._addTooltipEventListeners(element);
// if (validationCallback) {
// validationCallback(uri, element, callback);
// } else {
// callback(true);
// }
// };
return this._xterm.registerLinkMatcher(regex, this._wrapLinkHandler(handler), {
matchIndex,
validationCallback: (uri, element, callback) => validationCallback(uri, element, callback),
tooltipCallback: (e: MouseEvent, u) => this._widgetManager.showMessage(e.offsetX, e.offsetY, this._getLinkHoverString()),
leaveCallback: () => this._widgetManager.closeMessage(),
priority: CUSTOM_LINK_PRIORITY
});
}
public registerLocalLinkHandler(): number {
const wrappedHandler = this._wrapLinkHandler(url => {
this._handleLocalLink(url);
});
return this._xterm.registerLinkMatcher(this._localLinkRegex, wrappedHandler, {
validationCallback: (link: string, callback: (isValid: boolean) => void) => this._validateLocalLink(link, callback),
tooltipCallback: (e: MouseEvent, u) => this._widgetManager.showMessage(e.offsetX, e.offsetY, this._getLinkHoverString()),
leaveCallback: () => this._widgetManager.closeMessage(),
priority: LOCAL_LINK_PRIORITY
});
}
public dispose(): void {
this._hoverDisposables = dispose(this._hoverDisposables);
this._mouseMoveDisposable = dispose(this._mouseMoveDisposable);
}
private _wrapLinkHandler(handler: (uri: string) => boolean | void): XtermLinkMatcherHandler {
return (event: MouseEvent, uri: string) => {
// Prevent default electron link handling so Alt+Click mode works normally
event.preventDefault();
// Require correct modifier on click
if (!this._isLinkActivationModifierDown(event)) {
return false;
}
return handler(uri);
};
}
protected get _localLinkRegex(): RegExp {
return this._localLinkPattern;
}
private _handleLocalLink(link: string): TPromise<void> {
return this._resolvePath(link).then(resolvedLink => {
if (!resolvedLink) {
return void 0;
}
let normalizedPath = path.normalize(path.resolve(resolvedLink));
const normalizedUrl = this.extractLinkUrl(normalizedPath);
normalizedPath = this._formatLocalLinkPath(normalizedPath);
let resource = Uri.file(normalizedUrl);
resource = resource.with({<|fim▁hole|> return this._openerService.open(resource);
});
}
private _validateLocalLink(link: string, callback: (isValid: boolean) => void): void {
// this._resolvePath(link).then(resolvedLink => {
// if (resolvedLink) {
// this._addTooltipEventListeners(element);
// }
// callback(!!resolvedLink);
// });
this._resolvePath(link).then(resolvedLink => callback(!!resolvedLink));
}
private _validateWebLink(link: string, callback: (isValid: boolean) => void): void {
// this._addTooltipEventListeners(element);
callback(true);
}
private _handleHypertextLink(url: string): void {
let uri = Uri.parse(url);
this._openerService.open(uri);
}
private _isLinkActivationModifierDown(event: MouseEvent): boolean {
const editorConf = this._configurationService.getConfiguration<{ multiCursorModifier: 'ctrlCmd' | 'alt' }>('editor');
if (editorConf.multiCursorModifier === 'ctrlCmd') {
return !!event.altKey;
}
return platform.isMacintosh ? event.metaKey : event.ctrlKey;
}
private _getLinkHoverString(): string {
const editorConf = this._configurationService.getConfiguration<{ multiCursorModifier: 'ctrlCmd' | 'alt' }>('editor');
if (editorConf.multiCursorModifier === 'ctrlCmd') {
return nls.localize('terminalLinkHandler.followLinkAlt', 'Alt + click to follow link');
}
if (platform.isMacintosh) {
return nls.localize('terminalLinkHandler.followLinkCmd', 'Cmd + click to follow link');
}
return nls.localize('terminalLinkHandler.followLinkCtrl', 'Ctrl + click to follow link');
}
// private _addTooltipEventListeners(element: HTMLElement): void {
// let timeout: number = null;
// let isMessageShowing = false;
// this._hoverDisposables.push(dom.addDisposableListener(element, dom.EventType.MOUSE_OVER, e => {
// element.classList.toggle('active', this._isLinkActivationModifierDown(e));
// this._mouseMoveDisposable = dom.addDisposableListener(element, dom.EventType.MOUSE_MOVE, e => {
// element.classList.toggle('active', this._isLinkActivationModifierDown(e));
// });
// timeout = setTimeout(() => {
// this._widgetManager.showMessage(element.offsetLeft, element.offsetTop, this._getLinkHoverString());
// isMessageShowing = true;
// }, 500);
// }));
// this._hoverDisposables.push(dom.addDisposableListener(element, dom.EventType.MOUSE_OUT, () => {
// element.classList.remove('active');
// if (this._mouseMoveDisposable) {
// this._mouseMoveDisposable.dispose();
// }
// clearTimeout(timeout);
// this._widgetManager.closeMessage();
// isMessageShowing = false;
// }));
// }
protected _preprocessPath(link: string): string {
if (this._platform === platform.Platform.Windows) {
// Resolve ~ -> %HOMEDRIVE%\%HOMEPATH%
if (link.charAt(0) === '~') {
if (!process.env.HOMEDRIVE || !process.env.HOMEPATH) {
return null;
}
link = `${process.env.HOMEDRIVE}\\${process.env.HOMEPATH + link.substring(1)}`;
}
// Resolve relative paths (.\a, ..\a, ~\a, a\b)
if (!link.match('^' + winDrivePrefix)) {
if (!this._initialCwd) {
// Abort if no workspace is open
return null;
}
link = path.join(this._initialCwd, link);
}
}
// Resolve workspace path . | .. | <relative_path> -> <path>/. | <path>/.. | <path>/<relative_path>
else if (link.charAt(0) !== '/' && link.charAt(0) !== '~') {
if (!this._initialCwd) {
// Abort if no workspace is open
return null;
}
link = path.join(this._initialCwd, link);
}
return link;
}
private _resolvePath(link: string): TPromise<string> {
link = this._preprocessPath(link);
if (!link) {
return TPromise.as(void 0);
}
const linkUrl = this.extractLinkUrl(link);
if (!linkUrl) {
return TPromise.as(void 0);
}
// Open an editor if the path exists
return pfs.fileExists(linkUrl).then(isFile => {
if (!isFile) {
return null;
}
return link;
});
}
/**
* Appends line number and column number to link if they exists.
* @param link link to format, will become link#line_num,col_num.
*/
private _formatLocalLinkPath(link: string): string {
const lineColumnInfo: LineColumnInfo = this.extractLineColumnInfo(link);
if (lineColumnInfo.lineNumber) {
link += `#${lineColumnInfo.lineNumber}`;
if (lineColumnInfo.columnNumber) {
link += `,${lineColumnInfo.columnNumber}`;
}
}
return link;
}
/**
* Returns line and column number of URl if that is present.
*
* @param link Url link which may contain line and column number.
*/
public extractLineColumnInfo(link: string): LineColumnInfo {
const matches: string[] = this._localLinkRegex.exec(link);
const lineColumnInfo: LineColumnInfo = {};
const lineAndColumnMatchIndex = this._platform === platform.Platform.Windows ? winLineAndColumnMatchIndex : unixLineAndColumnMatchIndex;
for (let i = 0; i < lineAndColumnClause.length; i++) {
const lineMatchIndex = lineAndColumnMatchIndex + (lineAndColumnClauseGroupCount * i);
const rowNumber = matches[lineMatchIndex];
if (rowNumber) {
lineColumnInfo['lineNumber'] = rowNumber;
// Check if column number exists
const columnNumber = matches[lineMatchIndex + 2];
if (columnNumber) {
lineColumnInfo['columnNumber'] = columnNumber;
}
break;
}
}
return lineColumnInfo;
}
/**
* Returns url from link as link may contain line and column information.
*
* @param link url link which may contain line and column number.
*/
public extractLinkUrl(link: string): string {
const matches: string[] = this._localLinkRegex.exec(link);
if (!matches) {
return null;
}
return matches[1];
}
}
export interface LineColumnInfo {
lineNumber?: string;
columnNumber?: string;
};<|fim▁end|>
|
fragment: Uri.parse(normalizedPath).fragment
});
|
<|file_name|>test-case.ts<|end_file_name|><|fim▁begin|>import type {SpecDirectory, OptionKey} from '../spec-directory';
import {Compiler} from '../compiler';
import {
failures,
TestResult,
getExpectedFiles,
overwriteResults,
SassResult,
} from './util';
import {compareResults} from './compare';
import {getExpectedResult} from './expected';
/**
* A wrapper around a SpecDirectory that represents a sass-spec test case.
*
* Contains methods for running the test and updating the underlying directory
* based on the results.
*/
export default class TestCase {
readonly dir: SpecDirectory;
readonly impl: string;
private compiler: Compiler;
private todoMode?: string;
private _actual?: SassResult;
private _result?: TestResult;
// Private constructor that instantiates properties.
// The only way to create a test case is through the async factory below
private constructor(
dir: SpecDirectory,
impl: string,
compiler: Compiler,
todoMode?: string
) {
this.dir = dir;
this.impl = impl;
this.compiler = compiler;
this.todoMode = todoMode;
}
/**
* Run the spec at the given directory and return a TestCase object representing it
*/
static async create(
dir: SpecDirectory,
impl: string,
compiler: Compiler,
todoMode?: string
): Promise<TestCase> {
const testCase = new TestCase(dir, impl, compiler, todoMode);
try {
testCase._result = await testCase.run();
} catch (caught) {
const error = caught instanceof Error ? caught : new Error(`${caught}`);
testCase._actual = {isSuccess: false, error: error.toString()};
testCase._result = {type: 'error', error};
}
return testCase;
}
/** Return the name of the input file of this test directory. */
private inputFile(): string {
if (this.dir.hasFile('input.sass') && this.dir.hasFile('input.scss')) {
throw new Error(`Multiple input files found in ${this.dir.relPath()}`);
}
return this.dir.hasFile('input.sass') ? 'input.sass' : 'input.scss';
}
/** Get the contents of the input file for this test directory. */
async input(): Promise<string> {
return await this.dir.readFile(this.inputFile());
}
// Run the compiler and calculate the actual result
private async calcActualResult(): Promise<SassResult> {
const precision = (await this.dir.options()).precision();
const cmdArgs = [];
// Pass in the indented option to the command
if (precision) {
cmdArgs.push('--precision');
cmdArgs.push(`${precision}`);
}
cmdArgs.push(this.inputFile());
const {stdout, stderr, status} = await this.compiler.compile(
this.dir.path,
cmdArgs
);
// stderr can contain extra trailing newlines which just clog up the HRX
// files without any particular purpose.
const normalizedStderr = stderr.replace(/(\r?\n)+$/, '\n');
if (status === 0) {
return {isSuccess: true, output: stdout, warning: normalizedStderr};
} else {
return {isSuccess: false, error: normalizedStderr};
}
}
// Do the test run, storing the actual output if there is one, and return the test result
private async run(): Promise<TestResult> {
const options = await this.dir.options();
const mode = options.getMode(this.impl);
const warningTodo = options.isWarningTodo(this.impl);
if (mode === 'ignore') {
return {type: 'skip'};
}
if (mode === 'todo' && !this.todoMode) {
return {type: 'todo'};
}
const [expected, actual] = await Promise.all([
getExpectedResult(this.dir, this.impl),
this.calcActualResult(),
]);
this._actual = actual;
const testResult = compareResults(expected, actual, {
// Compare the full error only for dart-sass
trimErrors: this.impl !== 'dart-sass',
// Skip warning checks :warning_todo is enabled and we're not running todos
skipWarning: warningTodo && !this.todoMode,
});
// If we're probing todo
if (this.todoMode === 'probe') {
if (mode === 'todo') {
if (testResult.type === 'pass') {
return failures.UnnecessaryTodo();
} else {
return {type: 'todo'};
}
}
if (warningTodo) {
if (testResult.type === 'pass') {
return failures.UnnecessaryTodo();
} else {
return {type: 'pass'};
}
}
}
return testResult;
}
actual(): SassResult {
if (!this._actual) {
throw new Error(`Test case ${this.dir.relPath()} has not yet run.`);
}
return this._actual;
}
result(): TestResult {
if (!this._result) {
throw new Error(`Test case ${this.dir.relPath()} has not yet run.`);
}
return this._result;
}
// Mutations
/** Add the given option for the given impl */
async addOptionForImpl(option: OptionKey): Promise<void> {
const options = await this.dir.directOptions();
const updatedOptions = options.addImpl(this.impl, option);
await this.dir.writeFile('options.yml', updatedOptions.toYaml());
}
/**
* Overwrite the base results with the actual results
*/
async overwrite(): Promise<void> {
// overwrite the contents of the base files
await overwriteResults(this.dir, this.actual());
// delete any override files for this impl
await Promise.all(
getExpectedFiles(this.impl).map(filename => this.dir.removeFile(filename))
);
this._result = {type: 'pass'};
}
/**
* Migrate a copy of the expected results to pass on impl
*/
async migrateImpl(): Promise<void> {
const actual = this.actual();
await overwriteResults(this.dir, this.actual(), this.impl);
// If a nonempty base warning exists, but the actual result yields no warning,
// create a warning file
if (
this.dir.hasFile('warning') &&<|fim▁hole|> await this.dir.writeFile(`warning-${this.impl}`, '');
}
this._result = {type: 'pass'};
}
/** Mark this test (or its warning) as TODO */
async markTodo(): Promise<void> {
if (this.result().failureType === 'warning_difference') {
await this.addOptionForImpl(':warning_todo');
this._result = {type: 'pass'};
} else {
await this.addOptionForImpl(':todo');
this._result = {type: 'todo'};
}
}
/** Mark this test as ignored for the current implementation */
async markIgnore(): Promise<void> {
await this.addOptionForImpl(':ignore_for');
this._result = {type: 'skip'};
}
}<|fim▁end|>
|
this.dir.readFile('warning') &&
actual.isSuccess &&
!actual.warning
) {
|
<|file_name|>pseudo_element_definition.mako.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
/// Gecko's pseudo-element definition.
#[derive(Clone, Debug, Eq, Hash, MallocSizeOf, PartialEq)]
pub enum PseudoElement {
% for pseudo in PSEUDOS:
/// ${pseudo.value}
% if pseudo.is_tree_pseudo_element():
${pseudo.capitalized_pseudo()}(ThinBoxedSlice<Atom>),
% else:
${pseudo.capitalized_pseudo()},
% endif
% endfor
/// ::-webkit-* that we don't recognize
/// https://github.com/whatwg/compat/issues/103
UnknownWebkit(Atom),
}
/// Important: If you change this, you should also update Gecko's
/// nsCSSPseudoElements::IsEagerlyCascadedInServo.
<% EAGER_PSEUDOS = ["Before", "After", "FirstLine", "FirstLetter"] %>
<% TREE_PSEUDOS = [pseudo for pseudo in PSEUDOS if pseudo.is_tree_pseudo_element()] %>
<% SIMPLE_PSEUDOS = [pseudo for pseudo in PSEUDOS if not pseudo.is_tree_pseudo_element()] %>
/// The number of eager pseudo-elements.
pub const EAGER_PSEUDO_COUNT: usize = ${len(EAGER_PSEUDOS)};
/// The number of non-functional pseudo-elements.
pub const SIMPLE_PSEUDO_COUNT: usize = ${len(SIMPLE_PSEUDOS)};
/// The number of tree pseudo-elements.
pub const TREE_PSEUDO_COUNT: usize = ${len(TREE_PSEUDOS)};
/// The number of all pseudo-elements.
pub const PSEUDO_COUNT: usize = ${len(PSEUDOS)};
/// The list of eager pseudos.
pub const EAGER_PSEUDOS: [PseudoElement; EAGER_PSEUDO_COUNT] = [
% for eager_pseudo_name in EAGER_PSEUDOS:
PseudoElement::${eager_pseudo_name},
% endfor
];
<%def name="pseudo_element_variant(pseudo, tree_arg='..')">\
PseudoElement::${pseudo.capitalized_pseudo()}${"({})".format(tree_arg) if pseudo.is_tree_pseudo_element() else ""}\
</%def>
impl PseudoElement {
/// Get the pseudo-element as an atom.
#[inline]
fn atom(&self) -> Atom {
match *self {
% for pseudo in PSEUDOS:
${pseudo_element_variant(pseudo)} => atom!("${pseudo.value}"),
% endfor
PseudoElement::UnknownWebkit(..) => unreachable!(),
}
}
/// Returns an index of the pseudo-element.
#[inline]
pub fn index(&self) -> usize {
match *self {
% for i, pseudo in enumerate(PSEUDOS):
${pseudo_element_variant(pseudo)} => ${i},
% endfor
PseudoElement::UnknownWebkit(..) => unreachable!(),
}
}
/// Returns an array of `None` values.
///
/// FIXME(emilio): Integer generics can't come soon enough.
pub fn pseudo_none_array<T>() -> [Option<T>; PSEUDO_COUNT] {
[
${",\n ".join(["None" for pseudo in PSEUDOS])}
]
}
/// Whether this pseudo-element is an anonymous box.
#[inline]
pub fn is_anon_box(&self) -> bool {
match *self {
% for pseudo in PSEUDOS:
% if pseudo.is_anon_box():
${pseudo_element_variant(pseudo)} => true,
% endif
% endfor
_ => false,
}
}
/// Whether this pseudo-element is eagerly-cascaded.
#[inline]
pub fn is_eager(&self) -> bool {
matches!(*self,
${" | ".join(map(lambda name: "PseudoElement::{}".format(name), EAGER_PSEUDOS))})
}
/// Whether this pseudo-element is tree pseudo-element.
#[inline]
pub fn is_tree_pseudo_element(&self) -> bool {
match *self {
% for pseudo in TREE_PSEUDOS:
${pseudo_element_variant(pseudo)} => true,
% endfor
_ => false,
}
}
/// Whether this pseudo-element is an unknown Webkit-prefixed pseudo-element.
#[inline]
pub fn is_unknown_webkit_pseudo_element(&self) -> bool {
matches!(*self, PseudoElement::UnknownWebkit(..))
}
/// Gets the flags associated to this pseudo-element, or 0 if it's an
/// anonymous box.
pub fn flags(&self) -> u32 {
match *self {
% for pseudo in PSEUDOS:
${pseudo_element_variant(pseudo)} =>
% if pseudo.is_tree_pseudo_element():
if unsafe { structs::StaticPrefs_sVarCache_layout_css_xul_tree_pseudos_content_enabled } {
0
} else {
structs::CSS_PSEUDO_ELEMENT_ENABLED_IN_UA_SHEETS_AND_CHROME
},
% elif pseudo.is_anon_box():
structs::CSS_PSEUDO_ELEMENT_ENABLED_IN_UA_SHEETS,
% else:
structs::SERVO_CSS_PSEUDO_ELEMENT_FLAGS_${pseudo.pseudo_ident},
% endif
% endfor
PseudoElement::UnknownWebkit(..) => 0,
}
}
/// Construct a pseudo-element from a `CSSPseudoElementType`.
#[inline]
pub fn from_pseudo_type(type_: CSSPseudoElementType) -> Option<Self> {
match type_ {
% for pseudo in PSEUDOS:
% if not pseudo.is_anon_box():
CSSPseudoElementType::${pseudo.pseudo_ident} => {
Some(${pseudo_element_variant(pseudo)})
},
% endif
% endfor
_ => None,
}
}
/// Construct a `CSSPseudoElementType` from a pseudo-element
#[inline]
fn pseudo_type(&self) -> CSSPseudoElementType {
use crate::gecko_bindings::structs::CSSPseudoElementType_InheritingAnonBox;
match *self {
% for pseudo in PSEUDOS:
% if not pseudo.is_anon_box():
PseudoElement::${pseudo.capitalized_pseudo()} => CSSPseudoElementType::${pseudo.pseudo_ident},
% elif pseudo.is_tree_pseudo_element():
PseudoElement::${pseudo.capitalized_pseudo()}(..) => CSSPseudoElementType::XULTree,
% elif pseudo.is_inheriting_anon_box():
PseudoElement::${pseudo.capitalized_pseudo()} => CSSPseudoElementType_InheritingAnonBox,
% else:
PseudoElement::${pseudo.capitalized_pseudo()} => CSSPseudoElementType::NonInheritingAnonBox,
% endif
% endfor
PseudoElement::UnknownWebkit(..) => unreachable!(),
}
}
/// Get a PseudoInfo for a pseudo
pub fn pseudo_info(&self) -> (*mut structs::nsAtom, CSSPseudoElementType) {
(self.atom().as_ptr(), self.pseudo_type())
}
/// Get the argument list of a tree pseudo-element.
#[inline]
pub fn tree_pseudo_args(&self) -> Option<<&[Atom]> {
match *self {
% for pseudo in TREE_PSEUDOS:
PseudoElement::${pseudo.capitalized_pseudo()}(ref args) => Some(args),
% endfor
_ => None,
}
}
/// Construct a pseudo-element from an `Atom`.
#[inline]
pub fn from_atom(atom: &Atom) -> Option<Self> {
% for pseudo in PSEUDOS:
% if pseudo.is_tree_pseudo_element():
// We cannot generate ${pseudo_element_variant(pseudo)} from just an atom.
% else:
if atom == &atom!("${pseudo.value}") {
return Some(${pseudo_element_variant(pseudo)});
}
% endif
% endfor
None
}
/// Construct a pseudo-element from an anonymous box `Atom`.
#[inline]
pub fn from_anon_box_atom(atom: &Atom) -> Option<Self> {
% for pseudo in PSEUDOS:
% if pseudo.is_tree_pseudo_element():
// We cannot generate ${pseudo_element_variant(pseudo)} from just an atom.
% elif pseudo.is_anon_box():
if atom == &atom!("${pseudo.value}") {
return Some(${pseudo_element_variant(pseudo)});
}
% endif
% endfor
None
}
/// Construct a tree pseudo-element from atom and args.
#[inline]
pub fn from_tree_pseudo_atom(atom: &Atom, args: Box<[Atom]>) -> Option<Self> {
% for pseudo in PSEUDOS:
% if pseudo.is_tree_pseudo_element():
if atom == &atom!("${pseudo.value}") {
return Some(PseudoElement::${pseudo.capitalized_pseudo()}(args.into()));
}
% endif
% endfor
None
}
/// Constructs a pseudo-element from a string of text.
///
/// Returns `None` if the pseudo-element is not recognised.
#[inline]
pub fn from_slice(name: &str) -> Option<Self> {
// We don't need to support tree pseudos because functional
// pseudo-elements needs arguments, and thus should be created
// via other methods.
match_ignore_ascii_case! { name,
% for pseudo in SIMPLE_PSEUDOS:
"${pseudo.value[1:]}" => {
return Some(${pseudo_element_variant(pseudo)})
}
% endfor
// Alias "-moz-selection" to "selection" at parse time.
"-moz-selection" => {
return Some(PseudoElement::Selection);
}
"-moz-placeholder" => {
return Some(PseudoElement::Placeholder);
}
_ => {
if starts_with_ignore_ascii_case(name, "-moz-tree-") {
return PseudoElement::tree_pseudo_element(name, Box::new([]))
}
if unsafe {
structs::StaticPrefs_sVarCache_layout_css_unknown_webkit_pseudo_element
} {
const WEBKIT_PREFIX: &str = "-webkit-";
if starts_with_ignore_ascii_case(name, WEBKIT_PREFIX) {
let part = string_as_ascii_lowercase(&name[WEBKIT_PREFIX.len()..]);
return Some(PseudoElement::UnknownWebkit(part.into()));
}
}
}
}
None
}
/// Constructs a tree pseudo-element from the given name and arguments.
/// "name" must start with "-moz-tree-".
///
/// Returns `None` if the pseudo-element is not recognized.
#[inline]
pub fn tree_pseudo_element(name: &str, args: Box<[Atom]>) -> Option<Self> {
debug_assert!(starts_with_ignore_ascii_case(name, "-moz-tree-"));
let tree_part = &name[10..];
% for pseudo in TREE_PSEUDOS:
if tree_part.eq_ignore_ascii_case("${pseudo.value[11:]}") {
return Some(${pseudo_element_variant(pseudo, "args.into()")});
}
% endfor
None
}
}
impl ToCss for PseudoElement {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
dest.write_char(':')?;
match *self {
% for pseudo in PSEUDOS:
${pseudo_element_variant(pseudo)} => dest.write_str("${pseudo.value}")?,
% endfor
PseudoElement::UnknownWebkit(ref atom) => {
dest.write_str(":-webkit-")?;
serialize_atom_identifier(atom, dest)?;
}<|fim▁hole|> if let Some(args) = self.tree_pseudo_args() {
if !args.is_empty() {
dest.write_char('(')?;
let mut iter = args.iter();
if let Some(first) = iter.next() {
serialize_atom_identifier(&first, dest)?;
for item in iter {
dest.write_str(", ")?;
serialize_atom_identifier(item, dest)?;
}
}
dest.write_char(')')?;
}
}
Ok(())
}
}<|fim▁end|>
|
}
|
<|file_name|>structarm__pid__instance__q15.js<|end_file_name|><|fim▁begin|>var structarm__pid__instance__q15 =
[
<|fim▁hole|> [ "Ki", "structarm__pid__instance__q15.html#a0dcc19d5c8f7bc401acea9e8318cd777", null ],
[ "Kp", "structarm__pid__instance__q15.html#ad228aae24a1b6d855c93a8b9bbc1c4f1", null ],
[ "state", "structarm__pid__instance__q15.html#a4a3f0a878b5b6b055e3478a2f244cd30", null ]
];<|fim▁end|>
|
[ "A0", "structarm__pid__instance__q15.html#ad77f3a2823c7f96de42c92a3fbf3246b", null ],
[ "A1", "structarm__pid__instance__q15.html#ad8ac5ff736c0e51180398c31f777f18a", null ],
[ "A2", "structarm__pid__instance__q15.html#a33e8b4c2d3e24b8b494f6edca6a89c1b", null ],
[ "Kd", "structarm__pid__instance__q15.html#af5d4b53091f19eff7536636b7cc43111", null ],
|
<|file_name|>RadioGroupIntegrationTest.java<|end_file_name|><|fim▁begin|>package com.sdl.selenium.extjs6.form;
import com.sdl.selenium.InputData;
import com.sdl.selenium.TestBase;
import com.sdl.selenium.extjs6.panel.Panel;
import com.sdl.selenium.web.SearchType;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.time.Duration;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
public class RadioGroupIntegrationTest extends TestBase {
private Panel radioGroupPanel = new Panel(null, "Radio Group Example").setClasses("x-panel-default-framed");
private RadioGroup radioGroup = new RadioGroup(radioGroupPanel, "Auto Layout:", SearchType.DEEP_CHILD_NODE_OR_SELF);
@BeforeClass
public void startTests() {
driver.get(InputData.EXTJS_EXAMPLE_URL + "#form-radiogroup");
driver.switchTo().frame("examples-iframe");
radioGroup.setVersion(version);
radioGroup.ready(Duration.ofSeconds(20));
}
@Test
public void selectRadioGroup() {
assertThat(radioGroup.selectByLabel("Item 2"), is(true));
assertThat(radioGroup.isSelectedByLabel("Item 2"), is(true));<|fim▁hole|> assertThat(radioGroup.selectByLabel("5", SearchType.CONTAINS), is(true));
assertThat(radioGroup.isSelectedByLabel("Item 5"), is(true));
assertThat(radioGroup.selectByLabel("Item 4"), is(true));
assertThat(radioGroup.isSelectedByLabel("Item 4"), is(true));
assertThat(radioGroup.selectByLabel("Item 1"), is(true));
assertThat(radioGroup.isSelectedByLabel("Item 1"), is(true));
}
@Test
public void getLabelNameRadioGroup() {
assertThat(radioGroup.getLabelName("1"), equalTo("Item 1"));
assertThat(radioGroup.getLabelName("1"), equalTo("Item 1"));
}
}<|fim▁end|>
| |
<|file_name|>javax.management.MBeanNotificationInfo.d.ts<|end_file_name|><|fim▁begin|>declare namespace javax {<|fim▁hole|>
class MBeanNotificationInfo extends javax.management.MBeanFeatureInfo implements java.lang.Cloneable {
static readonly serialVersionUID: long
static readonly NO_NOTIFICATIONS: javax.management.MBeanNotificationInfo[]
public constructor(arg0: java.lang.String[] | string[], arg1: java.lang.String | string, arg2: java.lang.String | string)
public constructor(arg0: java.lang.String[] | string[], arg1: java.lang.String | string, arg2: java.lang.String | string, arg3: javax.management.Descriptor)
public clone(): java.lang.Object
public getNotifTypes(): java.lang.String[]
public toString(): java.lang.String
public equals(arg0: java.lang.Object | any): boolean
public hashCode(): number
}
}
}<|fim▁end|>
|
namespace management {
|
<|file_name|>load_text_file.py<|end_file_name|><|fim▁begin|>def load_text_file(text_file: str) -> str:<|fim▁hole|><|fim▁end|>
|
with open(text_file, 'r') as f:
return f.read()
|
<|file_name|>Frequent.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# coding: utf-8
class Frequent():
def __init__(self):
self.counters = {}
<|fim▁hole|> counters[item] = counters[item] + 1
elif len(self.counters) <= k:
self.counters[item] = 1
else:
for key, value in self.counters.copy().items():
if value > 1:
self.counters[key] = value - 1
else:
del self.counters[key]
return key
def returnItems(self):
return self.counters<|fim▁end|>
|
def add(self, item, k, k2, t):
if item in self.counters:
|
<|file_name|>ES5.js<|end_file_name|><|fim▁begin|>var topics = require('../data').topics;
console.log(topics);
var result = topics.filter(function (topic) { //filter renvoie les 'true'
return topic.user.name === 'Leonard'; //? true : false;
});
var result2 = topics.filter(topic=>topic.user.name === 'Leonard');
var titles = topics.map(function (topic) {
return topic.title;
});
var title2 = topics.map(topic=>topic.title);
var hasViolence = topics.some(function (topic) { //renvoie true pour les topics avec violence
return (topic.tags.includes('violence'));
});
var hasViolence2 = topics.some(topic=>topic.tags.includes('violence'));
console.log('_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_');
console.log(result);
console.log('_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_');
console.log(result2);<|fim▁hole|>console.log(title2);
console.log('_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_');
console.log('hasViolence ', hasViolence);
console.log('_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_');
var SheldonCom = topics.filter(function (topic) {
return (topic.comments.some(function (comment) {
return comment.user.name === 'Sheldon';
}));
}).map(function (topic) {
return (topic.title);
});
var SheldonCom2;
SheldonCom2 = topics.filter(topic=>topic.comments.some(comment=>comment.user.name === 'Sheldon')).map(topic=>topic.title);
console.log('Sheldon has published in ', SheldonCom);
console.log('Sheldon has published in ', SheldonCom2);
console.log('_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_');
var idCommPenny = [];
topics.forEach(function (topic) {
topic.comments.forEach(function (comment) {
if (comment.user.name === 'Penny') {
idCommPenny.push(comment.id);
}
})
});
var sortFunction = (a, b) => a < b ? -1 : 1;
idCommPenny.sort(sortFunction);
console.log('Penny has post in : ', idCommPenny);
console.log('_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_');
var Content = [];
function getCommentByTag(tag, isAdmin) {
topics.forEach(function (topic) {
topic.comments.forEach(function (comment) {
if (comment.tags !== undefined) {
if (!comment.user.admin === isAdmin && comment.tags.includes(tag)) {
Content.push(comment.content);
}
}
});
});
return Content;
};
console.log('Violent tag are present for these non-admin comments : ', getCommentByTag('fun', true));
console.log('_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_');
var searched = [];
function search(term) {
topics.forEach(function (topic) {
topic.comments.forEach(function (comment) {
if (comment.content.toLowerCase().includes(term.toLowerCase())) {
searched.push(comment.content);
}
})
});
return searched;
}
console.log('search is present in :', search('it'));<|fim▁end|>
|
console.log('_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_');
console.log(titles);
console.log('_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_/-\\\_');
|
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.conf import settings
from django.db import models as django_models
from django.utils.translation import ugettext_lazy as _
from cms import models as cms_models
from djangocms_utils import fields as cms_fields
from shop import models as shop_models
from shop.util import fields as shop_fields
from simple_translation import actions
CMSPLUGIN_BLOG_PLACEHOLDERS = getattr(settings, 'CMSPLUGIN_BLOG_PLACEHOLDERS', ('excerpt', 'content'))
<|fim▁hole|> class Meta:
pass
def get_price(self):
if self.price_set.count() > 0:
return self.price_set.aggregate(django_models.Sum('price')).get('price__sum')
return self.unit_price
class ProductTitle(django_models.Model):
product = django_models.ForeignKey(Product)
language = django_models.CharField(max_length=2, choices=settings.LANGUAGES)
name = django_models.CharField(max_length=255)
slug = django_models.SlugField()
def __unicode__(self):
return self.name
class Meta:
unique_together = ('language', 'slug')
class Item(django_models.Model):
product = django_models.ForeignKey(Product)
item = django_models.CharField(max_length=255)
quantity = django_models.IntegerField(default=1)
has_nodewatcher_firmware = django_models.BooleanField()
class Price(django_models.Model):
product = django_models.ForeignKey(Product)
price = shop_fields.CurrencyField()
price_type = django_models.CharField(max_length=255, choices=((_('Purchase price'), _('Purchase price')), (_('Import tax'), _('Import tax')), ))
class ProductPlugin(cms_models.CMSPlugin):
product = django_models.ForeignKey(Product)<|fim▁end|>
|
class Product(shop_models.Product):
placeholders = cms_fields.M2MPlaceholderField(actions=actions.SimpleTranslationPlaceholderActions(), placeholders=CMSPLUGIN_BLOG_PLACEHOLDERS)
|
<|file_name|>test_images_negative.py<|end_file_name|><|fim▁begin|># Copyright 2013 OpenStack Foundation
# All Rights Reserved.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from tempest_lib import exceptions as lib_exc
from tempest.api.image import base
from tempest import test
class ImagesNegativeTest(base.BaseV2ImageTest):
"""
here we have -ve tests for get_image and delete_image api
Tests
** get non-existent image
** get image with image_id=NULL
** get the deleted image
** delete non-existent image
** delete rimage with image_id=NULL
** delete the deleted image
"""
@test.attr(type=['negative'])
@test.idempotent_id('668743d5-08ad-4480-b2b8-15da34f81d9f')<|fim▁hole|> non_existent_id)
@test.attr(type=['negative'])
@test.idempotent_id('ef45000d-0a72-4781-866d-4cb7bf2562ad')
def test_get_image_null_id(self):
# get image with image_id = NULL
image_id = ""
self.assertRaises(lib_exc.NotFound, self.client.get_image, image_id)
@test.attr(type=['negative'])
@test.idempotent_id('e57fc127-7ba0-4693-92d7-1d8a05ebcba9')
def test_get_delete_deleted_image(self):
# get and delete the deleted image
# create and delete image
body = self.client.create_image(name='test',
container_format='bare',
disk_format='raw')
image_id = body['id']
self.client.delete_image(image_id)
self.client.wait_for_resource_deletion(image_id)
# get the deleted image
self.assertRaises(lib_exc.NotFound, self.client.get_image, image_id)
# delete the deleted image
self.assertRaises(lib_exc.NotFound, self.client.delete_image,
image_id)
@test.attr(type=['negative'])
@test.idempotent_id('6fe40f1c-57bd-4918-89cc-8500f850f3de')
def test_delete_non_existing_image(self):
# delete non-existent image
non_existent_image_id = str(uuid.uuid4())
self.assertRaises(lib_exc.NotFound, self.client.delete_image,
non_existent_image_id)
@test.attr(type=['negative'])
@test.idempotent_id('32248db1-ab88-4821-9604-c7c369f1f88c')
def test_delete_image_null_id(self):
# delete image with image_id=NULL
image_id = ""
self.assertRaises(lib_exc.NotFound, self.client.delete_image,
image_id)
@test.attr(type=['negative'])
@test.idempotent_id('292bd310-369b-41c7-a7a3-10276ef76753')
def test_register_with_invalid_container_format(self):
# Negative tests for invalid data supplied to POST /images
self.assertRaises(lib_exc.BadRequest, self.client.create_image,
'test', 'wrong', 'vhd')
@test.attr(type=['negative'])
@test.idempotent_id('70c6040c-5a97-4111-9e13-e73665264ce1')
def test_register_with_invalid_disk_format(self):
self.assertRaises(lib_exc.BadRequest, self.client.create_image,
'test', 'bare', 'wrong')<|fim▁end|>
|
def test_get_non_existent_image(self):
# get the non-existent image
non_existent_id = str(uuid.uuid4())
self.assertRaises(lib_exc.NotFound, self.client.get_image,
|
<|file_name|>testUtils.js<|end_file_name|><|fim▁begin|>// ./index.js contains imports to redisClient, which should be mocked in unit tests.
jest.mock('src/lib/redisClient');
// Avoid loading src/lib/queue, which really connects to redis
jest.mock('src/lib/queues', () => ({}));
import { graphql } from 'graphql';
import { schema } from './';<|fim▁hole|> *
* Usage:
* const result = await gql`query{...}`(variable)
*
* @returns {(variable: Object, context: Object) => Promise<GraphQLResult>}
*/
function gql(query, ...substitutes) {
return (variables, context = {}) =>
graphql(
schema,
String.raw(query, ...substitutes),
null,
context,
variables
);
}
export { gql };<|fim▁end|>
|
/**
* Executes graphql query against the current GraphQL schema.
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from common import Constant
from common import utils
from main.logger_helper import L
__author__ = 'Dan Cristian <[email protected]>'
# saves record to cloud database
def save_to_history_cloud(obj):
try:
L.l.debug('Trying to save historical record to cloud {}'.format(obj))
if Constant.JSON_PUBLISH_GRAPH_X in obj:
# name of x field
axis_x_field = obj[Constant.JSON_PUBLISH_GRAPH_X]
graph_id_field = obj[Constant.JSON_PUBLISH_GRAPH_ID]
graph_legend_field = obj[Constant.JSON_PUBLISH_GRAPH_LEGEND]
graph_shape_fields = obj[Constant.JSON_PUBLISH_GRAPH_SHAPE]
graph_y_fields = obj[Constant.JSON_PUBLISH_GRAPH_Y]
# names of fields that have value changed to record smallest amount of data
changed_fields = obj[Constant.JSON_PUBLISH_FIELDS_CHANGED]
# intersect lists and get only graphable fields that had values changed
list_axis_y = list(set(graph_y_fields) & set(changed_fields))
if len(list_axis_y) == 0:
L.l.info('Ignoring record save graph={} changed fields={} obj={}'.format(graph_y_fields,
changed_fields, obj))
else:
L.l.debug('Trying to save y axis {}'.format(list_axis_y))
if axis_x_field in obj and graph_id_field in obj:
table = obj[Constant.JSON_PUBLISH_TABLE]
trace_unique_id = obj[graph_id_field] # unique record/trace identifier
x_val = obj[axis_x_field]
graph_legend_item_name = obj[graph_legend_field] # unique key for legend
x_val = utils.parse_to_date(x_val)
x = x_val
index = 0
field_pairs = [[axis_x_field, x], [graph_legend_field, graph_legend_item_name],
[Constant.JSON_PUBLISH_RECORD_UUID, obj[Constant.JSON_PUBLISH_RECORD_UUID]],
[Constant.JSON_PUBLISH_SOURCE_HOST, obj[Constant.JSON_PUBLISH_SOURCE_HOST]]]
for axis_y in list_axis_y:
if axis_y in obj:
trace_list = []
y = obj[axis_y]
# add multiple y values for later save in db as a single record
field_pairs.append([axis_y, y])
# upload to cloud if plotly is initialised
#from cloud import graph_plotly
#if graph_plotly.initialised:
# from cloud.graph_plotly import graph_plotly_run
# Log.logger.info('Uploading to cloud field {}'.format(graph_legend_field))
# shape visual type for this trace
# shape = graph_shape_fields[index]
# unique name used for grid on upload
# grid_base_name = str(table)
# graph_plotly_run.add_grid_data(grid_unique_name=grid_base_name, x=x, y=y,
# axis_x_name=axis_x_field, axis_y_name=axis_y,<|fim▁hole|> #Log.logger.debug('Skip upload to cloud, plotly not init')
index += 1
else:
L.l.critical('Missing history axis_x [{}], graph_id [{}], in obj {}'.format(
axis_x_field,graph_id_field,obj))
else:
L.l.critical('Missing history axis X field {}'.format(Constant.JSON_PUBLISH_GRAPH_X))
except Exception as ex:
L.l.exception('General error saving historical cloud record, err {} obj={}'.format(ex, obj))
# saves record to cloud database
def save_to_history_db(obj):
try:
table = obj[Constant.JSON_PUBLISH_TABLE]
# L.l.debug('Trying to save historical record to db={}'.format(table))
# save to local history DB, append history to source table name
dest_table = str(table) + 'History'
# L.l.debug('Saving to local db table {} obj={}'.format(dest_table, obj))
from storage.sqalc import models
# http://stackoverflow.com/questions/4030982/initialise-class-object-by-name
try:
class_table = getattr(models, dest_table)
new_record = class_table()
for field in obj:
if hasattr(new_record, field) and field != "id":
setattr(new_record, field, obj[field])
if new_record.add_commit_record_to_db():
# L.l.debug('Saved OK to local db table {} obj={}'.format(dest_table, new_record))
pass
else:
L.l.critical("Cannot save history db record={}".format(obj))
except Exception as ex:
L.l.critical("Cannot save history db err={} record={}".format(ex, obj))
except Exception as ex:
L.l.exception('General error saving historical db record, err {} obj={}'.format(ex, obj))<|fim▁end|>
|
# record_unique_id_name=graph_legend_field,
# record_unique_id_value=graph_legend_item_name)
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#[macro_use] extern crate log;
extern crate openssl;
extern crate serde;
#[macro_use] extern crate serde_derive;
extern crate serde_json;
extern crate rand;
extern crate url;
mod config;
mod connection;
mod errors;
mod message;
mod parse;
mod subscription;<|fim▁hole|>pub use message::Message;
pub use subscription::AsyncSubscription;
pub use subscription::ChannelSubscription;
pub type Result<T> = std::result::Result<T, errors::Error>;<|fim▁end|>
|
pub use config::Config;
pub use connection::NatsConn;
pub use errors::Error;
|
<|file_name|>NavDef.java<|end_file_name|><|fim▁begin|>package com.rotoplastyc.util;
public class NavDef {
private final String i[];
public final static NavDef ERRO = new NavDef("err","mood_bad","ERRO");
public final static NavDef ERRO2 = new NavDef("err","mood","ERRO");
public NavDef(String contentPointer, String icon, String displayName) {
i = new String[3];
i[0] = contentPointer;
i[1] = icon;
i[2] = displayName;
}
<|fim▁hole|> public String getContentPointer() {
return i[0];
}
public String getIcon() {
return i[1];
}
public String getDisplayName() {
return i[2];
}
}<|fim▁end|>
| |
<|file_name|>CardsPresenceTest.java<|end_file_name|><|fim▁begin|>/*
* Preferanser is a program to simulate and calculate Russian Preferans Card game deals.
*
* Copyright (C) 2013 Yuriy Lazarev <[email protected]>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see [http://www.gnu.org/licenses/].
*/
package com.preferanser.webtest;
import com.preferanser.shared.domain.Card;
import com.preferanser.webtest.requirements.Application;
import net.thucydides.core.annotations.Story;
import org.junit.Test;
import static com.preferanser.shared.domain.TableLocation.*;
@Story(Application.Table.Cards.Presence.class)
public class CardsPresenceTest extends TableTest {
@Test
public void allCardsShouldBePresentOnSouth() {
endUser.onTheTablePage()<|fim▁hole|>}<|fim▁end|>
|
.canSeeCardsAt(SOUTH, Card.values())
.canSeeNoCardsAt(EAST, WEST);
}
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from enum import Enum
from typing import List, Any, cast
import yass
from tutorial.base_types_external import Integer
# shows how to use contract internal base types
class ExpirationHandler(yass.BaseTypeHandler):
def readBase(self, reader: yass.Reader) -> 'Expiration':
return Expiration(
reader.readZigZagInt()
)
def writeBase(self, value: 'Expiration', writer: yass.Writer) -> None:
writer.writeZigZagInt(value.year)
class Expiration:
TYPE_DESC = yass.TypeDesc(yass.FIRST_DESC_ID + 1, ExpirationHandler())
def __init__(self, year: int) -> None:
self.year = year
def __str__(self) -> str:<|fim▁hole|> BID = 0
ASK = 1
class Price:
def __init__(self) -> None:
self.instrumentId: Integer = cast(Integer, None)
self.kind: PriceKind = cast(PriceKind, None)
self.value: Integer = cast(Integer, None)
@yass.abstract
class Instrument:
def __init__(self) -> None:
self.id: Integer = cast(Integer, None)
self.name: str = cast(str, None)
class SystemException(Exception):
def __init__(self) -> None:
self.details: str = cast(str, None)
@yass.abstract
class ApplicationException(Exception):
def __init__(self) -> None:
pass
class UnknownInstrumentsException(ApplicationException):
def __init__(self) -> None:
ApplicationException.__init__(self)
self.instrumentIds: List[Integer] = cast(List[Integer], None)
self.onlyNeededForTests1: Any = cast(Any, None)
self.onlyNeededForTests2: bytes = cast(bytes, None)
self.onlyNeededForTests3: Exception = cast(Exception, None)
class Node:
def __init__(self) -> None:
self.id: float = cast(float, None)
self.links: List[Node] = cast(List[Node], None)
self.next: Node = cast(Node, None)
class EchoService:
def echo(self, value: Any) -> Any:
raise NotImplementedError()
class PriceEngine:
def subscribe(self, instrumentIds: List[Integer]) -> None:
raise NotImplementedError()
class PriceListener:
def newPrices(self, prices: List[Price]) -> None:
raise NotImplementedError()<|fim▁end|>
|
return f"{self.year}"
class PriceKind(Enum):
|
<|file_name|>service.py<|end_file_name|><|fim▁begin|>from spacyThrift import SpacyThrift
from spacyThrift.ttypes import Token
<|fim▁hole|>
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
from thrift.server import TServer
import logging
class Handler:
def __init__(self, nlp):
self.nlp = nlp
def tag(self, sentence):
document = nlp(sentence, parse=False, entity=False)
return [Token(element.orth_, element.tag_, element.lemma_)
for element in document]
if __name__ == '__main__':
logging.basicConfig()
logger = logging.getLogger()
logger.info("Loading ...")
nlp = English(parser=False, tagger=True, entity=False)
handler = Handler(nlp)
processor = SpacyThrift.Processor(handler)
transport = TSocket.TServerSocket(port=9090)
tfactory = TTransport.TBufferedTransportFactory()
pfactory = TBinaryProtocol.TBinaryProtocolFactory()
server = TServer.TThreadedServer(processor, transport, tfactory, pfactory)
logger.info("Serving ...")
server.serve()<|fim▁end|>
|
from spacy.en import English
|
<|file_name|>AdvancedSettingsProto.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/cx/v3beta1/advanced_settings.proto
package com.google.cloud.dialogflow.cx.v3beta1;
public final class AdvancedSettingsProto {
private AdvancedSettingsProto() {}<|fim▁hole|> public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry);
}
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_AdvancedSettings_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_AdvancedSettings_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_AdvancedSettings_LoggingSettings_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_AdvancedSettings_LoggingSettings_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor descriptor;
static {
java.lang.String[] descriptorData = {
"\n:google/cloud/dialogflow/cx/v3beta1/adv"
+ "anced_settings.proto\022\"google.cloud.dialo"
+ "gflow.cx.v3beta1\032\037google/api/field_behav"
+ "ior.proto\"\315\001\n\020AdvancedSettings\022^\n\020loggin"
+ "g_settings\030\006 \001(\0132D.google.cloud.dialogfl"
+ "ow.cx.v3beta1.AdvancedSettings.LoggingSe"
+ "ttings\032Y\n\017LoggingSettings\022\"\n\032enable_stac"
+ "kdriver_logging\030\002 \001(\010\022\"\n\032enable_interact"
+ "ion_logging\030\003 \001(\010B\335\001\n&com.google.cloud.d"
+ "ialogflow.cx.v3beta1B\025AdvancedSettingsPr"
+ "otoP\001ZDgoogle.golang.org/genproto/google"
+ "apis/cloud/dialogflow/cx/v3beta1;cx\370\001\001\242\002"
+ "\002DF\252\002\"Google.Cloud.Dialogflow.Cx.V3Beta1"
+ "\352\002&Google::Cloud::Dialogflow::CX::V3beta"
+ "1b\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.api.FieldBehaviorProto.getDescriptor(),
});
internal_static_google_cloud_dialogflow_cx_v3beta1_AdvancedSettings_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_google_cloud_dialogflow_cx_v3beta1_AdvancedSettings_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_AdvancedSettings_descriptor,
new java.lang.String[] {
"LoggingSettings",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_AdvancedSettings_LoggingSettings_descriptor =
internal_static_google_cloud_dialogflow_cx_v3beta1_AdvancedSettings_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_dialogflow_cx_v3beta1_AdvancedSettings_LoggingSettings_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_AdvancedSettings_LoggingSettings_descriptor,
new java.lang.String[] {
"EnableStackdriverLogging", "EnableInteractionLogging",
});
com.google.api.FieldBehaviorProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}<|fim▁end|>
| |
<|file_name|>cursor_pair.rs<|end_file_name|><|fim▁begin|>//! A generic cursor implementation merging pairs of different cursors.
use std::cmp::Ordering;
use super::Cursor;
/// A cursor over the combined updates of two different cursors.
///
/// A `CursorPair` wraps two cursors over the same types of updates, and provides navigation
/// through their merged updates.
pub struct CursorPair<C1, C2> {
cursor1: C1,
cursor2: C2,
key_order: Ordering, // Invalid keys are `Greater` than all other keys. `Equal` implies both valid.
val_order: Ordering, // Invalid vals are `Greater` than all other vals. `Equal` implies both valid.
}
impl<K, V, T, R, C1, C2> Cursor<K, V, T, R> for CursorPair<C1, C2>
where
K: Ord,
V: Ord,
C1: Cursor<K, V, T, R>,
C2: Cursor<K, V, T, R>,
{
type Storage = (C1::Storage, C2::Storage);
// validation methods
fn key_valid(&self, storage: &Self::Storage) -> bool {
match self.key_order {
Ordering::Less => self.cursor1.key_valid(&storage.0),
Ordering::Equal => true,
Ordering::Greater => self.cursor2.key_valid(&storage.1),
}
}
fn val_valid(&self, storage: &Self::Storage) -> bool {
match (self.key_order, self.val_order) {
(Ordering::Less, _) => self.cursor1.val_valid(&storage.0),
(Ordering::Greater, _) => self.cursor2.val_valid(&storage.1),
(Ordering::Equal, Ordering::Less) => self.cursor1.val_valid(&storage.0),
(Ordering::Equal, Ordering::Greater) => self.cursor2.val_valid(&storage.1),
(Ordering::Equal, Ordering::Equal) => true,
}
}
// accessors
fn key<'a>(&self, storage: &'a Self::Storage) -> &'a K {
match self.key_order {
Ordering::Less => self.cursor1.key(&storage.0),
_ => self.cursor2.key(&storage.1),
}
}
fn val<'a>(&self, storage: &'a Self::Storage) -> &'a V {
if self.key_order == Ordering::Less || (self.key_order == Ordering::Equal && self.val_order != Ordering::Greater) {
self.cursor1.val(&storage.0)
}
else {
self.cursor2.val(&storage.1)
}
}
fn map_times<L: FnMut(&T, &R)>(&mut self, storage: &Self::Storage, mut logic: L) {
if self.key_order == Ordering::Less || (self.key_order == Ordering::Equal && self.val_order != Ordering::Greater) {
self.cursor1.map_times(&storage.0, |t,d| logic(t,d));
}
if self.key_order == Ordering::Greater || (self.key_order == Ordering::Equal && self.val_order != Ordering::Less) {
self.cursor2.map_times(&storage.1, |t,d| logic(t,d));
}
}
// key methods
fn step_key(&mut self, storage: &Self::Storage) {
if self.key_order != Ordering::Greater { self.cursor1.step_key(&storage.0); }
if self.key_order != Ordering::Less { self.cursor2.step_key(&storage.1); }
self.key_order = match (self.cursor1.key_valid(&storage.0), self.cursor2.key_valid(&storage.1)) {
(false, _) => Ordering::Greater,
(_, false) => Ordering::Less,
(true, true) => self.cursor1.key(&storage.0).cmp(self.cursor2.key(&storage.1)),
};
}
fn seek_key(&mut self, storage: &Self::Storage, key: &K) {
self.cursor1.seek_key(&storage.0, key);
self.cursor2.seek_key(&storage.1, key);
self.key_order = match (self.cursor1.key_valid(&storage.0), self.cursor2.key_valid(&storage.1)) {
(false, _) => Ordering::Greater,
(_, false) => Ordering::Less,
(true, true) => self.cursor1.key(&storage.0).cmp(self.cursor2.key(&storage.1)),
};
}
// value methods
fn step_val(&mut self, storage: &Self::Storage) {
match self.key_order {
Ordering::Less => self.cursor1.step_val(&storage.0),
Ordering::Equal => {
if self.val_order != Ordering::Greater { self.cursor1.step_val(&storage.0); }
if self.val_order != Ordering::Less { self.cursor2.step_val(&storage.1); }
self.val_order = match (self.cursor1.val_valid(&storage.0), self.cursor2.val_valid(&storage.1)) {
(false, _) => Ordering::Greater,
(_, false) => Ordering::Less,
(true, true) => self.cursor1.val(&storage.0).cmp(self.cursor2.val(&storage.1)),
};
},
Ordering::Greater => self.cursor2.step_val(&storage.1),
}
}
fn seek_val(&mut self, storage: &Self::Storage, val: &V) {
match self.key_order {
Ordering::Less => self.cursor1.seek_val(&storage.0, val),
Ordering::Equal => {
self.cursor1.seek_val(&storage.0, val);
self.cursor2.seek_val(&storage.1, val);
self.val_order = match (self.cursor1.val_valid(&storage.0), self.cursor2.val_valid(&storage.1)) {
(false, _) => Ordering::Greater,
(_, false) => Ordering::Less,
(true, true) => self.cursor1.val(&storage.0).cmp(self.cursor2.val(&storage.1)),
};
},
Ordering::Greater => self.cursor2.seek_val(&storage.1, val),
}
}
// rewinding methods<|fim▁hole|> }
fn rewind_vals(&mut self, storage: &Self::Storage) {
if self.key_order != Ordering::Greater { self.cursor1.rewind_vals(&storage.0); }
if self.key_order != Ordering::Less { self.cursor2.rewind_vals(&storage.1); }
}
}<|fim▁end|>
|
fn rewind_keys(&mut self, storage: &Self::Storage) {
self.cursor1.rewind_keys(&storage.0);
self.cursor2.rewind_keys(&storage.1);
|
<|file_name|>database.spec.ts<|end_file_name|><|fim▁begin|>import { file as tempfile } from 'tempy'
import { SourceModel } from '../src/models/source-model'
import { SourceLogModel } from '../src/models/source-log-model'
import { ItemModel } from '../src/models/item-model'
import { TypeModel } from '../src/models/type-model'<|fim▁hole|> sources,
items,
types
} from '../src/database'
describe('database', () => {
describe('before init()', () => {
it('should still start with models', () => {
expect(sourceLogs).toEqual(jasmine.any(SourceLogModel))
expect(sources).toEqual(jasmine.any(SourceModel))
expect(items).toEqual(jasmine.any(ItemModel))
expect(types).toEqual(jasmine.any(TypeModel))
})
})
describe('init()', () => {
it('should create the models', () => {
init({
type: 'sqlite3',
filename: tempfile()
})
// checkModel() is private; this is a hack to get around that
return Promise.all([
sourceLogs['checkModel'](),
sources['checkModel'](),
items['checkModel'](),
types['checkModel']()
])
})
it('should use a port when specified', () => {
init({
type: 'sqlite3',
filename: tempfile(),
port: 123456
})
return Promise.all([
sourceLogs['checkModel'](),
sources['checkModel'](),
items['checkModel'](),
types['checkModel']()
])
})
it('should use a socket when specified', () => {
init({
type: 'sqlite3',
filename: tempfile(),
socket: 'testSocket'
})
return Promise.all([
sourceLogs['checkModel'](),
sources['checkModel'](),
items['checkModel'](),
types['checkModel']()
])
})
})
})<|fim▁end|>
|
import {
init,
sourceLogs,
|
<|file_name|>FileAppender.java<|end_file_name|><|fim▁begin|>package SOLID.Exercise.Logger.model.appenders;
import SOLID.Exercise.Logger.api.File;
import SOLID.Exercise.Logger.api.Layout;
import SOLID.Exercise.Logger.model.files.LogFile;
public class FileAppender extends BaseAppender {
private File file;
public FileAppender(Layout layout) {
super(layout);
this.setFile(new LogFile());
}<|fim▁hole|> public void setFile(File file) {
this.file = file;
}
@Override
public void append(String message) {
this.file.write(message);
}
@Override
public String toString() {
return String.format("%s, File size: %d", super.toString(), this.file.getSize());
}
}<|fim▁end|>
| |
<|file_name|>fan.py<|end_file_name|><|fim▁begin|>"""Support for Tasmota fans."""
from hatasmota import const as tasmota_const
from homeassistant.components import fan
from homeassistant.components.fan import FanEntity
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.util.percentage import (
ordered_list_item_to_percentage,
percentage_to_ordered_list_item,
)
from .const import DATA_REMOVE_DISCOVER_COMPONENT
from .discovery import TASMOTA_DISCOVERY_ENTITY_NEW
from .mixins import TasmotaAvailability, TasmotaDiscoveryUpdate
<|fim▁hole|> tasmota_const.FAN_SPEED_LOW,
tasmota_const.FAN_SPEED_MEDIUM,
tasmota_const.FAN_SPEED_HIGH,
] # off is not included
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Tasmota fan dynamically through discovery."""
@callback
def async_discover(tasmota_entity, discovery_hash):
"""Discover and add a Tasmota fan."""
async_add_entities(
[TasmotaFan(tasmota_entity=tasmota_entity, discovery_hash=discovery_hash)]
)
hass.data[
DATA_REMOVE_DISCOVER_COMPONENT.format(fan.DOMAIN)
] = async_dispatcher_connect(
hass,
TASMOTA_DISCOVERY_ENTITY_NEW.format(fan.DOMAIN),
async_discover,
)
class TasmotaFan(
TasmotaAvailability,
TasmotaDiscoveryUpdate,
FanEntity,
):
"""Representation of a Tasmota fan."""
def __init__(self, **kwds):
"""Initialize the Tasmota fan."""
self._state = None
super().__init__(
**kwds,
)
@property
def speed_count(self) -> int:
"""Return the number of speeds the fan supports."""
return len(ORDERED_NAMED_FAN_SPEEDS)
@property
def percentage(self):
"""Return the current speed percentage."""
if self._state is None:
return None
if self._state == 0:
return 0
return ordered_list_item_to_percentage(ORDERED_NAMED_FAN_SPEEDS, self._state)
@property
def supported_features(self):
"""Flag supported features."""
return fan.SUPPORT_SET_SPEED
async def async_set_percentage(self, percentage):
"""Set the speed of the fan."""
if percentage == 0:
await self.async_turn_off()
else:
tasmota_speed = percentage_to_ordered_list_item(
ORDERED_NAMED_FAN_SPEEDS, percentage
)
self._tasmota_entity.set_speed(tasmota_speed)
async def async_turn_on(
self, speed=None, percentage=None, preset_mode=None, **kwargs
):
"""Turn the fan on."""
# Tasmota does not support turning a fan on with implicit speed
await self.async_set_percentage(
percentage
or ordered_list_item_to_percentage(
ORDERED_NAMED_FAN_SPEEDS, tasmota_const.FAN_SPEED_MEDIUM
)
)
async def async_turn_off(self, **kwargs):
"""Turn the fan off."""
self._tasmota_entity.set_speed(tasmota_const.FAN_SPEED_OFF)<|fim▁end|>
|
ORDERED_NAMED_FAN_SPEEDS = [
|
<|file_name|>ConstrutorTesteDrive.java<|end_file_name|><|fim▁begin|>package construtores;
public class ConstrutorTesteDrive {
public static void main(String[] args) {
// Construtor c1 = new Construtor();
// System.out.println("--c1\n" + c1.getNomeERg() + "\n\n");
Construtor c2 = new Construtor("Odair");
<|fim▁hole|>
Construtor c3 = new Construtor("Odair", "123456");
System.out.println("--c3\n" + c3.getNomeERg() + "\n\n");
}
}<|fim▁end|>
|
System.out.println("--c2\n" + c2.getNomeERg() + "\n\n");
|
<|file_name|>gte.js<|end_file_name|><|fim▁begin|>Astro.createValidator({
name: 'gte',
validate: function(fieldValue, fieldName, compareValue) {
if (_.isFunction(compareValue)) {
compareValue = compareValue.call(this);
}
return fieldValue >= compareValue;
},
events: {
validationerror: function(e) {
var fieldName = e.data.field;
var compareValue = e.data.options;
if (_.isFunction(compareValue)) {
compareValue = compareValue.call(this);
}<|fim▁hole|> compareValue + '"';
}
}
});<|fim▁end|>
|
e.data.message = 'The "' + fieldName +
'" field\'s value has to be greater than or equal "' +
|
<|file_name|>file_codec.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# Copyright 2014 Google.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A base class for all codecs using encode-to-file."""
import encoder
import filecmp
import json
import os
import re
import subprocess
class FileCodec(encoder.Codec):
"""Base class for file-using codecs.
Subclasses MUST define:
- EncodeCommandLine
- DecodeCommandLine
- ResultData
"""
def __init__(self, name, formatter=None):
super(FileCodec, self).__init__(name, formatter=formatter)
self.extension = 'must-have-extension'
def _EncodeFile(self, parameters, bitrate, videofile, encodedfile):
commandline = self.EncodeCommandLine(
parameters, bitrate, videofile, encodedfile)
print commandline
with open(os.path.devnull, 'r') as nullinput:
times_start = os.times()
returncode = subprocess.call(commandline, shell=True, stdin=nullinput)
times_end = os.times()
subprocess_cpu = times_end[2] - times_start[2]
elapsed_clock = times_end[4] - times_start[4]
print "Encode took %f CPU seconds %f clock seconds" % (
subprocess_cpu, elapsed_clock)
if returncode:
raise Exception("Encode failed with returncode %d" % returncode)
return (subprocess_cpu, elapsed_clock)
def _DecodeFile(self, videofile, encodedfile, workdir):
tempyuvfile = os.path.join(workdir,
videofile.basename + 'tempyuvfile.yuv')
if os.path.isfile(tempyuvfile):
print "Removing tempfile before decode:", tempyuvfile
os.unlink(tempyuvfile)
commandline = self.DecodeCommandLine(videofile, encodedfile, tempyuvfile)
print commandline
with open(os.path.devnull, 'r') as nullinput:
subprocess_cpu_start = os.times()[2]
returncode = subprocess.call(commandline, shell=True,
stdin=nullinput)
if returncode:
raise Exception('Decode failed with returncode %d' % returncode)
subprocess_cpu = os.times()[2] - subprocess_cpu_start
print "Decode took %f seconds" % subprocess_cpu
commandline = encoder.Tool("psnr") + " %s %s %d %d 9999" % (
videofile.filename, tempyuvfile, videofile.width,
videofile.height)
print commandline
psnr = subprocess.check_output(commandline, shell=True, stdin=nullinput)
commandline = ['md5sum', tempyuvfile]
md5 = subprocess.check_output(commandline, shell=False)
yuv_md5 = md5.split(' ')[0]
os.unlink(tempyuvfile)
return psnr, subprocess_cpu, yuv_md5
def Execute(self, parameters, bitrate, videofile, workdir):
encodedfile = os.path.join(workdir,
'%s.%s' % (videofile.basename, self.extension))
subprocess_cpu, elapsed_clock = self._EncodeFile(parameters, bitrate,
videofile, encodedfile)
result = {}
result['encode_cputime'] = subprocess_cpu
result['encode_clocktime'] = elapsed_clock
result['encoder_version'] = self.EncoderVersion()
bitrate = videofile.MeasuredBitrate(os.path.getsize(encodedfile))
psnr, decode_cputime, yuv_md5 = self._DecodeFile(
videofile, encodedfile, workdir)
result['decode_cputime'] = decode_cputime
result['yuv_md5'] = yuv_md5
print "Bitrate", bitrate, "PSNR", psnr
result['bitrate'] = int(bitrate)
result['psnr'] = float(psnr)
result['cliptime'] = videofile.ClipTime()<|fim▁hole|>
return result
# Below are the fallback implementations of the interfaces
# that the subclasses have to implement.
def EncodeCommandLine(self, parameters, bitrate, videofile, encodedfile):
"""This function returns the command line that should be executed
in order to turn an YUV file into an encoded file."""
# pylint: disable=W0613,R0201
raise encoder.Error('EncodeCommandLine not defined')
def DecodeCommandLine(self, videofile, encodedfile, yuvfile):
"""This function returns the command line that should be executed
in order to turn an encoded file into an YUV file."""
# pylint: disable=W0613,R0201
raise encoder.Error('DecodeCommandLine not defined')
def ResultData(self, encodedfile):
"""Returns additional fields that the codec may know how to generate."""
# pylint: disable=W0613,R0201
return {}
def VerifyEncode(self, parameters, bitrate, videofile, workdir):
"""Returns true if a new encode of the file gives exactly the same file."""
old_encoded_file = '%s/%s.%s' % (workdir, videofile.basename,
self.extension)
if not os.path.isfile(old_encoded_file):
raise encoder.Error('Old encoded file missing: %s' % old_encoded_file)
new_encoded_file = '%s/%s_verify.%s' % (workdir, videofile.basename,
self.extension)
self._EncodeFile(parameters, bitrate, videofile,
new_encoded_file)
if not VideoFilesEqual(old_encoded_file, new_encoded_file, self.extension):
# If there is a difference, we leave the new encoded file so that
# they can be compared by hand if desired.
return False
os.unlink(new_encoded_file)
return True
def EncoderVersion(self):
raise encoder.Error('File codecs must define their own version')
# Tools that may be called upon by the codec implementation if needed.
def MatroskaFrameInfo(encodedfile):
# Run the mkvinfo tool across the file to get frame size info.
commandline = 'mkvinfo -v %s' % encodedfile
print commandline
mkvinfo = subprocess.check_output(commandline, shell=True)
frameinfo = []
for line in mkvinfo.splitlines():
match = re.search(r'Frame with size (\d+)', line)
if match:
# The mkvinfo tool gives frame size in bytes. We want bits.
frameinfo.append({'size': int(match.group(1))*8})
return frameinfo
def FfmpegFrameInfo(encodedfile):
# Uses the ffprobe tool to give frame info.
commandline = '%s -loglevel warning -show_frames -of json %s' % (
encoder.Tool('ffprobe'), encodedfile)
ffprobeinfo = subprocess.check_output(commandline, shell=True)
probeinfo = json.loads(ffprobeinfo)
previous_position = 0
frameinfo = []
for frame in probeinfo['frames']:
current_position = int(frame['pkt_pos'])
if previous_position != 0:
frameinfo.append({'size': 8 * (current_position - previous_position)})
previous_position = current_position
frameinfo.append({'size': 8 *
(os.path.getsize(encodedfile) - previous_position)})
return frameinfo
def VideoFilesEqual(old_encoded_file, new_encoded_file, extension):
if extension == 'webm':
# Matroska files contain UIDs that vary even if the video content
# is the same. So we must use vpxdec --md5 instead.
old_checksum = subprocess.check_output((encoder.Tool('vpxdec'),
'--md5',
old_encoded_file))
new_checksum = subprocess.check_output((encoder.Tool('vpxdec'),
'--md5',
new_encoded_file))
return old_checksum == new_checksum
else:
return filecmp.cmp(old_encoded_file, new_encoded_file)<|fim▁end|>
|
result.update(self.ResultData(encodedfile))
|
<|file_name|>main.js<|end_file_name|><|fim▁begin|>#!/usr/bin/env node
require("babel/register")({
"stage": 1
});
var fs = require("fs");
GLOBAL.WALLACEVERSION = "Err";
GLOBAL.PLUGIN_CONTRIBUTORS = [];
try {
var p = JSON.parse(fs.readFileSync(__dirname+"/package.json"));
GLOBAL.WALLACEVERSION = p.version;
}
catch(e) {}
var Core = require("./core/Core.js");
process.on('uncaughtException', function (err) {
console.error(err);
});
<|fim▁hole|><|fim▁end|>
|
GLOBAL.core = new Core();
|
<|file_name|>parseJavaScript.js<|end_file_name|><|fim▁begin|>'use strict';
var ok = require('assert').ok;
const esprima = require('esprima');
function parseExpression(src, builder, isExpression) {
ok(typeof src === 'string', '"src" should be a string expression');
ok(builder, '"builder" is required');
function convert(node) {
if (Array.isArray(node)) {
let nodes = node;
for (let i=0; i<nodes.length; i++) {
var converted = convert(nodes[i]);
if (converted == null) {
return null;
}
nodes[i] = converted;
}
return nodes;
}
switch(node.type) {
case 'ArrayExpression': {
let elements = convert(node.elements);
if (!elements) {
return null;
}
return builder.arrayExpression(elements);
}
case 'AssignmentExpression': {
let left = convert(node.left);
if (!left) {
return null;
}
let right = convert(node.right);
if (!right) {
return null;
}
return builder.assignment(left, right, node.operator);
}
case 'BinaryExpression': {
let left = convert(node.left);
if (!left) {
return null;
}
let right = convert(node.right);
if (!right) {
return null;
}
return builder.binaryExpression(left, node.operator, right);
}
case 'CallExpression': {
let callee = convert(node.callee);
if (!callee) {
return null;
}
let args = convert(node.arguments);
if (!args) {
return null;
}
return builder.functionCall(callee, args);
}
case 'ConditionalExpression': {
let test = convert(node.test);
if (!test) {
return null;
}
let consequent = convert(node.consequent);
if (!consequent) {
return null;
}
let alternate = convert(node.alternate);
if (!alternate) {
return null;
}
return builder.conditionalExpression(test, consequent, alternate);
}
case 'ExpressionStatement': {
return convert(node.expression);
}
case 'FunctionDeclaration':
case 'FunctionExpression': {
let name = null;
if (node.id) {
name = convert(node.id);
if (name == null) {
return null;
}
}
let params = convert(node.params);
if (!params) {
return null;
}
let body = convert(node.body);
if (!body) {<|fim▁hole|> }
return builder.functionDeclaration(name, params, body);
}
case 'Identifier': {
return builder.identifier(node.name);
}
case 'Literal': {
let literalValue;
if (node.regex) {
literalValue = new RegExp(node.regex.pattern, 'gi');
} else {
literalValue = node.value;
}
return builder.literal(literalValue);
}
case 'LogicalExpression': {
let left = convert(node.left);
if (!left) {
return null;
}
let right = convert(node.right);
if (!right) {
return null;
}
return builder.logicalExpression(left, node.operator, right);
}
case 'MemberExpression': {
let object = convert(node.object);
if (!object) {
return null;
}
let property = convert(node.property);
if (!property) {
return null;
}
return builder.memberExpression(object, property, node.computed);
}
case 'NewExpression': {
let callee = convert(node.callee);
if (!callee) {
return null;
}
let args = convert(node.arguments);
if (!args) {
return null;
}
return builder.newExpression(callee, args);
}
case 'Program': {
if (node.body && node.body.length === 1) {
return convert(node.body[0]);
}
return null;
}
case 'ObjectExpression': {
let properties = convert(node.properties);
if (!properties) {
return null;
}
return builder.objectExpression(properties);
}
case 'Property': {
let key = convert(node.key);
if (!key) {
return null;
}
let value = convert(node.value);
if (!value) {
return null;
}
return builder.property(key, value);
}
case 'ThisExpression': {
return builder.thisExpression();
}
case 'UnaryExpression': {
let argument = convert(node.argument);
if (!argument) {
return null;
}
return builder.unaryExpression(argument, node.operator, node.prefix);
}
case 'UpdateExpression': {
let argument = convert(node.argument);
if (!argument) {
return null;
}
return builder.updateExpression(argument, node.operator, node.prefix);
}
default:
return null;
}
}
let jsAST;
try {
if (isExpression) {
src = '(' + src + ')';
}
jsAST = esprima.parse(src);
} catch(e) {
if (e.index == null) {
// Doesn't look like an Esprima parse error... just rethrow the exception
throw e;
}
var errorIndex = e.index;
var errorMessage = '\n' + e.description;
if (errorIndex != null && errorIndex >= 0) {
if (isExpression) {
errorIndex--; // Account for extra paren added to start
}
errorMessage += ': ';
errorMessage += src + '\n'+ new Array(errorMessage.length + errorIndex + 1).join(" ") + '^';
}
var wrappedError = new Error(errorMessage);
wrappedError.index = errorIndex;
wrappedError.src = src;
wrappedError.code = 'ERR_INVALID_JAVASCRIPT_EXPRESSION';
throw wrappedError;
}
var converted = convert(jsAST);
if (converted == null) {
converted = builder.expression(src);
}
return converted;
}
module.exports = parseExpression;<|fim▁end|>
|
return null;
|
<|file_name|>SitzzahlEinschraenkung.java<|end_file_name|><|fim▁begin|>package edu.kit.iti.formal.mandatsverteilung.generierer;
import edu.kit.iti.formal.mandatsverteilung.datenhaltung.Bundestagswahl;
/**
* Modelliert eine Einschränkung an das Ergebnis des Generierers, dass der
* Bundestag eine bestimmte Größe haben soll.
* <|fim▁hole|> */
public class SitzzahlEinschraenkung extends Einschraenkung {
public SitzzahlEinschraenkung(int wert, int abweichung) {
assert wert > 0;
assert abweichung > 0;
this.wert = wert;
this.abweichung = abweichung;
gewichtung = 1.0;
}
@Override
int ueberpruefeErgebnis(Bundestagswahl b) {
int tatsaechlicheSitzzahl = b.getSitzzahl();
int genauigkeit = RandomisierterGenerierer.getGenauigkeit();
double minD = (minDistance(genauigkeit * tatsaechlicheSitzzahl,
genauigkeit * wert, genauigkeit * abweichung));
return (int) (gewichtung * minD);
}
}<|fim▁end|>
|
* @author Jan
*
|
<|file_name|>search_index.js<|end_file_name|><|fim▁begin|>window.esdocSearchIndex = [
[
"./git/resourcecreator/src/utils/commonmanagers/advencedfilterargmanager/advencedfilterargmanagercategorygroupaddsampledatas.js~advencedfilterargmanagercategorygroupaddsampledatas",
"variable/index.html#static-variable-AdvencedFilterArgManagerCategoryGroupAddSampleDatas",
"<span>AdvencedFilterArgManagerCategoryGroupAddSampleDatas</span> <span class=\"search-result-import-path\">./git/resourcecreator/src/Utils/CommonManagers/AdvencedFilterArgManager/AdvencedFilterArgManagerCategoryGroupAddSampleDatas.js</span>",
"variable"
],
[
"./git/resourcecreator/src/utils/commonmanagers/advencedfilterargmanager/advencedfilterargmanagercategorygroupeditsampledatas.js~advencedfilterargmanagercategorygroupeditsampledatas",
"variable/index.html#static-variable-AdvencedFilterArgManagerCategoryGroupEditSampleDatas",
"<span>AdvencedFilterArgManagerCategoryGroupEditSampleDatas</span> <span class=\"search-result-import-path\">./git/resourcecreator/src/Utils/CommonManagers/AdvencedFilterArgManager/AdvencedFilterArgManagerCategoryGroupEditSampleDatas.js</span>",
"variable"
],
[
"./git/resourcecreator/src/utils/commonmanagers/advencedfilterargmanager/advencedfilterargmanagercategorymovesampledatas.js~advencedfilterargmanagercategorymovesampledatas",
"variable/index.html#static-variable-AdvencedFilterArgManagerCategoryMoveSampleDatas",
"<span>AdvencedFilterArgManagerCategoryMoveSampleDatas</span> <span class=\"search-result-import-path\">./git/resourcecreator/src/Utils/CommonManagers/AdvencedFilterArgManager/AdvencedFilterArgManagerCategoryMoveSampleDatas.js</span>",
"variable"
],
[
"./git/resourcecreator/src/utils/commonmanagers/advencedfilterargmanager/advencedfilterargmanagercategoryremovesampledatas.js~advencedfilterargmanagercategoryremovesampledatas",
"variable/index.html#static-variable-AdvencedFilterArgManagerCategoryRemoveSampleDatas",
"<span>AdvencedFilterArgManagerCategoryRemoveSampleDatas</span> <span class=\"search-result-import-path\">./git/resourcecreator/src/Utils/CommonManagers/AdvencedFilterArgManager/AdvencedFilterArgManagerCategoryRemoveSampleDatas.js</span>",
"variable"
],
[
"./git/resourcecreator/src/utils/commonmanagers/advencedfilterargmanager/advencedfilterargmanagerreadsampledatas.js~advencedfilterargmanagerreadsampledatas",
"variable/index.html#static-variable-AdvencedFilterArgManagerReadSampleDatas",
"<span>AdvencedFilterArgManagerReadSampleDatas</span> <span class=\"search-result-import-path\">./git/resourcecreator/src/Utils/CommonManagers/AdvencedFilterArgManager/AdvencedFilterArgManagerReadSampleDatas.js</span>",
"variable"
],
[
"./git/resourcecreator/src/utils/commonmanagers/advencedfiltermanager/advencedfiltermanagerreadsampledatas.js~advencedfiltermanagerreadsampledatas",
"variable/index.html#static-variable-AdvencedFilterManagerReadSampleDatas",
"<span>AdvencedFilterManagerReadSampleDatas</span> <span class=\"search-result-import-path\">./git/resourcecreator/src/Utils/CommonManagers/AdvencedFilterManager/AdvencedFilterManagerReadSampleDatas.js</span>",
"variable"
],
[
"./git/userdashboard/src/components/advencedfilterquickviewer/advencedfilterquickviewersampledatas.js~advencedfilterquickviewersampledatas",
"variable/index.html#static-variable-AdvencedFilterQuickViewerSampleDatas",
"<span>AdvencedFilterQuickViewerSampleDatas</span> <span class=\"search-result-import-path\">./git/userdashboard/src/Components/AdvencedFilterQuickViewer/AdvencedFilterQuickViewerSampleDatas.js</span>",
"variable"
],
[
"./git/resourcecreator/src/utils/commonmanagers/categorylistmanager/categorylistmanagerreadsampledatas.js~categorylistmanagerreadsampledatas",
"variable/index.html#static-variable-CategoryListManagerReadSampleDatas",
"<span>CategoryListManagerReadSampleDatas</span> <span class=\"search-result-import-path\">./git/resourcecreator/src/Utils/CommonManagers/CategoryListManager/CategoryListManagerReadSampleDatas.js</span>",
"variable"
],
[
"./git/resourcecreator/src/utils/commonmanagers/loginmanager/loginmanagerloginsampledatas.js~loginmanagerloginsampledatas",
"variable/index.html#static-variable-LoginManagerLoginSampleDatas",
"<span>LoginManagerLoginSampleDatas</span> <span class=\"search-result-import-path\">./git/resourcecreator/src/Utils/CommonManagers/LoginManager/LoginManagerLoginSampleDatas.js</span>",
"variable"
],
[
"./git/resourcecreator/src/utils/commonmanagers/loginmanager/loginmanagerlogoutsampledatas.js~loginmanagerlogoutsampledatas",
"variable/index.html#static-variable-LoginManagerLogoutSampleDatas",
"<span>LoginManagerLogoutSampleDatas</span> <span class=\"search-result-import-path\">./git/resourcecreator/src/Utils/CommonManagers/LoginManager/LoginManagerLogoutSampleDatas.js</span>",
"variable"
],
[
"./git/uicontentsmanager/src/components/networkswitcher/networkswitchersampledatas.js~networkswitchersampledatas",
"variable/index.html#static-variable-NetworkSwitcherSampleDatas",
"<span>NetworkSwitcherSampleDatas</span> <span class=\"search-result-import-path\">./git/uicontentsmanager/src/Components/NetworkSwitcher/NetworkSwitcherSampleDatas.js</span>",
"variable"
],
[
"./git/userdashboard/src/components/networkswitcher/networkswitchersampledatas.js~networkswitchersampledatas",
"variable/index.html#static-variable-NetworkSwitcherSampleDatas",
"<span>NetworkSwitcherSampleDatas</span> <span class=\"search-result-import-path\">./git/userdashboard/src/Components/NetworkSwitcher/NetworkSwitcherSampleDatas.js</span>",
"variable"
],
[
"./git/resourcecreator/src/utils/commonmanagers/projectmanager/projectmanageraddsampledatas.js~projectmanageraddsampledatas",
"variable/index.html#static-variable-ProjectManagerAddSampleDatas",
"<span>ProjectManagerAddSampleDatas</span> <span class=\"search-result-import-path\">./git/resourcecreator/src/Utils/CommonManagers/ProjectManager/ProjectManagerAddSampleDatas.js</span>",
"variable"
],
[
"./git/resourcecreator/src/utils/commonmanagers/projectmanager/projectmanagerciuploadsampledatas.js~projectmanagerciuploadsampledatas",
"variable/index.html#static-variable-ProjectManagerCiUploadSampleDatas",
"<span>ProjectManagerCiUploadSampleDatas</span> <span class=\"search-result-import-path\">./git/resourcecreator/src/Utils/CommonManagers/ProjectManager/ProjectManagerCiUploadSampleDatas.js</span>",
"variable"
],
[
"./git/resourcecreator/src/utils/commonmanagers/projectmanager/projectmanagerreadsampledatas.js~projectmanagerreadsampledatas",
"variable/index.html#static-variable-ProjectManagerReadSampleDatas",
"<span>ProjectManagerReadSampleDatas</span> <span class=\"search-result-import-path\">./git/resourcecreator/src/Utils/CommonManagers/ProjectManager/ProjectManagerReadSampleDatas.js</span>",
"variable"
],
[
"./git/resourcecreator/src/utils/commonmanagers/projectmanager/projectmanagerremovesampledatas.js~projectmanagerremovesampledatas",
"variable/index.html#static-variable-ProjectManagerRemoveSampleDatas",
"<span>ProjectManagerRemoveSampleDatas</span> <span class=\"search-result-import-path\">./git/resourcecreator/src/Utils/CommonManagers/ProjectManager/ProjectManagerRemoveSampleDatas.js</span>",
"variable"
],
[
"./git/resourcecreator/src/utils/commonmanagers/projectmanager/projectmanagerthumbnailuploadsampledatas.js~projectmanagerthumbnailuploadsampledatas",
"variable/index.html#static-variable-ProjectManagerThumbnailUploadSampleDatas",
"<span>ProjectManagerThumbnailUploadSampleDatas</span> <span class=\"search-result-import-path\">./git/resourcecreator/src/Utils/CommonManagers/ProjectManager/ProjectManagerThumbnailUploadSampleDatas.js</span>",
"variable"
],
[
"./git/resourcecreator/src/utils/commonmanagers/querystatemanager/querystatemanagerclearsampledatas.js~querystatemanagerclearsampledatas",
"variable/index.html#static-variable-QueryStateManagerClearSampleDatas",
"<span>QueryStateManagerClearSampleDatas</span> <span class=\"search-result-import-path\">./git/resourcecreator/src/Utils/CommonManagers/QueryStateManager/QueryStateManagerClearSampleDatas.js</span>",
"variable"
],
[
"./git/resourcecreator/src/utils/commonmanagers/querystatemanager/querystatemanagerreadsampledatas.js~querystatemanagerreadsampledatas",
"variable/index.html#static-variable-QueryStateManagerReadSampleDatas",
"<span>QueryStateManagerReadSampleDatas</span> <span class=\"search-result-import-path\">./git/resourcecreator/src/Utils/CommonManagers/QueryStateManager/QueryStateManagerReadSampleDatas.js</span>",
"variable"
],
[
"./git/resourcecreator/src/utils/commonmanagers/querystatemanager/querystatemanagerupdatesampledatas.js~querystatemanagerupdatesampledatas",
"variable/index.html#static-variable-QueryStateManagerUpdateSampleDatas",
"<span>QueryStateManagerUpdateSampleDatas</span> <span class=\"search-result-import-path\">./git/resourcecreator/src/Utils/CommonManagers/QueryStateManager/QueryStateManagerUpdateSampleDatas.js</span>",
"variable"
],
[
"./git/resourcecreator/src/utils/commonmanagers/usermanager/usermanagerdeletesampledatas.js~usermanagerdeletesampledatas",
"variable/index.html#static-variable-UserManagerDeleteSampleDatas",
"<span>UserManagerDeleteSampleDatas</span> <span class=\"search-result-import-path\">./git/resourcecreator/src/Utils/CommonManagers/UserManager/UserManagerDeleteSampleDatas.js</span>",
"variable"
],
[
"./git/resourcecreator/src/utils/commonmanagers/usermanager/usermanagernewsampledatas.js~usermanagernewsampledatas",
"variable/index.html#static-variable-UserManagerNewSampleDatas",
"<span>UserManagerNewSampleDatas</span> <span class=\"search-result-import-path\">./git/resourcecreator/src/Utils/CommonManagers/UserManager/UserManagerNewSampleDatas.js</span>",
"variable"
],
[
"./git/resourcecreator/src/utils/commonmanagers/usermanager/usermanagerreadsampledatas.js~usermanagerreadsampledatas",
"variable/index.html#static-variable-UserManagerReadSampleDatas",
"<span>UserManagerReadSampleDatas</span> <span class=\"search-result-import-path\">./git/resourcecreator/src/Utils/CommonManagers/UserManager/UserManagerReadSampleDatas.js</span>",
"variable"
],
[
"./git/resourcecreator/src/utils/commonmanagers/usermanager/usermanagerupdatesampledatas.js~usermanagerupdatesampledatas",
"variable/index.html#static-variable-UserManagerUpdateSampleDatas",
"<span>UserManagerUpdateSampleDatas</span> <span class=\"search-result-import-path\">./git/resourcecreator/src/Utils/CommonManagers/UserManager/UserManagerUpdateSampleDatas.js</span>",
"variable"
],
[
"builtinexternal/ecmascriptexternal.js~array",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~Array",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~arraybuffer",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~ArrayBuffer",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~boolean",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~Boolean",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~dataview",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~DataView",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~date",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~Date",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~error",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~Error",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~evalerror",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~EvalError",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~float32array",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~Float32Array",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~float64array",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~Float64Array",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~function",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~Function",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~generator",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~Generator",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~generatorfunction",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~GeneratorFunction",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~infinity",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~Infinity",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~int16array",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~Int16Array",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~int32array",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~Int32Array",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~int8array",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~Int8Array",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~internalerror",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~InternalError",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~json",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~JSON",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~map",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~Map",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~nan",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~NaN",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~number",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~Number",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~object",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~Object",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~promise",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~Promise",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~proxy",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~Proxy",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~rangeerror",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~RangeError",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~referenceerror",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~ReferenceError",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~reflect",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~Reflect",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~regexp",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~RegExp",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~set",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~Set",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~string",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~String",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~symbol",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~Symbol",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~syntaxerror",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~SyntaxError",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~typeerror",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~TypeError",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~urierror",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~URIError",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~uint16array",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~Uint16Array",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~uint32array",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~Uint32Array",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~uint8array",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~Uint8Array",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~uint8clampedarray",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~Uint8ClampedArray",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~weakmap",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~WeakMap",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~weakset",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~WeakSet",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~boolean",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~boolean",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~function",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~function",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~null",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~null",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~number",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~number",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~object",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~object",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~string",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~string",
"external"
],
[
"builtinexternal/ecmascriptexternal.js~undefined",
"external/index.html",
"BuiltinExternal/ECMAScriptExternal.js~undefined",
"external"
],
[
"builtinexternal/webapiexternal.js~audiocontext",
"external/index.html",
"BuiltinExternal/WebAPIExternal.js~AudioContext",
"external"
],
[
"builtinexternal/webapiexternal.js~canvasrenderingcontext2d",
"external/index.html",
"BuiltinExternal/WebAPIExternal.js~CanvasRenderingContext2D",
"external"
],
[
"builtinexternal/webapiexternal.js~documentfragment",
"external/index.html",
"BuiltinExternal/WebAPIExternal.js~DocumentFragment",
"external"
],
[
"builtinexternal/webapiexternal.js~element",
"external/index.html",
"BuiltinExternal/WebAPIExternal.js~Element",
"external"
],
[
"builtinexternal/webapiexternal.js~event",
"external/index.html",
"BuiltinExternal/WebAPIExternal.js~Event",
"external"
],
[
"builtinexternal/webapiexternal.js~node",
"external/index.html",
"BuiltinExternal/WebAPIExternal.js~Node",
"external"
],
[
"builtinexternal/webapiexternal.js~nodelist",
"external/index.html",
"BuiltinExternal/WebAPIExternal.js~NodeList",
"external"
],
[
"builtinexternal/webapiexternal.js~xmlhttprequest",
"external/index.html",
"BuiltinExternal/WebAPIExternal.js~XMLHttpRequest",
"external"
],
[
"git/resourcecreator/src/components/widgets/cores/widget000/widget000sampledatas.js",
"file/git/resourcecreator/src/Components/Widgets/Cores/Widget000/Widget000SampleDatas.js.html",
"git/resourcecreator/src/Components/Widgets/Cores/Widget000/Widget000SampleDatas.js",
"file"
],
[
"git/resourcecreator/src/components/widgets/cores/widget000typea/widget000typeasampledatas.js",
"file/git/resourcecreator/src/Components/Widgets/Cores/Widget000TypeA/Widget000TypeASampleDatas.js.html",
"git/resourcecreator/src/Components/Widgets/Cores/Widget000TypeA/Widget000TypeASampleDatas.js",
"file"
],
[
"git/resourcecreator/src/components/widgets/cores/widget001/widget001sampledatas.js",
"file/git/resourcecreator/src/Components/Widgets/Cores/Widget001/Widget001SampleDatas.js.html",
"git/resourcecreator/src/Components/Widgets/Cores/Widget001/Widget001SampleDatas.js",
"file"
],
[
"git/resourcecreator/src/components/widgets/cores/widget001typea/widget001typeasampledatas.js",
"file/git/resourcecreator/src/Components/Widgets/Cores/Widget001TypeA/Widget001TypeASampleDatas.js.html",
"git/resourcecreator/src/Components/Widgets/Cores/Widget001TypeA/Widget001TypeASampleDatas.js",
"file"
],
[
"git/resourcecreator/src/components/widgets/cores/widget002/widget002sampledatas.js",
"file/git/resourcecreator/src/Components/Widgets/Cores/Widget002/Widget002SampleDatas.js.html",
"git/resourcecreator/src/Components/Widgets/Cores/Widget002/Widget002SampleDatas.js",
"file"
],
[
"git/resourcecreator/src/components/widgets/cores/widget003/widget003sampledatas.js",
"file/git/resourcecreator/src/Components/Widgets/Cores/Widget003/Widget003SampleDatas.js.html",
"git/resourcecreator/src/Components/Widgets/Cores/Widget003/Widget003SampleDatas.js",
"file"
],
[
"git/resourcecreator/src/components/widgets/cores/widget004/widget004sampledatas.js",
"file/git/resourcecreator/src/Components/Widgets/Cores/Widget004/Widget004SampleDatas.js.html",
"git/resourcecreator/src/Components/Widgets/Cores/Widget004/Widget004SampleDatas.js",
"file"
],
[
"git/resourcecreator/src/components/widgets/cores/widget008/widget008sampledatas.js",
"file/git/resourcecreator/src/Components/Widgets/Cores/Widget008/Widget008SampleDatas.js.html",
"git/resourcecreator/src/Components/Widgets/Cores/Widget008/Widget008SampleDatas.js",
"file"
],
[
"git/resourcecreator/src/components/widgets/cores/widget009/widget009sampledatas.js",
"file/git/resourcecreator/src/Components/Widgets/Cores/Widget009/Widget009SampleDatas.js.html",
"git/resourcecreator/src/Components/Widgets/Cores/Widget009/Widget009SampleDatas.js",
"file"
],
[
"git/resourcecreator/src/components/widgets/cores/widget009typea/widget009typeasampledatas.js",
"file/git/resourcecreator/src/Components/Widgets/Cores/Widget009TypeA/Widget009TypeASampleDatas.js.html",
"git/resourcecreator/src/Components/Widgets/Cores/Widget009TypeA/Widget009TypeASampleDatas.js",
"file"
],
[
"git/resourcecreator/src/components/widgets/cores/widget010/widget010sampledatas.js",
"file/git/resourcecreator/src/Components/Widgets/Cores/Widget010/Widget010SampleDatas.js.html",
"git/resourcecreator/src/Components/Widgets/Cores/Widget010/Widget010SampleDatas.js",
"file"
],
[
"git/resourcecreator/src/components/widgets/cores/widget028/widget028sampledatas.js",
"file/git/resourcecreator/src/Components/Widgets/Cores/Widget028/Widget028SampleDatas.js.html",
"git/resourcecreator/src/Components/Widgets/Cores/Widget028/Widget028SampleDatas.js",
"file"
],
[
"git/resourcecreator/src/components/widgets/cores/widget030/widget030sampledatas.js",
"file/git/resourcecreator/src/Components/Widgets/Cores/Widget030/Widget030SampleDatas.js.html",
"git/resourcecreator/src/Components/Widgets/Cores/Widget030/Widget030SampleDatas.js",
"file"
],
[
"git/resourcecreator/src/components/widgets/cores/widget031/widget031sampledatas.js",
"file/git/resourcecreator/src/Components/Widgets/Cores/Widget031/Widget031SampleDatas.js.html",
"git/resourcecreator/src/Components/Widgets/Cores/Widget031/Widget031SampleDatas.js",
"file"
],
[
"git/resourcecreator/src/components/widgets/cores/widget031typea/widget031typeasampledatas.js",
"file/git/resourcecreator/src/Components/Widgets/Cores/Widget031TypeA/Widget031TypeASampleDatas.js.html",
"git/resourcecreator/src/Components/Widgets/Cores/Widget031TypeA/Widget031TypeASampleDatas.js",
"file"
],
[
"git/resourcecreator/src/components/widgets/cores/widget032/widget032sampledatas.js",
"file/git/resourcecreator/src/Components/Widgets/Cores/Widget032/Widget032SampleDatas.js.html",
"git/resourcecreator/src/Components/Widgets/Cores/Widget032/Widget032SampleDatas.js",
"file"
],
[
"git/resourcecreator/src/components/widgets/cores/widget032typea/widget032typeasampledatas.js",
"file/git/resourcecreator/src/Components/Widgets/Cores/Widget032TypeA/Widget032TypeASampleDatas.js.html",
"git/resourcecreator/src/Components/Widgets/Cores/Widget032TypeA/Widget032TypeASampleDatas.js",
"file"
],
[
"git/resourcecreator/src/components/widgets/cores/widget033/widget033sampledatas.js",
"file/git/resourcecreator/src/Components/Widgets/Cores/Widget033/Widget033SampleDatas.js.html",
"git/resourcecreator/src/Components/Widgets/Cores/Widget033/Widget033SampleDatas.js",
"file"
],
[
"git/resourcecreator/src/components/widgets/cores/widget033typea/widget033typeasampledatas.js",
"file/git/resourcecreator/src/Components/Widgets/Cores/Widget033TypeA/Widget033TypeASampleDatas.js.html",
"git/resourcecreator/src/Components/Widgets/Cores/Widget033TypeA/Widget033TypeASampleDatas.js",
"file"
],
[
"git/resourcecreator/src/components/widgets/cores/widget033typeb/widget033typebsampledatas.js",<|fim▁hole|> "git/resourcecreator/src/Components/Widgets/Cores/Widget033TypeB/Widget033TypeBSampleDatas.js",
"file"
],
[
"git/resourcecreator/src/components/widgets/cores/widget034/widget034sampledatas.js",
"file/git/resourcecreator/src/Components/Widgets/Cores/Widget034/Widget034SampleDatas.js.html",
"git/resourcecreator/src/Components/Widgets/Cores/Widget034/Widget034SampleDatas.js",
"file"
],
[
"git/resourcecreator/src/components/widgets/cores/widget034typea/widget034typeasampledatas.js",
"file/git/resourcecreator/src/Components/Widgets/Cores/Widget034TypeA/Widget034TypeASampleDatas.js.html",
"git/resourcecreator/src/Components/Widgets/Cores/Widget034TypeA/Widget034TypeASampleDatas.js",
"file"
],
[
"git/resourcecreator/src/components/widgets/cores/widget034typeb/widget034typebsampledatas.js",
"file/git/resourcecreator/src/Components/Widgets/Cores/Widget034TypeB/Widget034TypeBSampleDatas.js.html",
"git/resourcecreator/src/Components/Widgets/Cores/Widget034TypeB/Widget034TypeBSampleDatas.js",
"file"
],
[
"git/resourcecreator/src/components/widgets/cores/widget035/widget035sampledatas.js",
"file/git/resourcecreator/src/Components/Widgets/Cores/Widget035/Widget035SampleDatas.js.html",
"git/resourcecreator/src/Components/Widgets/Cores/Widget035/Widget035SampleDatas.js",
"file"
],
[
"git/resourcecreator/src/components/widgets/cores/widget040/widget040sampledatas.js",
"file/git/resourcecreator/src/Components/Widgets/Cores/Widget040/Widget040SampleDatas.js.html",
"git/resourcecreator/src/Components/Widgets/Cores/Widget040/Widget040SampleDatas.js",
"file"
],
[
"git/resourcecreator/src/components/widgets/cores/widget041/widget041sampledatas.js",
"file/git/resourcecreator/src/Components/Widgets/Cores/Widget041/Widget041SampleDatas.js.html",
"git/resourcecreator/src/Components/Widgets/Cores/Widget041/Widget041SampleDatas.js",
"file"
],
[
"git/resourcecreator/src/components/widgets/cores/widget042/widget042sampledatas.js",
"file/git/resourcecreator/src/Components/Widgets/Cores/Widget042/Widget042SampleDatas.js.html",
"git/resourcecreator/src/Components/Widgets/Cores/Widget042/Widget042SampleDatas.js",
"file"
],
[
"git/resourcecreator/src/components/widgets/cores/widget043/widget043sampledatas.js",
"file/git/resourcecreator/src/Components/Widgets/Cores/Widget043/Widget043SampleDatas.js.html",
"git/resourcecreator/src/Components/Widgets/Cores/Widget043/Widget043SampleDatas.js",
"file"
],
[
"git/resourcecreator/src/redux/projectsampledatas.js",
"file/git/resourcecreator/src/Redux/projectSampleDatas.js.html",
"git/resourcecreator/src/Redux/projectSampleDatas.js",
"file"
],
[
"git/resourcecreator/src/utils/commonmanagers/advencedfilterargmanager/advencedfilterargmanagercategorygroupaddsampledatas.js",
"file/git/resourcecreator/src/Utils/CommonManagers/AdvencedFilterArgManager/AdvencedFilterArgManagerCategoryGroupAddSampleDatas.js.html",
"git/resourcecreator/src/Utils/CommonManagers/AdvencedFilterArgManager/AdvencedFilterArgManagerCategoryGroupAddSampleDatas.js",
"file"
],
[
"git/resourcecreator/src/utils/commonmanagers/advencedfilterargmanager/advencedfilterargmanagercategorygroupeditsampledatas.js",
"file/git/resourcecreator/src/Utils/CommonManagers/AdvencedFilterArgManager/AdvencedFilterArgManagerCategoryGroupEditSampleDatas.js.html",
"git/resourcecreator/src/Utils/CommonManagers/AdvencedFilterArgManager/AdvencedFilterArgManagerCategoryGroupEditSampleDatas.js",
"file"
],
[
"git/resourcecreator/src/utils/commonmanagers/advencedfilterargmanager/advencedfilterargmanagercategorymovesampledatas.js",
"file/git/resourcecreator/src/Utils/CommonManagers/AdvencedFilterArgManager/AdvencedFilterArgManagerCategoryMoveSampleDatas.js.html",
"git/resourcecreator/src/Utils/CommonManagers/AdvencedFilterArgManager/AdvencedFilterArgManagerCategoryMoveSampleDatas.js",
"file"
],
[
"git/resourcecreator/src/utils/commonmanagers/advencedfilterargmanager/advencedfilterargmanagercategoryremovesampledatas.js",
"file/git/resourcecreator/src/Utils/CommonManagers/AdvencedFilterArgManager/AdvencedFilterArgManagerCategoryRemoveSampleDatas.js.html",
"git/resourcecreator/src/Utils/CommonManagers/AdvencedFilterArgManager/AdvencedFilterArgManagerCategoryRemoveSampleDatas.js",
"file"
],
[
"git/resourcecreator/src/utils/commonmanagers/advencedfilterargmanager/advencedfilterargmanagerreadsampledatas.js",
"file/git/resourcecreator/src/Utils/CommonManagers/AdvencedFilterArgManager/AdvencedFilterArgManagerReadSampleDatas.js.html",
"git/resourcecreator/src/Utils/CommonManagers/AdvencedFilterArgManager/AdvencedFilterArgManagerReadSampleDatas.js",
"file"
],
[
"git/resourcecreator/src/utils/commonmanagers/advencedfiltermanager/advencedfiltermanagerreadsampledatas.js",
"file/git/resourcecreator/src/Utils/CommonManagers/AdvencedFilterManager/AdvencedFilterManagerReadSampleDatas.js.html",
"git/resourcecreator/src/Utils/CommonManagers/AdvencedFilterManager/AdvencedFilterManagerReadSampleDatas.js",
"file"
],
[
"git/resourcecreator/src/utils/commonmanagers/categorylistmanager/categorylistmanagerreadsampledatas.js",
"file/git/resourcecreator/src/Utils/CommonManagers/CategoryListManager/CategoryListManagerReadSampleDatas.js.html",
"git/resourcecreator/src/Utils/CommonManagers/CategoryListManager/CategoryListManagerReadSampleDatas.js",
"file"
],
[
"git/resourcecreator/src/utils/commonmanagers/loginmanager/loginmanagerloginsampledatas.js",
"file/git/resourcecreator/src/Utils/CommonManagers/LoginManager/LoginManagerLoginSampleDatas.js.html",
"git/resourcecreator/src/Utils/CommonManagers/LoginManager/LoginManagerLoginSampleDatas.js",
"file"
],
[
"git/resourcecreator/src/utils/commonmanagers/loginmanager/loginmanagerlogoutsampledatas.js",
"file/git/resourcecreator/src/Utils/CommonManagers/LoginManager/LoginManagerLogoutSampleDatas.js.html",
"git/resourcecreator/src/Utils/CommonManagers/LoginManager/LoginManagerLogoutSampleDatas.js",
"file"
],
[
"git/resourcecreator/src/utils/commonmanagers/projectmanager/projectmanageraddsampledatas.js",
"file/git/resourcecreator/src/Utils/CommonManagers/ProjectManager/ProjectManagerAddSampleDatas.js.html",
"git/resourcecreator/src/Utils/CommonManagers/ProjectManager/ProjectManagerAddSampleDatas.js",
"file"
],
[
"git/resourcecreator/src/utils/commonmanagers/projectmanager/projectmanagerciuploadsampledatas.js",
"file/git/resourcecreator/src/Utils/CommonManagers/ProjectManager/ProjectManagerCiUploadSampleDatas.js.html",
"git/resourcecreator/src/Utils/CommonManagers/ProjectManager/ProjectManagerCiUploadSampleDatas.js",
"file"
],
[
"git/resourcecreator/src/utils/commonmanagers/projectmanager/projectmanagerreadsampledatas.js",
"file/git/resourcecreator/src/Utils/CommonManagers/ProjectManager/ProjectManagerReadSampleDatas.js.html",
"git/resourcecreator/src/Utils/CommonManagers/ProjectManager/ProjectManagerReadSampleDatas.js",
"file"
],
[
"git/resourcecreator/src/utils/commonmanagers/projectmanager/projectmanagerremovesampledatas.js",
"file/git/resourcecreator/src/Utils/CommonManagers/ProjectManager/ProjectManagerRemoveSampleDatas.js.html",
"git/resourcecreator/src/Utils/CommonManagers/ProjectManager/ProjectManagerRemoveSampleDatas.js",
"file"
],
[
"git/resourcecreator/src/utils/commonmanagers/projectmanager/projectmanagerthumbnailuploadsampledatas.js",
"file/git/resourcecreator/src/Utils/CommonManagers/ProjectManager/ProjectManagerThumbnailUploadSampleDatas.js.html",
"git/resourcecreator/src/Utils/CommonManagers/ProjectManager/ProjectManagerThumbnailUploadSampleDatas.js",
"file"
],
[
"git/resourcecreator/src/utils/commonmanagers/querystatemanager/querystatemanagerclearsampledatas.js",
"file/git/resourcecreator/src/Utils/CommonManagers/QueryStateManager/QueryStateManagerClearSampleDatas.js.html",
"git/resourcecreator/src/Utils/CommonManagers/QueryStateManager/QueryStateManagerClearSampleDatas.js",
"file"
],
[
"git/resourcecreator/src/utils/commonmanagers/querystatemanager/querystatemanagerreadsampledatas.js",
"file/git/resourcecreator/src/Utils/CommonManagers/QueryStateManager/QueryStateManagerReadSampleDatas.js.html",
"git/resourcecreator/src/Utils/CommonManagers/QueryStateManager/QueryStateManagerReadSampleDatas.js",
"file"
],
[
"git/resourcecreator/src/utils/commonmanagers/querystatemanager/querystatemanagerupdatesampledatas.js",
"file/git/resourcecreator/src/Utils/CommonManagers/QueryStateManager/QueryStateManagerUpdateSampleDatas.js.html",
"git/resourcecreator/src/Utils/CommonManagers/QueryStateManager/QueryStateManagerUpdateSampleDatas.js",
"file"
],
[
"git/resourcecreator/src/utils/commonmanagers/usermanager/usermanagerdeletesampledatas.js",
"file/git/resourcecreator/src/Utils/CommonManagers/UserManager/UserManagerDeleteSampleDatas.js.html",
"git/resourcecreator/src/Utils/CommonManagers/UserManager/UserManagerDeleteSampleDatas.js",
"file"
],
[
"git/resourcecreator/src/utils/commonmanagers/usermanager/usermanagernewsampledatas.js",
"file/git/resourcecreator/src/Utils/CommonManagers/UserManager/UserManagerNewSampleDatas.js.html",
"git/resourcecreator/src/Utils/CommonManagers/UserManager/UserManagerNewSampleDatas.js",
"file"
],
[
"git/resourcecreator/src/utils/commonmanagers/usermanager/usermanagerreadsampledatas.js",
"file/git/resourcecreator/src/Utils/CommonManagers/UserManager/UserManagerReadSampleDatas.js.html",
"git/resourcecreator/src/Utils/CommonManagers/UserManager/UserManagerReadSampleDatas.js",
"file"
],
[
"git/resourcecreator/src/utils/commonmanagers/usermanager/usermanagerupdatesampledatas.js",
"file/git/resourcecreator/src/Utils/CommonManagers/UserManager/UserManagerUpdateSampleDatas.js.html",
"git/resourcecreator/src/Utils/CommonManagers/UserManager/UserManagerUpdateSampleDatas.js",
"file"
],
[
"git/uicontentsmanager/src/components/networkswitcher/networkswitchersampledatas.js",
"file/git/uicontentsmanager/src/Components/NetworkSwitcher/NetworkSwitcherSampleDatas.js.html",
"git/uicontentsmanager/src/Components/NetworkSwitcher/NetworkSwitcherSampleDatas.js",
"file"
],
[
"git/userdashboard/src/components/advencedfilterquickviewer/advencedfilterquickviewersampledatas.js",
"file/git/userdashboard/src/Components/AdvencedFilterQuickViewer/AdvencedFilterQuickViewerSampleDatas.js.html",
"git/userdashboard/src/Components/AdvencedFilterQuickViewer/AdvencedFilterQuickViewerSampleDatas.js",
"file"
],
[
"git/userdashboard/src/components/detailplayer/detailplayersampledatas.js",
"file/git/userdashboard/src/Components/DetailPlayer/DetailPlayerSampleDatas.js.html",
"git/userdashboard/src/Components/DetailPlayer/DetailPlayerSampleDatas.js",
"file"
],
[
"git/userdashboard/src/components/networkswitcher/networkswitchersampledatas.js",
"file/git/userdashboard/src/Components/NetworkSwitcher/NetworkSwitcherSampleDatas.js.html",
"git/userdashboard/src/Components/NetworkSwitcher/NetworkSwitcherSampleDatas.js",
"file"
]
]<|fim▁end|>
|
"file/git/resourcecreator/src/Components/Widgets/Cores/Widget033TypeB/Widget033TypeBSampleDatas.js.html",
|
<|file_name|>meta.js<|end_file_name|><|fim▁begin|>import { respond } from "theme/styles/mixins";
export default `
.resource-meta,
.resource-meta-mobile {
.resource-type {
margin-bottom: 14px;
${respond(`margin-bottom: 2px;`, 65)}
}
/* <ul> */<|fim▁hole|> margin-bottom: 22px;
&:not(:first-child) {
margin-top: 10px;
}
}
/* Only shown on mobile */
.meta-list-primary {
margin-bottom: 22px;
}
}
`;<|fim▁end|>
|
.meta-list-secondary {
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""The initialization file for the Pywikibot framework."""
#
# (C) Pywikibot team, 2008-2015
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
__release__ = '2.0b3'
__version__ = '$Id$'
__url__ = 'https://www.mediawiki.org/wiki/Special:MyLanguage/Manual:Pywikibot'
import datetime
import math
import re
import sys
import threading
import json
if sys.version_info[0] > 2:
from queue import Queue
long = int
else:
from Queue import Queue
from warnings import warn
# Use pywikibot. prefix for all in-package imports; this is to prevent
# confusion with similarly-named modules in version 1 framework, for users
# who want to continue using both
from pywikibot import config2 as config
from pywikibot.bot import (
output, warning, error, critical, debug, stdout, exception,
input, input_choice, input_yn, inputChoice, handle_args, showHelp, ui, log,
calledModuleName, Bot, CurrentPageBot, WikidataBot,
# the following are flagged as deprecated on usage
handleArgs,
)
from pywikibot.exceptions import (
Error, InvalidTitle, BadTitle, NoPage, NoMoveTarget, SectionError,
SiteDefinitionError, NoSuchSite, UnknownSite, UnknownFamily,
UnknownExtension,
NoUsername, UserBlocked,
PageRelatedError, IsRedirectPage, IsNotRedirectPage,
PageSaveRelatedError, PageNotSaved, OtherPageSaveError,
LockedPage, CascadeLockedPage, LockedNoPage, NoCreateError,
EditConflict, PageDeletedConflict, PageCreatedConflict,
ServerError, FatalServerError, Server504Error,
CaptchaError, SpamfilterError, CircularRedirect, InterwikiRedirectPage,
WikiBaseError, CoordinateGlobeUnknownException,
)
from pywikibot.tools import PY2, UnicodeMixin, redirect_func
from pywikibot.i18n import translate
from pywikibot.data.api import UploadWarning
from pywikibot.diff import PatchManager
import pywikibot.textlib as textlib
import pywikibot.tools
textlib_methods = (
'unescape', 'replaceExcept', 'removeDisabledParts', 'removeHTMLParts',
'isDisabled', 'interwikiFormat', 'interwikiSort',
'getLanguageLinks', 'replaceLanguageLinks',
'removeLanguageLinks', 'removeLanguageLinksAndSeparator',
'getCategoryLinks', 'categoryFormat', 'replaceCategoryLinks',
'removeCategoryLinks', 'removeCategoryLinksAndSeparator',
'replaceCategoryInPlace', 'compileLinkR', 'extract_templates_and_params',
'TimeStripper',
)
__all__ = (
'config', 'ui', 'UnicodeMixin', 'translate',
'Page', 'FilePage', 'Category', 'Link', 'User',
'ItemPage', 'PropertyPage', 'Claim',
'html2unicode', 'url2unicode', 'unicode2html',
'stdout', 'output', 'warning', 'error', 'critical', 'debug',
'exception', 'input_choice', 'input', 'input_yn', 'inputChoice',
'handle_args', 'handleArgs', 'showHelp', 'ui', 'log',
'calledModuleName', 'Bot', 'CurrentPageBot', 'WikidataBot',
'Error', 'InvalidTitle', 'BadTitle', 'NoPage', 'NoMoveTarget',
'SectionError',
'SiteDefinitionError', 'NoSuchSite', 'UnknownSite', 'UnknownFamily',
'UnknownExtension',
'NoUsername', 'UserBlocked', 'UserActionRefuse',
'PageRelatedError', 'IsRedirectPage', 'IsNotRedirectPage',
'PageSaveRelatedError', 'PageNotSaved', 'OtherPageSaveError',
'LockedPage', 'CascadeLockedPage', 'LockedNoPage', 'NoCreateError',
'EditConflict', 'PageDeletedConflict', 'PageCreatedConflict',
'UploadWarning',
'ServerError', 'FatalServerError', 'Server504Error',
'CaptchaError', 'SpamfilterError', 'CircularRedirect',
'InterwikiRedirectPage',
'WikiBaseError', 'CoordinateGlobeUnknownException',
'QuitKeyboardInterrupt',
)
__all__ += textlib_methods
if PY2:
# T111615: Python 2 requires __all__ is bytes
globals()['__all__'] = tuple(bytes(item) for item in __all__)
for _name in textlib_methods:
target = getattr(textlib, _name)
wrapped_func = redirect_func(target)
globals()[_name] = wrapped_func
deprecated = redirect_func(pywikibot.tools.deprecated)
deprecate_arg = redirect_func(pywikibot.tools.deprecate_arg)
class Timestamp(datetime.datetime):
"""Class for handling MediaWiki timestamps.
This inherits from datetime.datetime, so it can use all of the methods
and operations of a datetime object. To ensure that the results of any
operation are also a Timestamp object, be sure to use only Timestamp
objects (and datetime.timedeltas) in any operation.
Use Timestamp.fromISOformat() and Timestamp.fromtimestampformat() to
create Timestamp objects from MediaWiki string formats.
As these constructors are typically used to create objects using data
passed provided by site and page methods, some of which return a Timestamp
when previously they returned a MediaWiki string representation, these
methods also accept a Timestamp object, in which case they return a clone.
Use Site.getcurrenttime() for the current time; this is more reliable
than using Timestamp.utcnow().
"""
mediawikiTSFormat = "%Y%m%d%H%M%S"
ISO8601Format = "%Y-%m-%dT%H:%M:%SZ"
def clone(self):
"""Clone this instance."""
return self.replace(microsecond=self.microsecond)
@classmethod
def fromISOformat(cls, ts):
"""Convert an ISO 8601 timestamp to a Timestamp object."""
# If inadvertantly passed a Timestamp object, use replace()
# to create a clone.
if isinstance(ts, cls):
return ts.clone()
return cls.strptime(ts, cls.ISO8601Format)
@classmethod
def fromtimestampformat(cls, ts):
"""Convert a MediaWiki internal timestamp to a Timestamp object."""
# If inadvertantly passed a Timestamp object, use replace()
# to create a clone.
if isinstance(ts, cls):
return ts.clone()
return cls.strptime(ts, cls.mediawikiTSFormat)
def isoformat(self):
"""
Convert object to an ISO 8601 timestamp accepted by MediaWiki.
datetime.datetime.isoformat does not postfix the ISO formatted date
with a 'Z' unless a timezone is included, which causes MediaWiki
~1.19 and earlier to fail.
"""
return self.strftime(self.ISO8601Format)
toISOformat = redirect_func(isoformat, old_name='toISOformat',
class_name='Timestamp')
def totimestampformat(self):
"""Convert object to a MediaWiki internal timestamp."""
return self.strftime(self.mediawikiTSFormat)
def __str__(self):
"""Return a string format recognized by the API."""
return self.isoformat()
def __add__(self, other):
"""Perform addition, returning a Timestamp instead of datetime."""
newdt = super(Timestamp, self).__add__(other)
if isinstance(newdt, datetime.datetime):
return Timestamp(newdt.year, newdt.month, newdt.day, newdt.hour,
newdt.minute, newdt.second, newdt.microsecond,
newdt.tzinfo)
else:
return newdt
def __sub__(self, other):
"""Perform substraction, returning a Timestamp instead of datetime."""
newdt = super(Timestamp, self).__sub__(other)
if isinstance(newdt, datetime.datetime):
return Timestamp(newdt.year, newdt.month, newdt.day, newdt.hour,
newdt.minute, newdt.second, newdt.microsecond,
newdt.tzinfo)
else:
return newdt
class Coordinate(object):
"""
Class for handling and storing Coordinates.
<|fim▁hole|> in the future we can use it for the GeoData extension.
"""
def __init__(self, lat, lon, alt=None, precision=None, globe='earth',
typ="", name="", dim=None, site=None, entity=''):
"""
Represent a geo coordinate.
@param lat: Latitude
@type lat: float
@param lon: Longitude
@type lon: float
@param alt: Altitute? TODO FIXME
@param precision: precision
@type precision: float
@param globe: Which globe the point is on
@type globe: str
@param typ: The type of coordinate point
@type typ: str
@param name: The name
@type name: str
@param dim: Dimension (in meters)
@type dim: int
@param entity: The URL entity of a Wikibase item
@type entity: str
"""
self.lat = lat
self.lon = lon
self.alt = alt
self._precision = precision
if globe:
globe = globe.lower()
self.globe = globe
self._entity = entity
self.type = typ
self.name = name
self._dim = dim
if not site:
self.site = Site().data_repository()
else:
self.site = site
def __repr__(self):
string = 'Coordinate(%s, %s' % (self.lat, self.lon)
if self.globe != 'earth':
string += ', globe="%s"' % self.globe
string += ')'
return string
@property
def entity(self):
if self._entity:
return self._entity
return self.site.globes()[self.globe]
def toWikibase(self):
"""
Export the data to a JSON object for the Wikibase API.
FIXME: Should this be in the DataSite object?
"""
if self.globe not in self.site.globes():
raise CoordinateGlobeUnknownException(
u"%s is not supported in Wikibase yet."
% self.globe)
return {'latitude': self.lat,
'longitude': self.lon,
'altitude': self.alt,
'globe': self.entity,
'precision': self.precision,
}
@classmethod
def fromWikibase(cls, data, site):
"""Constructor to create an object from Wikibase's JSON output."""
globes = {}
for k in site.globes():
globes[site.globes()[k]] = k
globekey = data['globe']
if globekey:
globe = globes.get(data['globe'])
else:
# Default to earth or should we use None here?
globe = 'earth'
return cls(data['latitude'], data['longitude'],
data['altitude'], data['precision'],
globe, site=site, entity=data['globe'])
@property
def precision(self):
u"""
Return the precision of the geo coordinate.
The biggest error (in degrees) will be given by the longitudinal error;
the same error in meters becomes larger (in degrees) further up north.
We can thus ignore the latitudinal error.
The longitudinal can be derived as follows:
In small angle approximation (and thus in radians):
M{Δλ ≈ Δpos / r_φ}, where r_φ is the radius of earth at the given latitude.
Δλ is the error in longitude.
M{r_φ = r cos φ}, where r is the radius of earth, φ the latitude
Therefore::
precision = math.degrees(self._dim/(radius*math.cos(math.radians(self.lat))))
"""
if not self._precision:
radius = 6378137 # TODO: Support other globes
self._precision = math.degrees(
self._dim / (radius * math.cos(math.radians(self.lat))))
return self._precision
def precisionToDim(self):
"""Convert precision from Wikibase to GeoData's dim."""
raise NotImplementedError
class WbTime(object):
"""A Wikibase time representation."""
PRECISION = {'1000000000': 0,
'100000000': 1,
'10000000': 2,
'1000000': 3,
'100000': 4,
'10000': 5,
'millenia': 6,
'century': 7,
'decade': 8,
'year': 9,
'month': 10,
'day': 11,
'hour': 12,
'minute': 13,
'second': 14
}
FORMATSTR = '{0:+012d}-{1:02d}-{2:02d}T{3:02d}:{4:02d}:{5:02d}Z'
def __init__(self, year=None, month=None, day=None,
hour=None, minute=None, second=None,
precision=None, before=0, after=0,
timezone=0, calendarmodel=None, site=None):
"""
Create a new WbTime object.
The precision can be set by the Wikibase int value (0-14) or by a human
readable string, e.g., 'hour'. If no precision is given, it is set
according to the given time units.
"""
if year is None:
raise ValueError('no year given')
self.precision = self.PRECISION['second']
if second is None:
self.precision = self.PRECISION['minute']
second = 0
if minute is None:
self.precision = self.PRECISION['hour']
minute = 0
if hour is None:
self.precision = self.PRECISION['day']
hour = 0
if day is None:
self.precision = self.PRECISION['month']
day = 1
if month is None:
self.precision = self.PRECISION['year']
month = 1
self.year = long(year)
self.month = month
self.day = day
self.hour = hour
self.minute = minute
self.second = second
self.after = after
self.before = before
self.timezone = timezone
if calendarmodel is None:
if site is None:
site = Site().data_repository()
calendarmodel = site.calendarmodel()
self.calendarmodel = calendarmodel
# if precision is given it overwrites the autodetection above
if precision is not None:
if (isinstance(precision, int) and
precision in self.PRECISION.values()):
self.precision = precision
elif precision in self.PRECISION:
self.precision = self.PRECISION[precision]
else:
raise ValueError('Invalid precision: "%s"' % precision)
@classmethod
def fromTimestr(cls, datetimestr, precision=14, before=0, after=0,
timezone=0, calendarmodel=None, site=None):
match = re.match(r'([-+]?\d+)-(\d+)-(\d+)T(\d+):(\d+):(\d+)Z',
datetimestr)
if not match:
raise ValueError(u"Invalid format: '%s'" % datetimestr)
t = match.groups()
return cls(long(t[0]), int(t[1]), int(t[2]),
int(t[3]), int(t[4]), int(t[5]),
precision, before, after, timezone, calendarmodel, site)
def toTimestr(self):
"""
Convert the data to a UTC date/time string.
@return: str
"""
return self.FORMATSTR.format(self.year, self.month, self.day,
self.hour, self.minute, self.second)
def toWikibase(self):
"""
Convert the data to a JSON object for the Wikibase API.
@return: dict
"""
json = {'time': self.toTimestr(),
'precision': self.precision,
'after': self.after,
'before': self.before,
'timezone': self.timezone,
'calendarmodel': self.calendarmodel
}
return json
@classmethod
def fromWikibase(cls, ts):
return cls.fromTimestr(ts[u'time'], ts[u'precision'],
ts[u'before'], ts[u'after'],
ts[u'timezone'], ts[u'calendarmodel'])
def __str__(self):
return json.dumps(self.toWikibase(), indent=4, sort_keys=True,
separators=(',', ': '))
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __repr__(self):
return u"WbTime(year=%(year)d, month=%(month)d, day=%(day)d, " \
u"hour=%(hour)d, minute=%(minute)d, second=%(second)d, " \
u"precision=%(precision)d, before=%(before)d, after=%(after)d, " \
u"timezone=%(timezone)d, calendarmodel='%(calendarmodel)s')" \
% self.__dict__
class WbQuantity(object):
"""A Wikibase quantity representation."""
def __init__(self, amount, unit=None, error=None):
u"""
Create a new WbQuantity object.
@param amount: number representing this quantity
@type amount: float
@param unit: not used (only unit-less quantities are supported)
@param error: the uncertainty of the amount (e.g. ±1)
@type error: float, or tuple of two floats, where the first value is
the upper error and the second is the lower error value.
"""
if amount is None:
raise ValueError('no amount given')
if unit is None:
unit = '1'
self.amount = amount
self.unit = unit
upperError = lowerError = 0
if isinstance(error, tuple):
upperError, lowerError = error
elif error is not None:
upperError = lowerError = error
self.upperBound = self.amount + upperError
self.lowerBound = self.amount - lowerError
def toWikibase(self):
"""Convert the data to a JSON object for the Wikibase API."""
json = {'amount': self.amount,
'upperBound': self.upperBound,
'lowerBound': self.lowerBound,
'unit': self.unit
}
return json
@classmethod
def fromWikibase(cls, wb):
"""
Create a WbQuanity from the JSON data given by the Wikibase API.
@param wb: Wikibase JSON
"""
amount = eval(wb['amount'])
upperBound = eval(wb['upperBound'])
lowerBound = eval(wb['lowerBound'])
error = (upperBound - amount, amount - lowerBound)
return cls(amount, wb['unit'], error)
def __str__(self):
return json.dumps(self.toWikibase(), indent=4, sort_keys=True,
separators=(',', ': '))
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __repr__(self):
return (u"WbQuantity(amount=%(amount)s, upperBound=%(upperBound)s, "
u"lowerBound=%(lowerBound)s, unit=%(unit)s)" % self.__dict__)
_sites = {}
_url_cache = {} # The code/fam pair for each URL
def Site(code=None, fam=None, user=None, sysop=None, interface=None, url=None):
"""A factory method to obtain a Site object.
Site objects are cached and reused by this method.
By default rely on config settings. These defaults may all be overridden
using the method parameters.
@param code: language code (override config.mylang)
@type code: string
@param fam: family name or object (override config.family)
@type fam: string or Family
@param user: bot user name to use on this site (override config.usernames)
@type user: unicode
@param sysop: sysop user to use on this site (override config.sysopnames)
@type sysop: unicode
@param interface: site class or name of class in pywikibot.site
(override config.site_interface)
@type interface: subclass of L{pywikibot.site.BaseSite} or string
@param url: Instead of code and fam, does try to get a Site based on the
URL. Still requires that the family supporting that URL exists.
@type url: string
"""
# Either code and fam or only url
if url and (code or fam):
raise ValueError('URL to the wiki OR a pair of code and family name '
'should be provided')
_logger = "wiki"
if url:
if url not in _url_cache:
matched_sites = []
# Iterate through all families and look, which does apply to
# the given URL
for fam in config.family_files:
family = pywikibot.family.Family.load(fam)
code = family.from_url(url)
if code is not None:
matched_sites += [(code, fam)]
if matched_sites:
if len(matched_sites) > 1:
pywikibot.warning(
'Found multiple matches for URL "{0}": {1} (use first)'
.format(url, ', '.join(str(s) for s in matched_sites)))
_url_cache[url] = matched_sites[0]
else:
# TODO: As soon as AutoFamily is ready, try and use an
# AutoFamily
_url_cache[url] = None
cached = _url_cache[url]
if cached:
code = cached[0]
fam = cached[1]
else:
raise SiteDefinitionError("Unknown URL '{0}'.".format(url))
else:
# Fallback to config defaults
code = code or config.mylang
fam = fam or config.family
interface = interface or config.site_interface
# config.usernames is initialised with a dict for each family name
family_name = str(fam)
if family_name in config.usernames:
user = user or config.usernames[family_name].get(code) \
or config.usernames[family_name].get('*')
sysop = sysop or config.sysopnames[family_name].get(code) \
or config.sysopnames[family_name].get('*')
if not isinstance(interface, type):
# If it isnt a class, assume it is a string
try:
tmp = __import__('pywikibot.site', fromlist=[interface])
interface = getattr(tmp, interface)
except ImportError:
raise ValueError("Invalid interface name '%(interface)s'" % locals())
if not issubclass(interface, pywikibot.site.BaseSite):
warning('Site called with interface=%s' % interface.__name__)
user = pywikibot.tools.normalize_username(user)
key = '%s:%s:%s:%s' % (interface.__name__, fam, code, user)
if key not in _sites or not isinstance(_sites[key], interface):
_sites[key] = interface(code=code, fam=fam, user=user, sysop=sysop)
debug(u"Instantiated %s object '%s'"
% (interface.__name__, _sites[key]), _logger)
if _sites[key].code != code:
warn('Site %s instantiated using different code "%s"'
% (_sites[key], code), UserWarning, 2)
return _sites[key]
# alias for backwards-compability
getSite = pywikibot.tools.redirect_func(Site, old_name='getSite')
from pywikibot.page import (
Page,
FilePage,
Category,
Link,
User,
ItemPage,
PropertyPage,
Claim,
)
from pywikibot.page import html2unicode, url2unicode, unicode2html
link_regex = re.compile(r'\[\[(?P<title>[^\]|[<>{}]*)(\|.*?)?\]\]')
@pywikibot.tools.deprecated("comment parameter for page saving method")
def setAction(s):
"""Set a summary to use for changed page submissions."""
config.default_edit_summary = s
def showDiff(oldtext, newtext, context=0):
"""
Output a string showing the differences between oldtext and newtext.
The differences are highlighted (only on compatible systems) to show which
changes were made.
"""
PatchManager(oldtext, newtext, context=context).print_hunks()
# Throttle and thread handling
stopped = False
def stopme():
"""Drop this process from the throttle log, after pending threads finish.
Can be called manually if desired, but if not, will be called automatically
at Python exit.
"""
global stopped
_logger = "wiki"
if not stopped:
debug(u"stopme() called", _logger)
def remaining():
remainingPages = page_put_queue.qsize() - 1
# -1 because we added a None element to stop the queue
remainingSeconds = datetime.timedelta(
seconds=(remainingPages * config.put_throttle))
return (remainingPages, remainingSeconds)
page_put_queue.put((None, [], {}))
stopped = True
if page_put_queue.qsize() > 1:
num, sec = remaining()
format_values = dict(num=num, sec=sec)
output(u'\03{lightblue}'
u'Waiting for %(num)i pages to be put. '
u'Estimated time remaining: %(sec)s'
u'\03{default}' % format_values)
while(_putthread.isAlive()):
try:
_putthread.join(1)
except KeyboardInterrupt:
if input_yn('There are %i pages remaining in the queue. '
'Estimated time remaining: %s\nReally exit?'
% remaining(), default=False, automatic_quit=False):
return
# only need one drop() call because all throttles use the same global pid
try:
list(_sites.values())[0].throttle.drop()
log(u"Dropped throttle(s).")
except IndexError:
pass
import atexit
atexit.register(stopme)
# Create a separate thread for asynchronous page saves (and other requests)
def async_manager():
"""Daemon; take requests from the queue and execute them in background."""
while True:
(request, args, kwargs) = page_put_queue.get()
if request is None:
break
request(*args, **kwargs)
page_put_queue.task_done()
def async_request(request, *args, **kwargs):
"""Put a request on the queue, and start the daemon if necessary."""
if not _putthread.isAlive():
try:
page_put_queue.mutex.acquire()
try:
_putthread.start()
except (AssertionError, RuntimeError):
pass
finally:
page_put_queue.mutex.release()
page_put_queue.put((request, args, kwargs))
# queue to hold pending requests
page_put_queue = Queue(config.max_queue_size)
# set up the background thread
_putthread = threading.Thread(target=async_manager)
# identification for debugging purposes
_putthread.setName('Put-Thread')
_putthread.setDaemon(True)
wrapper = pywikibot.tools.ModuleDeprecationWrapper(__name__)
wrapper._add_deprecated_attr('ImagePage', FilePage)
wrapper._add_deprecated_attr(
'PageNotFound', pywikibot.exceptions.DeprecatedPageNotFoundError,
warning_message=('{0}.{1} is deprecated, and no longer '
'used by pywikibot; use http.fetch() instead.'))
wrapper._add_deprecated_attr(
'UserActionRefuse', pywikibot.exceptions._EmailUserError,
warning_message='UserActionRefuse is deprecated; '
'use UserRightsError and/or NotEmailableError')
wrapper._add_deprecated_attr(
'QuitKeyboardInterrupt', pywikibot.bot.QuitKeyboardInterrupt,
warning_message='pywikibot.QuitKeyboardInterrupt is deprecated; '
'use pywikibot.bot.QuitKeyboardInterrupt instead')<|fim▁end|>
|
For now its just being used for DataSite, but
|
<|file_name|>sass.js<|end_file_name|><|fim▁begin|>var nodeSass = require('node-sass'),
fs = require('fs');
module.exports = class Sass {
constructor(config){
this.config = config;
if (this.config.compileAtBootup) {
this.compile();
}
if (this.config.watch) {
this.startWatch();
}
}
get timeTaken(){
return parseInt(process.hrtime(this.timeBegun)[1] / 1000000) + 'ms';
}
writeOutputFile(css) {
let dst = this.config.arguments.outFile;
fs.writeFile(dst, css, (err)=>{
err
? console.warn(this.getFormattedTime(), 'Error writing compiled SASS to outFile:', err)
: console.log(this.getFormattedTime(), 'SASS re-compiled in', this.timeTaken);
});
}
compile() {
this.timeBegun = process.hrtime();
nodeSass.render(this.config.arguments, (err, result)=>{
err
? console.warn(this.getFormattedTime(), 'Error compiling SASS:', err)
: this.writeOutputFile(result.css.toString());
});
}
startWatch() {
let throttleId;
fs.watch(this.config.arguments.watchFolder, { recursive: true }, (eventType, filename) => {
if (throttleId) { clearTimeout(throttleId); }
throttleId = setTimeout(() => {
throttleId = null;
this.compile();
}, 50);
});
}
getFormattedTime(){
let d = new Date();
return d.getHours() + ':' + d.getMinutes() + ':' + d.getSeconds();<|fim▁hole|><|fim▁end|>
|
}
}
|
<|file_name|>message_icon.ts<|end_file_name|><|fim▁begin|>/**
* This file is part of Threema Web.
*
* Threema Web is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or (at
* your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero
* General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Threema Web. If not, see <http://www.gnu.org/licenses/>.
*/
export default [
function() {
return {
restrict: 'EA',
scope: {},
bindToController: {
message: '=eeeMessage',
},
controllerAs: 'ctrl',
controller: [function() {
// Return icon depending on message type.
const getIcon = (msgType: threema.MessageType) => {
switch (msgType) {
case 'image':
return 'ic_image_24px.svg';
case 'video':
return 'ic_movie_24px.svg';
case 'audio':
return 'ic_mic_24px.svg';
case 'location':
return 'ic_location_on_24px.svg';
case 'file':
if (this.message.file.type === 'image/gif') {
return 'ic_image_24px.svg';
}
return 'ic_insert_drive_file_24px.svg';
case 'ballot':<|fim▁hole|> default:
return null;
}
};
this.icon = getIcon(this.message.type);
}],
template: `
<img ng-if="ctrl.icon !== null" ng-src="img/{{ ctrl.icon }}" alt="{{ ctrl.message.type }} icon">
`,
};
},
];<|fim▁end|>
|
return 'ic_poll_24px.svg';
|
<|file_name|>FillGeneratorTool.java<|end_file_name|><|fim▁begin|>/* -*- tab-width: 4 -*-
*
* Electric(tm) VLSI Design System
*
* File: FillGeneratorTool.java
*
* Copyright (c) 2006 Sun Microsystems and Static Free Software
*
* Electric(tm) is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* Electric(tm) is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Electric(tm); see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 59 Temple Place, Suite 330,
* Boston, Mass 02111-1307, USA.
*/
package com.sun.electric.tool.generator.layout.fill;
import com.sun.electric.database.geometry.EPoint;
import com.sun.electric.database.hierarchy.Cell;
import com.sun.electric.database.hierarchy.Library;
import com.sun.electric.database.prototype.PortProto;
import com.sun.electric.database.topology.NodeInst;
import com.sun.electric.database.topology.PortInst;
import com.sun.electric.technology.ArcProto;
import com.sun.electric.technology.PrimitiveNode;
import com.sun.electric.tool.Job;
import com.sun.electric.tool.JobException;
import com.sun.electric.tool.Tool;
import com.sun.electric.tool.generator.layout.Gallery;
import com.sun.electric.tool.generator.layout.LayoutLib;
import com.sun.electric.tool.generator.layout.TechType;
import java.lang.reflect.Constructor;
import java.util.*;
abstract class MetalFloorplanBase extends Floorplan
{
/** width Vdd wires */ public double vddWidth;
/** width Gnd wires */ public double gndWidth;
MetalFloorplanBase(double cellWidth, double cellHeight, boolean horiz)
{
super(cellWidth, cellHeight, horiz);
vddWidth = gndWidth = 0;
}
}
// ------------------------------ MetalFloorplanFlex ------------------------------
// Similar to Metalfloor but number of power/gnd lines is determined by cell size
class MetalFloorplanFlex extends MetalFloorplanBase {
public final double minWidth, space, vddReserve, gndReserve;
MetalFloorplanFlex(double cellWidth, double cellHeight,
double vddReserve, double gndReserve, double space,
double vddW, double gndW,
boolean horiz)
{
super(cellWidth, cellHeight, horiz);
this.vddWidth = vddW; //27;
this.gndWidth = gndW; //20;
this.space = space;
this.vddReserve = vddReserve;
this.gndReserve = gndReserve;
minWidth = vddReserve + gndReserve + 2*space + 2*gndWidth + 2*vddWidth;
}
}
// ------------------------------ MetalFloorplan ------------------------------
// Floor plan:
//
// half of Gnd reserved
// gggggggggggggggggggg
// wide space
// vvvvvvvvvvvvvvvvvvvv
// Vdd reserved
// vvvvvvvvvvvvvvvvvvvv
// wide space
// gggggggggggggggggggg
// half of Gnd reserved
class MetalFloorplan extends MetalFloorplanBase {
/** no gap between Vdd wires */ public final boolean mergedVdd;
/** if horizontal then y coordinate of top Vdd wire
* if vertical then x coordinate of right Vdd wire */
public final double vddCenter;
/** if horizontal then y coordinate of top Gnd wire
* if vertical then x coordinate of right Gnd wire */
public final double gndCenter;
public final double coverage;
private double roundDownOneLambda(double x) {
return Math.floor(x);
}
// Round metal widths down to multiples of 1 lambda resolution.
// Then metal center can be on 1/2 lambda grid without problems.
MetalFloorplan(double cellWidth, double cellHeight,
double vddReserve, double gndReserve,
double space, boolean horiz) {
super(cellWidth, cellHeight, horiz);
mergedVdd = vddReserve==0;
double cellSpace = horiz ? cellHeight : cellWidth;
double metalSpace = cellSpace - 2*space - vddReserve - gndReserve;
// gnd is always in two pieces
gndWidth = roundDownOneLambda(metalSpace / 4);
gndCenter = cellSpace/2 - gndReserve/2 - gndWidth/2;
// vdd may be one or two pieces
if (mergedVdd) {
vddWidth = gndWidth*2;
vddCenter = 0;
} else {
vddWidth = gndWidth;
vddCenter = vddReserve/2 + vddWidth/2;
}
// compute coverage statistics
double cellArea = cellWidth * cellHeight;
double strapLength = horiz ? cellWidth : cellHeight;
double vddArea = (mergedVdd ? 1 : 2) * vddWidth * strapLength;
double gndArea = 2 * gndWidth * strapLength;
coverage = (vddArea + gndArea)/cellArea;
}
// Save this code in case I need to replicate LoCo FillCell exactly
// MetalFloorplan(double cellWidth, double cellHeight,
// double vddReserve, double gndReserve,
// double space, boolean horiz) {
// super(cellWidth, cellHeight, horiz);
// mergedVdd = vddReserve==0;
// double cellSpace = horiz ? cellHeight : cellWidth;
// if (mergedVdd) {
// double w = cellSpace/2 - space - vddReserve;
// vddWidth = roundDownOneLambda(w);
// vddCenter = 0;
// } else {
// double w = (cellSpace/2 - space - vddReserve) / 2;
// vddWidth = roundDownOneLambda(w);
// vddCenter = vddReserve/2 + vddWidth/2;
// }
// double vddEdge = vddCenter + vddWidth/2;
// double w = cellSpace/2 - vddEdge - space - gndReserve/2;
// gndWidth = roundDownOneLambda(w);
// gndCenter = vddEdge + space + gndWidth/2;
//
// // compute coverage statistics
// double cellArea = cellWidth * cellHeight;
// double strapLength = horiz ? cellWidth : cellHeight;
// double vddArea = (mergedVdd ? 1 : 2) * vddWidth * strapLength;
// double gndArea = 2 * gndWidth * strapLength;
// coverage = (vddArea + gndArea)/cellArea;
// }
}
// ------------------------------- ExportBars ---------------------------------
class ExportBar
{
PortInst[] ports = null;
Double center = null;
ExportBar(PortInst p1, PortInst p2, double c)
{
ports = new PortInst[2];
ports[0] = p1;
ports[1] = p2;
center = (c); // autoboxing
}
}
class MetalLayer extends VddGndStraps {
protected MetalFloorplanBase plan;
protected int layerNum;
protected PrimitiveNode pin;
protected ArcProto metal;
protected ArrayList<ExportBar> vddBars = new ArrayList<ExportBar>();
protected ArrayList<ExportBar> gndBars = new ArrayList<ExportBar>();
public boolean addExtraArc() { return true; }
private void buildGnd(Cell cell) {
double pinX, pinY;
MetalFloorplan plan = (MetalFloorplan)this.plan;
if (plan.horizontal) {
pinX = plan.cellWidth/2; // - plan.gndWidth/2;
pinY = plan.gndCenter;
} else {
pinX = plan.gndCenter;
pinY = plan.cellHeight/2; // - plan.gndWidth/2;
}
PortInst tl = LayoutLib.newNodeInst(pin, -pinX, pinY, G.DEF_SIZE,
G.DEF_SIZE, 0, cell
).getOnlyPortInst();
PortInst tr = LayoutLib.newNodeInst(pin, pinX, pinY, G.DEF_SIZE,
G.DEF_SIZE, 0, cell
).getOnlyPortInst();
PortInst bl = LayoutLib.newNodeInst(pin, -pinX, -pinY, G.DEF_SIZE,
G.DEF_SIZE, 0, cell
).getOnlyPortInst();
PortInst br = LayoutLib.newNodeInst(pin, pinX, -pinY, G.DEF_SIZE,
G.DEF_SIZE, 0, cell
).getOnlyPortInst();
if (plan.horizontal) {
G.noExtendArc(metal, plan.gndWidth, tl, tr);
G.noExtendArc(metal, plan.gndWidth, bl, br);
gndBars.add(new ExportBar(bl, br, -plan.gndCenter));
gndBars.add(new ExportBar(tl, tr, plan.gndCenter));
} else {
G.noExtendArc(metal, plan.gndWidth, bl, tl);
G.noExtendArc(metal, plan.gndWidth, br, tr);
gndBars.add(new ExportBar(bl, tl, -plan.gndCenter));
gndBars.add(new ExportBar(br, tr, plan.gndCenter));
}
}
private void buildVdd(Cell cell) {
double pinX, pinY;
MetalFloorplan plan = (MetalFloorplan)this.plan;
if (plan.horizontal) {
pinX = plan.cellWidth/2; // - plan.vddWidth/2;
pinY = plan.vddCenter;
} else {
pinX = plan.vddCenter;
pinY = plan.cellHeight/2; // - plan.vddWidth/2;
}
if (plan.mergedVdd) {
PortInst tr = LayoutLib.newNodeInst(pin, pinX, pinY, G.DEF_SIZE,
G.DEF_SIZE, 0, cell
).getOnlyPortInst();
PortInst bl = LayoutLib.newNodeInst(pin, -pinX, -pinY, G.DEF_SIZE,
G.DEF_SIZE, 0, cell
).getOnlyPortInst();
G.noExtendArc(metal, plan.vddWidth, bl, tr);
vddBars.add(new ExportBar(bl, tr, plan.vddCenter));
} else {
PortInst tl = LayoutLib.newNodeInst(pin, -pinX, pinY, G.DEF_SIZE,
G.DEF_SIZE, 0, cell
).getOnlyPortInst();
PortInst tr = LayoutLib.newNodeInst(pin, pinX, pinY, G.DEF_SIZE,
G.DEF_SIZE, 0, cell
).getOnlyPortInst();
PortInst bl = LayoutLib.newNodeInst(pin, -pinX, -pinY, G.DEF_SIZE,
G.DEF_SIZE, 0, cell
).getOnlyPortInst();
PortInst br = LayoutLib.newNodeInst(pin, pinX, -pinY, G.DEF_SIZE,
G.DEF_SIZE, 0, cell
).getOnlyPortInst();
if (plan.horizontal) {
G.noExtendArc(metal, plan.vddWidth, tl, tr);
G.noExtendArc(metal, plan.vddWidth, bl, br);
vddBars.add(new ExportBar(bl, br, -plan.vddCenter));
vddBars.add(new ExportBar(tl, tr, plan.vddCenter));
} else {
G.noExtendArc(metal, plan.vddWidth, bl, tl);
G.noExtendArc(metal, plan.vddWidth, br, tr);
vddBars.add(new ExportBar(bl, tl, -plan.vddCenter));
vddBars.add(new ExportBar(br, tr, plan.vddCenter));
}
}
}
/** It has to be protected to be overwritten by sub classes */
protected void buildGndAndVdd(Cell cell)
{
buildGnd(cell);
buildVdd(cell);
}
public MetalLayer(TechType t, int layerNum, Floorplan plan, Cell cell)
{
super(t);
this.plan = (MetalFloorplanBase)plan;
this.layerNum = layerNum;
metal = METALS[layerNum];
pin = PINS[layerNum];
buildGndAndVdd(cell);
}
public boolean isHorizontal() {return plan.horizontal;}
public int numVdd() {return vddBars.size();}
public double getVddCenter(int n) {
return (vddBars.get(n).center); // autoboxing
}
public PortInst getVdd(int n, int pos)
{return vddBars.get(n).ports[pos];}
public double getVddWidth(int n) {return plan.vddWidth;}
public int numGnd() {return gndBars.size();}
public double getGndCenter(int n) {
return (gndBars.get(n).center); // autoboxing
}
public PortInst getGnd(int n, int pos) {return gndBars.get(n).ports[pos];}
public double getGndWidth(int n) {return (plan).gndWidth;}
public PrimitiveNode getPinType() {return pin;}
public ArcProto getMetalType() {return metal;}
public double getCellWidth() {return plan.cellWidth;}
public double getCellHeight() {return plan.cellHeight;}
public int getLayerNumber() {return layerNum;}
}
// ------------------------------- MetalLayerFlex -----------------------------
class MetalLayerFlex extends MetalLayer {
public MetalLayerFlex(TechType t, int layerNum, Floorplan plan, Cell cell) {
super(t, layerNum, plan, cell);
}
public boolean addExtraArc() { return false; } // For automatic fill generator no extra arcs are wanted.
protected void buildGndAndVdd(Cell cell) {
double pinX, pinY;
double limit = 0;
MetalFloorplanFlex plan = (MetalFloorplanFlex)this.plan;
if (plan.horizontal)
{
limit = plan.cellHeight/2;
}
else
{
limit = plan.cellWidth/2;
}
double position = 0;
int i = 0;
while (position < limit)
{
boolean even = (i%2==0);
double maxDelta = 0, pos = 0;
if (even)
{
maxDelta = plan.vddReserve/2 + plan.vddWidth;
pos = plan.vddReserve/2 + plan.vddWidth/2 + position;
}
else
{
maxDelta = plan.gndReserve/2 + plan.gndWidth;
pos = plan.gndReserve/2 + plan.gndWidth/2 + position;
}
if (position + maxDelta > limit) return; // border was reached
if (plan.horizontal)
{
pinY = pos;
pinX = plan.cellWidth/2;
}
else
{
pinX = pos;
pinY = plan.cellHeight/2;
}
// Vdd if even, gnd if odd
if (!even)
addBars(cell, pinX, pinY, plan.gndWidth, gndBars);
else
addBars(cell, pinX, pinY, plan.vddWidth, vddBars);
if (even)
{
maxDelta = plan.vddReserve/2 + plan.vddWidth + plan.space + plan.gndWidth;
pos = plan.vddReserve/2 + plan.vddWidth + plan.space + plan.gndWidth/2 + position;
}
else
{
maxDelta = plan.gndReserve/2 + plan.gndWidth + plan.space + plan.vddWidth;
pos = plan.gndReserve/2 + plan.gndWidth + plan.space + plan.vddWidth/2 + position;
}
if (position + maxDelta > limit) return; // border was reached
if (plan.horizontal)
pinY = pos;
else
pinX = pos;
// Gnd if even, vdd if odd
if (!even)
{
addBars(cell, pinX, pinY, plan.vddWidth, vddBars);
position = ((plan.horizontal)?pinY:pinX) + plan.vddWidth/2 + plan.vddReserve/2;
}
else
{
addBars(cell, pinX, pinY, plan.gndWidth, gndBars);
position = ((plan.horizontal)?pinY:pinX) + plan.gndWidth/2 + plan.gndReserve/2;
}
i++;
}
}
private void addBars(Cell cell, double pinX, double pinY, double width, ArrayList<ExportBar> bars)
{
PortInst tl = LayoutLib.newNodeInst(pin, -pinX, pinY, G.DEF_SIZE,
G.DEF_SIZE, 0, cell
).getOnlyPortInst();
PortInst tr = LayoutLib.newNodeInst(pin, pinX, pinY, G.DEF_SIZE,
G.DEF_SIZE, 0, cell
).getOnlyPortInst();
PortInst bl = LayoutLib.newNodeInst(pin, -pinX, -pinY, G.DEF_SIZE,
G.DEF_SIZE, 0, cell
).getOnlyPortInst();
PortInst br = LayoutLib.newNodeInst(pin, pinX, -pinY, G.DEF_SIZE,
G.DEF_SIZE, 0, cell
).getOnlyPortInst();
double center = 0;
if (plan.horizontal) {
G.noExtendArc(metal, width, tl, tr);
G.noExtendArc(metal, width, bl, br);
center = pinY;
bars.add(new ExportBar(bl, br, -center));
bars.add(new ExportBar(tl, tr, center));
} else {
G.noExtendArc(metal, width, bl, tl);
G.noExtendArc(metal, width, br, tr);
center = pinX;
bars.add(new ExportBar(bl, tl, -center));
bars.add(new ExportBar(br, tr, center));
}
}
}
//---------------------------------- CapLayer ---------------------------------
class CapLayer extends VddGndStraps {
private CapCell capCell;
private NodeInst capCellInst;
private CapFloorplan plan;
public boolean addExtraArc() { return true; }
public CapLayer(TechType t, CapFloorplan plan, CapCell capCell, Cell cell)
{
super(t);
this.plan = plan;
this.capCell = capCell;
double angle = plan.horizontal ? 0 : 90;
if (capCell != null)
capCellInst = LayoutLib.newNodeInst(capCell.getCell(), 0, 0, G.DEF_SIZE,
G.DEF_SIZE, angle, cell);
}
public boolean isHorizontal() {return plan.horizontal;}
public int numVdd() {return (capCell != null) ? capCell.numVdd() : 0;}
public PortInst getVdd(int n, int pos) {
return capCellInst.findPortInst(FillCell.VDD_NAME+"_"+n);
}
public double getVddCenter(int n) {
EPoint center = getVdd(n, 0).getCenter();
return plan.horizontal ? center.getY() : center.getX();
}
public double getVddWidth(int n) {return capCell.getVddWidth();}
public int numGnd() {return (capCell != null) ? capCell.numGnd() : 0;}
public PortInst getGnd(int n, int pos) {
return capCellInst.findPortInst(FillCell.GND_NAME+"_"+n);
}
public double getGndCenter(int n) {
EPoint center = getGnd(n, 0).getCenter();
return plan.horizontal ? center.getY() : center.getX();
}
public double getGndWidth(int n) {return capCell.getGndWidth();}
public PrimitiveNode getPinType() {return tech.m1pin();}
public ArcProto getMetalType() {return tech.m1();}
public double getCellWidth() {return plan.cellWidth;}
public double getCellHeight() {return plan.cellHeight;}
public int getLayerNumber() {return 1;}
}
class FillRouter {
private HashMap<String,List<PortInst>> portMap = new HashMap<String,List<PortInst>>();
private TechType tech;
private String makeKey(PortInst pi) {
EPoint center = pi.getCenter();
String x = ""+center.getX(); // LayoutLib.roundCenterX(pi);
String y = ""+center.getY(); // LayoutLib.roundCenterY(pi);
return x+"x"+y;
}
// private boolean bothConnect(ArcProto a, PortProto pp1, PortProto pp2) {
// return pp1.connectsTo(a) && pp2.connectsTo(a);
// }
private ArcProto findCommonArc(PortInst p1, PortInst p2) {
ArcProto[] metals = {tech.m6(), tech.m5(), tech.m4(), tech.m3(), tech.m2(), tech.m1()};
PortProto pp1 = p1.getPortProto();
PortProto pp2 = p2.getPortProto();
for (int i=0; i<metals.length; i++) {
if (pp1.connectsTo(metals[i]) && pp2.connectsTo(metals[i])) {
return metals[i];
}
}
return null;
}
private void connectPorts(List<PortInst> ports) {
for (Iterator<PortInst> it=ports.iterator(); it.hasNext(); ) {
PortInst first = it.next();
double width = LayoutLib.widestWireWidth(first);
it.remove();
for (PortInst pi : ports) {
ArcProto a = findCommonArc(first, pi);
if (a!=null) LayoutLib.newArcInst(a, width, first, pi);
}
}
}
private FillRouter(TechType t, ArrayList<PortInst> ports)
{
tech = t;
for (PortInst pi : ports) {
String key = makeKey(pi);
List<PortInst> l = portMap.get(key);
if (l==null) {
l = new LinkedList<PortInst>();
portMap.put(key, l);
}
l.add(pi);
}
// to guarantee deterministic results
List<String> keys = new ArrayList<String>();
keys.addAll(portMap.keySet());
Collections.sort(keys);
for (String str : keys) {
connectPorts(portMap.get(str));
}
}
public static void connectCoincident(TechType t, ArrayList<PortInst> ports) {
new FillRouter(t, ports);
}
}
/**
* Object for building fill libraries
*/
public class FillGeneratorTool extends Tool {
public FillGenConfig config;
protected Library lib;
private boolean libInitialized;
public List<Cell> masters;
protected CapCell capCell;
protected Floorplan[] plans;
/** the fill generator tool. */ private static FillGeneratorTool tool = getTool();
// Depending on generator plugin available
public static FillGeneratorTool getTool()
{
if (tool != null) return tool;
FillGeneratorTool tool;
try
{
Class<?> extraClass = Class.forName("com.sun.electric.plugins.generator.FillCellTool");
Constructor instance = extraClass.getDeclaredConstructor(); // varags
Object obj = instance.newInstance(); // varargs;
tool = (FillGeneratorTool)obj;
} catch (Exception e)
{
if (Job.getDebug())
System.out.println("GNU Release can't find Fill Cell Generator plugin");
tool = new FillGeneratorTool();
}
return tool;
}
public FillGeneratorTool() {
super("Fill Generator");
}
public void setConfig(FillGenConfig config)
{
this.config = config;
this.libInitialized = false;
}
public enum Units {NONE, LAMBDA, TRACKS}
protected boolean getOrientation() {return plans[plans.length-1].horizontal;}
/** Reserve space in the middle of the Vdd and ground straps for signals.
* @param layer the layer number. This may be 2, 3, 4, 5, or 6. The layer
* number 1 is reserved to mean "capacitor between Vdd and ground".
* @param reserved space to reserve in the middle of the central
* strap in case of Vdd. The value 0 makes the Vdd strap one large strap instead of two smaller
* adjacent straps.
* Space to reserve between the ground strap of this
* cell and the ground strap of the adjacent fill cell. The value 0 means
* that these two ground straps should abut to form a single large strap
* instead of two smaller adjacent straps.
* */
private double reservedToLambda(int layer, double reserved, Units units) {
if (units==LAMBDA) return reserved;
double nbTracks = reserved;
if (nbTracks==0) return 0;
return config.getTechType().reservedToLambda(layer, nbTracks);
}
private Floorplan[] makeFloorplans(boolean metalFlex, boolean hierFlex) {
Job.error(config.width==Double.NaN,
"width hasn't been specified. use setWidth()");
Job.error(config.height==Double.NaN,
"height hasn't been specified. use setHeight()");
double w = config.width;
double h = config.height;
int numLayers = config.getTechType().getNumMetals() + 1; // one extra for the cap
double[] vddRes = new double[numLayers]; //{0,0,0,0,0,0,0};
double[] gndRes = new double[numLayers]; //{0,0,0,0,0,0,0};
double[] vddW = new double[numLayers]; //{0,0,0,0,0,0,0};
double[] gndW = new double[numLayers]; //{0,0,0,0,0,0,0};
// set given values
for (FillGenConfig.ReserveConfig c : config.reserves)
{
vddRes[c.layer] = reservedToLambda(c.layer, c.vddReserved, c.vddUnits);
gndRes[c.layer] = reservedToLambda(c.layer, c.gndReserved, c.gndUnits);
if (c.vddWUnits != Units.NONE)
vddW[c.layer] = reservedToLambda(c.layer, c.vddWidth, c.vddWUnits);
if (c.gndWUnits != Units.NONE)
gndW[c.layer] = reservedToLambda(c.layer, c.gndWidth, c.gndWUnits);<|fim▁hole|> boolean evenHor = config.evenLayersHorizontal;
boolean alignedMetals = true;
double[] spacing = new double[numLayers];
for (int i = 0; i < numLayers; i++) spacing[i] = config.drcSpacingRule;
// {config.drcSpacingRule,config.drcSpacingRule,
// config.drcSpacingRule,config.drcSpacingRule,
// config.drcSpacingRule,config.drcSpacingRule,config.drcSpacingRule};
if (alignedMetals)
{
double maxVddRes = 0, maxGndRes = 0, maxSpacing = 0, maxVddW = 0, maxGndW = 0;
for (int i = 0; i < vddRes.length; i++)
{
boolean vddOK = false, gndOK = false;
if (vddRes[i] > 0)
{
vddOK = true;
if (maxVddRes < vddRes[i]) maxVddRes = vddRes[i];
}
if (gndRes[i] > 0)
{
gndOK = true;
if (maxGndRes < gndRes[i]) maxGndRes = gndRes[i];
}
if (gndOK || vddOK) // checking max spacing rule
{
if (maxSpacing < config.drcSpacingRule) maxSpacing = config.drcSpacingRule; //drcRules[i];
}
if (maxVddW < vddW[i])
maxVddW = vddW[i];
if (maxGndW < gndW[i])
maxGndW = gndW[i];
}
// correct the values
for (int i = 0; i < vddRes.length; i++)
{
vddRes[i] = maxVddRes;
gndRes[i] = maxGndRes;
spacing[i] = maxSpacing;
vddW[i] = maxVddW;
gndW[i] = maxGndW;
}
}
Floorplan[] thePlans = new Floorplan[numLayers];
// 0 is always null
thePlans[1] = new CapFloorplan(w, h, !evenHor);
if (metalFlex)
{
if (!hierFlex)
{
for (int i = 2; i < numLayers; i++)
{
boolean horiz = (i%2==0);
thePlans[i] = new MetalFloorplanFlex(w, h, vddRes[i], gndRes[i], spacing[i], vddW[i], gndW[i], horiz);
}
return thePlans;
}
w = config.width = config.minTileSizeX;
h = config.height = config.minTileSizeY;
}
for (int i = 2; i < numLayers; i++)
{
boolean horiz = (i%2==0);
thePlans[i] = new MetalFloorplan(w, h, vddRes[i], gndRes[i], spacing[i], horiz);
}
return thePlans;
}
private void printCoverage(Floorplan[] plans) {
for (int i=2; i<plans.length; i++) {
System.out.println("metal-"+i+" coverage: "+
((MetalFloorplan)plans[i]).coverage);
}
}
private static CapCell getCMOS90CapCell(Library lib, CapFloorplan plan)
{
CapCell c = null;
try
{
Class<?> cmos90Class = Class.forName("com.sun.electric.plugins.tsmc.fill90nm.CapCellCMOS90");
Constructor capCellC = cmos90Class.getDeclaredConstructor(Library.class, CapFloorplan.class); // varargs
Object cell = capCellC.newInstance(lib, plan);
c = (CapCell)cell;
} catch (Exception e)
{
assert(false); // runtime error
}
return c;
}
protected void initFillParameters(boolean metalFlex, boolean hierFlex) {
if (libInitialized) return;
Job.error(config.fillLibName==null, "no library specified. Use setFillLibrary()");
Job.error((config.width==Double.NaN || config.width<=0), "no width specified. Use setFillCellWidth()");
Job.error((config.height==Double.NaN || config.height<=0), "no height specified. Use setFillCellHeight()");
plans = makeFloorplans(metalFlex, hierFlex);
if (!metalFlex) printCoverage(plans);
lib = LayoutLib.openLibForWrite(config.fillLibName);
if (!metalFlex) // don't do transistors
{
if (config.is180Tech())
{
capCell = new CapCellMosis(lib, (CapFloorplan) plans[1], config.getTechType());
}
else
{
capCell = getCMOS90CapCell(lib, (CapFloorplan) plans[1]);
}
}
libInitialized = true;
}
private void makeTiledCells(Cell cell, Floorplan[] plans, Library lib,
int[] tiledSizes) {
if (tiledSizes==null) return;
for (int num : tiledSizes)
{
TiledCell.makeTiledCell(num, num, cell, plans, lib);
}
}
public static Cell makeFillCell(Library lib, Floorplan[] plans,
int botLayer, int topLayer, CapCell capCell,
TechType tech,
ExportConfig expCfg, boolean metalFlex, boolean hierFlex) {
FillCell fc = new FillCell(tech);
return fc.makeFillCell1(lib, plans, botLayer, topLayer, capCell,
expCfg, metalFlex, hierFlex);
}
/**
* Method to create standard set of tiled cells.
*/
private Cell standardMakeAndTileCell(Library lib, Floorplan[] plans, int lowLay,
int hiLay, CapCell capCell,
TechType tech,
ExportConfig expCfg,
int[] tiledSizes, boolean metalFlex)
{
Cell master = makeFillCell(lib, plans, lowLay, hiLay, capCell,
tech, expCfg, metalFlex, false);
masters = new ArrayList<Cell>();
masters.add(master);
makeTiledCells(master, plans, lib, tiledSizes);
return master;
}
public static final Units LAMBDA = Units.LAMBDA;
public static final Units TRACKS = Units.TRACKS;
//public static final PowerType POWER = PowerType.POWER;
//public static final PowerType VDD = PowerType.VDD;
public static final ExportConfig PERIMETER = ExportConfig.PERIMETER;
public static final ExportConfig PERIMETER_AND_INTERNAL = ExportConfig.PERIMETER_AND_INTERNAL;
/** Reserve space in the middle of the Vdd and ground straps for signals.
* @param layer the layer number. This may be 2, 3, 4, 5, or 6. The layer
* number 1 is reserved to mean "capacitor between Vdd and ground".
* @param vddReserved space to reserve in the middle of the central Vdd
* strap.
* The value 0 makes the Vdd strap one large strap instead of two smaller
* adjacent straps.
* @param vddUnits LAMBDA or TRACKS
* @param gndReserved space to reserve between the ground strap of this
* cell and the ground strap of the adjacent fill cell. The value 0 means
* that these two ground straps should abut to form a single large strap
* instead of two smaller adjacent straps.
* @param gndUnits LAMBDA or TRACKS
* param tiledSizes an array of sizes. The default value is null. The
* value null means don't generate anything. */
// public void reserveSpaceOnLayer(int layer,
// double vddReserved, Units vddUnits,
// double gndReserved, Units gndUnits) {
// LayoutLib.error(layer<2 || layer>6,
// "Bad layer. Layers must be between 2 and 6 inclusive: "+
// layer);
// this.vddReserved[layer] = reservedToLambda(layer, vddReserved, vddUnits);
// this.gndReserved[layer] = reservedToLambda(layer, gndReserved, gndUnits);
// }
/** Create a fill cell using the current library, fill cell width, fill cell
* height, layer orientation, and reserved spaces for each layer. Then
* generate larger fill cells by tiling that fill cell according to the
* current tiled cell sizes.
* @param loLayer the lower layer. This may be 1 through 6. Layer 1 means
* build a capacitor using MOS transistors between Vdd and ground.
* @param hiLayer the upper layer. This may be 2 through 6. Note that hiLayer
* must be >= loLayer.
* @param exportConfig may be PERIMETER in which case exports are
* placed along the perimeter of the cell for the top two layers. Otherwise
* exportConfig must be PERIMETER_AND_INTERNAL in which case exports are
* placed inside the perimeter of the cell for the bottom layer.
* @param tiledSizes Array specifying composite Cells we should build by
* concatonating fill cells. For example int[] {2, 4, 7} means we should
* */
public Cell standardMakeFillCell(int loLayer, int hiLayer,
TechType tech,
ExportConfig exportConfig,
int[] tiledSizes, boolean metalFlex) {
initFillParameters(metalFlex, false);
Job.error(loLayer<1, "loLayer must be >=1");
int maxNumMetals = config.getTechType().getNumMetals();
Job.error(hiLayer>maxNumMetals, "hiLayer must be <=" + maxNumMetals);
Job.error(loLayer>hiLayer, "loLayer must be <= hiLayer");
Cell cell = null;
cell = standardMakeAndTileCell(lib, plans, loLayer, hiLayer, capCell,
tech, exportConfig,
tiledSizes, metalFlex);
return cell;
}
public void makeGallery() {
Gallery.makeGallery(lib);
}
public void writeLibrary(int backupScheme) throws JobException {
LayoutLib.writeLibrary(lib, backupScheme);
}
public enum FillTypeEnum {INVALID,TEMPLATE,CELL}
}<|fim▁end|>
|
}
|
<|file_name|>main.cpp<|end_file_name|><|fim▁begin|>#include <QtGui/QApplication>
#include "xmlparser.h"
#include "myfiledialog.h"
#include <iostream>
#include <QMessageBox>
using namespace std;
int main(int argc, char *argv[])
{
QApplication a(argc, argv);/*
MainWindow w;
w.show();*/
MyFileDialog my;//Create dialog
QString name=my.openFile();//Open dialog, and chose file. We get file path and file name as result
cout<<name.toUtf8().constData()<<"Podaci uspjeno uèitani!";
<|fim▁hole|>
}<|fim▁end|>
|
return 0;
|
<|file_name|>shopping-list.commands.ts<|end_file_name|><|fim▁begin|>import { Aggregate } from '@resolve-js/core'
import {
SHOPPING_LIST_CREATED,
SHOPPING_LIST_RENAMED,
SHOPPING_LIST_REMOVED,
SHOPPING_ITEM_CREATED,
SHOPPING_ITEM_TOGGLED,
SHOPPING_ITEM_REMOVED,
} from '../event-types'
const aggregate: Aggregate = {
createShoppingList: (state, { payload: { name } }) => {
if (state.createdAt) {
throw new Error('Shopping List already exists')
}
if (!name) {
throw new Error(`The "name" field is required`)
}
return {
type: SHOPPING_LIST_CREATED,
payload: { name },
}
},
renameShoppingList: (state, { payload: { name } }) => {
if (!state.createdAt) {
throw new Error('Shopping List does not exist')
}
if (!name) {
throw new Error(`The "name" field is required`)
}
return {
type: SHOPPING_LIST_RENAMED,
payload: { name },
}
},
removeShoppingList: (state) => {
if (!state.createdAt) {
throw new Error('Shopping List does not exist')
}
return {
type: SHOPPING_LIST_REMOVED,
}
},
createShoppingItem: (state, { payload: { id, text } }) => {
if (!state.createdAt) {
throw new Error('Shopping List does not exist')
}
if (!id) {
throw new Error(`The "id" field is required`)
}
if (!text) {
throw new Error(`The "text" field is required`)
}
return {
type: SHOPPING_ITEM_CREATED,
payload: { id, text },
}
},
toggleShoppingItem: (state, { payload: { id } }) => {
if (!state.createdAt) {
throw new Error('Shopping List does not exist')
}
if (!id) {
throw new Error(`The "id" field is required`)
}
return {
type: SHOPPING_ITEM_TOGGLED,
payload: { id },
}
},
removeShoppingItem: (state, { payload: { id } }) => {<|fim▁hole|> throw new Error('Shopping List does not exist')
}
if (!id) {
throw new Error(`The "id" field is required`)
}
return {
type: SHOPPING_ITEM_REMOVED,
payload: { id },
}
},
}
export default aggregate<|fim▁end|>
|
if (!state.createdAt) {
|
<|file_name|>EncodedGraph.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2015, 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package org.graalvm.compiler.nodes;
import java.util.List;
import org.graalvm.compiler.graph.NodeClass;
import jdk.vm.ci.meta.Assumptions;
import jdk.vm.ci.meta.ResolvedJavaMethod;
/**
* A {@link StructuredGraph} encoded in a compact binary representation as a byte[] array. See
* {@link GraphEncoder} for a description of the encoding format. Use {@link GraphDecoder} for
* decoding.
*/
public class EncodedGraph {
private final byte[] encoding;
private final long startOffset;
private final Object[] objects;
private final NodeClass<?>[] types;
private final Assumptions assumptions;
private final List<ResolvedJavaMethod> inlinedMethods;<|fim▁hole|>
/**
* The "table of contents" of the encoded graph, i.e., the mapping from orderId numbers to the
* offset in the encoded byte[] array. Used as a cache during decoding.
*/
protected long[] nodeStartOffsets;
public EncodedGraph(byte[] encoding, long startOffset, Object[] objects, NodeClass<?>[] types, Assumptions assumptions, List<ResolvedJavaMethod> inlinedMethods) {
this.encoding = encoding;
this.startOffset = startOffset;
this.objects = objects;
this.types = types;
this.assumptions = assumptions;
this.inlinedMethods = inlinedMethods;
}
public byte[] getEncoding() {
return encoding;
}
public long getStartOffset() {
return startOffset;
}
public Object[] getObjects() {
return objects;
}
public NodeClass<?>[] getNodeClasses() {
return types;
}
public Assumptions getAssumptions() {
return assumptions;
}
public List<ResolvedJavaMethod> getInlinedMethods() {
return inlinedMethods;
}
}<|fim▁end|>
| |
<|file_name|>sslcontext.py<|end_file_name|><|fim▁begin|>from ctypes import *
import ctypes
from ctypes.wintypes import *
from _ssl import SSLError
import sys
CRYPT32 = windll.Crypt32
SCHANNEL = windll.SChannel
# Lots of "Magic" constants, mainly from schannel.h
SCH_CRED_NO_SYSTEM_MAPPER = 0x00000002
SCH_CRED_NO_DEFAULT_CREDS = 0x00000010
SCH_CRED_REVOCATION_CHECK_CHAIN = 0x00000200
SCH_CRED_REVOCATION_CHECK_CHAIN_EXCLUDE_ROOT = 0x00000400
SCH_CRED_IGNORE_NO_REVOCATION_CHECK = 0x00000800
SECPKG_ATTR_REMOTE_CERT_CONTEXT = 0x53
SECPKG_ATTR_STREAM_SIZES = 4
SP_PROT_SSL3_CLIENT = 0x00000020
SP_PROT_SSL2_CLIENT = 0x00000008
SP_PROT_TLS1_1_CLIENT = 0x00000200
SCHANNEL_CRED_VERSION = 0x00000004
UNISP_NAME = "Microsoft Unified Security Protocol Provider"
SECPKG_CRED_OUTBOUND = 0x00000002
SECURITY_NATIVE_DREP = 0x00000010
SECBUFFER_VERSION = 0
SECBUFFER_EMPTY = 0
SECBUFFER_DATA = 1
SECBUFFER_TOKEN = 2
SECBUFFER_EXTRA = 5
SECBUFFER_STREAM_TRAILER = 6
SECBUFFER_STREAM_HEADER = 7
ISC_REQ_SEQUENCE_DETECT = 0x00000008
ISC_REQ_REPLAY_DETECT = 0x00000004
ISC_REQ_CONFIDENTIALITY = 0x00000010
ISC_REQ_EXTENDED_ERROR = 0x00008000
ISC_REQ_ALLOCATE_MEMORY = 0x00000100
ISC_REQ_STREAM = 0x00010000
SEC_I_CONTINUE_NEEDED = 0x00090312
SEC_I_INCOMPLETE_CREDENTIALS = 0x00090320
SEC_I_RENEGOTIATE = 0x00090321
SEC_E_INCOMPLETE_MESSAGE = 0x80090318
SEC_E_INTERNAL_ERROR = 0x80090304
SEC_E_OK = 0x00000000
class SecPkgContext_StreamSizes(Structure):
_fields_ = [("cbHeader", ULONG),
("cbTrailer", ULONG),
("cbMaximumMessage", ULONG),
("cBuffers", ULONG),
("cbBlockSize", ULONG)]
class CERT_CONTEXT(Structure):
_fields_ = [("dwCertEncodingType", DWORD),
("pbCertEncoded", c_char_p),
("cbCertEncoded", DWORD),
("pCertInfo", c_void_p),
("hCertStore", c_void_p)]
class SecBuffer(Structure):
_fields_ = [("cbBuffer", ULONG),
("BufferType", ULONG),
("pvBuffer", c_void_p)]
class SecBufferDesc(Structure):
_fields_ = [("ulVersion", ULONG),
("cBuffers", ULONG),
("pBuffers", POINTER(SecBuffer))]
class _SecHandle(Structure):
_fields_ = [("dwLower", ULONG ),
("dwUpper", ULONG )]
class SCHANNEL_CRED(Structure):
_fields_ = [("dwVersion", DWORD),
("cCreds", DWORD),
("paCred", POINTER(HANDLE)),
("hRootStore", HANDLE),
("cMappers", DWORD),
("aphMappers", POINTER(HANDLE)),
("cSupportedAlgs", DWORD),
("palgSupportedAlgs", POINTER(HANDLE)),
("grbitEnabledProtocols", DWORD),
("dwMinimumCipherStrength", DWORD),
("dwMaximumCipherStrength", DWORD),
("dwSessionLifespan", DWORD),
("dwFlags", DWORD),
("dwCredFormat", DWORD),
]
class SecurityFunctionTable(Structure):
_fields_ = [("dwVersion", ULONG),
("EnumerateSecurityPackages", WINFUNCTYPE(LONG)),
("QueryCredentialsAttributes", WINFUNCTYPE(LONG)),
("AcquireCredentialsHandle", WINFUNCTYPE(ULONG, c_void_p, c_wchar_p, ULONG, HANDLE, c_void_p, c_void_p, c_void_p, HANDLE, PULONG)),
("FreeCredentialsHandle", WINFUNCTYPE(LONG)),
("Reserved2", c_void_p),
("InitializeSecurityContext", WINFUNCTYPE(ULONG, c_void_p, c_void_p, c_wchar_p, ULONG, ULONG, ULONG, c_void_p, ULONG, c_void_p, c_void_p, POINTER(ULONG), POINTER(ULONG))),
("AcceptSecurityContext", WINFUNCTYPE(ULONG)),
("CompleteAuthToken", WINFUNCTYPE(LONG)),
("DeleteSecurityContext", WINFUNCTYPE(LONG, c_void_p)),
("ApplyControlToken", WINFUNCTYPE(LONG)),
("QueryContextAttributes", WINFUNCTYPE(LONG, c_void_p, ULONG, c_void_p)),
("ImpersonateSecurityContext", WINFUNCTYPE(LONG)),
("RevertSecurityContext", WINFUNCTYPE(LONG)),
("MakeSignature", WINFUNCTYPE(LONG)),
("VerifySignature", WINFUNCTYPE(LONG)),
("FreeContextBuffer", WINFUNCTYPE(LONG, c_void_p)),
("QuerySecurityPackageInfo", WINFUNCTYPE(LONG)),
("Reserved3", c_void_p),
("Reserved4", c_void_p),
("ExportSecurityContext", WINFUNCTYPE(LONG)),
("ImportSecurityContext", WINFUNCTYPE(LONG)),
("AddCredentials", WINFUNCTYPE(LONG)),
("Reserved8", c_void_p),
("QuerySecurityContextToken", WINFUNCTYPE(LONG)),
("EncryptMessage", WINFUNCTYPE(ULONG, HANDLE, ULONG, HANDLE, ULONG)),
("DecryptMessage", WINFUNCTYPE(ULONG, HANDLE, HANDLE, ULONG, PULONG)),
("SetContextAttributes", WINFUNCTYPE(LONG)),]
class SSLContext(object):
def __init__(self):
self._InitSecurityInterface()
self._creds = None
self._context = _SecHandle()
self._SchannelCred = None
self.reset()
def reset(self):
if self._creds is not None:
windll.Secur32.FreeCredentialsHandle(byref(self._creds))
self._creds = _SecHandle()
self._creds.dwUpper = 0;
self._creds.dwLower = 0;
self._context.dwUpper = 0;
self._context.dwLower = 0;
self._SchannelCred = SCHANNEL_CRED()
self._intialized = False
self._recv_buffer = b'' # Raw socket data
self._recv_buffer_decrypted = b'' # socket data that is decrypted
def do_handshake(self):
self.reset()
self._ClientCreateCredentials()
self._ClientHandshake()
#TODO: validate remote certificate
self._intialized = True #all communications should now be encrypted
def _ClientHandshake(self):
buffer = SecBuffer()
buffer.pvBuffer = None
buffer.BufferType = SECBUFFER_TOKEN
buffer.cbBuffer = 0
bufferGroup = SecBufferDesc()
bufferGroup.cBuffers = 1
bufferGroup.pBuffers = pointer(buffer)
bufferGroup.ulVersion = SECBUFFER_VERSION
dwSSPIFlags = ISC_REQ_SEQUENCE_DETECT | ISC_REQ_REPLAY_DETECT | ISC_REQ_CONFIDENTIALITY | ISC_REQ_EXTENDED_ERROR | ISC_REQ_ALLOCATE_MEMORY | ISC_REQ_STREAM
dwSSPIOutFlags = DWORD()
Status = self._securityFunc.InitializeSecurityContext(byref(self._creds),
None,
c_wchar_p(self._server_hostname),
dwSSPIFlags,
0,
SECURITY_NATIVE_DREP,
None,
0,
byref(self._context),
byref(bufferGroup),
byref(dwSSPIOutFlags),
POINTER(ULONG)() )
if Status != SEC_I_CONTINUE_NEEDED and Status != SEC_E_OK:
raise SSLError(WinError(c_long(Status).value))
if Status == SEC_I_CONTINUE_NEEDED:
if buffer.cbBuffer != 0 and buffer.pvBuffer is not None:
data = string_at(buffer.pvBuffer, buffer.cbBuffer)
if self.send(data, plaintext = True) == 0:
self._securityFunc.FreeContextBuffer(buffer.pvBuffer)
self._securityFunc.DeleteSecurityContext(byref(self._context))
else:
self._securityFunc.FreeContextBuffer(buffer.pvBuffer)
(Status,extraData) = self._ClientHandshakeLoop(True)
if Status != SEC_E_OK:
raise SSLError(WinError(c_long(Status).value))
def _ClientHandshakeLoop(self, doRead):
Status = SEC_I_CONTINUE_NEEDED
dwSSPIFlags = ISC_REQ_SEQUENCE_DETECT | ISC_REQ_REPLAY_DETECT | ISC_REQ_CONFIDENTIALITY | ISC_REQ_EXTENDED_ERROR | ISC_REQ_ALLOCATE_MEMORY | ISC_REQ_STREAM
dwSSPIOutFlags = DWORD()
recv_data = b''
while Status == SEC_I_CONTINUE_NEEDED or Status == SEC_E_INCOMPLETE_MESSAGE or Status == SEC_I_INCOMPLETE_CREDENTIALS:
if len(recv_data) == 0 or Status == SEC_E_INCOMPLETE_MESSAGE:
if doRead:
data = self._sock.recv(2048, raw = True)
recv_data += data
else:
doRead = True
inBufferGroup = SecBufferDesc()
inBufferGroup.cBuffers = 2
inBufferGroup.ulVersion = SECBUFFER_VERSION
buffers = (SecBuffer * 2)()
buffers[0].pvBuffer = cast(c_char_p(recv_data), c_void_p)
buffers[0].cbBuffer = len(recv_data)
buffers[0].BufferType = SECBUFFER_TOKEN
buffers[1].pvBuffer = None
buffers[1].cbBuffers = 0
buffers[1].BufferType = SECBUFFER_EMPTY
inBufferGroup.pBuffers = buffers
outBufferGroup = SecBufferDesc()
outBufferGroup.cBuffers = 1
outBufferGroup.ulVersion = SECBUFFER_VERSION
buffers = (SecBuffer * 1)()
buffers[0].pvBuffer = None
buffers[0].BufferType = SECBUFFER_TOKEN
buffers[0].cbBuffer = 0
outBufferGroup.pBuffers = buffers
Status = self._securityFunc.InitializeSecurityContext(byref(self._creds),
byref(self._context),
c_wchar_p(self._server_hostname),
dwSSPIFlags,
0,
SECURITY_NATIVE_DREP,
byref(inBufferGroup),
0,
None,
byref(outBufferGroup),
byref(dwSSPIOutFlags),
POINTER(ULONG)()
)
if Status == SEC_E_OK or Status == SEC_I_CONTINUE_NEEDED:
if outBufferGroup.pBuffers[0].cbBuffer != 0 and outBufferGroup.pBuffers[0].pvBuffer is not None:
data = string_at(outBufferGroup.pBuffers[0].pvBuffer, outBufferGroup.pBuffers[0].cbBuffer)
if self._sock.sendall(data, raw = True) == 0:
self._securityFunc.FreeContextBuffer(outBufferGroup.pBuffers[0].pvBuffer)
self._securityFunc.DeleteSecurityContext(byref(self._context))
return (SEC_E_INTERNAL_ERROR, None)
else:
self._securityFunc.FreeContextBuffer(outBufferGroup.pBuffers[0].pvBuffer)
outBufferGroup.pBuffers[0].pvBuffer = None
if Status == SEC_E_INCOMPLETE_MESSAGE:
continue
if Status == SEC_E_OK:
if inBufferGroup.pBuffers[1].BufferType == SECBUFFER_EXTRA:
return (Status, recv_data[-inBufferGroup.pBuffers[1].cbBuffer:])
else:
return (Status, None)
if inBufferGroup.pBuffers[1].BufferType == SECBUFFER_EXTRA:
recv_data = recv_data[-inBufferGroup.pBuffers[1].cbBuffer:]
else:
recv_data = b""
if Status == SEC_I_INCOMPLETE_CREDENTIALS:
#return (Status, None)
doRead = False
continue
return (Status, None)
def _InitSecurityInterface(self):
func = SCHANNEL.InitSecurityInterfaceW
func.restype = POINTER(SecurityFunctionTable)
self._securityFunc = func().contents
def _wrap_socket(self, sock, server_side, server_hostname, client_certificate = None):
self._sock = sock
self._server_hostname = server_hostname
self._client_certificate = client_certificate
return self
def _ClientCreateCredentials(self):
if self._client_certificate is not None:
self._SchannelCred.cCreds = 1
self._SchannelCred.paCred = pointer(self._client_certificate)
self._SchannelCred.grbitEnabledProtocols = SP_PROT_TLS1_1_CLIENT #| SP_PROT_TLS1_1_CLIENT | SP_PROT_SSL2_CLIENT
self._SchannelCred.dwVersion = SCHANNEL_CRED_VERSION
self._SchannelCred.dwFlags |= SCH_CRED_NO_DEFAULT_CREDS | \
SCH_CRED_NO_SYSTEM_MAPPER | \
SCH_CRED_REVOCATION_CHECK_CHAIN_EXCLUDE_ROOT | \
SCH_CRED_IGNORE_NO_REVOCATION_CHECK
<|fim▁hole|> Status = self._securityFunc.AcquireCredentialsHandle(None,
c_wchar_p(UNISP_NAME),
SECPKG_CRED_OUTBOUND,
None,
byref(self._SchannelCred),
None,
None,
byref(self._creds),
POINTER(ULONG)())
if Status != SEC_E_OK:
raise SSLError(WinError(Status))
def send(self, data, flags = 0, plaintext = False):
if self._intialized is False and plaintext is True:
return self._sock.sendall(data, flags, raw = True)
else:
Sizes = SecPkgContext_StreamSizes()
Status = self._securityFunc.QueryContextAttributes(byref(self._context), SECPKG_ATTR_STREAM_SIZES, byref(Sizes))
if Status != SEC_E_OK:
raise SSLError(WinError(c_long(Status).value))
bufferValue = b'\x00' * Sizes.cbHeader + data + b'\x00' * Sizes.cbTrailer + (b'\x00' *(Sizes.cbMaximumMessage - len(data)))
allocatedBuffer = create_string_buffer(bufferValue)
messageBuffers = SecBufferDesc()
messageBuffers.cBuffers = 4
messageBuffers.ulVersion = SECBUFFER_VERSION
buffers = (SecBuffer * 4)()
buffers[0].BufferType = SECBUFFER_STREAM_HEADER
buffers[0].cbBuffer = Sizes.cbHeader
buffers[0].pvBuffer = cast(byref(allocatedBuffer), c_void_p)
buffers[1].BufferType = SECBUFFER_DATA
buffers[1].cbBuffer = len(data)
buffers[1].pvBuffer = cast(byref(allocatedBuffer, Sizes.cbHeader), c_void_p)
buffers[2].BufferType = SECBUFFER_STREAM_TRAILER
buffers[2].cbBuffer = Sizes.cbTrailer
buffers[2].pvBuffer = cast(byref(allocatedBuffer, Sizes.cbHeader + len(data)), c_void_p)
buffers[3].BufferType = SECBUFFER_EMPTY
messageBuffers.pBuffers = buffers
Status = self._securityFunc.EncryptMessage(byref(self._context),0, byref(messageBuffers), 0)
if Status != SEC_E_OK:
raise SSLError(WinError(c_long(Status).value))
encrypted_data = string_at(buffers[0].pvBuffer, buffers[0].cbBuffer + buffers[1].cbBuffer + buffers[2].cbBuffer)
return self._sock.sendall(encrypted_data, flags, raw=True)
def recv(self, buffersize, flags=0, plaintext=False):
if self._intialized is False and plaintext is True:
return self._sock.recv(buffersize, flags, raw=True)
else:
if len(self._recv_buffer_decrypted) > 0:
decrypted_data = self._recv_buffer_decrypted[:buffersize]
self._recv_buffer_decrypted = self._recv_buffer_decrypted[buffersize:]
return decrypted_data
decrypted_data = self._recv_buffer_decrypted
shouldContinue = True
while shouldContinue:
self._recv_buffer += self._sock.recv(buffersize, flags, raw = True)
messageBuffers = SecBufferDesc()
messageBuffers.cBuffers = 4
messageBuffers.ulVersion = SECBUFFER_VERSION
buffers = (SecBuffer * 4)()
buffers[0].pvBuffer = cast(c_char_p(self._recv_buffer), c_void_p)
buffers[0].cbBuffer = len(self._recv_buffer)
buffers[0].BufferType = SECBUFFER_DATA
buffers[1].BufferType = SECBUFFER_EMPTY
buffers[2].BufferType = SECBUFFER_EMPTY
buffers[3].BufferType = SECBUFFER_EMPTY
messageBuffers.pBuffers = buffers
Status = self._securityFunc.DecryptMessage(byref(self._context), byref(messageBuffers), 0, None)
if Status == SEC_E_INCOMPLETE_MESSAGE:
continue
if Status != SEC_E_OK and Status != SEC_I_RENEGOTIATE:
raise SSLError(WinError(c_long(Status).value))
for idx in range(1,4):
if messageBuffers.pBuffers[idx].BufferType == SECBUFFER_DATA:
decrypted_data += string_at(messageBuffers.pBuffers[idx].pvBuffer, messageBuffers.pBuffers[idx].cbBuffer)
break
extra_data = b''
for idx in range(1,4):
if messageBuffers.pBuffers[idx].BufferType == SECBUFFER_EXTRA:
extra_data = string_at(messageBuffers.pBuffers[idx].pvBuffer, messageBuffers.pBuffers[idx].cbBuffer)
break
if len(extra_data) > 0:
self._recv_buffer = extra_data
continue
else:
self._recv_buffer = b''
shouldContinue = False
if Status == SEC_I_RENEGOTIATE:
(Status, _) = self._ClientHandshakeLoop(doRead = False)
shouldContinue = True
if Status != SEC_E_OK:
raise SSLError(WinError(c_long(Status).value))
elif Status != SEC_E_OK:
raise SSLError(WinError(c_long(Status).value))
self._recv_buffer_decrypted = decrypted_data[buffersize:]
return decrypted_data[:buffersize]<|fim▁end|>
| |
<|file_name|>oauth2.py<|end_file_name|><|fim▁begin|>"""
The MIT License
Copyright (c) 2007 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import urllib
import time
import random
import urlparse
import hmac
import binascii
import httplib2
try:
from urlparse import parse_qs, parse_qsl
except ImportError:
from cgi import parse_qs, parse_qsl
VERSION = '1.0' # Hi Blaine!
HTTP_METHOD = 'GET'
SIGNATURE_METHOD = 'PLAINTEXT'
class Error(RuntimeError):
"""Generic exception class."""
def __init__(self, message='OAuth error occured.'):
self._message = message
@property
def message(self):
"""A hack to get around the deprecation errors in 2.6."""
return self._message
def __str__(self):
return self._message
class MissingSignature(Error):
pass
def build_authenticate_header(realm=''):
"""Optional WWW-Authenticate header (401 error)"""
return {'WWW-Authenticate': 'OAuth realm="%s"' % realm}
def escape(s):
"""Escape a URL including any /."""
return urllib.quote(s, safe='~')
def generate_timestamp():
"""Get seconds since epoch (UTC)."""
return int(time.time())
def generate_nonce(length=8):
"""Generate pseudorandom number."""
return ''.join([str(random.randint(0, 9)) for i in range(length)])
def generate_verifier(length=8):
"""Generate pseudorandom number."""
return ''.join([str(random.randint(0, 9)) for i in range(length)])
class Consumer(object):
"""A consumer of OAuth-protected services.
The OAuth consumer is a "third-party" service that wants to access
protected resources from an OAuth service provider on behalf of an end
user. It's kind of the OAuth client.
Usually a consumer must be registered with the service provider by the
developer of the consumer software. As part of that process, the service
provider gives the consumer a *key* and a *secret* with which the consumer
software can identify itself to the service. The consumer will include its
key in each request to identify itself, but will use its secret only when
signing requests, to prove that the request is from that particular
registered consumer.
Once registered, the consumer can then use its consumer credentials to ask
the service provider for a request token, kicking off the OAuth
authorization process.
"""
key = None
secret = None
def __init__(self, key, secret):
self.key = key
self.secret = secret
if self.key is None or self.secret is None:
raise ValueError("Key and secret must be set.")
def __str__(self):
data = {
'oauth_consumer_key': self.key,
'oauth_consumer_secret': self.secret
}
return urllib.urlencode(data)
class Token(object):
"""An OAuth credential used to request authorization or a protected
resource.
Tokens in OAuth comprise a *key* and a *secret*. The key is included in
requests to identify the token being used, but the secret is used only in
the signature, to prove that the requester is who the server gave the
token to.
When first negotiating the authorization, the consumer asks for a *request
token* that the live user authorizes with the service provider. The
consumer then exchanges the request token for an *access token* that can
be used to access protected resources.
"""
key = None
secret = None
callback = None
callback_confirmed = None
verifier = None
def __init__(self, key, secret):
self.key = key
self.secret = secret
if self.key is None or self.secret is None:
raise ValueError("Key and secret must be set.")
def set_callback(self, callback):
self.callback = callback
self.callback_confirmed = 'true'
def set_verifier(self, verifier=None):
if verifier is not None:
self.verifier = verifier
else:
self.verifier = generate_verifier()
def get_callback_url(self):
if self.callback and self.verifier:
# Append the oauth_verifier.
parts = urlparse.urlparse(self.callback)
scheme, netloc, path, params, query, fragment = parts[:6]
if query:
query = '%s&oauth_verifier=%s' % (query, self.verifier)
else:
query = 'oauth_verifier=%s' % self.verifier
return urlparse.urlunparse((scheme, netloc, path, params,
query, fragment))
return self.callback
def to_string(self):
"""Returns this token as a plain string, suitable for storage.
The resulting string includes the token's secret, so you should never
send or store this string where a third party can read it.
"""
data = {
'oauth_token': self.key,
'oauth_token_secret': self.secret,
}
if self.callback_confirmed is not None:
data['oauth_callback_confirmed'] = self.callback_confirmed
return urllib.urlencode(data)
@staticmethod
def from_string(s):
"""Deserializes a token from a string like one returned by
`to_string()`."""
if not len(s):
raise ValueError("Invalid parameter string.")
params = parse_qs(s, keep_blank_values=False)
if not len(params):
raise ValueError("Invalid parameter string.")
try:
key = params['oauth_token'][0]
except Exception:
raise ValueError("'oauth_token' not found in OAuth request.")
try:
secret = params['oauth_token_secret'][0]
except Exception:
raise ValueError("'oauth_token_secret' not found in "
"OAuth request.")
token = Token(key, secret)
try:
token.callback_confirmed = params['oauth_callback_confirmed'][0]
except KeyError:
pass # 1.0, no callback confirmed.
return token
def __str__(self):
return self.to_string()
def setter(attr):
name = attr.__name__
def getter(self):
try:
return self.__dict__[name]
except KeyError:
raise AttributeError(name)
def deleter(self):
del self.__dict__[name]
return property(getter, attr, deleter)
class Request(dict):
"""The parameters and information for an HTTP request, suitable for
authorizing with OAuth credentials.
When a consumer wants to access a service's protected resources, it does
so using a signed HTTP request identifying itself (the consumer) with its
key, and providing an access token authorized by the end user to access
those resources.
"""
http_method = HTTP_METHOD
http_url = None
version = VERSION
def __init__(self, method=HTTP_METHOD, url=None, parameters=None):
if method is not None:
self.method = method
if url is not None:
self.url = url
if parameters is not None:
self.update(parameters)
@setter
def url(self, value):
parts = urlparse.urlparse(value)
scheme, netloc, path = parts[:3]
# Exclude default port numbers.
if scheme == 'http' and netloc[-3:] == ':80':
netloc = netloc[:-3]
elif scheme == 'https' and netloc[-4:] == ':443':
netloc = netloc[:-4]
if scheme != 'http' and scheme != 'https':
raise ValueError("Unsupported URL %s (%s)." % (value, scheme))
value = '%s://%s%s' % (scheme, netloc, path)
self.__dict__['url'] = value
@setter
def method(self, value):
self.__dict__['method'] = value.upper()
def _get_timestamp_nonce(self):
return self['oauth_timestamp'], self['oauth_nonce']
def get_nonoauth_parameters(self):
"""Get any non-OAuth parameters."""
return dict([(k, v) for k, v in self.iteritems()
if not k.startswith('oauth_')])
def to_header(self, realm=''):
"""Serialize as a header for an HTTPAuth request."""
oauth_params = ((k, v) for k, v in self.items()
if k.startswith('oauth_'))
stringy_params = ((k, escape(str(v))) for k, v in oauth_params)
header_params = ('%s="%s"' % (k, v) for k, v in stringy_params)
params_header = ', '.join(header_params)
auth_header = 'OAuth realm="%s"' % realm
if params_header:
auth_header = "%s, %s" % (auth_header, params_header)
return {'Authorization': auth_header}
def to_postdata(self):
"""Serialize as post data for a POST request."""
return self.encode_postdata(self)
def encode_postdata(self, data):
# tell urlencode to deal with sequence values and map them correctly
# to resulting querystring. for example self["k"] = ["v1", "v2"] will
# result in 'k=v1&k=v2' and not k=%5B%27v1%27%2C+%27v2%27%5D
return urllib.urlencode(data, True)
def to_url(self):
"""Serialize as a URL for a GET request."""
return '%s?%s' % (self.url, self.to_postdata())
def get_parameter(self, parameter):
ret = self.get(parameter)
if ret is None:
raise Error('Parameter not found: %s' % parameter)
return ret
def get_normalized_parameters(self):
"""Return a string that contains the parameters that must be signed."""
items = [(k, v) for k, v in self.items() if k != 'oauth_signature']
encoded_str = urllib.urlencode(sorted(items), True)
# Encode signature parameters per Oauth Core 1.0 protocol
# spec draft 7, section 3.6
# (http://tools.ietf.org/html/draft-hammer-oauth-07#section-3.6)
# Spaces must be encoded with "%20" instead of "+"
return encoded_str.replace('+', '%20')
def sign_request(self, signature_method, consumer, token):
"""Set the signature parameter to the result of sign."""
if 'oauth_consumer_key' not in self:
self['oauth_consumer_key'] = consumer.key
if token and 'oauth_token' not in self:
self['oauth_token'] = token.key
self['oauth_signature_method'] = signature_method.name
self['oauth_signature'] = signature_method.sign(self, consumer, token)
@classmethod
def make_timestamp(cls):
"""Get seconds since epoch (UTC)."""
return str(int(time.time()))
@classmethod
def make_nonce(cls):
"""Generate pseudorandom number."""
return str(random.randint(0, 100000000))
@classmethod
def from_request(cls, http_method, http_url, headers=None, parameters=None,
query_string=None):
"""Combines multiple parameter sources."""
if parameters is None:
parameters = {}
# Headers
if headers and 'Authorization' in headers:
auth_header = headers['Authorization']
# Check that the authorization header is OAuth.
if auth_header[:6] == 'OAuth ':
auth_header = auth_header[6:]
try:
# Get the parameters from the header.
header_params = cls._split_header(auth_header)
parameters.update(header_params)
except:
raise Error('Unable to parse OAuth parameters from '
'Authorization header.')
# GET or POST query string.
if query_string:
query_params = cls._split_url_string(query_string)
parameters.update(query_params)
# URL parameters.
param_str = urlparse.urlparse(http_url)[4] # query
url_params = cls._split_url_string(param_str)
parameters.update(url_params)
if parameters:
return cls(http_method, http_url, parameters)
return None
@classmethod
def from_consumer_and_token(cls, consumer, token=None,
http_method=HTTP_METHOD, http_url=None, parameters=None):
if not parameters:
parameters = {}
defaults = {
'oauth_consumer_key': consumer.key,
'oauth_timestamp': cls.make_timestamp(),
'oauth_nonce': cls.make_nonce(),
'oauth_version': cls.version,
}
defaults.update(parameters)
parameters = defaults
if token:
parameters['oauth_token'] = token.key
return Request(http_method, http_url, parameters)
@classmethod
def from_token_and_callback(cls, token, callback=None,
http_method=HTTP_METHOD, http_url=None, parameters=None):
if not parameters:
parameters = {}
parameters['oauth_token'] = token.key
if callback:
parameters['oauth_callback'] = callback
return cls(http_method, http_url, parameters)
@staticmethod
def _split_header(header):
"""Turn Authorization: header into parameters."""
params = {}
parts = header.split(',')
for param in parts:
# Ignore realm parameter.
if param.find('realm') > -1:
continue
# Remove whitespace.
param = param.strip()
# Split key-value.
param_parts = param.split('=', 1)
# Remove quotes and unescape the value.
params[param_parts[0]] = urllib.unquote(param_parts[1].strip('\"'))
return params
@staticmethod
def _split_url_string(param_str):
"""Turn URL string into parameters."""
parameters = parse_qs(param_str, keep_blank_values=False)
for k, v in parameters.iteritems():
parameters[k] = urllib.unquote(v[0])
return parameters
class Server(object):
"""A skeletal implementation of a service provider, providing protected
resources to requests from authorized consumers.
This class implements the logic to check requests for authorization. You
can use it with your web server or web framework to protect certain
resources with OAuth.<|fim▁hole|>
timestamp_threshold = 300 # In seconds, five minutes.
version = VERSION
signature_methods = None
def __init__(self, signature_methods=None):
self.signature_methods = signature_methods or {}
def add_signature_method(self, signature_method):
self.signature_methods[signature_method.name] = signature_method
return self.signature_methods
def verify_request(self, request, consumer, token):
"""Verifies an api call and checks all the parameters."""
version = self._get_version(request)
self._check_signature(request, consumer, token)
parameters = request.get_nonoauth_parameters()
return parameters
def build_authenticate_header(self, realm=''):
"""Optional support for the authenticate header."""
return {'WWW-Authenticate': 'OAuth realm="%s"' % realm}
def _get_version(self, request):
"""Verify the correct version request for this server."""
try:
version = request.get_parameter('oauth_version')
except:
version = VERSION
if version and version != self.version:
raise Error('OAuth version %s not supported.' % str(version))
return version
def _get_signature_method(self, request):
"""Figure out the signature with some defaults."""
try:
signature_method = request.get_parameter('oauth_signature_method')
except:
signature_method = SIGNATURE_METHOD
try:
# Get the signature method object.
signature_method = self.signature_methods[signature_method]
except:
signature_method_names = ', '.join(self.signature_methods.keys())
raise Error('Signature method %s not supported try one of the following: %s' % (signature_method, signature_method_names))
return signature_method
def _get_verifier(self, request):
return request.get_parameter('oauth_verifier')
def _check_signature(self, request, consumer, token):
timestamp, nonce = request._get_timestamp_nonce()
self._check_timestamp(timestamp)
signature_method = self._get_signature_method(request)
try:
signature = request.get_parameter('oauth_signature')
except:
raise MissingSignature('Missing oauth_signature.')
# Validate the signature.
valid = signature_method.check(request, consumer, token, signature)
if not valid:
key, base = signature_method.signing_base(request, consumer, token)
raise Error('Invalid signature. Expected signature base '
'string: %s' % base)
built = signature_method.sign(request, consumer, token)
def _check_timestamp(self, timestamp):
"""Verify that timestamp is recentish."""
timestamp = int(timestamp)
now = int(time.time())
lapsed = now - timestamp
if lapsed > self.timestamp_threshold:
raise Error('Expired timestamp: given %d and now %s has a '
'greater difference than threshold %d' % (timestamp, now, self.timestamp_threshold))
class Client(httplib2.Http):
"""OAuthClient is a worker to attempt to execute a request."""
def __init__(self, consumer, token=None, cache=None, timeout=None,
proxy_info=None):
if consumer is not None and not isinstance(consumer, Consumer):
raise ValueError("Invalid consumer.")
if token is not None and not isinstance(token, Token):
raise ValueError("Invalid token.")
self.consumer = consumer
self.token = token
self.method = SignatureMethod_HMAC_SHA1()
httplib2.Http.__init__(self, cache=cache, timeout=timeout,
proxy_info=proxy_info)
def set_signature_method(self, method):
if not isinstance(method, SignatureMethod):
raise ValueError("Invalid signature method.")
self.method = method
def request(self, uri, method="GET", body=None, headers=None,
redirections=httplib2.DEFAULT_MAX_REDIRECTS, connection_type=None,
force_auth_header=False):
if not isinstance(headers, dict):
headers = {}
if body and method == "POST":
parameters = dict(parse_qsl(body))
elif method == "GET":
parsed = urlparse.urlparse(uri)
parameters = parse_qs(parsed.query)
else:
parameters = None
req = Request.from_consumer_and_token(self.consumer, token=self.token,
http_method=method, http_url=uri, parameters=parameters)
req.sign_request(self.method, self.consumer, self.token)
if force_auth_header:
# ensure we always send Authorization
headers.update(req.to_header())
if method == "POST":
if not force_auth_header:
body = req.to_postdata()
else:
body = req.encode_postdata(req.get_nonoauth_parameters())
headers['Content-Type'] = 'application/x-www-form-urlencoded'
elif method == "GET":
if not force_auth_header:
uri = req.to_url()
else:
if not force_auth_header:
# don't call update twice.
headers.update(req.to_header())
return httplib2.Http.request(self, uri, method=method, body=body,
headers=headers, redirections=redirections,
connection_type=connection_type)
class SignatureMethod(object):
"""A way of signing requests.
The OAuth protocol lets consumers and service providers pick a way to sign
requests. This interface shows the methods expected by the other `oauth`
modules for signing requests. Subclass it and implement its methods to
provide a new way to sign requests.
"""
def signing_base(self, request, consumer, token):
"""Calculates the string that needs to be signed.
This method returns a 2-tuple containing the starting key for the
signing and the message to be signed. The latter may be used in error
messages to help clients debug their software.
"""
raise NotImplementedError
def sign(self, request, consumer, token):
"""Returns the signature for the given request, based on the consumer
and token also provided.
You should use your implementation of `signing_base()` to build the
message to sign. Otherwise it may be less useful for debugging.
"""
raise NotImplementedError
def check(self, request, consumer, token, signature):
"""Returns whether the given signature is the correct signature for
the given consumer and token signing the given request."""
built = self.sign(request, consumer, token)
return built == signature
class SignatureMethod_HMAC_SHA1(SignatureMethod):
name = 'HMAC-SHA1'
def signing_base(self, request, consumer, token):
sig = (
escape(request.method),
escape(request.url),
escape(request.get_normalized_parameters()),
)
key = '%s&' % escape(consumer.secret)
if token:
key += escape(token.secret)
raw = '&'.join(sig)
return key, raw
def sign(self, request, consumer, token):
"""Builds the base signature string."""
key, raw = self.signing_base(request, consumer, token)
# HMAC object.
try:
import hashlib # 2.5
hashed = hmac.new(key, raw, hashlib.sha1)
except ImportError:
import sha # Deprecated
hashed = hmac.new(key, raw, sha)
# Calculate the digest base 64.
return binascii.b2a_base64(hashed.digest())[:-1]
class SignatureMethod_PLAINTEXT(SignatureMethod):
name = 'PLAINTEXT'
def signing_base(self, request, consumer, token):
"""Concatenates the consumer key and secret with the token's
secret."""
sig = '%s&' % escape(consumer.secret)
if token:
sig = sig + escape(token.secret)
return sig, sig
def sign(self, request, consumer, token):
key, raw = self.signing_base(request, consumer, token)
return raw<|fim▁end|>
|
"""
|
<|file_name|>dlist.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A doubly-linked list with owned nodes.
//!
//! The `DList` allows pushing and popping elements at either end.
//!
//! `DList` implements the trait `Deque`. It should be imported with
//! `use collections::Deque`.
// DList is constructed like a singly-linked list over the field `next`.
// including the last link being None; each Node owns its `next` field.
//
// Backlinks over DList::prev are raw pointers that form a full chain in
// the reverse direction.
use core::prelude::*;
use alloc::boxed::Box;
use core::default::Default;
use core::fmt;
use core::iter;
use core::mem;
use core::ptr;
use std::hash::{Writer, Hash};
use {Mutable, Deque, MutableSeq};
/// A doubly-linked list.
pub struct DList<T> {
length: uint,
list_head: Link<T>,
list_tail: Rawlink<Node<T>>,
}
type Link<T> = Option<Box<Node<T>>>;
struct Rawlink<T> { p: *mut T }
struct Node<T> {
next: Link<T>,
prev: Rawlink<Node<T>>,
value: T,
}
/// An iterator over references to the items of a `DList`.
pub struct Items<'a, T:'a> {
head: &'a Link<T>,
tail: Rawlink<Node<T>>,
nelem: uint,
}
// FIXME #11820: the &'a Option<> of the Link stops clone working.
impl<'a, T> Clone for Items<'a, T> {
fn clone(&self) -> Items<'a, T> { *self }
}
/// An iterator over mutable references to the items of a `DList`.
pub struct MutItems<'a, T:'a> {
list: &'a mut DList<T>,
head: Rawlink<Node<T>>,
tail: Rawlink<Node<T>>,
nelem: uint,
}
/// An iterator over mutable references to the items of a `DList`.
#[deriving(Clone)]
pub struct MoveItems<T> {
list: DList<T>
}
/// Rawlink is a type like Option<T> but for holding a raw pointer
impl<T> Rawlink<T> {
/// Like Option::None for Rawlink
fn none() -> Rawlink<T> {
Rawlink{p: ptr::mut_null()}
}
/// Like Option::Some for Rawlink
fn some(n: &mut T) -> Rawlink<T> {
Rawlink{p: n}
}
/// Convert the `Rawlink` into an Option value
fn resolve_immut<'a>(&self) -> Option<&'a T> {
unsafe {
mem::transmute(self.p.to_option())
}
}
/// Convert the `Rawlink` into an Option value
fn resolve<'a>(&mut self) -> Option<&'a mut T> {
if self.p.is_null() {
None
} else {
Some(unsafe { mem::transmute(self.p) })
}
}
/// Return the `Rawlink` and replace with `Rawlink::none()`
fn take(&mut self) -> Rawlink<T> {
mem::replace(self, Rawlink::none())
}
}
impl<T> Clone for Rawlink<T> {
#[inline]
fn clone(&self) -> Rawlink<T> {
Rawlink{p: self.p}
}
}
impl<T> Node<T> {
fn new(v: T) -> Node<T> {
Node{value: v, next: None, prev: Rawlink::none()}
}
}
/// Set the .prev field on `next`, then return `Some(next)`
fn link_with_prev<T>(mut next: Box<Node<T>>, prev: Rawlink<Node<T>>)
-> Link<T> {
next.prev = prev;
Some(next)
}
impl<T> Collection for DList<T> {
/// Returns `true` if the `DList` is empty.
///
/// This operation should compute in O(1) time.
#[inline]
fn is_empty(&self) -> bool {
self.list_head.is_none()
}
/// Returns the length of the `DList`.
///
/// This operation should compute in O(1) time.
#[inline]
fn len(&self) -> uint {
self.length
}
}
impl<T> Mutable for DList<T> {
/// Removes all elements from the `DList`.
///
/// This operation should compute in O(n) time.
#[inline]
fn clear(&mut self) {
*self = DList::new()
}
}
// private methods
impl<T> DList<T> {
/// Add a Node first in the list
#[inline]
fn push_front_node(&mut self, mut new_head: Box<Node<T>>) {
match self.list_head {
None => {
self.list_tail = Rawlink::some(&mut *new_head);
self.list_head = link_with_prev(new_head, Rawlink::none());
}
Some(ref mut head) => {
new_head.prev = Rawlink::none();
head.prev = Rawlink::some(&mut *new_head);
mem::swap(head, &mut new_head);
head.next = Some(new_head);
}
}
self.length += 1;
}
/// Remove the first Node and return it, or None if the list is empty
#[inline]
fn pop_front_node(&mut self) -> Option<Box<Node<T>>> {
self.list_head.take().map(|mut front_node| {
self.length -= 1;
match front_node.next.take() {
Some(node) => self.list_head = link_with_prev(node, Rawlink::none()),
None => self.list_tail = Rawlink::none()
}
front_node
})
}
/// Add a Node last in the list
#[inline]
fn push_back_node(&mut self, mut new_tail: Box<Node<T>>) {
match self.list_tail.resolve() {
None => return self.push_front_node(new_tail),
Some(tail) => {
self.list_tail = Rawlink::some(&mut *new_tail);
tail.next = link_with_prev(new_tail, Rawlink::some(tail));
}
}
self.length += 1;
}
/// Remove the last Node and return it, or None if the list is empty
#[inline]
fn pop_back_node(&mut self) -> Option<Box<Node<T>>> {
self.list_tail.resolve().map_or(None, |tail| {
self.length -= 1;
self.list_tail = tail.prev;
match tail.prev.resolve() {
None => self.list_head.take(),
Some(tail_prev) => tail_prev.next.take()
}
})
}
}
impl<T> Deque<T> for DList<T> {
/// Provides a reference to the front element, or `None` if the list is
/// empty.
#[inline]
fn front<'a>(&'a self) -> Option<&'a T> {
self.list_head.as_ref().map(|head| &head.value)
}
/// Provides a mutable reference to the front element, or `None` if the list
/// is empty.
#[inline]
fn front_mut<'a>(&'a mut self) -> Option<&'a mut T> {
self.list_head.as_mut().map(|head| &mut head.value)
}
/// Provides a reference to the back element, or `None` if the list is
/// empty.
#[inline]
fn back<'a>(&'a self) -> Option<&'a T> {
self.list_tail.resolve_immut().as_ref().map(|tail| &tail.value)
}
/// Provides a mutable reference to the back element, or `None` if the list
/// is empty.
#[inline]
fn back_mut<'a>(&'a mut self) -> Option<&'a mut T> {
self.list_tail.resolve().map(|tail| &mut tail.value)
}
/// Adds an element first in the list.
///
/// This operation should compute in O(1) time.
fn push_front(&mut self, elt: T) {
self.push_front_node(box Node::new(elt))
}
/// Removes the first element and returns it, or `None` if the list is
/// empty.
///
/// This operation should compute in O(1) time.
fn pop_front(&mut self) -> Option<T> {
self.pop_front_node().map(|box Node{value, ..}| value)
}
}
impl<T> MutableSeq<T> for DList<T> {
fn push(&mut self, elt: T) {
self.push_back_node(box Node::new(elt))
}
fn pop(&mut self) -> Option<T> {
self.pop_back_node().map(|box Node{value, ..}| value)
}
}
impl<T> Default for DList<T> {
#[inline]
fn default() -> DList<T> { DList::new() }
}
impl<T> DList<T> {
/// Creates an empty `DList`.
#[inline]
pub fn new() -> DList<T> {
DList{list_head: None, list_tail: Rawlink::none(), length: 0}
}
/// Moves the last element to the front of the list.
///
/// If the list is empty, does nothing.
///
/// # Example
///
/// ```rust
/// use std::collections::DList;
///
/// let mut dl = DList::new();
/// dl.push(1i);
/// dl.push(2);
/// dl.push(3);
///
/// dl.rotate_forward();
///
/// for e in dl.iter() {
/// println!("{}", e); // prints 3, then 1, then 2
/// }
/// ```
#[inline]
pub fn rotate_forward(&mut self) {
self.pop_back_node().map(|tail| {
self.push_front_node(tail)
});
}
/// Moves the first element to the back of the list.
///
/// If the list is empty, does nothing.
///
/// # Example
///
/// ```rust
/// use std::collections::DList;
///
/// let mut dl = DList::new();
/// dl.push(1i);
/// dl.push(2);
/// dl.push(3);
///
/// dl.rotate_backward();
///
/// for e in dl.iter() {
/// println!("{}", e); // prints 2, then 3, then 1
/// }
/// ```
#[inline]
pub fn rotate_backward(&mut self) {
self.pop_front_node().map(|head| {
self.push_back_node(head)
});
}
/// Adds all elements from `other` to the end of the list.
///
/// This operation should compute in O(1) time.
///
/// # Example
///
/// ```rust
/// use std::collections::DList;
///
/// let mut a = DList::new();
/// let mut b = DList::new();
/// a.push(1i);
/// a.push(2);
/// b.push(3i);
/// b.push(4);
///
/// a.append(b);
///
/// for e in a.iter() {
/// println!("{}", e); // prints 1, then 2, then 3, then 4
/// }
/// ```
pub fn append(&mut self, mut other: DList<T>) {
match self.list_tail.resolve() {
None => *self = other,
Some(tail) => {
// Carefully empty `other`.
let o_tail = other.list_tail.take();
let o_length = other.length;
match other.list_head.take() {
None => return,
Some(node) => {
tail.next = link_with_prev(node, self.list_tail);
self.list_tail = o_tail;
self.length += o_length;
}
}
}
}
}
/// Adds all elements from `other` to the beginning of the list.
///
/// This operation should compute in O(1) time.
///
/// # Example
///
/// ```rust
/// use std::collections::DList;
///
/// let mut a = DList::new();
/// let mut b = DList::new();
/// a.push(1i);
/// a.push(2);
/// b.push(3i);
/// b.push(4);
///
/// a.prepend(b);
///
/// for e in a.iter() {
/// println!("{}", e); // prints 3, then 4, then 1, then 2
/// }
/// ```
#[inline]
pub fn prepend(&mut self, mut other: DList<T>) {
mem::swap(self, &mut other);
self.append(other);
}
/// Inserts `elt` before the first `x` in the list where `f(x, elt)` is
/// true, or at the end.
///
/// This operation should compute in O(N) time.
///
/// # Example
///
/// ```rust
/// use std::collections::DList;
///
/// let mut a: DList<int> = DList::new();
/// a.push(2i);
/// a.push(4);
/// a.push(7);
/// a.push(8);
///
/// // insert 11 before the first odd number in the list
/// a.insert_when(11, |&e, _| e % 2 == 1);
///
/// for e in a.iter() {
/// println!("{}", e); // prints 2, then 4, then 11, then 7, then 8
/// }
/// ```
pub fn insert_when(&mut self, elt: T, f: |&T, &T| -> bool) {
{
let mut it = self.mut_iter();
loop {
match it.peek_next() {
None => break,
Some(x) => if f(x, &elt) { break }
}
it.next();
}
it.insert_next(elt);
}
}
/// Merges `other` into this `DList`, using the function `f`.
///
/// Iterates both `DList`s with `a` from self and `b` from `other`, and
/// put `a` in the result if `f(a, b)` is true, and otherwise `b`.
///
/// This operation should compute in O(max(N, M)) time.
pub fn merge(&mut self, mut other: DList<T>, f: |&T, &T| -> bool) {
{
let mut it = self.mut_iter();
loop {
let take_a = match (it.peek_next(), other.front()) {
(_ , None) => return,
(None, _ ) => break,
(Some(ref mut x), Some(y)) => f(*x, y),
};
if take_a {
it.next();
} else {
it.insert_next_node(other.pop_front_node().unwrap());
}
}
}
self.append(other);
}
/// Provides a forward iterator.
#[inline]
pub fn iter<'a>(&'a self) -> Items<'a, T> {
Items{nelem: self.len(), head: &self.list_head, tail: self.list_tail}
}
/// Provides a forward iterator with mutable references.
#[inline]
pub fn mut_iter<'a>(&'a mut self) -> MutItems<'a, T> {
let head_raw = match self.list_head {
Some(ref mut h) => Rawlink::some(&mut **h),
None => Rawlink::none(),
};
MutItems{
nelem: self.len(),
head: head_raw,
tail: self.list_tail,
list: self
}
}
/// Consumes the list into an iterator yielding elements by value.
#[inline]
pub fn move_iter(self) -> MoveItems<T> {
MoveItems{list: self}
}
}
impl<T: Ord> DList<T> {
/// Inserts `elt` sorted in ascending order.
///
/// This operation should compute in O(N) time.
#[inline]
pub fn insert_ordered(&mut self, elt: T) {
self.insert_when(elt, |a, b| a >= b)
}
}
#[unsafe_destructor]
impl<T> Drop for DList<T> {
fn drop(&mut self) {
// Dissolve the dlist in backwards direction
// Just dropping the list_head can lead to stack exhaustion
// when length is >> 1_000_000
let mut tail = self.list_tail;
loop {
match tail.resolve() {
None => break,
Some(prev) => {
prev.next.take(); // release Box<Node<T>>
tail = prev.prev;
}
}
}
self.length = 0;
self.list_head = None;
self.list_tail = Rawlink::none();
}
}
impl<'a, A> Iterator<&'a A> for Items<'a, A> {
#[inline]
fn next(&mut self) -> Option<&'a A> {
if self.nelem == 0 {
return None;
}
self.head.as_ref().map(|head| {
self.nelem -= 1;
self.head = &head.next;
&head.value
})
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
(self.nelem, Some(self.nelem))
}
}
impl<'a, A> DoubleEndedIterator<&'a A> for Items<'a, A> {
#[inline]
fn next_back(&mut self) -> Option<&'a A> {
if self.nelem == 0 {
return None;
}
self.tail.resolve_immut().as_ref().map(|prev| {
self.nelem -= 1;
self.tail = prev.prev;
&prev.value
})
}
}
impl<'a, A> ExactSize<&'a A> for Items<'a, A> {}
impl<'a, A> Iterator<&'a mut A> for MutItems<'a, A> {
#[inline]
fn next(&mut self) -> Option<&'a mut A> {
if self.nelem == 0 {
return None;
}
self.head.resolve().map(|next| {
self.nelem -= 1;
self.head = match next.next {
Some(ref mut node) => Rawlink::some(&mut **node),
None => Rawlink::none(),
};
&mut next.value
})
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
(self.nelem, Some(self.nelem))
}
}
impl<'a, A> DoubleEndedIterator<&'a mut A> for MutItems<'a, A> {
#[inline]
fn next_back(&mut self) -> Option<&'a mut A> {
if self.nelem == 0 {
return None;
}
self.tail.resolve().map(|prev| {
self.nelem -= 1;
self.tail = prev.prev;
&mut prev.value
})
}
}
impl<'a, A> ExactSize<&'a mut A> for MutItems<'a, A> {}
/// Allows mutating a `DList` while iterating.
pub trait ListInsertion<A> {
/// Inserts `elt` just after to the element most recently returned by
/// `.next()`
///
/// The inserted element does not appear in the iteration.
fn insert_next(&mut self, elt: A);
/// Provides a reference to the next element, without changing the iterator
fn peek_next<'a>(&'a mut self) -> Option<&'a mut A>;
}
// private methods for MutItems
impl<'a, A> MutItems<'a, A> {
fn insert_next_node(&mut self, mut ins_node: Box<Node<A>>) {
// Insert before `self.head` so that it is between the
// previously yielded element and self.head.
//
// The inserted node will not appear in further iteration.
match self.head.resolve() {
None => { self.list.push_back_node(ins_node); }
Some(node) => {
let prev_node = match node.prev.resolve() {
None => return self.list.push_front_node(ins_node),
Some(prev) => prev,
};
let node_own = prev_node.next.take().unwrap();
ins_node.next = link_with_prev(node_own, Rawlink::some(&mut *ins_node));
prev_node.next = link_with_prev(ins_node, Rawlink::some(prev_node));
self.list.length += 1;
}
}
}
}
impl<'a, A> ListInsertion<A> for MutItems<'a, A> {
#[inline]
fn insert_next(&mut self, elt: A) {
self.insert_next_node(box Node::new(elt))
}
#[inline]
fn peek_next<'a>(&'a mut self) -> Option<&'a mut A> {
if self.nelem == 0 {
return None
}
self.head.resolve().map(|head| &mut head.value)
}
}
impl<A> Iterator<A> for MoveItems<A> {
#[inline]
fn next(&mut self) -> Option<A> { self.list.pop_front() }
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
(self.list.length, Some(self.list.length))
}
}
impl<A> DoubleEndedIterator<A> for MoveItems<A> {
#[inline]
fn next_back(&mut self) -> Option<A> { self.list.pop() }
}
impl<A> FromIterator<A> for DList<A> {
fn from_iter<T: Iterator<A>>(iterator: T) -> DList<A> {
let mut ret = DList::new();
ret.extend(iterator);
ret
}
}
impl<A> Extendable<A> for DList<A> {
fn extend<T: Iterator<A>>(&mut self, mut iterator: T) {
for elt in iterator { self.push(elt); }
}
}
impl<A: PartialEq> PartialEq for DList<A> {
fn eq(&self, other: &DList<A>) -> bool {
self.len() == other.len() &&
iter::order::eq(self.iter(), other.iter())
}
fn ne(&self, other: &DList<A>) -> bool {
self.len() != other.len() ||
iter::order::ne(self.iter(), other.iter())
}
}
impl<A: Eq> Eq for DList<A> {}
impl<A: PartialOrd> PartialOrd for DList<A> {
fn partial_cmp(&self, other: &DList<A>) -> Option<Ordering> {
iter::order::partial_cmp(self.iter(), other.iter())
}
}
impl<A: Ord> Ord for DList<A> {
#[inline]
fn cmp(&self, other: &DList<A>) -> Ordering {
iter::order::cmp(self.iter(), other.iter())
}
}
impl<A: Clone> Clone for DList<A> {
fn clone(&self) -> DList<A> {
self.iter().map(|x| x.clone()).collect()
}
}
impl<A: fmt::Show> fmt::Show for DList<A> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
try!(write!(f, "["));
for (i, e) in self.iter().enumerate() {
if i != 0 { try!(write!(f, ", ")); }
try!(write!(f, "{}", *e));
}
write!(f, "]")
}
}
impl<S: Writer, A: Hash<S>> Hash<S> for DList<A> {
fn hash(&self, state: &mut S) {
self.len().hash(state);
for elt in self.iter() {
elt.hash(state);
}
}
}
#[cfg(test)]
mod tests {
use std::prelude::*;
use std::rand;
use std::hash;
use test::Bencher;
use test;
use {Deque, MutableSeq};
use super::{DList, Node, ListInsertion};
use vec::Vec;
pub fn check_links<T>(list: &DList<T>) {
let mut len = 0u;
let mut last_ptr: Option<&Node<T>> = None;
let mut node_ptr: &Node<T>;
match list.list_head {
None => { assert_eq!(0u, list.length); return }
Some(ref node) => node_ptr = &**node,
}
loop {
match (last_ptr, node_ptr.prev.resolve_immut()) {
(None , None ) => {}
(None , _ ) => fail!("prev link for list_head"),
(Some(p), Some(pptr)) => {
assert_eq!(p as *const Node<T>, pptr as *const Node<T>);
}
_ => fail!("prev link is none, not good"),
}
match node_ptr.next {
Some(ref next) => {
last_ptr = Some(node_ptr);
node_ptr = &**next;
len += 1;
}
None => {
len += 1;
break;
}
}
}
assert_eq!(len, list.length);
}
#[test]
fn test_basic() {
let mut m: DList<Box<int>> = DList::new();
assert_eq!(m.pop_front(), None);
assert_eq!(m.pop(), None);
assert_eq!(m.pop_front(), None);
m.push_front(box 1);
assert_eq!(m.pop_front(), Some(box 1));
m.push(box 2);
m.push(box 3);
assert_eq!(m.len(), 2);
assert_eq!(m.pop_front(), Some(box 2));
assert_eq!(m.pop_front(), Some(box 3));
assert_eq!(m.len(), 0);
assert_eq!(m.pop_front(), None);
m.push(box 1);
m.push(box 3);
m.push(box 5);
m.push(box 7);
assert_eq!(m.pop_front(), Some(box 1));
let mut n = DList::new();
n.push_front(2i);
n.push_front(3);
{
assert_eq!(n.front().unwrap(), &3);
let x = n.front_mut().unwrap();
assert_eq!(*x, 3);
*x = 0;
}
{
assert_eq!(n.back().unwrap(), &2);
let y = n.back_mut().unwrap();
assert_eq!(*y, 2);
*y = 1;
}
assert_eq!(n.pop_front(), Some(0));
assert_eq!(n.pop_front(), Some(1));
}
#[cfg(test)]
fn generate_test() -> DList<int> {
list_from(&[0i,1,2,3,4,5,6])
}
#[cfg(test)]
fn list_from<T: Clone>(v: &[T]) -> DList<T> {
v.iter().map(|x| (*x).clone()).collect()
}
#[test]
fn test_append() {
{
let mut m = DList::new();
let mut n = DList::new();
n.push(2i);
m.append(n);
assert_eq!(m.len(), 1);
assert_eq!(m.pop(), Some(2));
check_links(&m);
}
{
let mut m = DList::new();
let n = DList::new();
m.push(2i);
m.append(n);
assert_eq!(m.len(), 1);
assert_eq!(m.pop(), Some(2));
check_links(&m);
}
let v = vec![1i,2,3,4,5];
let u = vec![9i,8,1,2,3,4,5];
let mut m = list_from(v.as_slice());
m.append(list_from(u.as_slice()));
check_links(&m);
let sum = v.append(u.as_slice());
assert_eq!(sum.len(), m.len());
for elt in sum.move_iter() {
assert_eq!(m.pop_front(), Some(elt))
}
}
#[test]
fn test_prepend() {
{
let mut m = DList::new();
let mut n = DList::new();
n.push(2i);
m.prepend(n);
assert_eq!(m.len(), 1);
assert_eq!(m.pop(), Some(2));
check_links(&m);
}
let v = vec![1i,2,3,4,5];
let u = vec![9i,8,1,2,3,4,5];
let mut m = list_from(v.as_slice());
m.prepend(list_from(u.as_slice()));
check_links(&m);
let sum = u.append(v.as_slice());
assert_eq!(sum.len(), m.len());
for elt in sum.move_iter() {
assert_eq!(m.pop_front(), Some(elt))
}
}
#[test]
fn test_rotate() {
let mut n: DList<int> = DList::new();
n.rotate_backward(); check_links(&n);
assert_eq!(n.len(), 0);
n.rotate_forward(); check_links(&n);
assert_eq!(n.len(), 0);
let v = vec![1i,2,3,4,5];
let mut m = list_from(v.as_slice());
m.rotate_backward(); check_links(&m);
m.rotate_forward(); check_links(&m);
assert_eq!(v.iter().collect::<Vec<&int>>(), m.iter().collect());
m.rotate_forward(); check_links(&m);
m.rotate_forward(); check_links(&m);
m.pop_front(); check_links(&m);
m.rotate_forward(); check_links(&m);
m.rotate_backward(); check_links(&m);
m.push_front(9); check_links(&m);
m.rotate_forward(); check_links(&m);
assert_eq!(vec![3i,9,5,1,2], m.move_iter().collect());
}
#[test]
fn test_iterator() {
let m = generate_test();
for (i, elt) in m.iter().enumerate() {
assert_eq!(i as int, *elt);
}
let mut n = DList::new();
assert_eq!(n.iter().next(), None);
n.push_front(4i);
let mut it = n.iter();
assert_eq!(it.size_hint(), (1, Some(1)));
assert_eq!(it.next().unwrap(), &4);
assert_eq!(it.size_hint(), (0, Some(0)));
assert_eq!(it.next(), None);
}
#[test]
fn test_iterator_clone() {
let mut n = DList::new();
n.push(2i);
n.push(3);
n.push(4);
let mut it = n.iter();
it.next();
let mut jt = it.clone();
assert_eq!(it.next(), jt.next());
assert_eq!(it.next_back(), jt.next_back());
assert_eq!(it.next(), jt.next());
}
#[test]
fn test_iterator_double_end() {
let mut n = DList::new();
assert_eq!(n.iter().next(), None);
n.push_front(4i);
n.push_front(5);
n.push_front(6);
let mut it = n.iter();
assert_eq!(it.size_hint(), (3, Some(3)));
assert_eq!(it.next().unwrap(), &6);
assert_eq!(it.size_hint(), (2, Some(2)));
assert_eq!(it.next_back().unwrap(), &4);
assert_eq!(it.size_hint(), (1, Some(1)));
assert_eq!(it.next_back().unwrap(), &5);
assert_eq!(it.next_back(), None);
assert_eq!(it.next(), None);
}
#[test]
fn test_rev_iter() {
let m = generate_test();
for (i, elt) in m.iter().rev().enumerate() {
assert_eq!((6 - i) as int, *elt);
}
let mut n = DList::new();
assert_eq!(n.iter().rev().next(), None);
n.push_front(4i);
let mut it = n.iter().rev();
assert_eq!(it.size_hint(), (1, Some(1)));
assert_eq!(it.next().unwrap(), &4);
assert_eq!(it.size_hint(), (0, Some(0)));
assert_eq!(it.next(), None);
}
#[test]
fn test_mut_iter() {
let mut m = generate_test();
let mut len = m.len();
for (i, elt) in m.mut_iter().enumerate() {
assert_eq!(i as int, *elt);
len -= 1;
}
assert_eq!(len, 0);
let mut n = DList::new();
assert!(n.mut_iter().next().is_none());
n.push_front(4i);
n.push(5);
let mut it = n.mut_iter();
assert_eq!(it.size_hint(), (2, Some(2)));
assert!(it.next().is_some());
assert!(it.next().is_some());
assert_eq!(it.size_hint(), (0, Some(0)));
assert!(it.next().is_none());
}
#[test]
fn test_iterator_mut_double_end() {
let mut n = DList::new();
assert!(n.mut_iter().next_back().is_none());
n.push_front(4i);
n.push_front(5);
n.push_front(6);
let mut it = n.mut_iter();
assert_eq!(it.size_hint(), (3, Some(3)));
assert_eq!(*it.next().unwrap(), 6);
assert_eq!(it.size_hint(), (2, Some(2)));
assert_eq!(*it.next_back().unwrap(), 4);
assert_eq!(it.size_hint(), (1, Some(1)));
assert_eq!(*it.next_back().unwrap(), 5);
assert!(it.next_back().is_none());
assert!(it.next().is_none());
}
#[test]
fn test_insert_prev() {
let mut m = list_from(&[0i,2,4,6,8]);
let len = m.len();
{
let mut it = m.mut_iter();
it.insert_next(-2);
loop {
match it.next() {
None => break,
Some(elt) => {
it.insert_next(*elt + 1);
match it.peek_next() {
Some(x) => assert_eq!(*x, *elt + 2),
None => assert_eq!(8, *elt),
}
}
}
}
it.insert_next(0);
it.insert_next(1);
}
check_links(&m);
assert_eq!(m.len(), 3 + len * 2);
assert_eq!(m.move_iter().collect::<Vec<int>>(), vec![-2,0,1,2,3,4,5,6,7,8,9,0,1]);
}
#[test]
fn test_merge() {
let mut m = list_from([0i, 1, 3, 5, 6, 7, 2]);
let n = list_from([-1i, 0, 0, 7, 7, 9]);
let len = m.len() + n.len();
m.merge(n, |a, b| a <= b);
assert_eq!(m.len(), len);
check_links(&m);
let res = m.move_iter().collect::<Vec<int>>();
assert_eq!(res, vec![-1, 0, 0, 0, 1, 3, 5, 6, 7, 2, 7, 7, 9]);
}
#[test]
fn test_insert_ordered() {
let mut n = DList::new();
n.insert_ordered(1i);
assert_eq!(n.len(), 1);
assert_eq!(n.pop_front(), Some(1));
let mut m = DList::new();
m.push(2i);
m.push(4);
m.insert_ordered(3);
check_links(&m);
assert_eq!(vec![2,3,4], m.move_iter().collect::<Vec<int>>());
}
#[test]
fn test_mut_rev_iter() {<|fim▁hole|> let mut m = generate_test();
for (i, elt) in m.mut_iter().rev().enumerate() {
assert_eq!((6-i) as int, *elt);
}
let mut n = DList::new();
assert!(n.mut_iter().rev().next().is_none());
n.push_front(4i);
let mut it = n.mut_iter().rev();
assert!(it.next().is_some());
assert!(it.next().is_none());
}
#[test]
fn test_send() {
let n = list_from([1i,2,3]);
spawn(proc() {
check_links(&n);
let a: &[_] = &[&1,&2,&3];
assert_eq!(a, n.iter().collect::<Vec<&int>>().as_slice());
});
}
#[test]
fn test_eq() {
let mut n: DList<u8> = list_from([]);
let mut m = list_from([]);
assert!(n == m);
n.push_front(1);
assert!(n != m);
m.push(1);
assert!(n == m);
let n = list_from([2i,3,4]);
let m = list_from([1i,2,3]);
assert!(n != m);
}
#[test]
fn test_hash() {
let mut x = DList::new();
let mut y = DList::new();
assert!(hash::hash(&x) == hash::hash(&y));
x.push(1i);
x.push(2);
x.push(3);
y.push_front(3i);
y.push_front(2);
y.push_front(1);
assert!(hash::hash(&x) == hash::hash(&y));
}
#[test]
fn test_ord() {
let n: DList<int> = list_from([]);
let m = list_from([1i,2,3]);
assert!(n < m);
assert!(m > n);
assert!(n <= n);
assert!(n >= n);
}
#[test]
fn test_ord_nan() {
let nan = 0.0f64/0.0;
let n = list_from([nan]);
let m = list_from([nan]);
assert!(!(n < m));
assert!(!(n > m));
assert!(!(n <= m));
assert!(!(n >= m));
let n = list_from([nan]);
let one = list_from([1.0f64]);
assert!(!(n < one));
assert!(!(n > one));
assert!(!(n <= one));
assert!(!(n >= one));
let u = list_from([1.0f64,2.0,nan]);
let v = list_from([1.0f64,2.0,3.0]);
assert!(!(u < v));
assert!(!(u > v));
assert!(!(u <= v));
assert!(!(u >= v));
let s = list_from([1.0f64,2.0,4.0,2.0]);
let t = list_from([1.0f64,2.0,3.0,2.0]);
assert!(!(s < t));
assert!(s > one);
assert!(!(s <= one));
assert!(s >= one);
}
#[test]
fn test_fuzz() {
for _ in range(0u, 25) {
fuzz_test(3);
fuzz_test(16);
fuzz_test(189);
}
}
#[test]
fn test_show() {
let list: DList<int> = range(0i, 10).collect();
assert!(list.to_string().as_slice() == "[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]");
let list: DList<&str> = vec!["just", "one", "test", "more"].iter()
.map(|&s| s)
.collect();
assert!(list.to_string().as_slice() == "[just, one, test, more]");
}
#[cfg(test)]
fn fuzz_test(sz: int) {
let mut m: DList<int> = DList::new();
let mut v = vec![];
for i in range(0, sz) {
check_links(&m);
let r: u8 = rand::random();
match r % 6 {
0 => {
m.pop();
v.pop();
}
1 => {
m.pop_front();
v.remove(0);
}
2 | 4 => {
m.push_front(-i);
v.insert(0, -i);
}
3 | 5 | _ => {
m.push(i);
v.push(i);
}
}
}
check_links(&m);
let mut i = 0u;
for (a, &b) in m.move_iter().zip(v.iter()) {
i += 1;
assert_eq!(a, b);
}
assert_eq!(i, v.len());
}
#[bench]
fn bench_collect_into(b: &mut test::Bencher) {
let v = &[0i, ..64];
b.iter(|| {
let _: DList<int> = v.iter().map(|x| *x).collect();
})
}
#[bench]
fn bench_push_front(b: &mut test::Bencher) {
let mut m: DList<int> = DList::new();
b.iter(|| {
m.push_front(0);
})
}
#[bench]
fn bench_push_back(b: &mut test::Bencher) {
let mut m: DList<int> = DList::new();
b.iter(|| {
m.push(0);
})
}
#[bench]
fn bench_push_back_pop_back(b: &mut test::Bencher) {
let mut m: DList<int> = DList::new();
b.iter(|| {
m.push(0);
m.pop();
})
}
#[bench]
fn bench_push_front_pop_front(b: &mut test::Bencher) {
let mut m: DList<int> = DList::new();
b.iter(|| {
m.push_front(0);
m.pop_front();
})
}
#[bench]
fn bench_rotate_forward(b: &mut test::Bencher) {
let mut m: DList<int> = DList::new();
m.push_front(0i);
m.push_front(1);
b.iter(|| {
m.rotate_forward();
})
}
#[bench]
fn bench_rotate_backward(b: &mut test::Bencher) {
let mut m: DList<int> = DList::new();
m.push_front(0i);
m.push_front(1);
b.iter(|| {
m.rotate_backward();
})
}
#[bench]
fn bench_iter(b: &mut test::Bencher) {
let v = &[0i, ..128];
let m: DList<int> = v.iter().map(|&x|x).collect();
b.iter(|| {
assert!(m.iter().count() == 128);
})
}
#[bench]
fn bench_iter_mut(b: &mut test::Bencher) {
let v = &[0i, ..128];
let mut m: DList<int> = v.iter().map(|&x|x).collect();
b.iter(|| {
assert!(m.mut_iter().count() == 128);
})
}
#[bench]
fn bench_iter_rev(b: &mut test::Bencher) {
let v = &[0i, ..128];
let m: DList<int> = v.iter().map(|&x|x).collect();
b.iter(|| {
assert!(m.iter().rev().count() == 128);
})
}
#[bench]
fn bench_iter_mut_rev(b: &mut test::Bencher) {
let v = &[0i, ..128];
let mut m: DList<int> = v.iter().map(|&x|x).collect();
b.iter(|| {
assert!(m.mut_iter().rev().count() == 128);
})
}
}<|fim▁end|>
| |
<|file_name|>errors.generated.go<|end_file_name|><|fim▁begin|>package internal
import "v2ray.com/core/common/errors"<|fim▁hole|>
func newError(values ...interface{}) *errors.Error {
return errors.New(values...).Path("Transport", "Internet", "Internal")
}<|fim▁end|>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.