prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>InvalidParameterValueExceptionUnmarshaller.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.glacier.model.transform;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.transform.GlacierErrorUnmarshaller;
import com.amazonaws.util.json.JSONObject;
import com.amazonaws.services.glacier.model.InvalidParameterValueException;
public class InvalidParameterValueExceptionUnmarshaller extends GlacierErrorUnmarshaller {
public InvalidParameterValueExceptionUnmarshaller() {
super(InvalidParameterValueException.class);
}
@Override
public boolean match(String errorTypeFromHeader, JSONObject json) throws Exception {
if (errorTypeFromHeader == null) {
// Parse error type from the JSON content if it's not available in the response headers
String errorCodeFromContent = parseErrorCode(json);
return (errorCodeFromContent != null && errorCodeFromContent.equals("InvalidParameterValueException"));
} else {<|fim▁hole|> }
}
@Override
public AmazonServiceException unmarshall(JSONObject json) throws Exception {
InvalidParameterValueException e = (InvalidParameterValueException)super.unmarshall(json);
e.setErrorCode("InvalidParameterValueException");
e.setType(parseMember("Type", json));
e.setCode(parseMember("Code", json));
return e;
}
}<|fim▁end|> | return errorTypeFromHeader.equals("InvalidParameterValueException"); |
<|file_name|>phone_number_extractor.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import urllib.request
import re
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("url", help="the URL whose HTML you want to extract telephone numbers from", type=str)
args = parser.parse_args()
with urllib.request.urlopen(args.url) as response:
html = response.read().decode('utf-8')
<|fim▁hole|># Naive, simple regex; can be further refined (overinclusive in some respects (e.g., any 10-digit numerical string), no detection when non-parentheses phone number first in a parenthetical clause, no real international support, no extension support, no letters-as-numbers support)
regex = re.compile(r'0?0?1?-?\(?[0-9]{3}\)?\s?-?[0-9]{3}-?[0-9]{4}')
print(regex.findall(html))<|fim▁end|> | |
<|file_name|>testcase_create_input_incomplete_data.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from bitcodin import create_input
from bitcodin import Input
from bitcodin.exceptions import BitcodinBadRequestError
from bitcodin.test.bitcodin_test_case import BitcodinTestCase
class CreateInputIncompleteDataTestCase(BitcodinTestCase):
def setUp(self):
super(CreateInputIncompleteDataTestCase, self).setUp()
self.inputUrl = ''
def runTest(self):
input = Input(self.inputUrl)
with self.assertRaises(BitcodinBadRequestError):
result = create_input(input)
def tearDown(self):
super(CreateInputIncompleteDataTestCase, self).tearDown()
if __name__ == '__main__':
unittest.main()<|fim▁end|> | __author__ = 'Dominic Miglar <[email protected]>'
import unittest |
<|file_name|>image.service.spec.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>import { TestBed, async, inject } from '@angular/core/testing';
import { ImageService } from './image.service';
describe('ImageService', () => {
beforeEach(() => {
TestBed.configureTestingModule({
providers: [ImageService]
});
});
it('should ...', inject([ImageService], (service: ImageService) => {
expect(service).toBeTruthy();
}));
});<|fim▁end|> | /* tslint:disable:no-unused-variable */
|
<|file_name|>10_class.test.js<|end_file_name|><|fim▁begin|>test('has a constructor for initialization', () => {
// Create an Animal class
// Add a constructor that takes one param, the name.
// Set this.name to the name passed in
const animal = new Animal()
const dog = new Animal('Dog')
expect(animal.name).toBeUndefined()
expect(dog.name).toBe('Dog')
})
test('constructor can have default param values', () => {
// Create an Animal class with a constructor
// Make your class default (using default params) the name to 'Honey Badger'
const animal = new Animal()
const dog = new Animal('Dog')
expect(animal.name).toBe('Honey Badger')
expect(dog.name).toBe('Dog')
})
test('can have instance methods', () => {
// Create an Animal class, pass in the name to the constructor, and add a sayName function to the class definition
const animal = new Animal()
expect(animal.sayName).toBeDefined()
expect(Animal.sayName).toBeUndefined()
expect(animal.sayName()).toBe('My name is: Honey Badger')
})
test('can have static methods', () => {
// Create an Animal class, pass in the name to the constructor,
// and add a create method that takes a name and returns an instance<|fim▁hole|> const animal = new Animal()
expect(animal.create).toBeUndefined()
expect(Animal.create).toBeDefined()
})
test('can extend another class', () => {
// Create an Animal class
// Create a Dog class that extends Animal
// Add sayName to Animal
const dog = new Dog('Fido')
expect(dog instanceof Dog).toBe(true)
expect(dog instanceof Animal).toBe(true)
})
test('can use property setters and getters', () => {
// Create an Animal class (don't pass name into constructor)
// Add property setter for name
// Add property getter for name
const animal = new Animal()
animal.name = 'Dog'
expect(animal.name).toBe('Dog type of animal')
animal.name = 'Cat'
expect(animal.name).toBe('Cat type of animal')
})
//////// EXTRA CREDIT ////////
// If you get this far, try adding a few more tests, then file a pull request to add them to the extra credit!
// Learn more here: https://github.com/kentcdodds/es6-workshop/blob/master/CONTRIBUTING.md#development<|fim▁end|> | |
<|file_name|>request.rs<|end_file_name|><|fim▁begin|>//! Client Requests
use std::marker::PhantomData;
use std::io::{self, Write, BufWriter};
use url::Url;
use method::{self, Method};
use header::Headers;
use header::{self, Host};
use net::{NetworkStream, NetworkConnector, HttpConnector, Fresh, Streaming};
use http::{HttpWriter, LINE_ENDING};
use http::HttpWriter::{ThroughWriter, ChunkedWriter, SizedWriter, EmptyWriter};
use version;
use HttpResult;
use client::{Response, get_host_and_port};
/// A client request to a remote server.
pub struct Request<W> {
/// The target URI for this request.
pub url: Url,
/// The HTTP version of this request.
pub version: version::HttpVersion,
body: HttpWriter<BufWriter<Box<NetworkStream + Send>>>,
headers: Headers,
method: method::Method,
_marker: PhantomData<W>,
}
impl<W> Request<W> {
/// Read the Request headers.
#[inline]
pub fn headers(&self) -> &Headers { &self.headers }
/// Read the Request method.
#[inline]
pub fn method(&self) -> method::Method { self.method.clone() }
}
impl Request<Fresh> {
/// Create a new client request.
pub fn new(method: method::Method, url: Url) -> HttpResult<Request<Fresh>> {
let mut conn = HttpConnector(None);
Request::with_connector(method, url, &mut conn)
}
/// Create a new client request with a specific underlying NetworkStream.
pub fn with_connector<C, S>(method: method::Method, url: Url, connector: &mut C)
-> HttpResult<Request<Fresh>> where
C: NetworkConnector<Stream=S>,
S: Into<Box<NetworkStream + Send>> {
debug!("{} {}", method, url);
let (host, port) = try!(get_host_and_port(&url));
let stream = try!(connector.connect(&*host, port, &*url.scheme)).into();
let stream = ThroughWriter(BufWriter::new(stream));
let mut headers = Headers::new();
headers.set(Host {
hostname: host,
port: Some(port),
});
Ok(Request {
method: method,
headers: headers,
url: url,
version: version::HttpVersion::Http11,
body: stream,
_marker: PhantomData,
})
}
/// Consume a Fresh Request, writing the headers and method,
/// returning a Streaming Request.
pub fn start(mut self) -> HttpResult<Request<Streaming>> {
let mut uri = self.url.serialize_path().unwrap();
//TODO: this needs a test
if let Some(ref q) = self.url.query {
uri.push('?');
uri.push_str(&q[..]);
}
debug!("writing head: {:?} {:?} {:?}", self.method, uri, self.version);
try!(write!(&mut self.body, "{} {} {}{}",
self.method, uri, self.version, LINE_ENDING));
let stream = match self.method {
Method::Get | Method::Head => {
debug!("headers [\n{:?}]", self.headers);
try!(write!(&mut self.body, "{}{}", self.headers, LINE_ENDING));
EmptyWriter(self.body.into_inner())
},
_ => {
let mut chunked = true;
let mut len = 0;
match self.headers.get::<header::ContentLength>() {
Some(cl) => {
chunked = false;
len = **cl;
},
None => ()
};
// cant do in match above, thanks borrowck
if chunked {
let encodings = match self.headers.get_mut::<header::TransferEncoding>() {
Some(&mut header::TransferEncoding(ref mut encodings)) => {
//TODO: check if chunked is already in encodings. use HashSet?
encodings.push(header::Encoding::Chunked);
false
},
None => true
};
if encodings {
self.headers.set::<header::TransferEncoding>(
header::TransferEncoding(vec![header::Encoding::Chunked]))
}
}
debug!("headers [\n{:?}]", self.headers);
try!(write!(&mut self.body, "{}{}", self.headers, LINE_ENDING));
<|fim▁hole|> } else {
SizedWriter(self.body.into_inner(), len)
}
}
};
Ok(Request {
method: self.method,
headers: self.headers,
url: self.url,
version: self.version,
body: stream,
_marker: PhantomData,
})
}
/// Get a mutable reference to the Request headers.
#[inline]
pub fn headers_mut(&mut self) -> &mut Headers { &mut self.headers }
}
impl Request<Streaming> {
/// Completes writing the request, and returns a response to read from.
///
/// Consumes the Request.
pub fn send(self) -> HttpResult<Response> {
let raw = try!(self.body.end()).into_inner().unwrap(); // end() already flushes
Response::new(raw)
}
}
impl Write for Request<Streaming> {
#[inline]
fn write(&mut self, msg: &[u8]) -> io::Result<usize> {
self.body.write(msg)
}
#[inline]
fn flush(&mut self) -> io::Result<()> {
self.body.flush()
}
}
#[cfg(test)]
mod tests {
use std::str::from_utf8;
use url::Url;
use method::Method::{Get, Head};
use mock::{MockStream, MockConnector};
use super::Request;
#[test]
fn test_get_empty_body() {
let req = Request::with_connector(
Get, Url::parse("http://example.dom").unwrap(), &mut MockConnector
).unwrap();
let req = req.start().unwrap();
let stream = *req.body.end().unwrap()
.into_inner().unwrap().downcast::<MockStream>().ok().unwrap();
let bytes = stream.write;
let s = from_utf8(&bytes[..]).unwrap();
assert!(!s.contains("Content-Length:"));
assert!(!s.contains("Transfer-Encoding:"));
}
#[test]
fn test_head_empty_body() {
let req = Request::with_connector(
Head, Url::parse("http://example.dom").unwrap(), &mut MockConnector
).unwrap();
let req = req.start().unwrap();
let stream = *req.body.end().unwrap()
.into_inner().unwrap().downcast::<MockStream>().ok().unwrap();
let bytes = stream.write;
let s = from_utf8(&bytes[..]).unwrap();
assert!(!s.contains("Content-Length:"));
assert!(!s.contains("Transfer-Encoding:"));
}
}<|fim▁end|> | if chunked {
ChunkedWriter(self.body.into_inner()) |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import patterns, url
from rai00base.raccordement import getModeleRaccordement, createRaccordement, deleteRaccordement, listRaccordement
urlpatterns = patterns('',
url('getModeleRaccordement/$', getModeleRaccordement),
url('createRaccordement/$', createRaccordement),
url('deleteRaccordement/$', deleteRaccordement),
url('listRaccordement/$', listRaccordement),<|fim▁hole|><|fim▁end|> | ) |
<|file_name|>iacolorhist.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-<|fim▁hole|> import numpy as np
from iahistogram import iahistogram
WFRAME=5
f = np.asarray(f)
if len(f.shape) == 1: f = f[np.newaxis,:]
if not f.dtype == 'uint8':
raise Exception,'error, can only process uint8 images'
if not f.shape[0] == 3:
raise Exception, 'error, can only process 3-band images'
r,g,b = f[0].astype(np.int), f[1].astype(np.int), f[2].astype(np.int)
n_zeros = 0
if mask:
n_zeros = f.shape[0]*f.shape[1]-len(np.nonzero(np.ravel(mask)))
r,g,b = mask*r, mask*g, mask*b
hrg = np.zeros((256,256), np.int32); hbg=hrg+0; hrb=hrg+0
img = 256*r + g; m1 = img.max()
aux = iahistogram(img.astype(np.int32)); aux[0] = aux[0] - n_zeros
np.put(np.ravel(hrg), range(m1+1), aux)
img = 256*b + g; m2 = img.max()
aux = iahistogram(img.astype(np.int32)); aux[0] = aux[0] - n_zeros
np.put(np.ravel(hbg), range(m2+1), aux)
img = 256*r + b; m3 = img.max()
aux = iahistogram(img.astype(np.int32)); aux[0] = aux[0] - n_zeros
np.put(np.ravel(hrb), range(m3+1), aux)
m=max(hrg.max(),hbg.max(),hrb.max())
hc=m*np.ones((3*WFRAME+2*256,3*WFRAME+2*256))
hc[WFRAME:WFRAME+256,WFRAME:WFRAME+256] = np.transpose(hrg)
hc[WFRAME:WFRAME+256,2*WFRAME+256:2*WFRAME+512] = np.transpose(hbg)
hc[2*WFRAME+256:2*WFRAME+512,WFRAME:WFRAME+256] = np.transpose(hrb)
return hc<|fim▁end|> | # Module iacolorhist
def iacolorhist(f, mask=None): |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from frontend import app
from flask import render_template
from flask import send_from_directory
from flask import request
from flask import redirect
from flask import url_for
from flask import flash
from flask import abort
import os
import models
import forms
from wtfpeewee.orm import model_form
@app.route('/register/', methods=['GET', 'POST'])
def register():
Form = forms.ManualRegisterForm(request.values)
if request.method == 'POST':
if Form.submit.data:
saveFormsToModels(Form)
return redirect(url_for('register'))
return render_template('frontpage.html',
form = Form,
)
@app.route('/add/<modelname>/', methods=['GET', 'POST'])
def add(modelname):
kwargs = listAndEdit(modelname)
return render_template('editpage.html', **kwargs)
@app.route('/add/<modelname>/to/<foreign_table>/<foreign_key>', methods=['GET', 'POST'])
def addto(modelname, foreign_table, foreign_key):
kwargs = listAndEdit(modelname,
action = 'AddTo',
foreign_table = foreign_table,
foreign_key = foreign_key)
return render_template('editpage.html', **kwargs)
@app.route('/edit/<modelname>/<entryid>', methods=['GET', 'POST'])
def edit(modelname, entryid):
kwargs = listAndEdit(modelname, entryid)
#print kwargs
return render_template('editpage.html', **kwargs)
def saveFormsToModels(form):
<|fim▁hole|> # needs the form fields to be named modelname_fieldname
editedModels = {}
foreignKeys = []
for formfield in form.data:
if formfield in ['csrf_token']:
continue
try:
modelname, field = formfield.split('_')
except:
continue
value = form[formfield].data
try:
functionName, foreignKeyName = value.split('_')
if functionName == 'ForeignKey':
foreignKeys.append(
dict(
modelname = modelname,
field = field,
foreignKeyName = foreignKeyName,
)
)
continue
except:
pass
try:
setattr(editedModels[modelname], field, value)
except:
editedModels[modelname] = models.ALL_MODELS_DICT[modelname]()
setattr(editedModels[modelname], field, value)
for model in editedModels:
editedModels[model].save()
for key in foreignKeys:
setattr(
editedModels[key['modelname']],
key['field'],
editedModels[key['foreignKeyName']])
print 'start'
print 'Set attr: {}, {}, {}'.format(
editedModels[key['modelname']],
key['field'],
editedModels[key['foreignKeyName']])
for model in editedModels:
editedModels[model].save()
def getFields(model, exclude=['id']):
foreignKeys = {x.column : x.dest_table for x in models.db.get_foreign_keys(model.__name__)}
#fields = [(x, type(model._meta.fields[x]).__name__, foreignKeys) for x in model._meta.sorted_field_names if not x in exclude]
#print foreignKeys
fields = []
for field in model._meta.sorted_field_names:
if not field in exclude:
fieldtype = type(model._meta.fields[field]).__name__
foreignFieldName = '{}_id'.format(field)
if foreignFieldName in foreignKeys:
foreignKeyModelName = foreignKeys[foreignFieldName].title()
else:
foreignKeyModelName = False
fields.append(
(field, fieldtype, foreignKeyModelName))
#print "Field: {}\nType: {}\nModelname: {}\n".format(field, fieldtype, foreignKeyModelName)
return fields
def getRelatedModels(entry):
entries = {}
models = []
try:
for query, fk in reversed(list(entry.dependencies())):
#for x in dir(fk):
#print x
for x in fk.model_class.select().where(query):
#print 'here:'
#print x
modelname = fk.model_class.__name__
try:
entries[modelname].append(x)
except:
models.append(modelname)
entries[modelname] = []
entries[modelname].append(x)
#entries.append((fk.model_class.__name__, x))
except:
pass
return (models, entries)
def listAndEdit(modelname, entryid = 0, entries = False, action = False, **kwargs):
try:
model = models.ALL_MODELS_DICT[modelname]
except KeyError:
abort(404)
if not entries:
entries = model.select()
modelForm = model_form(model)
fields = getFields(model)
try:
entry = model.get(id=int(entryid))
dependencies = getRelatedModels(entry)
except:
entry = model()
dependencies = False
form = modelForm(obj = entry)
if request.method == 'POST':
if request.form['submit'] == 'Save':
form = modelForm(request.values, obj = entry)
if form.validate():
form.populate_obj(entry)
entry.save()
if action == 'AddTo':
addForeignKey(model, entry, kwargs['foreign_table'], kwargs['foreign_key'])
redirect(url_for('edit', modelname = model, entryid = kwargs['foreign_key']))
flash('Your entry has been saved')
print 'saved'
elif request.form['submit'] == 'Delete':
try:
model.get(model.id == int(entryid)).delete_instance(recursive = True)
#redirect(url_for('add', modelname = modelname))
except:
pass
finally:
entry = model()
form = modelForm(obj = entry)
kwargs = dict(
links = [x.__name__ for x in models.ALL_MODELS],
header = model.__name__,
form=form,
entry=entry,
entries=entries,
fields = fields,
dependencies = dependencies,
)
return kwargs
def addForeignKey(model, entry, foreign_table, foreign_key):
foreignModel = models.ALL_MODELS_DICT[foreign_table]
foreignItem = foreignModel.get(foreignModel.id == int(foreign_key))
foreignFieldName = model.__name__.lower()
print "entry = {}".format(foreignModel)
print "item = {}".format(foreignItem)
print "fieldName = {}".format(foreignFieldName)
print "id = {}".format(entry.id)
setattr(foreignItem, foreignFieldName, entry.id)
foreignItem.save()
@app.route('/favicon.ico')
def favicon():
return send_from_directory(
os.path.join(app.root_path, 'static'), 'favicon.png', mimetype='image/vnd.microsoft.icon')<|fim▁end|> | |
<|file_name|>type_extras.py<|end_file_name|><|fim▁begin|>from collections.abc import Iterable
from django import template
from django.db.models import Model
register = template.Library()
@register.filter
def get_type(value):
# inspired by: https://stackoverflow.com/a/12028864
return type(value)
@register.filter<|fim▁hole|> return isinstance(value, Model)
@register.filter
def is_iterable(value):
return isinstance(value, Iterable)
@register.filter
def is_str(value):
return isinstance(value, str)
@register.filter
def is_bool(value):
return isinstance(value, bool)<|fim▁end|> | def is_model(value): |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
## This file is part of Invenio.
## Copyright (C) 2011, 2012, 2013, 2014 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
invenio.ext.sqlalchemy
----------------------
This module provides initialization and configuration for
`flask.ext.sqlalchemy` module.
"""
import sqlalchemy
from flask.ext.registry import RegistryProxy, ModuleAutoDiscoveryRegistry
from flask.ext.sqlalchemy import SQLAlchemy as FlaskSQLAlchemy
from sqlalchemy import event
from sqlalchemy.ext.hybrid import hybrid_property, Comparator
from sqlalchemy.pool import Pool
from sqlalchemy_utils import JSONType
from invenio.utils.hash import md5
from .expressions import AsBINARY
from .types import MarshalBinary, PickleBinary, GUID
from .utils import get_model_type
def _include_sqlalchemy(obj, engine=None):
#for module in sqlalchemy, sqlalchemy.orm:
# for key in module.__all__:
# if not hasattr(obj, key):
# setattr(obj, key,
# getattr(module, key))
if engine == 'mysql':
from sqlalchemy.dialects import mysql as engine_types
else:
from sqlalchemy import types as engine_types
# Length is provided to JSONType to ensure MySQL uses LONGTEXT instead
# of TEXT which only provides for 64kb storage compared to 4gb for
# LONGTEXT.
setattr(obj, 'JSON', JSONType(length=2**32-2))
setattr(obj, 'Char', engine_types.CHAR)
try:
setattr(obj, 'TinyText', engine_types.TINYTEXT)
except:
setattr(obj, 'TinyText', engine_types.TEXT)
setattr(obj, 'hybrid_property', hybrid_property)
try:
setattr(obj, 'Double', engine_types.DOUBLE)
except:
setattr(obj, 'Double', engine_types.FLOAT)
setattr(obj, 'Integer', engine_types.INTEGER)
setattr(obj, 'SmallInteger', engine_types.SMALLINT)
try:
setattr(obj, 'MediumInteger', engine_types.MEDIUMINT)
except:
setattr(obj, 'MediumInteger', engine_types.INT)
setattr(obj, 'BigInteger', engine_types.BIGINT)
try:
setattr(obj, 'TinyInteger', engine_types.TINYINT)
except:
setattr(obj, 'TinyInteger', engine_types.INT)
setattr(obj, 'Binary', sqlalchemy.types.LargeBinary)
setattr(obj, 'iBinary', sqlalchemy.types.LargeBinary)
setattr(obj, 'iLargeBinary', sqlalchemy.types.LargeBinary)
setattr(obj, 'iMediumBinary', sqlalchemy.types.LargeBinary)
setattr(obj, 'UUID', GUID)
if engine == 'mysql':
from .engines import mysql as dummy_mysql # noqa
# module = invenio.sqlalchemyutils_mysql
# for key in module.__dict__:
# setattr(obj, key,
# getattr(module, key))
obj.AsBINARY = AsBINARY
obj.MarshalBinary = MarshalBinary
obj.PickleBinary = PickleBinary
## Overwrite :meth:`MutableDick.update` to detect changes.
from sqlalchemy.ext.mutable import MutableDict
def update_mutable_dict(self, *args, **kwargs):
super(MutableDict, self).update(*args, **kwargs)
self.changed()
MutableDict.update = update_mutable_dict
obj.MutableDict = MutableDict
class PasswordComparator(Comparator):
def __eq__(self, other):
return self.__clause_element__() == self.hash(other)
def hash(self, password):
if db.engine.name != 'mysql':<|fim▁hole|>
def autocommit_on_checkin(dbapi_con, con_record):
"""Calls autocommit on raw mysql connection for fixing bug in MySQL 5.5"""
try:
dbapi_con.autocommit(True)
except:
pass
#FIXME
#from invenio.ext.logging import register_exception
#register_exception()
## Possibly register globally.
#event.listen(Pool, 'checkin', autocommit_on_checkin)
class SQLAlchemy(FlaskSQLAlchemy):
"""Database object."""
PasswordComparator = PasswordComparator
def init_app(self, app):
super(self.__class__, self).init_app(app)
engine = app.config.get('CFG_DATABASE_TYPE', 'mysql')
self.Model = get_model_type(self.Model)
if engine == 'mysql':
self.Model.__table_args__ = {'keep_existing': True,
'extend_existing': False,
'mysql_engine': 'MyISAM',
'mysql_charset': 'utf8'}
_include_sqlalchemy(self, engine=engine)
def __getattr__(self, name):
# This is only called when the normal mechanism fails, so in practice
# should never be called.
# It is only provided to satisfy pylint that it is okay not to
# raise E1101 errors in the client code.
# :see http://stackoverflow.com/a/3515234/780928
raise AttributeError("%r instance has no attribute %r" % (self, name))
def schemadiff(self, excludeTables=None):
from migrate.versioning import schemadiff
return schemadiff.getDiffOfModelAgainstDatabase(self.metadata,
self.engine,
excludeTables=excludeTables)
def apply_driver_hacks(self, app, info, options):
"""
This method is called before engine creation.
"""
# Don't forget to apply hacks defined on parent object.
super(self.__class__, self).apply_driver_hacks(app, info, options)
if info.drivername == 'mysql':
options.setdefault('execution_options', {'autocommit': True,
'use_unicode': False,
'charset': 'utf8mb4',
})
event.listen(Pool, 'checkin', autocommit_on_checkin)
db = SQLAlchemy()
"""
Provides access to :class:`~.SQLAlchemy` instance.
"""
models = RegistryProxy('models', ModuleAutoDiscoveryRegistry, 'models')
def setup_app(app):
"""Setup SQLAlchemy extension."""
if 'SQLALCHEMY_DATABASE_URI' not in app.config:
from sqlalchemy.engine.url import URL
cfg = app.config
app.config['SQLALCHEMY_DATABASE_URI'] = URL(
cfg.get('CFG_DATABASE_TYPE', 'mysql'),
username=cfg.get('CFG_DATABASE_USER'),
password=cfg.get('CFG_DATABASE_PASS'),
host=cfg.get('CFG_DATABASE_HOST'),
database=cfg.get('CFG_DATABASE_NAME'),
port=cfg.get('CFG_DATABASE_PORT'),
)
## Let's initialize database.
db.init_app(app)
return app<|fim▁end|> | return md5(password).digest()
email = self.__clause_element__().table.columns.email
return db.func.aes_encrypt(email, password)
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | __all__ = ["user_controller", "plant_controller"] |
<|file_name|>trait_no_fields.rs<|end_file_name|><|fim▁begin|>use juniper::graphql_union;
#[graphql_union]
trait Character {}<|fim▁hole|>fn main() {}<|fim▁end|> | |
<|file_name|>property_group_actions.cpp<|end_file_name|><|fim▁begin|>#include <GsTLAppli/actions/property_group_actions.h>
#include <GsTLAppli/actions/defines.h>
#include <GsTLAppli/utils/string_manipulation.h>
#include <GsTLAppli/utils/error_messages_handler.h>
#include <GsTLAppli/appli/manager_repository.h>
#include <GsTLAppli/appli/project.h>
#include <GsTLAppli/grid/grid_model/geostat_grid.h>
#include <GsTLAppli/grid/grid_model/grid_property.h>
#include <GsTLAppli/grid/grid_model/grid_property_set.h>
/**
* New_property_group
*/
Named_interface* New_property_group::create_new_interface( std::string& ){
return new New_property_group;
}
bool New_property_group::init( std::string& parameters, GsTL_project* proj,
Error_messages_handler* errors ){
std::vector< std::string > params =
String_Op::decompose_string( parameters, Actions::separator,
Actions::unique );
if( params.size() < 2 ) {
errors->report( "Must have at least 2 parameters, name of the grid and name the group" );
return false;
}
// Get the grid
std::string grid_name = params[0];
SmartPtr<Named_interface> ni = Root::instance()->interface( gridModels_manager + "/" + grid_name);
Geostat_grid* grid = dynamic_cast<Geostat_grid*>( ni.raw_ptr() );
if(!grid) {
errors->report( "The grid "+params[0]+" does not exist" );
return false;
}
GsTLGridPropertyGroup* group = grid->get_group(params[1]);
if(group) {
errors->report( "The goup "+params[1]+" already exist; hence cannot be created" );
return false;
}
std::string type = "";
if( params.size() == 3 ) {
if( params[2] == "General" ) type = "";
else type = params[2];
}
group = grid->add_group(params[1],type);
if(!group) {
errors->report( "The goup "+params[1]+" could no be created; possibly type undefined" );
return false;
}
for(int i=3; i< params.size(); i++) {
GsTLGridProperty* prop = grid->property(params[i]);
if(prop == NULL) {
errors->report( "The property "+params[i]+" does not exist" );
return false;
}
}
for(int i=3; i< params.size(); i++) {
group->add_property(grid->property(params[i]));
}
proj->update();
return true;
}
bool New_property_group::exec(){
return true;
}
/**
* Add_properties_to_group::
*/
Named_interface* Add_properties_to_group::create_new_interface( std::string& ){
return new Add_properties_to_group;
}
bool Add_properties_to_group::init( std::string& parameters, GsTL_project* proj,
Error_messages_handler* errors ){
std::vector< std::string > params =
String_Op::decompose_string( parameters, Actions::separator,
Actions::unique );
if( params.size() < 3 ) {
errors->report( "Must have at least 3 parameters, name of the grid and name the group and at least one property" );
return false;
}
// Get the grid
SmartPtr<Named_interface> ni = Root::instance()->interface( gridModels_manager + "/" + params[0] );
Geostat_grid* grid = dynamic_cast<Geostat_grid*>( ni.raw_ptr() );
if(!grid) {
errors->report( "The grid "+params[0]+" does not exist" );
return false;
}
GsTLGridPropertyGroup* group = grid->get_group(params[1]);
if(!group) {
errors->report( "The goup "+params[1]+" does not exist" );
return false;
}
for(int i=2; i< params.size(); i++) {
GsTLGridProperty* prop = grid->property(params[i]);
if(prop == NULL) {
errors->report( "The property "+params[i]+" does not exist" );
return false;
}
}
for(int i=2; i< params.size(); i++) {
if( !group->is_member_property( params[i] ) )
group->add_property(grid->property(params[i]));
}
return true;
}
bool Add_properties_to_group::exec(){
return true;
}
/*---------------------------*/
/**
* Add_properties_to_group::
*/
Named_interface* Remove_properties_from_group::create_new_interface( std::string& ){
return new Remove_properties_from_group;
}
bool Remove_properties_from_group::init( std::string& parameters, GsTL_project* proj,
Error_messages_handler* errors ){
std::vector< std::string > params =
String_Op::decompose_string( parameters, Actions::separator,
Actions::unique );
if( params.size() < 3 ) {
errors->report( "Must have at least 3 parameters, name of the grid and name the group and at least one property" );
return false;
}
// Get the grid
SmartPtr<Named_interface> ni = Root::instance()->interface( gridModels_manager + "/" + params[0] );
Geostat_grid* grid = dynamic_cast<Geostat_grid*>( ni.raw_ptr() );
if(!grid) {
errors->report( "The grid "+params[0]+" does not exist" );
return false;
}
GsTLGridPropertyGroup* group = grid->get_group(params[1]);
if(!group) {
errors->report( "The goup "+params[1]+" does not exist" );
return false;
}
for(int i=2; i< params.size(); i++) {
GsTLGridProperty* prop = grid->property(params[i]);
if(prop == NULL) {
errors->report( "The property "+params[i]+" does not exist" );
return false;
}
}
for(int i=2; i< params.size(); i++) {
if( group->is_member_property( params[i] ) )
group->remove_property(grid->property(params[i]));
}
return true;
}
bool Remove_properties_from_group::exec(){
return true;
}
/*-------------------*/
Named_interface*
Delete_property_in_group::create_new_interface( std::string& ){
return new Delete_property_in_group;
}
bool Delete_property_in_group::init( std::string& parameters, GsTL_project* proj,
Error_messages_handler* errors ){
std::vector< std::string > params =
String_Op::decompose_string( parameters, Actions::separator,
Actions::unique );
if( params.size() != 2 ) {
errors->report( "Must have 2 parameters, name of the grid and name the group to be deleted" );
return false;
}
// Get the grid
SmartPtr<Named_interface> ni = Root::instance()->interface( gridModels_manager + "/" + params[0] );
Geostat_grid* grid = dynamic_cast<Geostat_grid*>( ni.raw_ptr() );
if(!grid) {
errors->report( "The grid "+params[0]+" does not exist" );
return false;
}
GsTLGridPropertyGroup* group = grid->get_group(params[1]);
if(!group) {
errors->report( "The goup "+params[1]+" does not exist" );
return false;
}
//GsTLGridPropertyGroup::property_map::iterator it = group->begin_property();
std::vector<std::string> names = group->property_names();
std::vector<std::string>::const_iterator it = names.begin();
for(; it != names.end(); ++it){
grid->remove_property(*it);
}
grid->remove_group(params[1]);
return true;
}
bool Delete_property_in_group::exec(){
return true;
}
/*
* --------------------------------
*/
Named_interface*
Remove_group::create_new_interface( std::string& ){
return new Remove_group;
}
bool Remove_group::init( std::string& parameters, GsTL_project* proj,
Error_messages_handler* errors ){
std::vector< std::string > params =
String_Op::decompose_string( parameters, Actions::separator,
Actions::unique );
if( params.size() != 2 ) {
errors->report( "Must have 2 parameters, name of the grid and name the group to be removed" );
return false;
}
// Get the grid
SmartPtr<Named_interface> ni = Root::instance()->interface( gridModels_manager + "/" + params[0] );
Geostat_grid* grid = dynamic_cast<Geostat_grid*>( ni.raw_ptr() );
if(!grid) {
errors->report( "The grid "+params[0]+" does not exist" );
return false;
}
GsTLGridPropertyGroup* group = grid->get_group(params[1]);
if(!group) {
errors->report( "The group "+params[1]+" does not exist" );
return false;
}
<|fim▁hole|> return true;
}
bool Remove_group::exec(){
return true;
}<|fim▁end|> | grid->remove_group(params[1]);
|
<|file_name|>pelicanconf.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python<|fim▁hole|># -*- coding: utf-8 -*- #
from __future__ import unicode_literals
import os
AUTHOR = u'Eric Carmichael'
SITENAME = u"Eric Carmichael's Nerdery"
SITEURL = os.environ.get("PELICAN_SITE_URL", "")
TIMEZONE = 'Europe/Paris'
DEFAULT_LANG = u'en'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
DEFAULT_PAGINATION = 2
# WITH_FUTURE_DATES = True
GITHUB_URL = 'http://github.com/ckcollab/'
THEME = "themes/mintheme"
PATH = "content"
PLUGINS = ["plugins.assets", "plugins.sitemap"]
MARKUP = (('rst', 'md', 'html'))
WEBASSETS = True
SITEMAP = {
"format": "xml",
"priorities": {
"articles": 1,
"pages": 1,
"indexes": 0
},
"changefreqs": {
"articles": "daily",
"pages": "daily",
"indexes": "daily",
}
}
STATIC_PATHS = [
'images',
'extra/robots.txt',
]
EXTRA_PATH_METADATA = {
'extra/robots.txt': {'path': 'robots.txt'},
}
# Make the site display full articles instead of summaries by setting this to 0
# SUMMARY_MAX_LENGTH = 0<|fim▁end|> | |
<|file_name|>hook-vispy.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | from PyInstaller.utils.hooks import collect_submodules, collect_data_files
datas = collect_data_files('vispy') |
<|file_name|>stream.rs<|end_file_name|><|fim▁begin|>// Copyright © 2017-2018 Mozilla Foundation
//
// This program is made available under an ISC-style license. See the
// accompanying file LICENSE for details.
use callbacks::cubeb_device_changed_callback;
use channel::cubeb_channel_layout;
use device::cubeb_device;
use format::cubeb_sample_format;
use std::{fmt, mem};
use std::os::raw::{c_float, c_int, c_uint, c_void, c_char};
cubeb_enum! {
pub enum cubeb_stream_prefs {
CUBEB_STREAM_PREF_NONE = 0x00,
CUBEB_STREAM_PREF_LOOPBACK = 0x01,
CUBEB_STREAM_PREF_DISABLE_DEVICE_SWITCHING = 0x02,
CUBEB_STREAM_PREF_VOICE = 0x04,
}
}
cubeb_enum! {
pub enum cubeb_state {
CUBEB_STATE_STARTED,
CUBEB_STATE_STOPPED,
CUBEB_STATE_DRAINED,
CUBEB_STATE_ERROR,
}
}
pub enum cubeb_stream {}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct cubeb_stream_params {
pub format: cubeb_sample_format,
pub rate: c_uint,
pub channels: c_uint,
pub layout: cubeb_channel_layout,
pub prefs: cubeb_stream_prefs,
}
impl Default for cubeb_stream_params {
fn default() -> Self {
unsafe { mem::zeroed() }
}
}
// Explicit Debug impl to work around bug in ctest
impl fmt::Debug for cubeb_stream_params {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("cubeb_stream_params")
.field("format", &self.format)
.field("rate", &self.rate)
.field("channels", &self.channels)
.field("layout", &self.layout)
.field("prefs", &self.prefs)
.finish()
}
}
extern "C" {
pub fn cubeb_stream_destroy(stream: *mut cubeb_stream);
pub fn cubeb_stream_start(stream: *mut cubeb_stream) -> c_int;<|fim▁hole|> pub fn cubeb_stream_get_latency(stream: *mut cubeb_stream, latency: *mut c_uint) -> c_int;
pub fn cubeb_stream_get_input_latency(stream: *mut cubeb_stream, latency: *mut c_uint) -> c_int;
pub fn cubeb_stream_set_volume(stream: *mut cubeb_stream, volume: c_float) -> c_int;
pub fn cubeb_stream_set_name(stream: *mut cubeb_stream, name: *const c_char) -> c_int;
pub fn cubeb_stream_get_current_device(
stream: *mut cubeb_stream,
device: *mut *mut cubeb_device,
) -> c_int;
pub fn cubeb_stream_device_destroy(
stream: *mut cubeb_stream,
devices: *mut cubeb_device,
) -> c_int;
pub fn cubeb_stream_register_device_changed_callback(
stream: *mut cubeb_stream,
device_changed_callback: cubeb_device_changed_callback,
) -> c_int;
pub fn cubeb_stream_user_ptr(stream: *mut cubeb_stream) -> *mut c_void;
}<|fim▁end|> | pub fn cubeb_stream_stop(stream: *mut cubeb_stream) -> c_int;
pub fn cubeb_stream_reset_default_device(stream: *mut cubeb_stream) -> c_int;
pub fn cubeb_stream_get_position(stream: *mut cubeb_stream, position: *mut u64) -> c_int; |
<|file_name|>types.py<|end_file_name|><|fim▁begin|>import typing
import twittback<|fim▁hole|>TweetSequence = typing.Sequence[twittback.Tweet]
UserSequence = typing.Sequence[twittback.User]<|fim▁end|> | |
<|file_name|>streaming.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""Functions for video streaming."""
import cStringIO
import fcntl
import os
import signal
import struct
import subprocess
import sys
import time
import types
import json
from PIL import Image
import v4l2
VIDEO_DEVICE = None
VIDEO_STREAM_PROCESS = None
VIDEO_INITIALIZED = False
VIDEO_SIZE = "-1,-1"
VIDEO_RESTART = False
VIDEO_ORIGINAL_SIZE = 0,0
def open_video_device(path="/dev/video0"):
global VIDEO_DEVICE
if os.path.exists(path):
# binary, unbuffered write
device = open(path, "wb", 0)
VIDEO_DEVICE = device
else:
msg = "Cannot open video device %s, path do not exist. " % path
msg += "Make sure that the v4l2loopback kernel module is loaded (modprobe v4l2loopback). "
msg += "Falling back to MJPEG."
raise RuntimeError(msg)
return VIDEO_DEVICE
def initialize_video_device(pixel_format, width, height, channels):
f = v4l2.v4l2_format()
f.type = v4l2.V4L2_BUF_TYPE_VIDEO_OUTPUT
f.fmt.pix.pixelformat = pixel_format
f.fmt.pix.width = width
f.fmt.pix.height = height
f.fmt.pix.field = v4l2.V4L2_FIELD_NONE
f.fmt.pix.bytesperline = width * channels
f.fmt.pix.sizeimage = width * height * channels
f.fmt.pix.colorspace = v4l2.V4L2_COLORSPACE_SRGB
<|fim▁hole|>
return True
def set_video_size(width=-1, height=-1):
global VIDEO_SIZE
global VIDEO_RESTART
VIDEO_SIZE = "%s,%s" % (width, height)
VIDEO_RESTART = True
def video_size():
current_size = VIDEO_SIZE.split(",")
scale = float(current_size[0]) / VIDEO_ORIGINAL_SIZE[0]
return current_size + list((scale,))
def new_frame_received(img, width, height, *args, **kwargs):
"""
Executed when a new image is received, (new frame received callback).
"""
pixel_format = v4l2.V4L2_PIX_FMT_RGB24
channels = 3
global VIDEO_INITIALIZED
global VIDEO_STREAM_PROCESS
global VIDEO_RESTART
# Assume that we are getting a qimage if we are not getting a str,
# to be able to handle data sent by hardware objects used in MxCuBE 2.x
if not isinstance(img, str):
# 4 Channels with alpha
channels = 4
pixel_format = v4l2.V4L2_PIX_FMT_RGB32
rawdata = img.bits().asstring(img.numBytes())
img = rawdata
else:
# Is the image on JPEG format get the RGB data otherwise assume its
# already RGB and do nothing with the data
if img.startswith('\xff\xd8\xff\xe0\x00\x10JFIF'):
# jpeg image
strbuf = cStringIO.StringIO(img)
img = Image.open(strbuf)
img = img.tobytes()
if VIDEO_DEVICE:
if not VIDEO_INITIALIZED:
VIDEO_INITIALIZED = \
initialize_video_device(pixel_format, width, height, channels)
VIDEO_DEVICE.write(img)
if VIDEO_RESTART and VIDEO_STREAM_PROCESS:
os.system('pkill -TERM -P {pid}'.format(pid=VIDEO_STREAM_PROCESS.pid))
VIDEO_RESTART = False
VIDEO_STREAM_PROCESS = None
# start the streaming process if not started or restart if terminated
if not VIDEO_STREAM_PROCESS or VIDEO_STREAM_PROCESS.poll() is not None:
sfpath = os.path.join(os.path.dirname(__file__), "streaming_processes.py")
python_executable = os.sep.join(os.path.dirname(os.__file__).split(os.sep)[:-2]+["bin", "python"])
VIDEO_STREAM_PROCESS = subprocess.Popen([python_executable, sfpath, VIDEO_DEVICE.name, VIDEO_SIZE], close_fds=True)
def get_available_sizes(camera):
try:
w, h = camera.getWidth(), camera.getHeight()
# Some video decoders have difficulties to decode videos with odd image dimensions
# (JSMPEG beeing one of them) so we make sure that the size is even
w = w if w % 2 == 0 else w + 1
h = h if h % 2 == 0 else h + 1
# Calculate half the size and quarter of the size if MPEG streaming is used
# otherwise just return the orignal size.
if VIDEO_STREAM_PROCESS:
video_sizes = [(w, h), (w/2, h/2), (w/4, h/4)]
else:
video_sizes = [(w, h)]
except (ValueError, AttributeError):
video_sizes = []
return video_sizes
def set_initial_stream_size(camera, video_device_path):
global VIDEO_SIZE
global VIDEO_ORIGINAL_SIZE
w, h = camera.getWidth(), camera.getHeight()
w = w if w % 2 == 0 else w + 1
h = h if h % 2 == 0 else h + 1
VIDEO_ORIGINAL_SIZE = w, h
VIDEO_SIZE = "%s,%s" % VIDEO_ORIGINAL_SIZE
def tango_lima_video_plugin(camera, video_device):
"""
Configures video frame handling for TangoLimaVideo devices.
:param HardwareObject camera: Object providing frames to encode and stream
:param str video_device: Video loopback path
"""
if camera.__class__.__name__ == 'TangoLimaVideo':
# patch hardware object to set acquisition to the right mode
# and to get the right frames out of the video device
if camera.isReady():
camera.setLive(False)
camera.device.video_mode = "RGB24"
time.sleep(0.1)
camera.setLive(True)
def parse_image_data(self, img_data):
hfmt = ">IHHqiiHHHH"
hsize = struct.calcsize(hfmt)
_, _, img_mode, frame_number, width, height, _, _, _, _ = \
struct.unpack(hfmt, img_data[1][:hsize])
raw_data = img_data[1][hsize:]
return width, height, raw_data
def do_polling(self, sleep_time):
hfmt = ">IHHqiiHHHH"
hsize = struct.calcsize(hfmt)
while True:
width, height, raw_data = \
self.parse_image_data(self.device.video_last_image)
self.emit("imageReceived", raw_data, width, height, False)
time.sleep(sleep_time)
def take_snapshot(self, path, bw=False):
width, height, raw_data = \
self.parse_image_data(self.device.video_last_image)
img = Image.frombytes("RGB", (width, height), raw_data)
if bw:
img.convert("1")
img.save(path)
camera._do_polling = types.MethodType(do_polling, camera)
camera.takeSnapshot = types.MethodType(take_snapshot, camera)
camera.parse_image_data = types.MethodType(parse_image_data, camera)
def init(camera, video_device_path):
"""
Initialize video loopback device.
:param HardwareObject camera: Object providing frames to encode and stream
:param str video_device_path: Video loopback path
"""
set_initial_stream_size(camera, video_device_path)
tango_lima_video_plugin(camera, video_device_path)
video_device = open_video_device(video_device_path)
camera.connect("imageReceived", new_frame_received)
return video_device<|fim▁end|> | res = fcntl.ioctl(VIDEO_DEVICE, v4l2.VIDIOC_S_FMT, f)
if res != 0:
raise RuntimeError("Could not initialize video device: %d" % res) |
<|file_name|>ShopOrder.ts<|end_file_name|><|fim▁begin|>import {ShopOrderDetail} from './ShopOrderDetail';
export class ShopOrder {
order_id: string;
user_id: number;
username: string;
is_vip: number;
payment: number;
order_no: number;
shopway: number;
status: number;<|fim▁hole|> orderdealtime: Date;
phone: string;
address: string;
soft_delete: string;
subPrice:number;
shopOrderDetails: ShopOrderDetail[];
}<|fim▁end|> | employee_id: string;
buytime: Date; |
<|file_name|>send_via_pyxmpp.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3
# https://bugzilla.altlinux.org/show_bug.cgi?id=33532
#!/usr/bin/env -S python3 -u
# -*- coding: utf-8 -*-
#
# A simple message-sending script
# TODO: When error: No handlers could be found for logger "pyxmpp.Client"
import os, sys
# python-module-pyxmpp
from pyxmpp2.jid import JID
from pyxmpp2.jabber.simple import send_message
# set in korinf config file
jid = os.environ['KORINFERJID']
password = os.environ['KORINFERJIDPASSWD']<|fim▁hole|> print("example:")
print("\t%s test1@localhost Test 'this is test'" % (sys.argv[0],))
sys.exit(1)
recpt,subject,body=sys.argv[1:]
jid = JID(jid)
if not jid.resource:
jid = JID(jid.node,jid.domain,"korinf")
recpt = JID(recpt)
send_message(jid,password,recpt,body,subject)<|fim▁end|> |
if len(sys.argv)!=4:
print("Usage:")
print("\t%s recipient_jid subject body" % (sys.argv[0],)) |
<|file_name|>IndexManagerTest.java<|end_file_name|><|fim▁begin|>/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.fuerve.villageelder.indexing;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.lang.reflect.Field;
import org.apache.lucene.analysis.miscellaneous.PerFieldAnalyzerWrapper;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.LongField;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.junit.Test;
/**
* Unit tests for {@link IndexManager}.
* @author lparker
*
*/
public class IndexManagerTest {
/**
* Test method for {@link com.fuerve.villageelder.indexing.IndexManager#IndexManager(org.apache.lucene.store.Directory, org.apache.lucene.store.Directory)}.
* @throws Exception
*/
@Test
public final void testIndexManagerDirectoryDirectory() throws Exception {
RAMDirectory indexDirectory = new RAMDirectory();
RAMDirectory taxonomyDirectory = new RAMDirectory();
Field idField = IndexManager.class.getDeclaredField("indexDirectory");
Field tdField = IndexManager.class.getDeclaredField("taxonomyDirectory");
Field iwField = IndexManager.class.getDeclaredField("indexWriter");
Field twField = IndexManager.class.getDeclaredField("taxonomyWriter");
Field stField = IndexManager.class.getDeclaredField("stringDirectories");
Field initField = IndexManager.class.getDeclaredField("initialized");
idField.setAccessible(true);
tdField.setAccessible(true);
iwField.setAccessible(true);
twField.setAccessible(true);
stField.setAccessible(true);
initField.setAccessible(true);
IndexManager target = new IndexManager(indexDirectory, taxonomyDirectory);
// TEST 1: A newly constructed IndexManager believes itself
// to be uninitialized, as indicated by the 'initialized'
// field.
boolean initActual = initField.getBoolean(target);
assertFalse(initActual);
target.initializeIndex();
Directory idActual = (Directory) idField.get(target);
Directory tdActual = (Directory) tdField.get(target);
IndexWriter iwActual = (IndexWriter) iwField.get(target);
TaxonomyWriter twActual = (TaxonomyWriter) twField.get(target);
boolean stActual = (Boolean) stField.get(target);
initActual = initField.getBoolean(target);
// TEST 2: The IndexManager's index directory is what was passed in.
assertEquals(indexDirectory, idActual);
// TEST 3: The IndexManager's taxonomy directory is what was passed in.
assertEquals(taxonomyDirectory, tdActual);
// TEST 4: The IndexWriter's directory is what was passed in.
assertEquals(indexDirectory, iwActual.getDirectory());
// TEST 5: The taxonomy index is initialized afresh with no categories
// in it.
assertEquals(1, twActual.getSize());
// TEST 6: An IndexManager constructed with Directories does not
// believe that it needs to construct new Directories from string
// pathnames.
assertEquals(false, stActual);
// TEST 7: The IndexManager's initialized field is true after it
// has been initialized.<|fim▁hole|>
target.dispose();
// TEST 8: The IndexManager's index writer is null after it has
// been disposed.
iwActual = (IndexWriter) iwField.get(target);
assertEquals(null, iwActual);
// TEST 9: The IndexManager's taxonomy writer is null after it
// has been disposed.
twActual = (TaxonomyWriter) twField.get(target);
assertEquals(null, twActual);
// TEST 10: The IndexManager's initialized flag is false after
// it has been disposed.
initActual = initField.getBoolean(target);
assertEquals(false, initActual);
}
/**
* Test method for {@link com.fuerve.villageelder.indexing.IndexManager#IndexManager(org.apache.lucene.store.Directory, org.apache.lucene.store.Directory, org.apache.lucene.index.IndexWriterConfig.OpenMode)}.
*/
@Test
public final void testIndexManagerDirectoryDirectoryOpenMode() throws Exception {
RAMDirectory indexDirectory = new RAMDirectory();
RAMDirectory taxonomyDirectory = new RAMDirectory();
Field idField = IndexManager.class.getDeclaredField("indexDirectory");
Field tdField = IndexManager.class.getDeclaredField("taxonomyDirectory");
Field iwField = IndexManager.class.getDeclaredField("indexWriter");
Field twField = IndexManager.class.getDeclaredField("taxonomyWriter");
Field stField = IndexManager.class.getDeclaredField("stringDirectories");
Field initField = IndexManager.class.getDeclaredField("initialized");
idField.setAccessible(true);
tdField.setAccessible(true);
iwField.setAccessible(true);
twField.setAccessible(true);
stField.setAccessible(true);
initField.setAccessible(true);
IndexManager target = new IndexManager(indexDirectory, taxonomyDirectory, OpenMode.CREATE);
target.initializeIndex();
TaxonomyWriter tw = (TaxonomyWriter) twField.get(target);
IndexWriter iw = (IndexWriter) iwField.get(target);
tw.addCategory(new CategoryPath("test/stuff", '/'));
Document doc = new Document();
doc.add(new LongField("testfield", 1000L, Store.YES));
iw.addDocument(doc);
target.dispose();
// TEST: Initializing an index, disposing it and initializing another
// index instance on the same Directories results in loading the same
// index.
IndexManager target2 = new IndexManager(indexDirectory, taxonomyDirectory, OpenMode.APPEND);
target2.initializeIndex();
iw = (IndexWriter) iwField.get(target2);
tw = (TaxonomyWriter) twField.get(target2);
assertEquals(1, iw.numDocs());
assertEquals(3, tw.getSize());
target2.dispose();
}
/**
* Test method for {@link com.fuerve.villageelder.indexing.IndexManager#getIndexWriter()}.
*/
@Test
public final void testGetIndexWriter() throws Exception {
RAMDirectory indexDirectory = new RAMDirectory();
RAMDirectory taxonomyDirectory = new RAMDirectory();
IndexManager target = new IndexManager(indexDirectory, taxonomyDirectory);
target.initializeIndex();
Document doc = new Document();
doc.add(new LongField("testfield", 1000L, Store.YES));
target.getIndexWriter().addDocument(doc);
assertEquals(1, target.getIndexWriter().numDocs());
target.dispose();
}
/**
* Test method for {@link com.fuerve.villageelder.indexing.IndexManager#getTaxonomyWriter()}.
*/
@Test
public final void testGetTaxonomyWriter() throws Exception {
RAMDirectory indexDirectory = new RAMDirectory();
RAMDirectory taxonomyDirectory = new RAMDirectory();
IndexManager target = new IndexManager(indexDirectory, taxonomyDirectory);
target.initializeIndex();
target.getTaxonomyWriter().addCategory(new CategoryPath("test/stuff", '/'));
assertEquals(3, target.getTaxonomyWriter().getSize());
target.dispose();
}
/**
* Test method for {@link com.fuerve.villageelder.indexing.IndexManager#getAnalyzer()}.
*/
@Test
public final void testGetAnalyzer() throws Exception {
RAMDirectory indexDirectory = new RAMDirectory();
RAMDirectory taxonomyDirectory = new RAMDirectory();
IndexManager target = new IndexManager(indexDirectory, taxonomyDirectory);
target.initializeIndex();
assertTrue(target.getAnalyzer() instanceof PerFieldAnalyzerWrapper);
target.dispose();
}
}<|fim▁end|> | assertEquals(true, initActual); |
<|file_name|>is-implemented.js<|end_file_name|><|fim▁begin|>'use strict';
var re = /\|/;
module.exports = function() {<|fim▁hole|> return re.split( 'bar|foo' )[1] === 'foo';
};<|fim▁end|> | if( typeof re.split !== 'function' ) return false; |
<|file_name|>MainPresenterImpl.java<|end_file_name|><|fim▁begin|>package com.ipvans.flickrgallery.ui.main;
import android.util.Log;
import com.ipvans.flickrgallery.data.SchedulerProvider;
import com.ipvans.flickrgallery.di.PerActivity;
import com.ipvans.flickrgallery.domain.FeedInteractor;<|fim▁hole|>
import java.util.concurrent.TimeUnit;
import javax.inject.Inject;
import io.reactivex.Observable;
import io.reactivex.disposables.CompositeDisposable;
import io.reactivex.disposables.Disposable;
import io.reactivex.subjects.BehaviorSubject;
import io.reactivex.subjects.PublishSubject;
import static com.ipvans.flickrgallery.ui.main.MainViewState.*;
@PerActivity
public class MainPresenterImpl implements MainPresenter<MainViewState> {
private final FeedInteractor interactor;
private final SchedulerProvider schedulers;
private BehaviorSubject<MainViewState> stateSubject = BehaviorSubject.createDefault(empty());
private PublishSubject<UpdateEvent> searchSubject = PublishSubject.create();
private Disposable disposable = new CompositeDisposable();
@Inject
public MainPresenterImpl(FeedInteractor interactor, SchedulerProvider schedulers) {
this.interactor = interactor;
this.schedulers = schedulers;
}
@Override
public void onAttach() {
Observable.combineLatest(searchSubject
.debounce(150, TimeUnit.MILLISECONDS, schedulers.io())
.doOnNext(interactor::getFeed),
interactor.observe(),
(tags, feed) -> new MainViewState(feed.isLoading(),
feed.getError(), feed.getData(), tags.getTags()))
.withLatestFrom(stateSubject,
(newState, oldState) -> new MainViewState(
newState.isLoading(), newState.getError(),
newState.getData() != null ? newState.getData() : oldState.getData(),
newState.getTags()
))
.observeOn(schedulers.io())
.subscribeWith(stateSubject)
.onSubscribe(disposable);
}
@Override
public void onDetach() {
disposable.dispose();
}
@Override
public void restoreState(MainViewState data) {
stateSubject.onNext(data);
}
@Override
public Observable<MainViewState> observe() {
return stateSubject;
}
@Override
public MainViewState getLatestState() {
return stateSubject.getValue();
}
@Override
public void search(String tags, boolean force) {
searchSubject.onNext(new UpdateEvent(tags, force));
}
}<|fim▁end|> | import com.ipvans.flickrgallery.domain.UpdateEvent; |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>pub mod data;
pub mod value;<|fim▁hole|>pub mod builtins;
pub mod lambda;<|fim▁end|> | pub mod list;
pub mod lexer;
pub mod parser;
pub mod scope; |
<|file_name|>RecorderVideoActivity.java<|end_file_name|><|fim▁begin|>/************************************************************
* * EaseMob CONFIDENTIAL
* __________________
* Copyright (C) 2013-2014 EaseMob Technologies. All rights reserved.
*
* NOTICE: All information contained herein is, and remains
* the property of EaseMob Technologies.
* Dissemination of this information or reproduction of this material
* is strictly forbidden unless prior written permission is obtained
* from EaseMob Technologies.
*/
package com.easemob.chatuidemo.activity;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import android.annotation.SuppressLint;
import android.app.AlertDialog;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.graphics.Bitmap;
import android.graphics.PixelFormat;
import android.hardware.Camera;
import android.hardware.Camera.CameraInfo;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.Size;
import android.media.MediaRecorder;
import android.media.MediaRecorder.OnErrorListener;
import android.media.MediaRecorder.OnInfoListener;
import android.media.MediaScannerConnection;
import android.media.MediaScannerConnection.MediaScannerConnectionClient;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.os.PowerManager;
import android.os.SystemClock;
import android.text.TextUtils;
import android.view.SurfaceHolder;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.Chronometer;
import android.widget.ImageView;
import android.widget.Toast;
import android.widget.VideoView;
import com.easemob.chatuidemo.utils.CommonUtils;
import com.easemob.chatuidemo.video.util.Utils;
import com.easemob.qixin.R;
import com.easemob.util.EMLog;
import com.easemob.util.PathUtil;
public class RecorderVideoActivity extends BaseActivity implements
OnClickListener, SurfaceHolder.Callback, OnErrorListener,
OnInfoListener {
private static final String TAG = "RecorderVideoActivity";
private final static String CLASS_LABEL = "RecordActivity";
private PowerManager.WakeLock mWakeLock;
private ImageView btnStart;// 开始录制按钮
private ImageView btnStop;// 停止录制按钮
private MediaRecorder mediaRecorder;// 录制视频的类
private VideoView mVideoView;// 显示视频的控件
String localPath = "";// 录制的视频路径
private Camera mCamera;
// 预览的宽高
private int previewWidth = 480;
private int previewHeight = 480;
private Chronometer chronometer;
private int frontCamera = 0;// 0是后置摄像头,1是前置摄像头
private Button btn_switch;
Parameters cameraParameters = null;
private SurfaceHolder mSurfaceHolder;
int defaultVideoFrameRate = -1;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);// 去掉标题栏
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);// 设置全屏
// 选择支持半透明模式,在有surfaceview的activity中使用
getWindow().setFormat(PixelFormat.TRANSLUCENT);
setContentView(R.layout.recorder_activity);
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK,
CLASS_LABEL);
mWakeLock.acquire();
initViews();
}
private void initViews() {
btn_switch = (Button) findViewById(R.id.switch_btn);
btn_switch.setOnClickListener(this);
btn_switch.setVisibility(View.VISIBLE);
mVideoView = (VideoView) findViewById(R.id.mVideoView);
btnStart = (ImageView) findViewById(R.id.recorder_start);
btnStop = (ImageView) findViewById(R.id.recorder_stop);
btnStart.setOnClickListener(this);
btnStop.setOnClickListener(this);
mSurfaceHolder = mVideoView.getHolder();
mSurfaceHolder.addCallback(this);
mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
chronometer = (Chronometer) findViewById(R.id.chronometer);
}
public void back(View view) {
releaseRecorder();
releaseCamera();
finish();
}
@Override
protected void onResume() {
super.onResume();
if (mWakeLock == null) {
// 获取唤醒锁,保持屏幕常亮
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK,
CLASS_LABEL);
mWakeLock.acquire();
}
// if (!initCamera()) {
// showFailDialog();
// }
}
@SuppressLint("NewApi")
private boolean initCamera() {
try {
if (frontCamera == 0) {
mCamera = Camera.open(CameraInfo.CAMERA_FACING_BACK);
} else {
mCamera = Camera.open(CameraInfo.CAMERA_FACING_FRONT);
}
Camera.Parameters camParams = mCamera.getParameters();
mCamera.lock();
mSurfaceHolder = mVideoView.getHolder();
mSurfaceHolder.addCallback(this);
mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mCamera.setDisplayOrientation(90);
} catch (RuntimeException ex) {
EMLog.e("video", "init Camera fail " + ex.getMessage());
return false;
}
return true;
}
private void handleSurfaceChanged() {
if (mCamera == null) {
finish();
return;
}
boolean hasSupportRate = false;
List<Integer> supportedPreviewFrameRates = mCamera.getParameters()
.getSupportedPreviewFrameRates();
if (supportedPreviewFrameRates != null
&& supportedPreviewFrameRates.size() > 0) {
Collections.sort(supportedPreviewFrameRates);
for (int i = 0; i < supportedPreviewFrameRates.size(); i++) {
int supportRate = supportedPreviewFrameRates.get(i);
if (supportRate == 15) {
hasSupportRate = true;
}
}
if (hasSupportRate) {
defaultVideoFrameRate = 15;
} else {
defaultVideoFrameRate = supportedPreviewFrameRates.get(0);
}
}
// 获取摄像头的所有支持的分辨率
List<Camera.Size> resolutionList = Utils.getResolutionList(mCamera);
if (resolutionList != null && resolutionList.size() > 0) {
Collections.sort(resolutionList, new Utils.ResolutionComparator());
Camera.Size previewSize = null;
boolean hasSize = false;
// 如果摄像头支持640*480,那么强制设为640*480
for (int i = 0; i < resolutionList.size(); i++) {
Size size = resolutionList.get(i);
if (size != null && size.width == 640 && size.height == 480) {
previewSize = size;
previewWidth = previewSize.width;
previewHeight = previewSize.height;
hasSize = true;
break;
}
}
// 如果不支持设为中间的那个
if (!hasSize) {
int mediumResolution = resolutionList.size() / 2;
if (mediumResolution >= resolutionList.size())
mediumResolution = resolutionList.size() - 1;
previewSize = resolutionList.get(mediumResolution);
previewWidth = previewSize.width;
previewHeight = previewSize.height;
}
}
}
@Override
protected void onPause() {
super.onPause();
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
@Override
public void onClick(View view) {
switch (view.getId()) {
case R.id.switch_btn:
switchCamera();
break;
case R.id.recorder_start:
// start recording
if(!startRecording())
return;
Toast.makeText(this, R.string.The_video_to_start, Toast.LENGTH_SHORT).show();
btn_switch.setVisibility(View.INVISIBLE);
btnStart.setVisibility(View.INVISIBLE);
btnStart.setEnabled(false);
btnStop.setVisibility(View.VISIBLE);
// 重置其他
chronometer.setBase(SystemClock.elapsedRealtime());
chronometer.start();
break;
case R.id.recorder_stop:
btnStop.setEnabled(false);
// 停止拍摄
stopRecording();
btn_switch.setVisibility(View.VISIBLE);
chronometer.stop();
btnStart.setVisibility(View.VISIBLE);
btnStop.setVisibility(View.INVISIBLE);
new AlertDialog.Builder(this)
.setMessage(R.string.Whether_to_send)
.setPositiveButton(R.string.ok,
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog,
int which) {
dialog.dismiss();
sendVideo(null);
}
})
.setNegativeButton(R.string.cancel,
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog,
int which) {
if(localPath != null){
File file = new File(localPath);
if(file.exists())
file.delete();
}
finish();
}
}).setCancelable(false).show();
break;
default:
break;
}
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
// 将holder,这个holder为开始在oncreat里面取得的holder,将它赋给surfaceHolder
mSurfaceHolder = holder;
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
if (mCamera == null){
if(!initCamera()){
showFailDialog();
return;
}
}
try {
mCamera.setPreviewDisplay(mSurfaceHolder);
mCamera.startPreview();
handleSurfaceChanged();
} catch (Exception e1) {
EMLog.e("video", "start preview fail " + e1.getMessage());
showFailDialog();
}
}
@Override
public void surfaceDestroyed(SurfaceHolder arg0) {
EMLog.v("video", "surfaceDestroyed");
}
public boolean startRecording(){
if (mediaRecorder == null){
if(!initRecorder())
return false;
}
mediaRecorder.setOnInfoListener(this);
mediaRecorder.setOnErrorListener(this);
mediaRecorder.start();
return true;
}
@SuppressLint("NewApi")
private boolean initRecorder(){
if(!CommonUtils.isExitsSdcard()){
showNoSDCardDialog();
return false;
}
if (mCamera == null) {
if(!initCamera()){
showFailDialog();
return false;
}
}
mVideoView.setVisibility(View.VISIBLE);
// TODO init button
mCamera.stopPreview();
mediaRecorder = new MediaRecorder();
mCamera.unlock();
mediaRecorder.setCamera(mCamera);
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
// 设置录制视频源为Camera(相机)
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
if (frontCamera == 1) {
mediaRecorder.setOrientationHint(270);
} else {
mediaRecorder.setOrientationHint(90);
}
// 设置录制完成后视频的封装格式THREE_GPP为3gp.MPEG_4为mp4
mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
// 设置录制的视频编码h263 h264
mediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
// 设置视频录制的分辨率。必须放在设置编码和格式的后面,否则报错
mediaRecorder.setVideoSize(previewWidth, previewHeight);
// 设置视频的比特率
mediaRecorder.setVideoEncodingBitRate(384 * 1024);
// // 设置录制的视频帧率。必须放在设置编码和格式的后面,否则报错
if (defaultVideoFrameRate != -1) {
mediaRecorder.setVideoFrameRate(defaultVideoFrameRate);
}
// 设置视频文件输出的路径
localPath = PathUtil.getInstance().getVideoPath() + "/"
+ System.currentTimeMillis() + ".mp4";
mediaRecorder.setOutputFile(localPath);
mediaRecorder.setMaxDuration(30000);
mediaRecorder.setPreviewDisplay(mSurfaceHolder.getSurface());
try {
mediaRecorder.prepare();
} catch (IllegalStateException e) {
e.printStackTrace();
return false;
} catch (IOException e) {
e.printStackTrace();
return false;
}
return true;
}
public void stopRecording() {
if (mediaRecorder != null) {
mediaRecorder.setOnErrorListener(null);
mediaRecorder.setOnInfoListener(null);
try {
mediaRecorder.stop();
} catch (IllegalStateException e) {
EMLog.e("video", "stopRecording error:" + e.getMessage());
}
}
releaseRecorder();
if (mCamera != null) {
mCamera.stopPreview();
releaseCamera();
}
}
private void releaseRecorder() {
if (mediaRecorder != null) {
mediaRecorder.release();
mediaRecorder = null;
}
}
protected void releaseCamera() {
try {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
} catch (Exception e) {
}
}
@SuppressLint("NewApi")
public void switchCamera() {
if (mCamera == null) {
return;
}
if (Camera.getNumberOfCameras() >= 2) {
btn_switch.setEnabled(false);
if (mCamera != null) {
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
switch (frontCamera) {
case 0:
mCamera = Camera.open(CameraInfo.CAMERA_FACING_FRONT);
frontCamera = 1;
break;
case 1:
mCamera = Camera.open(CameraInfo.CAMERA_FACING_BACK);
frontCamera = 0;
break;
}
try {
mCamera.lock();
mCamera.setDisplayOrientation(90);
mCamera.setPreviewDisplay(mVideoView.getHolder());
mCamera.startPreview();
} catch (IOException e) {
mCamera.release();
mCamera = null;
}
btn_switch.setEnabled(true);
}
}
MediaScannerConnection msc = null;
ProgressDialog progressDialog = null;
public void sendVideo(View view) {
if (TextUtils.isEmpty(localPath)) {
EMLog.e("Recorder", "recorder fail please try again!");
return;
}
if(msc == null)
msc = new MediaScannerConnection(this,
new MediaScannerConnectionClient() {
@Override
public void onScanCompleted(String path, Uri uri) {
EMLog.d(TAG, "scanner completed");
msc.disconnect();
progressDialog.dismiss();
setResult(RESULT_OK, getIntent().putExtra("uri", uri));
finish();
}
@Override
public void onMediaScannerConnected() {
msc.scanFile(localPath, "video/*");
}
});
if(progressDialog == null){
progressDialog = new ProgressDialog(this);
progressDialog.setMessage("processing...");
progressDialog.setCancelable(false);
}
progressDialog.show();<|fim▁hole|>
}
@Override
public void onInfo(MediaRecorder mr, int what, int extra) {
EMLog.v("video", "onInfo");
if (what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED) {
EMLog.v("video", "max duration reached");
stopRecording();
btn_switch.setVisibility(View.VISIBLE);
chronometer.stop();
btnStart.setVisibility(View.VISIBLE);
btnStop.setVisibility(View.INVISIBLE);
chronometer.stop();
if (localPath == null) {
return;
}
String st3 = getResources().getString(R.string.Whether_to_send);
new AlertDialog.Builder(this)
.setMessage(st3)
.setPositiveButton(R.string.ok,
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface arg0,
int arg1) {
arg0.dismiss();
sendVideo(null);
}
}).setNegativeButton(R.string.cancel, null)
.setCancelable(false).show();
}
}
@Override
public void onError(MediaRecorder mr, int what, int extra) {
EMLog.e("video", "recording onError:");
stopRecording();
Toast.makeText(this,
"Recording error has occurred. Stopping the recording",
Toast.LENGTH_SHORT).show();
}
public void saveBitmapFile(Bitmap bitmap) {
File file = new File(Environment.getExternalStorageDirectory(), "a.jpg");
try {
BufferedOutputStream bos = new BufferedOutputStream(
new FileOutputStream(file));
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, bos);
bos.flush();
bos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
protected void onDestroy() {
super.onDestroy();
releaseCamera();
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
@Override
public void onBackPressed() {
back(null);
}
private void showFailDialog() {
new AlertDialog.Builder(this)
.setTitle(R.string.prompt)
.setMessage(R.string.Open_the_equipment_failure)
.setPositiveButton(R.string.ok,
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog,
int which) {
finish();
}
}).setCancelable(false).show();
}
private void showNoSDCardDialog() {
new AlertDialog.Builder(this)
.setTitle(R.string.prompt)
.setMessage("No sd card!")
.setPositiveButton(R.string.ok,
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog,
int which) {
finish();
}
}).setCancelable(false).show();
}
}<|fim▁end|> | msc.connect(); |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""Utility modules used throughout the rest of the codebase.
<|fim▁hole|>Note that these modules should in general not depend (at compile time)
on any other modules, to avoid cyclic dependencies. They could be
imported (at the top level) into any other module so should not have
any top-level imports from other modules.
"""<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | from checkpy.assertlib.basic import * |
<|file_name|>page-share.module.ts<|end_file_name|><|fim▁begin|>/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {NgModule} from '@angular/core';
import {CommonModule} from '@common/common.module';
import {PagePivotComponent} from './page-pivot/page-pivot.component';
import {DatasourceService} from '../datasource/service/datasource.service';
import {ChartModule} from '@common/chart.module';
import {WidgetService} from '../dashboard/service/widget.service';
import {ImageService} from '@common/service/image.service';
import {DashboardShareModule} from '../dashboard/dashboard-share.module';
import {PageFilterPanelComponent} from './filter/filter-panel.component';
import {DashboardService} from '../dashboard/service/dashboard.service';
import {DragulaModule} from '../../lib/ng2-dragula';
import {PageDataContextComponent} from './page-data/page-data-context.component';
import {FormatItemComponent} from './chart-style/format/format-item.component';
import {DataPreviewModule} from '@common/data.preview.module';
import {AnalysisModule} from './component/analysis/analysis.module';
import {PopupValueAliasComponent} from './page-pivot/popup-value-alias.component';
import {PageComponent} from './page.component';
import {CommonOptionComponent} from './chart-style/common-option.component';
import {DataLabelOptionComponent} from './chart-style/datalabel-option.component';
import {FormatOptionComponent} from './chart-style/format-option.component';
import {LegendOptionComponent} from './chart-style/legend-option.component';
import {TooltipOptionComponent} from './chart-style/tooltip-option.component';
import {XAxisOptionComponent} from './chart-style/xaxis-option.component';
import {YAxisOptionComponent} from './chart-style/yaxis-option.component';
import {ColorOptionComponent} from './chart-style/color-option.component';
import {AxisValueOptionComponent} from './chart-style/axis-value-option.component';
import {AxisCategoryOptionComponent} from './chart-style/axis-category-option.component';
import {CalculatedRowOptionComponent} from './chart-style/calc-option.component';
import {SecondaryIndicatorComponent} from './chart-style/secondary-indicator.component';
import {MetadataService} from '../meta-data-management/metadata/service/metadata.service';
import {MapPagePivotComponent} from './page-pivot/map/map-page-pivot.component';
import {MapLayerOptionComponent} from './chart-style/map/map-layer-option.component';
import {MapLegendOptionComponent} from './chart-style/map/map-legend-option.component';
import {MapFormatOptionComponent} from './chart-style/map/map-format-option.component';
import {MapTooltipOptionComponent} from './chart-style/map/map-tooltip-option.component';
import {MapCommonOptionComponent} from './chart-style/map/map-common-option.component';
import {PivotContextComponent} from './page-pivot/pivot-context.component';
import {SecondaryAxisOptionComponent} from './chart-style/secondary-axis-option.component';
import {CalculatedOptionSliderComponent} from './chart-style/calc-option-slider.component';
import {PagePivotFormatComponent} from './page-pivot/page-pivot-format.component';
import {ColorTemplateComponent} from './component/color/color-template.component';
@NgModule({
imports: [
CommonModule,
DragulaModule,
ChartModule,
DashboardShareModule,
DataPreviewModule,
AnalysisModule
],
declarations: [
PageComponent,
PagePivotComponent,
FormatItemComponent,
PageFilterPanelComponent,
PageDataContextComponent,
PopupValueAliasComponent,
CommonOptionComponent,
LegendOptionComponent,<|fim▁hole|> XAxisOptionComponent,
YAxisOptionComponent,
SecondaryAxisOptionComponent,
AxisValueOptionComponent,
AxisCategoryOptionComponent,
DataLabelOptionComponent,
TooltipOptionComponent,
SecondaryIndicatorComponent,
FormatOptionComponent,
ColorOptionComponent,
CalculatedRowOptionComponent,
CalculatedOptionSliderComponent,
MapPagePivotComponent,
MapCommonOptionComponent,
MapLayerOptionComponent,
MapLegendOptionComponent,
MapTooltipOptionComponent,
MapFormatOptionComponent,
PivotContextComponent,
PagePivotFormatComponent,
ColorTemplateComponent
],
exports: [
PageComponent,
PagePivotComponent,
FormatItemComponent,
PageFilterPanelComponent,
PageDataContextComponent,
PopupValueAliasComponent,
CommonOptionComponent,
LegendOptionComponent,
XAxisOptionComponent,
YAxisOptionComponent,
SecondaryAxisOptionComponent,
AxisValueOptionComponent,
AxisCategoryOptionComponent,
DataLabelOptionComponent,
TooltipOptionComponent,
SecondaryIndicatorComponent,
FormatOptionComponent,
ColorOptionComponent,
CalculatedRowOptionComponent,
CalculatedOptionSliderComponent,
MapPagePivotComponent,
MapCommonOptionComponent,
MapLayerOptionComponent,
MapLegendOptionComponent,
MapTooltipOptionComponent,
MapFormatOptionComponent,
PivotContextComponent
],
providers: [
DatasourceService,
DashboardService,
MetadataService,
WidgetService,
ImageService
]
})
export class PageShareModule {
}<|fim▁end|> | |
<|file_name|>textarea.sandbox.ts<|end_file_name|><|fim▁begin|>import Vue, { PluginObject } from 'vue';
import { Component } from 'vue-property-decorator';
import { TEXTAREA_NAME } from '../component-names';<|fim▁hole|>
@WithRender
@Component
export class MTextareaSandbox extends Vue {
public test4Model: string = '';
}
const TextareaSandboxPlugin: PluginObject<any> = {
install(v, options): void {
v.use(TextareaPlugin);
v.component(`${TEXTAREA_NAME}-sandbox`, MTextareaSandbox);
}
};
export default TextareaSandboxPlugin;<|fim▁end|> | import WithRender from './textarea.sandbox.html';
import TextareaPlugin from './textarea'; |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
description='RESTful Nagios/Icinga Livestatus API',
author='Christoph Oelmueller',
url='https://github.com/zwopiR/lsapi',
download_url='https://github.com/zwopiR/lsapi',
author_email='[email protected]',
version='0.1',
install_requires=['flask', 'ConfigParser'],
tests_require=['mock', 'nose'],
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
scripts=[],
name='lsapi'<|fim▁hole|><|fim▁end|> | ) |
<|file_name|>0013_auto__add_field_tttrip_shape.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'TtTrip.shape'
db.add_column(u'timetable_tttrip', 'shape',
self.gf('django.db.models.fields.related.ForeignKey')(to=orm['timetable.TtShape'], null=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'TtTrip.shape'
db.delete_column(u'timetable_tttrip', 'shape_id')
models = {
u'timetable.ttshape': {<|fim▁hole|> 'Meta': {'object_name': 'TtShape'},
'gtfs_shape_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'points': ('django.db.models.fields.TextField', [], {})
},
u'timetable.ttstop': {
'Meta': {'object_name': 'TtStop'},
'gtfs_stop_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'stop_lat': ('django.db.models.fields.FloatField', [], {}),
'stop_lon': ('django.db.models.fields.FloatField', [], {}),
'stop_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'stop_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
u'timetable.ttstoptime': {
'Meta': {'object_name': 'TtStopTime'},
'exp_arrival': ('django.db.models.fields.DateTimeField', [], {}),
'exp_departure': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'stop': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['timetable.TtStop']"}),
'stop_sequence': ('django.db.models.fields.IntegerField', [], {}),
'trip': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['timetable.TtTrip']"})
},
u'timetable.tttrip': {
'Meta': {'object_name': 'TtTrip'},
'date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'gtfs_trip_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'shape': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['timetable.TtShape']", 'null': 'True'})
}
}
complete_apps = ['timetable']<|fim▁end|> | |
<|file_name|>Pipelines.java<|end_file_name|><|fim▁begin|>/**
* Copyright (C) 2015 Agro-Know, Deutsches Forschungszentrum für Künstliche Intelligenz, iMinds,
* Institut für Angewandte Informatik e. V. an der Universität Leipzig,
* Istituto Superiore Mario Boella, Tilde, Vistatec, WRIPL (http://freme-project.eu)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.freme.broker.eservices;
import com.google.gson.JsonSyntaxException;
import com.mashape.unirest.http.exceptions.UnirestException;
import eu.freme.broker.exception.*;
import eu.freme.common.conversion.rdf.RDFConstants;
import eu.freme.common.exception.OwnedResourceNotFoundException;
import eu.freme.common.persistence.dao.PipelineDAO;
import eu.freme.common.persistence.dao.UserDAO;
import eu.freme.common.persistence.model.OwnedResource;
import eu.freme.common.persistence.model.Pipeline;
import eu.freme.common.persistence.model.User;
import eu.freme.eservices.pipelines.core.PipelineResponse;
import eu.freme.eservices.pipelines.core.PipelineService;
import eu.freme.eservices.pipelines.core.ServiceException;
import eu.freme.eservices.pipelines.core.WrappedPipelineResponse;
import eu.freme.eservices.pipelines.requests.RequestBuilder;
import eu.freme.eservices.pipelines.requests.RequestFactory;
import eu.freme.eservices.pipelines.requests.SerializedRequest;
import eu.freme.eservices.pipelines.serialization.Serializer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Profile;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.security.access.annotation.Secured;
import org.springframework.security.authentication.InsufficientAuthenticationException;
import org.springframework.util.MultiValueMap;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author Gerald Haesendonck
*/
@RestController
@SuppressWarnings("unused")
@Profile("broker")
public class Pipelines extends BaseRestController {
@Autowired
PipelineService pipelineAPI;
@Autowired
PipelineDAO pipelineDAO;
@Autowired
UserDAO userDAO;
/**
* <p>Calls the pipelining service.</p>
* <p>Some predefined Requests can be formed using the class {@link RequestFactory}. It also converts request objects
* from and to JSON.</p>
* <p><To create custom requests, use the {@link RequestBuilder}.</p>
* <p>Examples can be found in the unit tests in {@link eu/freme/broker/integration_tests/pipelines}.</p>
* @param requests The requests to send to the service.
* @param stats If "true": wrap the response of the last request and add timing statistics.
* @return The response of the last request.
* @throws BadRequestException The contents of the request is not valid.
* @throws InternalServerErrorException Something goes wrong that shouldn't go wrong.
*/
@RequestMapping(value = "pipelining/chain",
method = RequestMethod.POST,
consumes = "application/json",
produces = {"text/turtle", "application/json", "application/ld+json", "application/n-triples", "application/rdf+xml", "text/n3", "text/html"}
)
@Secured({"ROLE_USER", "ROLE_ADMIN"})
public ResponseEntity<String> pipeline(@RequestBody String requests, @RequestParam (value = "stats", defaultValue = "false", required = false) String stats) {
try {
boolean wrapResult = Boolean.parseBoolean(stats);
List<SerializedRequest> serializedRequests = Serializer.fromJson(requests);
WrappedPipelineResponse pipelineResult = pipelineAPI.chain(serializedRequests);
MultiValueMap<String, String> headers = new HttpHeaders();
if (wrapResult) {
headers.add(HttpHeaders.CONTENT_TYPE, RDFConstants.RDFSerialization.JSON.contentType());
return new ResponseEntity<>(Serializer.toJson(pipelineResult), headers, HttpStatus.OK);
} else {
headers.add(HttpHeaders.CONTENT_TYPE, pipelineResult.getContent().getContentType());
PipelineResponse lastResponse = pipelineResult.getContent();
return new ResponseEntity<>(lastResponse.getBody(), headers, HttpStatus.OK);
}
} catch (ServiceException serviceError) {
// TODO: see if this can be replaced by excsption(s) defined in the broker.
logger.error(serviceError.getMessage(), serviceError);
MultiValueMap<String, String> headers = new HttpHeaders();
headers.add(HttpHeaders.CONTENT_TYPE, serviceError.getResponse().getContentType());
return new ResponseEntity<>(serviceError.getMessage(), headers, serviceError.getStatus());
} catch (JsonSyntaxException jsonException) {
logger.error(jsonException.getMessage(), jsonException);
String errormsg = jsonException.getCause() != null ? jsonException.getCause().getMessage() : jsonException.getMessage();
throw new BadRequestException("Error detected in the JSON body contents: " + errormsg);
} catch (UnirestException unirestException) {
logger.error(unirestException.getMessage(), unirestException);
throw new BadRequestException(unirestException.getMessage());
} catch (Throwable t) {
logger.error(t.getMessage(), t);
// throw an Internal Server exception if anything goes really wrong...
throw new InternalServerErrorException(t.getMessage());
}
}
/**
* Calls the pipelining service using an existing template.
* @param body The contents to send to the pipeline. This can be a NIF or plain text document.
* @param id The id of the pipeline template to use.
* @param stats If "true": wrap the response of the last request and add timing statistics.
* @return The response of the latest request defined in the template.
* @throws AccessDeniedException The pipeline template is not visible by the current user.
* @throws BadRequestException The contents of the request is not valid.
* @throws InternalServerErrorException Something goes wrong that shouldn't go wrong.
* @throws TemplateNotFoundException The pipeline template does not exist.
*/
@RequestMapping(value = "pipelining/chain/{id}",
method = RequestMethod.POST,
consumes = {"text/turtle", "application/json", "application/ld+json", "application/n-triples", "application/rdf+xml", "text/n3", "text/plain"},
produces = {"text/turtle", "application/json", "application/ld+json", "application/n-triples", "application/rdf+xml", "text/n3"}
)
public ResponseEntity<String> pipeline(@RequestBody String body, @PathVariable long id, @RequestParam (value = "stats", defaultValue = "false", required = false) String stats) {
try {
Pipeline pipeline = pipelineDAO.findOneById(id);
List<SerializedRequest> serializedRequests = Serializer.fromJson(pipeline.getSerializedRequests());
serializedRequests.get(0).setBody(body);
return pipeline(Serializer.toJson(serializedRequests), stats);
} catch (org.springframework.security.access.AccessDeniedException | InsufficientAuthenticationException ex) {
logger.error(ex.getMessage(), ex);
throw new AccessDeniedException(ex.getMessage());
} catch (JsonSyntaxException jsonException) {
logger.error(jsonException.getMessage(), jsonException);
String errormsg = jsonException.getCause() != null ? jsonException.getCause().getMessage() : jsonException.getMessage();
throw new BadRequestException("Error detected in the JSON body contents: " + errormsg);
} catch (OwnedResourceNotFoundException ex) {
logger.error(ex.getMessage(), ex);
throw new TemplateNotFoundException("Could not find the pipeline template with id " + id);
}
}
/**
* Creates and stores a pipeline template.
* @param pipelineInfo A JSON string containing the fields "label", "description", "serializedRequests", which
* define the pipeline template.
* @param visibility The visibility of the template. Can be {@literal PUBLIC} or {@literal PRIVATE}. PUBLIC means visible to anyone,
* PRIVATE means only visible to the currently authenticated user.
* @param persist {@literal true}: store the template until deleted by someone, {@literal false} to guarantee
* it to be stored for one week.
* @return A JSON string containing the full pipeline info, i.e. the fields "id", "label", "description",
* "persist", "visibility", "owner", "serializedRequests".
* @throws AccessDeniedException The pipeline template is not visible by the current user.
* @throws BadRequestException The contents of the request is not valid.
* @throws InternalServerErrorException Something goes wrong that shouldn't go wrong.
*/
@RequestMapping(value = "pipelining/templates",
method = RequestMethod.POST,
consumes = "application/json",
produces = "application/json"
)
@Secured({"ROLE_USER", "ROLE_ADMIN"})
public ResponseEntity<String> create(
@RequestBody String pipelineInfo,
@RequestParam(value = "visibility", required = false) String visibility,
@RequestParam (value = "persist", defaultValue = "false", required = false) String persist
) {
try {
// just to perform a first validation of the pipeline...
eu.freme.eservices.pipelines.serialization.Pipeline pipelineInfoObj = Serializer.templateFromJson(pipelineInfo);
//List<SerializedRequest> serializedRequests = RequestFactory.fromJson(requests);
boolean toPersist = Boolean.parseBoolean(persist);
Pipeline pipeline = new Pipeline(
OwnedResource.Visibility.getByString(visibility),
pipelineInfoObj.getLabel(),
pipelineInfoObj.getDescription(),
Serializer.toJson(pipelineInfoObj.getSerializedRequests()),
toPersist);
pipeline = pipelineDAO.save(pipeline);
String response = Serializer.toJson(pipeline);
return createOKJSONResponse(response);
} catch (JsonSyntaxException jsonException) {
logger.error(jsonException.getMessage(), jsonException);
String errormsg = jsonException.getCause() != null ? jsonException.getCause().getMessage() : jsonException.getMessage();
throw new BadRequestException("Error detected in the JSON body contents: " + errormsg);
} catch (eu.freme.common.exception.BadRequestException e) {
logger.error(e.getMessage(), e);
throw new BadRequestException(e.getMessage());
} catch (org.springframework.security.access.AccessDeniedException | InsufficientAuthenticationException ex) {
logger.error(ex.getMessage(), ex);
throw new AccessDeniedException(ex.getMessage());
} catch (Throwable t) {
logger.error(t.getMessage(), t);
// throw an Internal Server exception if anything goes really wrong...
throw new InternalServerErrorException(t.getMessage());
}
}
/**
* Updates an existing pipeline template.
* @param id The id of the pipeline template to update.
* @param ownerName The name of the new owner.
* @param visibility The visibility of the template. Can be {@literal PUBLIC} or {@literal PRIVATE}. PUBLIC means visible to anyone,
* PRIVATE means only visible to the currently authenticated user.
* @param persist {@literal true}: store the template until deleted by someone, {@literal false} to guarantee
* it to be stored for one week.
* @param pipelineInfo A JSON string containing updated pipeline template info. The fields "label", "description", "serializedRequests"
* define the pipeline template.
* @return A JSON string containing the updated full pipeline info, i.e. the fields "id", "label", "description",
* "persist", "visibility", "owner", "serializedRequests".
* @throws ForbiddenException The pipeline template is not visible by the current user.
* @throws BadRequestException The contents of the request is not valid.
* @throws TemplateNotFoundException The pipeline template does not exist.
* @throws InternalServerErrorException Something goes wrong that shouldn't go wrong.
*/
@RequestMapping(
value = "pipelining/templates/{id}",
method = RequestMethod.PUT,
consumes = "application/json",
produces = "application/json"
)
public ResponseEntity<String> update(
@PathVariable(value = "id") long id,
@RequestParam(value = "owner", required=false) String ownerName,
@RequestParam(value = "visibility", required = false) String visibility,
@RequestParam(value = "persist", required = false) String persist,
@RequestBody(required = false) String pipelineInfo
) {
try {
Pipeline pipeline = pipelineDAO.findOneById(id);
if (pipelineInfo != null && !pipelineInfo.isEmpty()) {
eu.freme.eservices.pipelines.serialization.Pipeline pipelineInfoObj = Serializer.templateFromJson(pipelineInfo);
String newLabel = pipelineInfoObj.getLabel();
if (newLabel != null && !newLabel.equals(pipeline.getLabel())) {
pipeline.setLabel(newLabel);
}
String newDescription = pipelineInfoObj.getDescription();
if (newDescription != null && !newDescription.equals(pipeline.getDescription())) {
pipeline.setDescription(newDescription);
}
List<SerializedRequest> oldRequests = Serializer.fromJson(pipeline.getSerializedRequests());
List<SerializedRequest> newRequests = pipelineInfoObj.getSerializedRequests();
if (newRequests != null && !newRequests.equals(oldRequests)) {
pipeline.setSerializedRequests(Serializer.toJson(newRequests));
}
}
if (visibility != null && !visibility.equals(pipeline.getVisibility().name())) {
pipeline.setVisibility(OwnedResource.Visibility.getByString(visibility));
}
if (persist != null) {
boolean toPersist = Boolean.parseBoolean(persist);
if (toPersist != pipeline.isPersistent()) {
pipeline.setPersist(toPersist);
}
}
if (ownerName != null && !ownerName.equals(pipeline.getOwner().getName())) {
User newOwner = userDAO.getRepository().findOneByName(ownerName);
if (newOwner == null) {
throw new BadRequestException("Can not change owner of the dataset. User \"" + ownerName + "\" does not exist.");
}
pipeline.setOwner(newOwner);
}
pipeline = pipelineDAO.save(pipeline);
String response = Serializer.toJson(pipeline);
return createOKJSONResponse(response);
} catch (org.springframework.security.access.AccessDeniedException | InsufficientAuthenticationException ex) {
logger.error(ex.getMessage(), ex);
throw new ForbiddenException(ex.getMessage());
} catch (OwnedResourceNotFoundException ex) {
logger.error(ex.getMessage(), ex);
throw new TemplateNotFoundException("Could not find the pipeline template with id " + id);
} catch (JsonSyntaxException jsonException) {
logger.error(jsonException.getMessage(), jsonException);
String errormsg = jsonException.getCause() != null ? jsonException.getCause().getMessage() : jsonException.getMessage();
throw new BadRequestException("Error detected in the JSON body contents: " + errormsg);
} catch (eu.freme.common.exception.BadRequestException e) {
logger.error(e.getMessage(), e);
throw new BadRequestException(e.getMessage());
} catch (Throwable t) {
logger.error(t.getMessage(), t);
// throw an Internal Server exception if anything goes really wrong...
throw new InternalServerErrorException(t.getMessage());
}
}
/**
* Reads (gets) the pipeline template with the given id.
* @param id The id of the pipeline template to get.
* @return The pipeline templatewith the given id as a JSON string.
* @throws AccessDeniedException The pipeline template is not visible by the current user.
* @throws TemplateNotFoundException The pipeline template does not exist.
* @throws InternalServerErrorException Something goes wrong that shouldn't go wrong.
*/
@RequestMapping(
value = "pipelining/templates/{id}",
method = RequestMethod.GET,
produces = "application/json"
)
@Secured({"ROLE_USER", "ROLE_ADMIN"})
public ResponseEntity<String> read(@PathVariable(value = "id") long id) {
try {
Pipeline pipeline = pipelineDAO.findOneById(id);
String serializedPipeline = Serializer.toJson(pipeline);
return createOKJSONResponse(serializedPipeline);
} catch (org.springframework.security.access.AccessDeniedException | InsufficientAuthenticationException ex) {
logger.error(ex.getMessage(), ex);
throw new AccessDeniedException(ex.getMessage());
} catch (OwnedResourceNotFoundException ex) {
logger.error(ex.getMessage(), ex);
throw new TemplateNotFoundException("Could not find the pipeline template with id " + id);
} catch (Throwable t) {
logger.error(t.getMessage(), t);
// throw an Internal Server exception if anything goes really wrong...
throw new InternalServerErrorException(t.getMessage());
}
}
/**
* Reads (gets) all visible pipelines.
* @return all visible pipelines as a JSON string.
*/
@RequestMapping(
value = "pipelining/templates",
method = RequestMethod.GET,
produces = "application/json"
)
@Secured({"ROLE_USER", "ROLE_ADMIN"})
public ResponseEntity<String> read() {
try {
List<Pipeline> readablePipelines = pipelineDAO.findAllReadAccessible();
String serializedPipelines = Serializer.templatesToJson(readablePipelines);
return createOKJSONResponse(serializedPipelines);
} catch (Throwable t) {
logger.error(t.getMessage(), t);
// throw an Internal Server exception if anything goes really wrong...
throw new InternalServerErrorException(t.getMessage());
}
}
/**
* Deletes the pipeline template with the given id.
* @param id The id of the template to delete.
* @return The message "The pipeline was sucessfully removed."
* @throws ForbiddenException The pipeline template cannot be deleted by the current user.
* @throws TemplateNotFoundException The pipeline template does not exist.
* @throws InternalServerErrorException Something goes wrong that shouldn't go wrong.
*/
@RequestMapping(
value = "pipelining/templates/{id}",
method = RequestMethod.DELETE
)
@Secured({"ROLE_USER", "ROLE_ADMIN"})
public ResponseEntity<String> delete(@PathVariable("id") long id) {
try {
pipelineDAO.delete(pipelineDAO.findOneById(id));
return new ResponseEntity<>("The pipeline was sucessfully removed.", HttpStatus.OK);<|fim▁hole|> } catch (OwnedResourceNotFoundException ex) {
logger.error(ex.getMessage(), ex);
throw new TemplateNotFoundException("Could not find the pipeline template with id " + id);
} catch (Throwable t) {
logger.error(t.getMessage(), t);
// throw an Internal Server exception if anything goes really wrong...
throw new InternalServerErrorException(t.getMessage());
}
}
private ResponseEntity<String> createOKJSONResponse(final String contents) {
MultiValueMap<String, String> headers = new HttpHeaders();
headers.add(HttpHeaders.CONTENT_TYPE, RDFConstants.RDFSerialization.JSON.contentType());
return new ResponseEntity<>(contents, headers, HttpStatus.OK);
}
}<|fim▁end|> | } catch (org.springframework.security.access.AccessDeniedException | InsufficientAuthenticationException ex) {
logger.error(ex.getMessage(), ex);
throw new ForbiddenException(ex.getMessage()); |
<|file_name|>searchbar-compiled.js<|end_file_name|><|fim▁begin|>'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
var _react = require('react');
var _react2 = _interopRequireDefault(_react);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
var SearchBar = function (_Component) {
_inherits(SearchBar, _Component);<|fim▁hole|>
function SearchBar(props) {
_classCallCheck(this, SearchBar);
var _this = _possibleConstructorReturn(this, (SearchBar.__proto__ || Object.getPrototypeOf(SearchBar)).call(this, props));
_this.state = { term: '' };
_this.onInputChange = _this.onInputChange.bind(_this);
return _this;
}
_createClass(SearchBar, [{
key: 'onInputChange',
value: function onInputChange(term) {
this.setState({ term: term });
this.props.getResults(term);
}
}, {
key: 'render',
value: function render() {
var _this2 = this;
return _react2.default.createElement(
'div',
{ className: 'searchBarContainer' },
_react2.default.createElement('input', { type: 'text',
value: this.state.term,
onChange: function onChange(event) {
return _this2.onInputChange(event.target.value);
}
})
);
}
}]);
return SearchBar;
}(_react.Component);
exports.default = SearchBar;
{/*<button><i id="searchIcon" className="fa fa-search" /></button>*/}
//# sourceMappingURL=searchbar-compiled.js.map<|fim▁end|> | |
<|file_name|>view.py<|end_file_name|><|fim▁begin|># Copyright (C) 2013, Walter Bender - Raul Gutierrez Segales
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from gettext import gettext as _
from gi.repository import GLib
from gi.repository import Gtk
from gi.repository import Gdk
from jarabe.webservice.accountsmanager import get_webaccount_services
from jarabe.controlpanel.sectionview import SectionView
from sugar3.graphics.icon import CanvasIcon, Icon
from sugar3.graphics import style
def get_service_name(service):
if hasattr(service, '_account'):
if hasattr(service._account, 'get_description'):
return service._account.get_description()
return ''
class WebServicesConfig(SectionView):
def __init__(self, model, alerts):
SectionView.__init__(self)
self._model = model
self.restart_alerts = alerts
services = get_webaccount_services()
grid = Gtk.Grid()
if len(services) == 0:
grid.set_row_spacing(style.DEFAULT_SPACING)
icon = Icon(pixel_size=style.LARGE_ICON_SIZE,
icon_name='module-webaccount',
stroke_color=style.COLOR_BUTTON_GREY.get_svg(),
fill_color=style.COLOR_TRANSPARENT.get_svg())
grid.attach(icon, 0, 0, 1, 1)
icon.show()
label = Gtk.Label()
label.set_justify(Gtk.Justification.CENTER)
label.set_markup(
'<span foreground="%s" size="large">%s</span>'
% (style.COLOR_BUTTON_GREY.get_html(),
GLib.markup_escape_text(
_('No web services are installed.\n'
'Please visit %s for more details.' %
'http://wiki.sugarlabs.org/go/WebServices'))))
label.show()
grid.attach(label, 0, 1, 1, 1)
alignment = Gtk.Alignment.new(0.5, 0.5, 0.1, 0.1)
alignment.add(grid)
grid.show()
self.add(alignment)
alignment.show()
return
grid.set_row_spacing(style.DEFAULT_SPACING * 4)
grid.set_column_spacing(style.DEFAULT_SPACING * 4)
grid.set_border_width(style.DEFAULT_SPACING * 2)
grid.set_column_homogeneous(True)
width = Gdk.Screen.width() - 2 * style.GRID_CELL_SIZE
nx = int(width / (style.GRID_CELL_SIZE + style.DEFAULT_SPACING * 4))
self._service_config_box = Gtk.VBox()
x = 0
y = 0<|fim▁hole|> icon.show()
service_grid.attach(icon, x, y, 1, 1)
icon.connect('activate', service.config_service_cb, None,
self._service_config_box)
label = Gtk.Label()
label.set_justify(Gtk.Justification.CENTER)
name = get_service_name(service)
label.set_markup(name)
service_grid.attach(label, x, y + 1, 1, 1)
label.show()
grid.attach(service_grid, x, y, 1, 1)
service_grid.show()
x += 1
if x == nx:
x = 0
y += 1
alignment = Gtk.Alignment.new(0.5, 0, 0, 0)
alignment.add(grid)
grid.show()
vbox = Gtk.VBox()
vbox.pack_start(alignment, False, False, 0)
alignment.show()
scrolled = Gtk.ScrolledWindow()
vbox.pack_start(scrolled, True, True, 0)
self.add(vbox)
scrolled.set_policy(Gtk.PolicyType.NEVER, Gtk.PolicyType.AUTOMATIC)
scrolled.show()
workspace = Gtk.VBox()
scrolled.add_with_viewport(workspace)
workspace.show()
workspace.add(self._service_config_box)
workspace.show_all()
vbox.show()
def undo(self):
pass<|fim▁end|> | for service in services:
service_grid = Gtk.Grid()
icon = CanvasIcon(icon_name=service.get_icon_name()) |
<|file_name|>prediction-gen.go<|end_file_name|><|fim▁begin|>// Package prediction provides access to the Prediction API.
//
// See https://developers.google.com/prediction/docs/developer-guide
//
// Usage example:
//
// import "google.golang.org/api/prediction/v1.4"
// ...
// predictionService, err := prediction.New(oauthHttpClient)
package prediction // import "google.golang.org/api/prediction/v1.4"
import (
"bytes"
"encoding/json"
"errors"
"fmt"
context "golang.org/x/net/context"
ctxhttp "golang.org/x/net/context/ctxhttp"
gensupport "google.golang.org/api/gensupport"
googleapi "google.golang.org/api/googleapi"
"io"
"net/http"
"net/url"
"strconv"
"strings"
)
// Always reference these packages, just in case the auto-generated code
// below doesn't.
var _ = bytes.NewBuffer
var _ = strconv.Itoa
var _ = fmt.Sprintf
var _ = json.NewDecoder
var _ = io.Copy
var _ = url.Parse
var _ = gensupport.MarshalJSON
var _ = googleapi.Version
var _ = errors.New
var _ = strings.Replace
var _ = context.Canceled
var _ = ctxhttp.Do
const apiId = "prediction:v1.4"
const apiName = "prediction"
const apiVersion = "v1.4"
const basePath = "https://www.googleapis.com/prediction/v1.4/"
// OAuth2 scopes used by this API.
const (
// Manage your data and permissions in Google Cloud Storage
DevstorageFullControlScope = "https://www.googleapis.com/auth/devstorage.full_control"
// View your data in Google Cloud Storage
DevstorageReadOnlyScope = "https://www.googleapis.com/auth/devstorage.read_only"
// Manage your data in Google Cloud Storage
DevstorageReadWriteScope = "https://www.googleapis.com/auth/devstorage.read_write"
// Manage your data in the Google Prediction API
PredictionScope = "https://www.googleapis.com/auth/prediction"
)
func New(client *http.Client) (*Service, error) {
if client == nil {
return nil, errors.New("client is nil")
}
s := &Service{client: client, BasePath: basePath}
s.Hostedmodels = NewHostedmodelsService(s)
s.Trainedmodels = NewTrainedmodelsService(s)
return s, nil
}
type Service struct {
client *http.Client
BasePath string // API endpoint base URL
UserAgent string // optional additional User-Agent fragment
Hostedmodels *HostedmodelsService
Trainedmodels *TrainedmodelsService
}
func (s *Service) userAgent() string {
if s.UserAgent == "" {
return googleapi.UserAgent
}
return googleapi.UserAgent + " " + s.UserAgent
}
func NewHostedmodelsService(s *Service) *HostedmodelsService {
rs := &HostedmodelsService{s: s}
return rs
}
type HostedmodelsService struct {
s *Service
}
func NewTrainedmodelsService(s *Service) *TrainedmodelsService {
rs := &TrainedmodelsService{s: s}
return rs
}
type TrainedmodelsService struct {
s *Service
}
type Input struct {
// Input: Input to the model for a prediction
Input *InputInput `json:"input,omitempty"`
// ForceSendFields is a list of field names (e.g. "Input") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
}
func (s *Input) MarshalJSON() ([]byte, error) {
type noMethod Input
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields)
}
// InputInput: Input to the model for a prediction
type InputInput struct {
// CsvInstance: A list of input features, these can be strings or
// doubles.
CsvInstance []interface{} `json:"csvInstance,omitempty"`
// ForceSendFields is a list of field names (e.g. "CsvInstance") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
}
func (s *InputInput) MarshalJSON() ([]byte, error) {
type noMethod InputInput
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields)
}
type Output struct {
// Id: The unique name for the predictive model.
Id string `json:"id,omitempty"`
// Kind: What kind of resource this is.
Kind string `json:"kind,omitempty"`
// OutputLabel: The most likely class label [Categorical models only].
OutputLabel string `json:"outputLabel,omitempty"`
// OutputMulti: A list of class labels with their estimated
// probabilities [Categorical models only].
OutputMulti []*OutputOutputMulti `json:"outputMulti,omitempty"`
// OutputValue: The estimated regression value [Regression models only].
OutputValue float64 `json:"outputValue,omitempty"`
// SelfLink: A URL to re-request this resource.
SelfLink string `json:"selfLink,omitempty"`
// ServerResponse contains the HTTP response code and headers from the
// server.
googleapi.ServerResponse `json:"-"`
// ForceSendFields is a list of field names (e.g. "Id") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
}
func (s *Output) MarshalJSON() ([]byte, error) {
type noMethod Output
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields)
}
type OutputOutputMulti struct {
// Label: The class label.
Label string `json:"label,omitempty"`
// Score: The probability of the class label.
Score float64 `json:"score,omitempty"`
// ForceSendFields is a list of field names (e.g. "Label") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
}
func (s *OutputOutputMulti) MarshalJSON() ([]byte, error) {
type noMethod OutputOutputMulti
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields)
}
type Training struct {
// DataAnalysis: Data Analysis.
DataAnalysis *TrainingDataAnalysis `json:"dataAnalysis,omitempty"`
// Id: The unique name for the predictive model.
Id string `json:"id,omitempty"`
// Kind: What kind of resource this is.
Kind string `json:"kind,omitempty"`
// ModelInfo: Model metadata.
ModelInfo *TrainingModelInfo `json:"modelInfo,omitempty"`
// SelfLink: A URL to re-request this resource.
SelfLink string `json:"selfLink,omitempty"`
// StorageDataLocation: Google storage location of the training data
// file.
StorageDataLocation string `json:"storageDataLocation,omitempty"`
// StoragePMMLLocation: Google storage location of the preprocessing
// pmml file.
StoragePMMLLocation string `json:"storagePMMLLocation,omitempty"`
// StoragePMMLModelLocation: Google storage location of the pmml model
// file.
StoragePMMLModelLocation string `json:"storagePMMLModelLocation,omitempty"`
// TrainingStatus: The current status of the training job. This can be
// one of following: RUNNING; DONE; ERROR; ERROR: TRAINING JOB NOT FOUND
TrainingStatus string `json:"trainingStatus,omitempty"`
// Utility: A class weighting function, which allows the importance
// weights for class labels to be specified [Categorical models only].
Utility []*TrainingUtility `json:"utility,omitempty"`
<|fim▁hole|> // ServerResponse contains the HTTP response code and headers from the
// server.
googleapi.ServerResponse `json:"-"`
// ForceSendFields is a list of field names (e.g. "DataAnalysis") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
}
func (s *Training) MarshalJSON() ([]byte, error) {
type noMethod Training
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields)
}
// TrainingDataAnalysis: Data Analysis.
type TrainingDataAnalysis struct {
Warnings []string `json:"warnings,omitempty"`
// ForceSendFields is a list of field names (e.g. "Warnings") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
}
func (s *TrainingDataAnalysis) MarshalJSON() ([]byte, error) {
type noMethod TrainingDataAnalysis
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields)
}
// TrainingModelInfo: Model metadata.
type TrainingModelInfo struct {
// ClassWeightedAccuracy: Estimated accuracy of model taking utility
// weights into account [Categorical models only].
ClassWeightedAccuracy float64 `json:"classWeightedAccuracy,omitempty"`
// ClassificationAccuracy: A number between 0.0 and 1.0, where 1.0 is
// 100% accurate. This is an estimate, based on the amount and quality
// of the training data, of the estimated prediction accuracy. You can
// use this is a guide to decide whether the results are accurate enough
// for your needs. This estimate will be more reliable if your real
// input data is similar to your training data [Categorical models
// only].
ClassificationAccuracy float64 `json:"classificationAccuracy,omitempty"`
// ConfusionMatrix: An output confusion matrix. This shows an estimate
// for how this model will do in predictions. This is first indexed by
// the true class label. For each true class label, this provides a pair
// {predicted_label, count}, where count is the estimated number of
// times the model will predict the predicted label given the true
// label. Will not output if more then 100 classes [Categorical models
// only].
ConfusionMatrix *TrainingModelInfoConfusionMatrix `json:"confusionMatrix,omitempty"`
// ConfusionMatrixRowTotals: A list of the confusion matrix row totals
ConfusionMatrixRowTotals *TrainingModelInfoConfusionMatrixRowTotals `json:"confusionMatrixRowTotals,omitempty"`
// MeanSquaredError: An estimated mean squared error. The can be used to
// measure the quality of the predicted model [Regression models only].
MeanSquaredError float64 `json:"meanSquaredError,omitempty"`
// ModelType: Type of predictive model (CLASSIFICATION or REGRESSION)
ModelType string `json:"modelType,omitempty"`
// NumberInstances: Number of valid data instances used in the trained
// model.
NumberInstances int64 `json:"numberInstances,omitempty,string"`
// NumberLabels: Number of class labels in the trained model
// [Categorical models only].
NumberLabels int64 `json:"numberLabels,omitempty,string"`
// ForceSendFields is a list of field names (e.g.
// "ClassWeightedAccuracy") to unconditionally include in API requests.
// By default, fields with empty values are omitted from API requests.
// However, any non-pointer, non-interface field appearing in
// ForceSendFields will be sent to the server regardless of whether the
// field is empty or not. This may be used to include empty fields in
// Patch requests.
ForceSendFields []string `json:"-"`
}
func (s *TrainingModelInfo) MarshalJSON() ([]byte, error) {
type noMethod TrainingModelInfo
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields)
}
// TrainingModelInfoConfusionMatrix: An output confusion matrix. This
// shows an estimate for how this model will do in predictions. This is
// first indexed by the true class label. For each true class label,
// this provides a pair {predicted_label, count}, where count is the
// estimated number of times the model will predict the predicted label
// given the true label. Will not output if more then 100 classes
// [Categorical models only].
type TrainingModelInfoConfusionMatrix struct {
}
// TrainingModelInfoConfusionMatrixRowTotals: A list of the confusion
// matrix row totals
type TrainingModelInfoConfusionMatrixRowTotals struct {
}
// TrainingUtility: Class label (string).
type TrainingUtility struct {
}
type Update struct {
// CsvInstance: The input features for this instance
CsvInstance []interface{} `json:"csvInstance,omitempty"`
// Label: The class label of this instance
Label string `json:"label,omitempty"`
// Output: The generic output value - could be regression value or class
// label
Output string `json:"output,omitempty"`
// ForceSendFields is a list of field names (e.g. "CsvInstance") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
}
func (s *Update) MarshalJSON() ([]byte, error) {
type noMethod Update
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields)
}
// method id "prediction.hostedmodels.predict":
type HostedmodelsPredictCall struct {
s *Service
hostedModelName string
input *Input
urlParams_ gensupport.URLParams
ctx_ context.Context
}
// Predict: Submit input and request an output against a hosted model.
func (r *HostedmodelsService) Predict(hostedModelName string, input *Input) *HostedmodelsPredictCall {
c := &HostedmodelsPredictCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.hostedModelName = hostedModelName
c.input = input
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *HostedmodelsPredictCall) Fields(s ...googleapi.Field) *HostedmodelsPredictCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *HostedmodelsPredictCall) Context(ctx context.Context) *HostedmodelsPredictCall {
c.ctx_ = ctx
return c
}
func (c *HostedmodelsPredictCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
body, err := googleapi.WithoutDataWrapper.JSONReader(c.input)
if err != nil {
return nil, err
}
reqHeaders.Set("Content-Type", "application/json")
c.urlParams_.Set("alt", alt)
urls := googleapi.ResolveRelative(c.s.BasePath, "hostedmodels/{hostedModelName}/predict")
urls += "?" + c.urlParams_.Encode()
req, _ := http.NewRequest("POST", urls, body)
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"hostedModelName": c.hostedModelName,
})
if c.ctx_ != nil {
return ctxhttp.Do(c.ctx_, c.s.client, req)
}
return c.s.client.Do(req)
}
// Do executes the "prediction.hostedmodels.predict" call.
// Exactly one of *Output or error will be non-nil. Any non-2xx status
// code is an error. Response headers are in either
// *Output.ServerResponse.Header or (if a response was returned at all)
// in error.(*googleapi.Error).Header. Use googleapi.IsNotModified to
// check whether the returned error was because http.StatusNotModified
// was returned.
func (c *HostedmodelsPredictCall) Do(opts ...googleapi.CallOption) (*Output, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Output{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := json.NewDecoder(res.Body).Decode(target); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Submit input and request an output against a hosted model.",
// "httpMethod": "POST",
// "id": "prediction.hostedmodels.predict",
// "parameterOrder": [
// "hostedModelName"
// ],
// "parameters": {
// "hostedModelName": {
// "description": "The name of a hosted model.",
// "location": "path",
// "required": true,
// "type": "string"
// }
// },
// "path": "hostedmodels/{hostedModelName}/predict",
// "request": {
// "$ref": "Input"
// },
// "response": {
// "$ref": "Output"
// },
// "scopes": [
// "https://www.googleapis.com/auth/prediction"
// ]
// }
}
// method id "prediction.trainedmodels.delete":
type TrainedmodelsDeleteCall struct {
s *Service
id string
urlParams_ gensupport.URLParams
ctx_ context.Context
}
// Delete: Delete a trained model.
func (r *TrainedmodelsService) Delete(id string) *TrainedmodelsDeleteCall {
c := &TrainedmodelsDeleteCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.id = id
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *TrainedmodelsDeleteCall) Fields(s ...googleapi.Field) *TrainedmodelsDeleteCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *TrainedmodelsDeleteCall) Context(ctx context.Context) *TrainedmodelsDeleteCall {
c.ctx_ = ctx
return c
}
func (c *TrainedmodelsDeleteCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
c.urlParams_.Set("alt", alt)
urls := googleapi.ResolveRelative(c.s.BasePath, "trainedmodels/{id}")
urls += "?" + c.urlParams_.Encode()
req, _ := http.NewRequest("DELETE", urls, body)
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"id": c.id,
})
if c.ctx_ != nil {
return ctxhttp.Do(c.ctx_, c.s.client, req)
}
return c.s.client.Do(req)
}
// Do executes the "prediction.trainedmodels.delete" call.
func (c *TrainedmodelsDeleteCall) Do(opts ...googleapi.CallOption) error {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if err != nil {
return err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return err
}
return nil
// {
// "description": "Delete a trained model.",
// "httpMethod": "DELETE",
// "id": "prediction.trainedmodels.delete",
// "parameterOrder": [
// "id"
// ],
// "parameters": {
// "id": {
// "description": "The unique name for the predictive model.",
// "location": "path",
// "required": true,
// "type": "string"
// }
// },
// "path": "trainedmodels/{id}",
// "scopes": [
// "https://www.googleapis.com/auth/prediction"
// ]
// }
}
// method id "prediction.trainedmodels.get":
type TrainedmodelsGetCall struct {
s *Service
id string
urlParams_ gensupport.URLParams
ifNoneMatch_ string
ctx_ context.Context
}
// Get: Check training status of your model.
func (r *TrainedmodelsService) Get(id string) *TrainedmodelsGetCall {
c := &TrainedmodelsGetCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.id = id
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *TrainedmodelsGetCall) Fields(s ...googleapi.Field) *TrainedmodelsGetCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// IfNoneMatch sets the optional parameter which makes the operation
// fail if the object's ETag matches the given value. This is useful for
// getting updates only after the object has changed since the last
// request. Use googleapi.IsNotModified to check whether the response
// error from Do is the result of In-None-Match.
func (c *TrainedmodelsGetCall) IfNoneMatch(entityTag string) *TrainedmodelsGetCall {
c.ifNoneMatch_ = entityTag
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *TrainedmodelsGetCall) Context(ctx context.Context) *TrainedmodelsGetCall {
c.ctx_ = ctx
return c
}
func (c *TrainedmodelsGetCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("User-Agent", c.s.userAgent())
if c.ifNoneMatch_ != "" {
reqHeaders.Set("If-None-Match", c.ifNoneMatch_)
}
var body io.Reader = nil
c.urlParams_.Set("alt", alt)
urls := googleapi.ResolveRelative(c.s.BasePath, "trainedmodels/{id}")
urls += "?" + c.urlParams_.Encode()
req, _ := http.NewRequest("GET", urls, body)
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"id": c.id,
})
if c.ctx_ != nil {
return ctxhttp.Do(c.ctx_, c.s.client, req)
}
return c.s.client.Do(req)
}
// Do executes the "prediction.trainedmodels.get" call.
// Exactly one of *Training or error will be non-nil. Any non-2xx status
// code is an error. Response headers are in either
// *Training.ServerResponse.Header or (if a response was returned at
// all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified
// to check whether the returned error was because
// http.StatusNotModified was returned.
func (c *TrainedmodelsGetCall) Do(opts ...googleapi.CallOption) (*Training, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Training{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := json.NewDecoder(res.Body).Decode(target); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Check training status of your model.",
// "httpMethod": "GET",
// "id": "prediction.trainedmodels.get",
// "parameterOrder": [
// "id"
// ],
// "parameters": {
// "id": {
// "description": "The unique name for the predictive model.",
// "location": "path",
// "required": true,
// "type": "string"
// }
// },
// "path": "trainedmodels/{id}",
// "response": {
// "$ref": "Training"
// },
// "scopes": [
// "https://www.googleapis.com/auth/prediction"
// ]
// }
}
// method id "prediction.trainedmodels.insert":
type TrainedmodelsInsertCall struct {
s *Service
training *Training
urlParams_ gensupport.URLParams
ctx_ context.Context
}
// Insert: Begin training your model.
func (r *TrainedmodelsService) Insert(training *Training) *TrainedmodelsInsertCall {
c := &TrainedmodelsInsertCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.training = training
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *TrainedmodelsInsertCall) Fields(s ...googleapi.Field) *TrainedmodelsInsertCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *TrainedmodelsInsertCall) Context(ctx context.Context) *TrainedmodelsInsertCall {
c.ctx_ = ctx
return c
}
func (c *TrainedmodelsInsertCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
body, err := googleapi.WithoutDataWrapper.JSONReader(c.training)
if err != nil {
return nil, err
}
reqHeaders.Set("Content-Type", "application/json")
c.urlParams_.Set("alt", alt)
urls := googleapi.ResolveRelative(c.s.BasePath, "trainedmodels")
urls += "?" + c.urlParams_.Encode()
req, _ := http.NewRequest("POST", urls, body)
req.Header = reqHeaders
googleapi.SetOpaque(req.URL)
if c.ctx_ != nil {
return ctxhttp.Do(c.ctx_, c.s.client, req)
}
return c.s.client.Do(req)
}
// Do executes the "prediction.trainedmodels.insert" call.
// Exactly one of *Training or error will be non-nil. Any non-2xx status
// code is an error. Response headers are in either
// *Training.ServerResponse.Header or (if a response was returned at
// all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified
// to check whether the returned error was because
// http.StatusNotModified was returned.
func (c *TrainedmodelsInsertCall) Do(opts ...googleapi.CallOption) (*Training, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Training{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := json.NewDecoder(res.Body).Decode(target); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Begin training your model.",
// "httpMethod": "POST",
// "id": "prediction.trainedmodels.insert",
// "path": "trainedmodels",
// "request": {
// "$ref": "Training"
// },
// "response": {
// "$ref": "Training"
// },
// "scopes": [
// "https://www.googleapis.com/auth/devstorage.full_control",
// "https://www.googleapis.com/auth/devstorage.read_only",
// "https://www.googleapis.com/auth/devstorage.read_write",
// "https://www.googleapis.com/auth/prediction"
// ]
// }
}
// method id "prediction.trainedmodels.predict":
type TrainedmodelsPredictCall struct {
s *Service
id string
input *Input
urlParams_ gensupport.URLParams
ctx_ context.Context
}
// Predict: Submit model id and request a prediction
func (r *TrainedmodelsService) Predict(id string, input *Input) *TrainedmodelsPredictCall {
c := &TrainedmodelsPredictCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.id = id
c.input = input
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *TrainedmodelsPredictCall) Fields(s ...googleapi.Field) *TrainedmodelsPredictCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *TrainedmodelsPredictCall) Context(ctx context.Context) *TrainedmodelsPredictCall {
c.ctx_ = ctx
return c
}
func (c *TrainedmodelsPredictCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
body, err := googleapi.WithoutDataWrapper.JSONReader(c.input)
if err != nil {
return nil, err
}
reqHeaders.Set("Content-Type", "application/json")
c.urlParams_.Set("alt", alt)
urls := googleapi.ResolveRelative(c.s.BasePath, "trainedmodels/{id}/predict")
urls += "?" + c.urlParams_.Encode()
req, _ := http.NewRequest("POST", urls, body)
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"id": c.id,
})
if c.ctx_ != nil {
return ctxhttp.Do(c.ctx_, c.s.client, req)
}
return c.s.client.Do(req)
}
// Do executes the "prediction.trainedmodels.predict" call.
// Exactly one of *Output or error will be non-nil. Any non-2xx status
// code is an error. Response headers are in either
// *Output.ServerResponse.Header or (if a response was returned at all)
// in error.(*googleapi.Error).Header. Use googleapi.IsNotModified to
// check whether the returned error was because http.StatusNotModified
// was returned.
func (c *TrainedmodelsPredictCall) Do(opts ...googleapi.CallOption) (*Output, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Output{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := json.NewDecoder(res.Body).Decode(target); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Submit model id and request a prediction",
// "httpMethod": "POST",
// "id": "prediction.trainedmodels.predict",
// "parameterOrder": [
// "id"
// ],
// "parameters": {
// "id": {
// "description": "The unique name for the predictive model.",
// "location": "path",
// "required": true,
// "type": "string"
// }
// },
// "path": "trainedmodels/{id}/predict",
// "request": {
// "$ref": "Input"
// },
// "response": {
// "$ref": "Output"
// },
// "scopes": [
// "https://www.googleapis.com/auth/prediction"
// ]
// }
}
// method id "prediction.trainedmodels.update":
type TrainedmodelsUpdateCall struct {
s *Service
id string
update *Update
urlParams_ gensupport.URLParams
ctx_ context.Context
}
// Update: Add new data to a trained model.
func (r *TrainedmodelsService) Update(id string, update *Update) *TrainedmodelsUpdateCall {
c := &TrainedmodelsUpdateCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.id = id
c.update = update
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *TrainedmodelsUpdateCall) Fields(s ...googleapi.Field) *TrainedmodelsUpdateCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *TrainedmodelsUpdateCall) Context(ctx context.Context) *TrainedmodelsUpdateCall {
c.ctx_ = ctx
return c
}
func (c *TrainedmodelsUpdateCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
body, err := googleapi.WithoutDataWrapper.JSONReader(c.update)
if err != nil {
return nil, err
}
reqHeaders.Set("Content-Type", "application/json")
c.urlParams_.Set("alt", alt)
urls := googleapi.ResolveRelative(c.s.BasePath, "trainedmodels/{id}")
urls += "?" + c.urlParams_.Encode()
req, _ := http.NewRequest("PUT", urls, body)
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"id": c.id,
})
if c.ctx_ != nil {
return ctxhttp.Do(c.ctx_, c.s.client, req)
}
return c.s.client.Do(req)
}
// Do executes the "prediction.trainedmodels.update" call.
// Exactly one of *Training or error will be non-nil. Any non-2xx status
// code is an error. Response headers are in either
// *Training.ServerResponse.Header or (if a response was returned at
// all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified
// to check whether the returned error was because
// http.StatusNotModified was returned.
func (c *TrainedmodelsUpdateCall) Do(opts ...googleapi.CallOption) (*Training, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Training{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := json.NewDecoder(res.Body).Decode(target); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Add new data to a trained model.",
// "httpMethod": "PUT",
// "id": "prediction.trainedmodels.update",
// "parameterOrder": [
// "id"
// ],
// "parameters": {
// "id": {
// "description": "The unique name for the predictive model.",
// "location": "path",
// "required": true,
// "type": "string"
// }
// },
// "path": "trainedmodels/{id}",
// "request": {
// "$ref": "Update"
// },
// "response": {
// "$ref": "Training"
// },
// "scopes": [
// "https://www.googleapis.com/auth/prediction"
// ]
// }
}<|fim▁end|> | |
<|file_name|>qmlengine.rs<|end_file_name|><|fim▁begin|>use qvariant::*;
use types::*;
use qurl::*;
extern "C" {
fn dos_qguiapplication_create();
fn dos_qguiapplication_exec();
fn dos_qguiapplication_quit();
fn dos_qguiapplication_delete();
fn dos_qqmlapplicationengine_create() -> DosQmlApplicationEngine;
fn dos_qqmlapplicationengine_load(vptr: DosQmlApplicationEngine, filename: DosCStr);
fn dos_qqmlapplicationengine_load_url(vptr: DosQmlApplicationEngine, url: DosQUrl);
fn dos_qqmlapplicationengine_load_data(vptr: DosQmlApplicationEngine, data: DosCStr);
fn dos_qqmlapplicationengine_add_import_path(vptr: DosQmlApplicationEngine, path: DosCStr);
fn dos_qqmlapplicationengine_context(vptr: DosQmlApplicationEngine) -> DosQQmlContext;
fn dos_qqmlapplicationengine_delete(vptr: DosQmlApplicationEngine);
fn dos_qqmlcontext_setcontextproperty(vptr: DosQQmlContext,
name: DosCStr,
value: DosQVariant);
}
/// Provides an entry point for building QML applications from Rust
pub struct QmlEngine {
ptr: DosQmlApplicationEngine,
stored: Vec<QVariant>,
}
impl QmlEngine {
/// Creates a QML context of a non-headless application
pub fn new() -> Self {
unsafe {
dos_qguiapplication_create();
QmlEngine {
ptr: dos_qqmlapplicationengine_create(),
stored: Vec::new(),
}
}
}
<|fim▁hole|> format!("file:///{}", path_raw.display())
} else {
format!("file://{}", path_raw.display())
};
unsafe { dos_qqmlapplicationengine_load_url(self.ptr, construct_qurl(&path)) }
}
/// Loads qml from a specified url (`file://`, `qrc://`, `http://`)
pub fn load_url(&mut self, uri: &str) {
unsafe { dos_qqmlapplicationengine_load_url(self.ptr, construct_qurl(uri)) }
}
/// Adds a path to the QML import path
/// On an "import ModuleName" call QML will additionally search this path for the matching module.
pub fn add_import_path(&mut self, path: &str) {
unsafe { dos_qqmlapplicationengine_add_import_path(self.ptr, stoptr(path)) }
}
/// Loads a string as a qml file
pub fn load_data(&mut self, data: &str) {
unsafe { dos_qqmlapplicationengine_load_data(self.ptr, stoptr(data)) }
}
/// Launches the application
pub fn exec(&mut self) {
unsafe {
dos_qguiapplication_exec();
}
}
/// Closes the application
pub fn quit(&mut self) {
unsafe {
dos_qguiapplication_quit();
}
}
/// Sets a property for this QML context
///
/// This variant stores qvariant, so it is removed, only when this QmlEngine is removed.
pub fn set_and_store_property<T: Into<QVariant>>(&mut self, name: &str, value: T) {
let val = value.into();
unsafe {
let context = dos_qqmlapplicationengine_context(self.ptr);
dos_qqmlcontext_setcontextproperty(context, stoptr(name), get_private_variant(&val));
}
self.stored.push(val);
}
/// Sets a property for this QML context
pub fn set_property(&mut self, name: &str, value: &QVariant) {
unsafe {
let context = dos_qqmlapplicationengine_context(self.ptr);
dos_qqmlcontext_setcontextproperty(context, stoptr(name), get_private_variant(value));
}
}
}
use utils::*;
impl Default for QmlEngine {
fn default() -> Self {
Self::new()
}
}
impl Drop for QmlEngine {
fn drop(&mut self) {
unsafe {
dos_qguiapplication_quit();
dos_qqmlapplicationengine_delete(self.ptr);
dos_qguiapplication_delete();
}
}
}<|fim▁end|> | /// Loads a file as a qml file
pub fn load_file(&mut self, path: &str) {
let path_raw = ::std::env::current_dir().unwrap().join(path);
let path = if cfg!(windows) { |
<|file_name|>Gruntfile.js<|end_file_name|><|fim▁begin|>module.exports = function(grunt) {
var commonTasks = ['jscs', 'jshint', 'concat', 'uglify'];
grunt.initConfig({
pkg: grunt.file.readJSON('package.json'),
jscs: {
src: ['Gruntfile.js', 'src/*.js', 'test/utils-test.js', 'test/basicTimerSpec', 'test/timedFuncSpec.js']
},
jshint: {
all: ['Gruntfile.js', 'src/*.js', 'test/utils-test.js', 'test/basicTimerSpec', 'test/timedFuncSpec.js']
},
concat: {
options: {
banner: [
'/*! <%= pkg.name %> <%= pkg.version %> <%=grunt.template.today("yyyy-mm-dd")%>*/\n',
'(function($) {\n'
].join(''),
footer: '} (jQuery));'
},
dist: {
src: [
'src/constants.js',
'src/utils.js',
'src/Timer.js',
'src/index.js'
],
dest: 'dist/timer.jquery.js'
}
},
<|fim▁hole|> }
},
watch: {
scripts: {
files: ['src/*.js'],
tasks: commonTasks,
options: {
nospawn: true
}
}
}
});
grunt.loadNpmTasks('grunt-jscs');
grunt.loadNpmTasks('grunt-contrib-jshint');
grunt.loadNpmTasks('grunt-contrib-concat');
grunt.loadNpmTasks('grunt-contrib-uglify');
grunt.loadNpmTasks('grunt-contrib-watch');
grunt.registerTask('default', commonTasks);
};<|fim▁end|> | uglify: {
dist: {
src: 'dist/timer.jquery.js',
dest: 'dist/timer.jquery.min.js' |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>export { default as Toolbar } from './Toolbar';<|fim▁hole|><|fim▁end|> | export { default as ToolbarSection } from './ToolbarSection';
export { default as ToolbarTitle } from './ToolbarTitle';
export { default as ToolbarRow } from './ToolbarRow';
export { default as ToolbarIcon } from './ToolbarIcon'; |
<|file_name|>vue-meta.js<|end_file_name|><|fim▁begin|>/**
* vue-meta v2.2.0
* (c) 2019
* - Declan de Wet
* - Sébastien Chopin (@Atinux)
* - All the amazing contributors
* @license MIT
*/
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
typeof define === 'function' && define.amd ? define(factory) :
(global = global || self, global.VueMeta = factory());
}(this, function () { 'use strict';
var version = "2.2.0";
// store an id to keep track of DOM updates
var batchId = null;
function triggerUpdate(vm, hookName) {
// if an update was triggered during initialization or when an update was triggered by the
// metaInfo watcher, set initialized to null
// then we keep falsy value but know we need to run a triggerUpdate after initialization
if (!vm.$root._vueMeta.initialized && (vm.$root._vueMeta.initializing || hookName === 'watcher')) {
vm.$root._vueMeta.initialized = null;
}
if (vm.$root._vueMeta.initialized && !vm.$root._vueMeta.paused) {
// batch potential DOM updates to prevent extraneous re-rendering
batchUpdate(function () {
return vm.$meta().refresh();
});
}
}
/**
* Performs a batched update.
*
* @param {(null|Number)} id - the ID of this update
* @param {Function} callback - the update to perform
* @return {Number} id - a new ID
*/
function batchUpdate(callback) {
var timeout = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 10;
clearTimeout(batchId);
batchId = setTimeout(function () {
callback();
}, timeout);
return batchId;
}
function _typeof(obj) {
if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") {
_typeof = function (obj) {
return typeof obj;
};
} else {
_typeof = function (obj) {
return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj;
};
}
return _typeof(obj);
}
function _slicedToArray(arr, i) {
return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _nonIterableRest();
}
function _arrayWithHoles(arr) {
if (Array.isArray(arr)) return arr;
}
function _iterableToArrayLimit(arr, i) {
var _arr = [];
var _n = true;
var _d = false;
var _e = undefined;
try {
for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) {
_arr.push(_s.value);
if (i && _arr.length === i) break;
}
} catch (err) {
_d = true;
_e = err;
} finally {
try {
if (!_n && _i["return"] != null) _i["return"]();
} finally {
if (_d) throw _e;
}
}
return _arr;
}
function _nonIterableRest() {
throw new TypeError("Invalid attempt to destructure non-iterable instance");
}
/**
* checks if passed argument is an array
* @param {any} arg - the object to check
* @return {Boolean} - true if `arg` is an array
*/
function isArray(arg) {
return Array.isArray(arg);
}
function isUndefined(arg) {
return typeof arg === 'undefined';
}
function isObject(arg) {
return _typeof(arg) === 'object';
}
function isPureObject(arg) {
return _typeof(arg) === 'object' && arg !== null;
}
function isFunction(arg) {
return typeof arg === 'function';
}
function isString(arg) {
return typeof arg === 'string';
}
function ensureIsArray(arg, key) {
if (!key || !isObject(arg)) {
return isArray(arg) ? arg : [];
}
if (!isArray(arg[key])) {
arg[key] = [];
}
return arg;
}
function ensuredPush(object, key, el) {
ensureIsArray(object, key);
object[key].push(el);
}
function hasMetaInfo() {
var vm = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : this;
return vm && (vm._vueMeta === true || isObject(vm._vueMeta));
} // a component is in a metaInfo branch when itself has meta info or one of its (grand-)children has
function inMetaInfoBranch() {
var vm = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : this;
return vm && !isUndefined(vm._vueMeta);
}
function addNavGuards(vm) {
// return when nav guards already added or no router exists<|fim▁hole|> if (vm.$root._vueMeta.navGuards || !vm.$root.$router) {
/* istanbul ignore next */
return;
}
vm.$root._vueMeta.navGuards = true;
var $router = vm.$root.$router;
var $meta = vm.$root.$meta();
$router.beforeEach(function (to, from, next) {
$meta.pause();
next();
});
$router.afterEach(function () {
var _$meta$resume = $meta.resume(),
metaInfo = _$meta$resume.metaInfo;
if (metaInfo && metaInfo.afterNavigation && isFunction(metaInfo.afterNavigation)) {
metaInfo.afterNavigation(metaInfo);
}
});
}
function hasGlobalWindowFn() {
try {
return !isUndefined(window);
} catch (e) {
return false;
}
}
var hasGlobalWindow = hasGlobalWindowFn();
var _global = hasGlobalWindow ? window : global;
var console = _global.console = _global.console || {};
function warn(str) {
/* istanbul ignore next */
if (!console || !console.warn) {
return;
}
console.warn(str);
}
var showWarningNotSupported = function showWarningNotSupported() {
return warn('This vue app/component has no vue-meta configuration');
};
var appId = 1;
function createMixin(Vue, options) {
// for which Vue lifecycle hooks should the metaInfo be refreshed
var updateOnLifecycleHook = ['activated', 'deactivated', 'beforeMount']; // watch for client side component updates
return {
beforeCreate: function beforeCreate() {
var _this = this;
Object.defineProperty(this, '_hasMetaInfo', {
configurable: true,
get: function get() {
// Show deprecation warning once when devtools enabled
if (Vue.config.devtools && !this.$root._vueMeta.hasMetaInfoDeprecationWarningShown) {
warn('VueMeta DeprecationWarning: _hasMetaInfo has been deprecated and will be removed in a future version. Please use hasMetaInfo(vm) instead');
this.$root._vueMeta.hasMetaInfoDeprecationWarningShown = true;
}
return hasMetaInfo(this);
}
}); // Add a marker to know if it uses metaInfo
// _vnode is used to know that it's attached to a real component
// useful if we use some mixin to add some meta tags (like nuxt-i18n)
if (!isUndefined(this.$options[options.keyName]) && this.$options[options.keyName] !== null) {
if (!this.$root._vueMeta) {
this.$root._vueMeta = {
appId: appId
};
appId++;
} // to speed up updates we keep track of branches which have a component with vue-meta info defined
// if _vueMeta = true it has info, if _vueMeta = false a child has info
if (!this._vueMeta) {
this._vueMeta = true;
var p = this.$parent;
while (p && p !== this.$root) {
if (isUndefined(p._vueMeta)) {
p._vueMeta = false;
}
p = p.$parent;
}
} // coerce function-style metaInfo to a computed prop so we can observe
// it on creation
if (isFunction(this.$options[options.keyName])) {
if (!this.$options.computed) {
this.$options.computed = {};
}
this.$options.computed.$metaInfo = this.$options[options.keyName];
if (!this.$isServer) {
// if computed $metaInfo exists, watch it for updates & trigger a refresh
// when it changes (i.e. automatically handle async actions that affect metaInfo)
// credit for this suggestion goes to [Sébastien Chopin](https://github.com/Atinux)
ensuredPush(this.$options, 'created', function () {
_this.$watch('$metaInfo', function () {
triggerUpdate(this, 'watcher');
});
});
}
} // force an initial refresh on page load and prevent other lifecycleHooks
// to triggerUpdate until this initial refresh is finished
// this is to make sure that when a page is opened in an inactive tab which
// has throttled rAF/timers we still immediately set the page title
if (isUndefined(this.$root._vueMeta.initialized)) {
this.$root._vueMeta.initialized = this.$isServer;
if (!this.$root._vueMeta.initialized) {
ensuredPush(this.$options, 'beforeMount', function () {
// if this Vue-app was server rendered, set the appId to 'ssr'
// only one SSR app per page is supported
if (_this.$root.$el && _this.$root.$el.hasAttribute && _this.$root.$el.hasAttribute('data-server-rendered')) {
_this.$root._vueMeta.appId = options.ssrAppId;
}
}); // we use the mounted hook here as on page load
ensuredPush(this.$options, 'mounted', function () {
if (!_this.$root._vueMeta.initialized) {
// used in triggerUpdate to check if a change was triggered
// during initialization
_this.$root._vueMeta.initializing = true; // refresh meta in nextTick so all child components have loaded
_this.$nextTick(function () {
var _this2 = this;
var _this$$root$$meta$ref = this.$root.$meta().refresh(),
tags = _this$$root$$meta$ref.tags,
metaInfo = _this$$root$$meta$ref.metaInfo; // After ssr hydration (identifier by tags === false) check
// if initialized was set to null in triggerUpdate. That'd mean
// that during initilazation changes where triggered which need
// to be applied OR a metaInfo watcher was triggered before the
// current hook was called
// (during initialization all changes are blocked)
if (tags === false && this.$root._vueMeta.initialized === null) {
this.$nextTick(function () {
return triggerUpdate(_this2, 'initializing');
});
}
this.$root._vueMeta.initialized = true;
delete this.$root._vueMeta.initializing; // add the navigation guards if they havent been added yet
// they are needed for the afterNavigation callback
if (!options.refreshOnceOnNavigation && metaInfo.afterNavigation) {
addNavGuards(this);
}
});
}
}); // add the navigation guards if requested
if (options.refreshOnceOnNavigation) {
addNavGuards(this);
}
}
} // do not trigger refresh on the server side
if (!this.$isServer) {
// no need to add this hooks on server side
updateOnLifecycleHook.forEach(function (lifecycleHook) {
ensuredPush(_this.$options, lifecycleHook, function () {
return triggerUpdate(_this, lifecycleHook);
});
}); // re-render meta data when returning from a child component to parent
ensuredPush(this.$options, 'destroyed', function () {
// Wait that element is hidden before refreshing meta tags (to support animations)
var interval = setInterval(function () {
if (_this.$el && _this.$el.offsetParent !== null) {
/* istanbul ignore next line */
return;
}
clearInterval(interval);
if (!_this.$parent) {
/* istanbul ignore next line */
return;
}
triggerUpdate(_this, 'destroyed');
}, 50);
});
}
}
}
};
}
/**
* These are constant variables used throughout the application.
*/
// set some sane defaults
var defaultInfo = {
title: undefined,
titleChunk: '',
titleTemplate: '%s',
htmlAttrs: {},
bodyAttrs: {},
headAttrs: {},
base: [],
link: [],
meta: [],
style: [],
script: [],
noscript: [],
__dangerouslyDisableSanitizers: [],
__dangerouslyDisableSanitizersByTagID: {} // This is the name of the component option that contains all the information that
// gets converted to the various meta tags & attributes for the page.
};
var keyName = 'metaInfo'; // This is the attribute vue-meta arguments on elements to know which it should
// manage and which it should ignore.
var attribute = 'data-vue-meta'; // This is the attribute that goes on the `html` tag to inform `vue-meta`
// that the server has already generated the meta tags for the initial render.
var ssrAttribute = 'data-vue-meta-server-rendered'; // This is the property that tells vue-meta to overwrite (instead of append)
// an item in a tag list. For example, if you have two `meta` tag list items
// that both have `vmid` of "description", then vue-meta will overwrite the
// shallowest one with the deepest one.
var tagIDKeyName = 'vmid'; // This is the key name for possible meta templates
var metaTemplateKeyName = 'template'; // This is the key name for the content-holding property
var contentKeyName = 'content'; // The id used for the ssr app
var ssrAppId = 'ssr';
var defaultOptions = {
keyName: keyName,
attribute: attribute,
ssrAttribute: ssrAttribute,
tagIDKeyName: tagIDKeyName,
contentKeyName: contentKeyName,
metaTemplateKeyName: metaTemplateKeyName,
ssrAppId: ssrAppId // List of metaInfo property keys which are configuration options (and dont generate html)
};
var metaInfoOptionKeys = ['titleChunk', 'titleTemplate', 'changed', '__dangerouslyDisableSanitizers', '__dangerouslyDisableSanitizersByTagID']; // The metaInfo property keys which are used to disable escaping
var disableOptionKeys = ['__dangerouslyDisableSanitizers', '__dangerouslyDisableSanitizersByTagID']; // List of metaInfo property keys which only generates attributes and no tags
var metaInfoAttributeKeys = ['htmlAttrs', 'headAttrs', 'bodyAttrs']; // HTML elements which support the onload event
var tagsSupportingOnload = ['link', 'style', 'script']; // HTML elements which dont have a head tag (shortened to our needs)
var commonDataAttributes = ['body', 'pbody']; // from: https://github.com/kangax/html-minifier/blob/gh-pages/src/htmlminifier.js#L202
var booleanHtmlAttributes = ['allowfullscreen', 'amp', 'async', 'autofocus', 'autoplay', 'checked', 'compact', 'controls', 'declare', 'default', 'defaultchecked', 'defaultmuted', 'defaultselected', 'defer', 'disabled', 'enabled', 'formnovalidate', 'hidden', 'indeterminate', 'inert', 'ismap', 'itemscope', 'loop', 'multiple', 'muted', 'nohref', 'noresize', 'noshade', 'novalidate', 'nowrap', 'open', 'pauseonexit', 'readonly', 'required', 'reversed', 'scoped', 'seamless', 'selected', 'sortable', 'truespeed', 'typemustmatch', 'visible'];
function setOptions(options) {
// combine options
options = isObject(options) ? options : {};
for (var key in defaultOptions) {
if (!options[key]) {
options[key] = defaultOptions[key];
}
}
return options;
}
function getOptions(options) {
var optionsCopy = {};
for (var key in options) {
optionsCopy[key] = options[key];
}
return optionsCopy;
}
function pause() {
var refresh = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : true;
this.$root._vueMeta.paused = true;
return function () {
return resume(refresh);
};
}
function resume() {
var refresh = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : true;
this.$root._vueMeta.paused = false;
if (refresh) {
return this.$root.$meta().refresh();
}
}
/*
* To reduce build size, this file provides simple polyfills without
* overly excessive type checking and without modifying
* the global Array.prototype
* The polyfills are automatically removed in the commonjs build
* Also, only files in client/ & shared/ should use these functions
* files in server/ still use normal js function
*/
function findIndex(array, predicate) {
if ( !Array.prototype.findIndex) {
// idx needs to be a Number, for..in returns string
for (var idx = 0; idx < array.length; idx++) {
if (predicate.call(arguments[2], array[idx], idx, array)) {
return idx;
}
}
return -1;
}
return array.findIndex(predicate, arguments[2]);
}
function toArray(arg) {
if ( !Array.from) {
return Array.prototype.slice.call(arg);
}
return Array.from(arg);
}
function includes(array, value) {
if ( !Array.prototype.includes) {
for (var idx in array) {
if (array[idx] === value) {
return true;
}
}
return false;
}
return array.includes(value);
}
var clientSequences = [[/&/g, "&"], [/</g, "<"], [/>/g, ">"], [/"/g, "\""], [/'/g, "'"]]; // sanitizes potentially dangerous characters
function escape(info, options, escapeOptions, escapeKeys) {
var tagIDKeyName = options.tagIDKeyName;
var _escapeOptions$doEsca = escapeOptions.doEscape,
doEscape = _escapeOptions$doEsca === void 0 ? function (v) {
return v;
} : _escapeOptions$doEsca;
var escaped = {};
for (var key in info) {
var value = info[key]; // no need to escape configuration options
if (includes(metaInfoOptionKeys, key)) {
escaped[key] = value;
continue;
}
var _disableOptionKeys = _slicedToArray(disableOptionKeys, 1),
disableKey = _disableOptionKeys[0];
if (escapeOptions[disableKey] && includes(escapeOptions[disableKey], key)) {
// this info[key] doesnt need to escaped if the option is listed in __dangerouslyDisableSanitizers
escaped[key] = value;
continue;
}
var tagId = info[tagIDKeyName];
if (tagId) {
disableKey = disableOptionKeys[1]; // keys which are listed in __dangerouslyDisableSanitizersByTagID for the current vmid do not need to be escaped
if (escapeOptions[disableKey] && escapeOptions[disableKey][tagId] && includes(escapeOptions[disableKey][tagId], key)) {
escaped[key] = value;
continue;
}
}
if (isString(value)) {
escaped[key] = doEscape(value);
} else if (isArray(value)) {
escaped[key] = value.map(function (v) {
if (isPureObject(v)) {
return escape(v, options, escapeOptions, true);
}
return doEscape(v);
});
} else if (isPureObject(value)) {
escaped[key] = escape(value, options, escapeOptions, true);
} else {
escaped[key] = value;
}
if (escapeKeys) {
var escapedKey = doEscape(key);
if (key !== escapedKey) {
escaped[escapedKey] = escaped[key];
delete escaped[key];
}
}
}
return escaped;
}
function escapeMetaInfo(options, info) {
var escapeSequences = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : [];
var escapeOptions = {
doEscape: function doEscape(value) {
return escapeSequences.reduce(function (val, _ref) {
var _ref2 = _slicedToArray(_ref, 2),
v = _ref2[0],
r = _ref2[1];
return val.replace(v, r);
}, value);
}
};
disableOptionKeys.forEach(function (disableKey, index) {
if (index === 0) {
ensureIsArray(info, disableKey);
} else if (index === 1) {
for (var key in info[disableKey]) {
ensureIsArray(info[disableKey], key);
}
}
escapeOptions[disableKey] = info[disableKey];
}); // begin sanitization
return escape(info, options, escapeOptions);
}
var isMergeableObject = function isMergeableObject(value) {
return isNonNullObject(value) && !isSpecial(value);
};
function isNonNullObject(value) {
return !!value && _typeof(value) === 'object';
}
function isSpecial(value) {
var stringValue = Object.prototype.toString.call(value);
return stringValue === '[object RegExp]' || stringValue === '[object Date]' || isReactElement(value);
} // see https://github.com/facebook/react/blob/b5ac963fb791d1298e7f396236383bc955f916c1/src/isomorphic/classic/element/ReactElement.js#L21-L25
var canUseSymbol = typeof Symbol === 'function' && Symbol.for;
var REACT_ELEMENT_TYPE = canUseSymbol ? Symbol.for('react.element') : 0xeac7;
function isReactElement(value) {
return value.$$typeof === REACT_ELEMENT_TYPE;
}
function emptyTarget(val) {
return Array.isArray(val) ? [] : {};
}
function cloneUnlessOtherwiseSpecified(value, options) {
return options.clone !== false && options.isMergeableObject(value) ? deepmerge(emptyTarget(value), value, options) : value;
}
function defaultArrayMerge(target, source, options) {
return target.concat(source).map(function (element) {
return cloneUnlessOtherwiseSpecified(element, options);
});
}
function getMergeFunction(key, options) {
if (!options.customMerge) {
return deepmerge;
}
var customMerge = options.customMerge(key);
return typeof customMerge === 'function' ? customMerge : deepmerge;
}
function getEnumerableOwnPropertySymbols(target) {
return Object.getOwnPropertySymbols ? Object.getOwnPropertySymbols(target).filter(function (symbol) {
return target.propertyIsEnumerable(symbol);
}) : [];
}
function getKeys(target) {
return Object.keys(target).concat(getEnumerableOwnPropertySymbols(target));
}
function mergeObject(target, source, options) {
var destination = {};
if (options.isMergeableObject(target)) {
getKeys(target).forEach(function (key) {
destination[key] = cloneUnlessOtherwiseSpecified(target[key], options);
});
}
getKeys(source).forEach(function (key) {
if (!options.isMergeableObject(source[key]) || !target[key]) {
destination[key] = cloneUnlessOtherwiseSpecified(source[key], options);
} else {
destination[key] = getMergeFunction(key, options)(target[key], source[key], options);
}
});
return destination;
}
function deepmerge(target, source, options) {
options = options || {};
options.arrayMerge = options.arrayMerge || defaultArrayMerge;
options.isMergeableObject = options.isMergeableObject || isMergeableObject;
var sourceIsArray = Array.isArray(source);
var targetIsArray = Array.isArray(target);
var sourceAndTargetTypesMatch = sourceIsArray === targetIsArray;
if (!sourceAndTargetTypesMatch) {
return cloneUnlessOtherwiseSpecified(source, options);
} else if (sourceIsArray) {
return options.arrayMerge(target, source, options);
} else {
return mergeObject(target, source, options);
}
}
deepmerge.all = function deepmergeAll(array, options) {
if (!Array.isArray(array)) {
throw new Error('first argument should be an array');
}
return array.reduce(function (prev, next) {
return deepmerge(prev, next, options);
}, {});
};
var deepmerge_1 = deepmerge;
var cjs = deepmerge_1;
function applyTemplate(_ref, headObject, template, chunk) {
var component = _ref.component,
metaTemplateKeyName = _ref.metaTemplateKeyName,
contentKeyName = _ref.contentKeyName;
if (isUndefined(template)) {
template = headObject[metaTemplateKeyName];
delete headObject[metaTemplateKeyName];
} // return early if no template defined
if (!template) {
return false;
}
if (isUndefined(chunk)) {
chunk = headObject[contentKeyName];
}
headObject[contentKeyName] = isFunction(template) ? template.call(component, chunk) : template.replace(/%s/g, chunk);
return true;
}
function _arrayMerge(_ref, target, source) {
var component = _ref.component,
tagIDKeyName = _ref.tagIDKeyName,
metaTemplateKeyName = _ref.metaTemplateKeyName,
contentKeyName = _ref.contentKeyName;
// we concat the arrays without merging objects contained in,
// but we check for a `vmid` property on each object in the array
// using an O(1) lookup associative array exploit
var destination = [];
target.forEach(function (targetItem, targetIndex) {
// no tagID so no need to check for duplicity
if (!targetItem[tagIDKeyName]) {
destination.push(targetItem);
return;
}
var sourceIndex = findIndex(source, function (item) {
return item[tagIDKeyName] === targetItem[tagIDKeyName];
});
var sourceItem = source[sourceIndex]; // source doesnt contain any duplicate vmid's, we can keep targetItem
if (sourceIndex === -1) {
destination.push(targetItem);
return;
} // when sourceItem explictly defines contentKeyName or innerHTML as undefined, its
// an indication that we need to skip the default behaviour or child has preference over parent
// which means we keep the targetItem and ignore/remove the sourceItem
if (sourceItem.hasOwnProperty(contentKeyName) && sourceItem[contentKeyName] === undefined || sourceItem.hasOwnProperty('innerHTML') && sourceItem.innerHTML === undefined) {
destination.push(targetItem); // remove current index from source array so its not concatenated to destination below
source.splice(sourceIndex, 1);
return;
} // we now know that targetItem is a duplicate and we should ignore it in favor of sourceItem
// if source specifies null as content then ignore both the target as the source
if (sourceItem[contentKeyName] === null || sourceItem.innerHTML === null) {
// remove current index from source array so its not concatenated to destination below
source.splice(sourceIndex, 1);
return;
} // now we only need to check if the target has a template to combine it with the source
var targetTemplate = targetItem[metaTemplateKeyName];
if (!targetTemplate) {
return;
}
var sourceTemplate = sourceItem[metaTemplateKeyName];
if (!sourceTemplate) {
// use parent template and child content
applyTemplate({
component: component,
metaTemplateKeyName: metaTemplateKeyName,
contentKeyName: contentKeyName
}, sourceItem, targetTemplate);
} else if (!sourceItem[contentKeyName]) {
// use child template and parent content
applyTemplate({
component: component,
metaTemplateKeyName: metaTemplateKeyName,
contentKeyName: contentKeyName
}, sourceItem, undefined, targetItem[contentKeyName]);
}
});
return destination.concat(source);
}
function merge(target, source) {
var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
// remove properties explicitly set to false so child components can
// optionally _not_ overwrite the parents content
// (for array properties this is checked in arrayMerge)
if (source.hasOwnProperty('title') && source.title === undefined) {
delete source.title;
}
metaInfoAttributeKeys.forEach(function (attrKey) {
if (!source[attrKey]) {
return;
}
for (var key in source[attrKey]) {
if (source[attrKey].hasOwnProperty(key) && source[attrKey][key] === undefined) {
if (includes(booleanHtmlAttributes, key)) {
warn('VueMeta: Please note that since v2 the value undefined is not used to indicate boolean attributes anymore, see migration guide for details');
}
delete source[attrKey][key];
}
}
});
return cjs(target, source, {
arrayMerge: function arrayMerge(t, s) {
return _arrayMerge(options, t, s);
}
});
}
function getComponentMetaInfo() {
var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
var component = arguments.length > 1 ? arguments[1] : undefined;
return getComponentOption(options, component, defaultInfo);
}
/**
* Returns the `opts.option` $option value of the given `opts.component`.
* If methods are encountered, they will be bound to the component context.
* If `opts.deep` is true, will recursively merge all child component
* `opts.option` $option values into the returned result.
*
* @param {Object} opts - options
* @param {Object} opts.component - Vue component to fetch option data from
* @param {Boolean} opts.deep - look for data in child components as well?
* @param {Function} opts.arrayMerge - how should arrays be merged?
* @param {String} opts.keyName - the name of the option to look for
* @param {Object} [result={}] - result so far
* @return {Object} result - final aggregated result
*/
function getComponentOption() {
var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
var component = arguments.length > 1 ? arguments[1] : undefined;
var result = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
var keyName = options.keyName,
metaTemplateKeyName = options.metaTemplateKeyName,
tagIDKeyName = options.tagIDKeyName;
var $options = component.$options,
$children = component.$children;
if (component._inactive) {
return result;
} // only collect option data if it exists
if ($options[keyName]) {
var data = $options[keyName]; // if option is a function, replace it with it's result
if (isFunction(data)) {
data = data.call(component);
} // ignore data if its not an object, then we keep our previous result
if (!isObject(data)) {
return result;
} // merge with existing options
result = merge(result, data, options);
} // collect & aggregate child options if deep = true
if ($children.length) {
$children.forEach(function (childComponent) {
// check if the childComponent is in a branch
// return otherwise so we dont walk all component branches unnecessarily
if (!inMetaInfoBranch(childComponent)) {
return;
}
result = getComponentOption(options, childComponent, result);
});
}
if (metaTemplateKeyName && result.meta) {
// apply templates if needed
result.meta.forEach(function (metaObject) {
return applyTemplate(options, metaObject);
}); // remove meta items with duplicate vmid's
result.meta = result.meta.filter(function (metaItem, index, arr) {
return (// keep meta item if it doesnt has a vmid
!metaItem.hasOwnProperty(tagIDKeyName) || // or if it's the first item in the array with this vmid
index === findIndex(arr, function (item) {
return item[tagIDKeyName] === metaItem[tagIDKeyName];
})
);
});
}
return result;
}
/**
* Returns the correct meta info for the given component
* (child components will overwrite parent meta info)
*
* @param {Object} component - the Vue instance to get meta info from
* @return {Object} - returned meta info
*/
function getMetaInfo() {
var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
var info = arguments.length > 1 ? arguments[1] : undefined;
var escapeSequences = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : [];
var component = arguments.length > 3 ? arguments[3] : undefined;
// Remove all "template" tags from meta
// backup the title chunk in case user wants access to it
if (info.title) {
info.titleChunk = info.title;
} // replace title with populated template
if (info.titleTemplate && info.titleTemplate !== '%s') {
applyTemplate({
component: component,
contentKeyName: 'title'
}, info, info.titleTemplate, info.titleChunk || '');
} // convert base tag to an array so it can be handled the same way
// as the other tags
if (info.base) {
info.base = Object.keys(info.base).length ? [info.base] : [];
}
return escapeMetaInfo(options, info, escapeSequences);
}
function getTag(tags, tag) {
if (!tags[tag]) {
tags[tag] = document.getElementsByTagName(tag)[0];
}
return tags[tag];
}
function getElementsKey(_ref) {
var body = _ref.body,
pbody = _ref.pbody;
return body ? 'body' : pbody ? 'pbody' : 'head';
}
function queryElements(parentNode, _ref2) {
var appId = _ref2.appId,
attribute = _ref2.attribute,
type = _ref2.type,
tagIDKeyName = _ref2.tagIDKeyName;
var attributes = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
var queries = ["".concat(type, "[").concat(attribute, "=\"").concat(appId, "\"]"), "".concat(type, "[data-").concat(tagIDKeyName, "]")].map(function (query) {
for (var key in attributes) {
var val = attributes[key];
var attributeValue = val && val !== true ? "=\"".concat(val, "\"") : '';
query += "[data-".concat(key).concat(attributeValue, "]");
}
return query;
});
return toArray(parentNode.querySelectorAll(queries.join(', ')));
}
var callbacks = [];
function isDOMComplete() {
var d = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : document;
return d.readyState === 'complete';
}
function addCallback(query, callback) {
if (arguments.length === 1) {
callback = query;
query = '';
}
callbacks.push([query, callback]);
}
function addCallbacks(_ref, type, tags, autoAddListeners) {
var tagIDKeyName = _ref.tagIDKeyName;
var hasAsyncCallback = false;
tags.forEach(function (tag) {
if (!tag[tagIDKeyName] || !tag.callback) {
return;
}
hasAsyncCallback = true;
addCallback("".concat(type, "[data-").concat(tagIDKeyName, "=\"").concat(tag[tagIDKeyName], "\"]"), tag.callback);
});
if (!autoAddListeners || !hasAsyncCallback) {
return hasAsyncCallback;
}
return addListeners();
}
function addListeners() {
if (isDOMComplete()) {
applyCallbacks();
return;
} // Instead of using a MutationObserver, we just apply
/* istanbul ignore next */
document.onreadystatechange = function () {
applyCallbacks();
};
}
function applyCallbacks(matchElement) {
callbacks.forEach(function (_ref2) {
var _ref3 = _slicedToArray(_ref2, 2),
query = _ref3[0],
callback = _ref3[1];
var selector = "".concat(query, "[onload=\"this.__vm_l=1\"]");
var elements = [];
if (!matchElement) {
elements = toArray(document.querySelectorAll(selector));
}
if (matchElement && matchElement.matches(selector)) {
elements = [matchElement];
}
elements.forEach(function (element) {
/* __vm_cb: whether the load callback has been called
* __vm_l: set by onload attribute, whether the element was loaded
* __vm_ev: whether the event listener was added or not
*/
if (element.__vm_cb) {
return;
}
var onload = function onload() {
/* Mark that the callback for this element has already been called,
* this prevents the callback to run twice in some (rare) conditions
*/
element.__vm_cb = true;
/* onload needs to be removed because we only need the
* attribute after ssr and if we dont remove it the node
* will fail isEqualNode on the client
*/
element.removeAttribute('onload');
callback(element);
};
/* IE9 doesnt seem to load scripts synchronously,
* causing a script sometimes/often already to be loaded
* when we add the event listener below (thus adding an onload event
* listener has no use because it will never be triggered).
* Therefore we add the onload attribute during ssr, and
* check here if it was already loaded or not
*/
if (element.__vm_l) {
onload();
return;
}
if (!element.__vm_ev) {
element.__vm_ev = true;
element.addEventListener('load', onload);
}
});
});
}
/**
* Updates the document's html tag attributes
*
* @param {Object} attrs - the new document html attributes
* @param {HTMLElement} tag - the HTMLElement tag to update with new attrs
*/
function updateAttribute() {
var _ref = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {},
attribute = _ref.attribute;
var attrs = arguments.length > 1 ? arguments[1] : undefined;
var tag = arguments.length > 2 ? arguments[2] : undefined;
var vueMetaAttrString = tag.getAttribute(attribute);
var vueMetaAttrs = vueMetaAttrString ? vueMetaAttrString.split(',') : [];
var toRemove = toArray(vueMetaAttrs);
var keepIndexes = [];
for (var attr in attrs) {
if (attrs.hasOwnProperty(attr)) {
var value = includes(booleanHtmlAttributes, attr) ? '' : isArray(attrs[attr]) ? attrs[attr].join(' ') : attrs[attr];
tag.setAttribute(attr, value || '');
if (!includes(vueMetaAttrs, attr)) {
vueMetaAttrs.push(attr);
} // filter below wont ever check -1
keepIndexes.push(toRemove.indexOf(attr));
}
}
var removedAttributesCount = toRemove.filter(function (el, index) {
return !includes(keepIndexes, index);
}).reduce(function (acc, attr) {
tag.removeAttribute(attr);
return acc + 1;
}, 0);
if (vueMetaAttrs.length === removedAttributesCount) {
tag.removeAttribute(attribute);
} else {
tag.setAttribute(attribute, vueMetaAttrs.sort().join(','));
}
}
/**
* Updates the document title
*
* @param {String} title - the new title of the document
*/
function updateTitle(title) {
if (!title && title !== '') {
return;
}
document.title = title;
}
/**
* Updates meta tags inside <head> and <body> on the client. Borrowed from `react-helmet`:
* https://github.com/nfl/react-helmet/blob/004d448f8de5f823d10f838b02317521180f34da/src/Helmet.js#L195-L245
*
* @param {('meta'|'base'|'link'|'style'|'script'|'noscript')} type - the name of the tag
* @param {(Array<Object>|Object)} tags - an array of tag objects or a single object in case of base
* @return {Object} - a representation of what tags changed
*/
function updateTag(appId) {
var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
var type = arguments.length > 2 ? arguments[2] : undefined;
var tags = arguments.length > 3 ? arguments[3] : undefined;
var head = arguments.length > 4 ? arguments[4] : undefined;
var body = arguments.length > 5 ? arguments[5] : undefined;
var attribute = options.attribute,
tagIDKeyName = options.tagIDKeyName;
var dataAttributes = commonDataAttributes.slice();
dataAttributes.push(tagIDKeyName);
var newElements = [];
var queryOptions = {
appId: appId,
attribute: attribute,
type: type,
tagIDKeyName: tagIDKeyName
};
var currentElements = {
head: queryElements(head, queryOptions),
pbody: queryElements(body, queryOptions, {
pbody: true
}),
body: queryElements(body, queryOptions, {
body: true
})
};
if (tags.length > 1) {
// remove duplicates that could have been found by merging tags
// which include a mixin with metaInfo and that mixin is used
// by multiple components on the same page
var found = [];
tags = tags.filter(function (x) {
var k = JSON.stringify(x);
var res = !includes(found, k);
found.push(k);
return res;
});
}
tags.forEach(function (tag) {
if (tag.skip) {
return;
}
var newElement = document.createElement(type);
newElement.setAttribute(attribute, appId);
var _loop = function _loop(attr) {
/* istanbul ignore next */
if (!tag.hasOwnProperty(attr)) {
return "continue";
}
if (attr === 'innerHTML') {
newElement.innerHTML = tag.innerHTML;
return "continue";
}
if (attr === 'json') {
newElement.innerHTML = JSON.stringify(tag.json);
return "continue";
}
if (attr === 'cssText') {
if (newElement.styleSheet) {
/* istanbul ignore next */
newElement.styleSheet.cssText = tag.cssText;
} else {
newElement.appendChild(document.createTextNode(tag.cssText));
}
return "continue";
}
if (attr === 'callback') {
newElement.onload = function () {
return tag[attr](newElement);
};
return "continue";
}
var _attr = includes(dataAttributes, attr) ? "data-".concat(attr) : attr;
var isBooleanAttribute = includes(booleanHtmlAttributes, attr);
if (isBooleanAttribute && !tag[attr]) {
return "continue";
}
var value = isBooleanAttribute ? '' : tag[attr];
newElement.setAttribute(_attr, value);
};
for (var attr in tag) {
var _ret = _loop(attr);
if (_ret === "continue") continue;
}
var oldElements = currentElements[getElementsKey(tag)]; // Remove a duplicate tag from domTagstoRemove, so it isn't cleared.
var indexToDelete;
var hasEqualElement = oldElements.some(function (existingTag, index) {
indexToDelete = index;
return newElement.isEqualNode(existingTag);
});
if (hasEqualElement && (indexToDelete || indexToDelete === 0)) {
oldElements.splice(indexToDelete, 1);
} else {
newElements.push(newElement);
}
});
var oldElements = [];
for (var _type in currentElements) {
Array.prototype.push.apply(oldElements, currentElements[_type]);
} // remove old elements
oldElements.forEach(function (element) {
element.parentNode.removeChild(element);
}); // insert new elements
newElements.forEach(function (element) {
if (element.hasAttribute('data-body')) {
body.appendChild(element);
return;
}
if (element.hasAttribute('data-pbody')) {
body.insertBefore(element, body.firstChild);
return;
}
head.appendChild(element);
});
return {
oldTags: oldElements,
newTags: newElements
};
}
/**
* Performs client-side updates when new meta info is received
*
* @param {Object} newInfo - the meta info to update to
*/
function updateClientMetaInfo(appId) {
var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
var newInfo = arguments.length > 2 ? arguments[2] : undefined;
var ssrAttribute = options.ssrAttribute,
ssrAppId = options.ssrAppId; // only cache tags for current update
var tags = {};
var htmlTag = getTag(tags, 'html'); // if this is a server render, then dont update
if (appId === ssrAppId && htmlTag.hasAttribute(ssrAttribute)) {
// remove the server render attribute so we can update on (next) changes
htmlTag.removeAttribute(ssrAttribute); // add load callbacks if the
var addLoadListeners = false;
tagsSupportingOnload.forEach(function (type) {
if (newInfo[type] && addCallbacks(options, type, newInfo[type])) {
addLoadListeners = true;
}
});
if (addLoadListeners) {
addListeners();
}
return false;
} // initialize tracked changes
var addedTags = {};
var removedTags = {};
for (var type in newInfo) {
// ignore these
if (includes(metaInfoOptionKeys, type)) {
continue;
}
if (type === 'title') {
// update the title
updateTitle(newInfo.title);
continue;
}
if (includes(metaInfoAttributeKeys, type)) {
var tagName = type.substr(0, 4);
updateAttribute(options, newInfo[type], getTag(tags, tagName));
continue;
} // tags should always be an array, ignore if it isnt
if (!isArray(newInfo[type])) {
continue;
}
var _updateTag = updateTag(appId, options, type, newInfo[type], getTag(tags, 'head'), getTag(tags, 'body')),
oldTags = _updateTag.oldTags,
newTags = _updateTag.newTags;
if (newTags.length) {
addedTags[type] = newTags;
removedTags[type] = oldTags;
}
}
return {
addedTags: addedTags,
removedTags: removedTags
};
}
function _refresh() {
var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
/**
* When called, will update the current meta info with new meta info.
* Useful when updating meta info as the result of an asynchronous
* action that resolves after the initial render takes place.
*
* Credit to [Sébastien Chopin](https://github.com/Atinux) for the suggestion
* to implement this method.
*
* @return {Object} - new meta info
*/
return function refresh() {
// collect & aggregate all metaInfo $options
var rawInfo = getComponentMetaInfo(options, this.$root);
var metaInfo = getMetaInfo(options, rawInfo, clientSequences, this.$root);
var appId = this.$root._vueMeta.appId;
var tags = updateClientMetaInfo(appId, options, metaInfo); // emit "event" with new info
if (tags && isFunction(metaInfo.changed)) {
metaInfo.changed(metaInfo, tags.addedTags, tags.removedTags);
}
return {
vm: this,
metaInfo: metaInfo,
tags: tags
};
};
}
function _$meta() {
var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
var _refresh$1 = _refresh(options);
var inject = function inject() {};
/**
* Returns an injector for server-side rendering.
* @this {Object} - the Vue instance (a root component)
* @return {Object} - injector
*/
return function $meta() {
if (!this.$root._vueMeta) {
return {
getOptions: showWarningNotSupported,
refresh: showWarningNotSupported,
inject: showWarningNotSupported,
pause: showWarningNotSupported,
resume: showWarningNotSupported
};
}
return {
getOptions: function getOptions$1() {
return getOptions(options);
},
refresh: _refresh$1.bind(this),
inject: inject,
pause: pause.bind(this),
resume: resume.bind(this)
};
};
}
/**
* Plugin install function.
* @param {Function} Vue - the Vue constructor.
*/
function install(Vue) {
var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
if (Vue.__vuemeta_installed) {
return;
}
Vue.__vuemeta_installed = true;
options = setOptions(options);
Vue.prototype.$meta = _$meta(options);
Vue.mixin(createMixin(Vue, options));
} // automatic install
if (!isUndefined(window) && !isUndefined(window.Vue)) {
/* istanbul ignore next */
install(window.Vue);
}
var browser = {
version: version,
install: install,
hasMetaInfo: hasMetaInfo
};
return browser;
}));<|fim▁end|> | |
<|file_name|>euler 02.py<|end_file_name|><|fim▁begin|>def fib():
a, b = 1, 1
while True:
yield b
<|fim▁hole|> for n in seq:
if n % 2 == 0:
yield n
def menores_4M(seq):
for n in seq:
if n > 4000000:
break
yield n
print (sum(pares(menores_4M(fib()))))<|fim▁end|> | a, b = b, a + b
def pares(seq):
|
<|file_name|>e362.py<|end_file_name|><|fim▁begin|>from __future__ import print_function, division
import matplotlib
import logging
from sys import stdout
matplotlib.use('Agg') # Must be before importing matplotlib.pyplot or pylab!
from neuralnilm import (Net, RealApplianceSource,
BLSTMLayer, DimshuffleLayer,
BidirectionalRecurrentLayer)
from neuralnilm.source import standardise, discretize, fdiff, power_and_fdiff
from neuralnilm.experiment import run_experiment, init_experiment
from neuralnilm.net import TrainingError
from neuralnilm.layers import MixtureDensityLayer
from neuralnilm.objectives import (scaled_cost, mdn_nll,
scaled_cost_ignore_inactive, ignore_inactive,
scaled_cost3)
from neuralnilm.plot import MDNPlotter, CentralOutputPlotter
from lasagne.nonlinearities import sigmoid, rectify, tanh
from lasagne.objectives import mse, binary_crossentropy
from lasagne.init import Uniform, Normal
from lasagne.layers import (LSTMLayer, DenseLayer, Conv1DLayer,
ReshapeLayer, FeaturePoolLayer, RecurrentLayer)
from lasagne.updates import nesterov_momentum, momentum
from functools import partial
import os
import __main__
from copy import deepcopy
from math import sqrt
import numpy as np
import theano.tensor as T
NAME = os.path.splitext(os.path.split(__main__.__file__)[1])[0]
#PATH = "/homes/dk3810/workspace/python/neuralnilm/figures"
PATH = "/data/dk3810/figures"
SAVE_PLOT_INTERVAL = 500
GRADIENT_STEPS = 10
source_dict = dict(
filename='/data/dk3810/ukdale.h5',
appliances=[
['fridge freezer', 'fridge', 'freezer'],
'hair straighteners',
'television'
# 'dish washer',
# ['washer dryer', 'washing machine']
],
on_power_thresholds=[5] * 5,
min_on_durations=[60, 60, 60, 1800, 1800],
min_off_durations=[12, 12, 12, 1800, 600],
window=("2013-06-01", "2014-07-01"),
seq_length=1024,
# random_window=64,
output_one_appliance=False,
boolean_targets=False,
train_buildings=[1],
validation_buildings=[1],
# skip_probability=0.8,
one_target_per_seq=False,
n_seq_per_batch=16,
subsample_target=4,
include_diff=False,
include_power=True,
# clip_appliance_power=True,
target_is_prediction=False,
# independently_center_inputs = True,
standardise_input=True,
unit_variance_targets=True,
input_padding=2,
lag=0
# classification=True
# reshape_target_to_2D=True
# input_stats={'mean': np.array([ 0.05526326], dtype=np.float32),
# 'std': np.array([ 0.12636775], dtype=np.float32)},
# target_stats={
# 'mean': np.array([ 0.04066789, 0.01881946,
# 0.24639061, 0.17608672, 0.10273963],
# dtype=np.float32),
# 'std': np.array([ 0.11449792, 0.07338708,
# 0.26608968, 0.33463112, 0.21250485],
# dtype=np.float32)}
)
N = 50
net_dict = dict(
save_plot_interval=SAVE_PLOT_INTERVAL,
# loss_function=partial(ignore_inactive, loss_func=mdn_nll, seq_length=SEQ_LENGTH),
# loss_function=lambda x, t: mdn_nll(x, t).mean(),
loss_function=lambda x, t: mse(x, t).mean(),
# loss_function=lambda x, t: binary_crossentropy(x, t).mean(),
# loss_function=partial(scaled_cost, loss_func=mse),
# loss_function=ignore_inactive,
# loss_function=partial(scaled_cost3, ignore_inactive=False),
updates_func=momentum,
learning_rate=1e-1,
learning_rate_changes_by_iteration={
1000: 1e-2,
# 400: 1e-3,
# 800: 1e-4
# 500: 1e-3
# 4000: 1e-03,
# 6000: 5e-06,<|fim▁hole|> # 7000: 5e-06,
# 10000: 1e-06,
# 15000: 5e-07,
# 50000: 1e-07
},
do_save_activations=True
# auto_reshape=False,
# plotter=CentralOutputPlotter
# plotter=MDNPlotter
)
def exp_a(name):
global source
source_dict_copy = deepcopy(source_dict)
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
output_shape = source.output_shape_after_processing()
net_dict_copy['layers_config'] = [
{
'type': BLSTMLayer,
'num_units': 40,
'gradient_steps': GRADIENT_STEPS,
'peepholes': False
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1)
},
{
'type': Conv1DLayer,
'num_filters': 20,
'filter_length': 4,
'stride': 4,
'nonlinearity': sigmoid
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1)
},
{
'type': BLSTMLayer,
'num_units': 80,
'gradient_steps': GRADIENT_STEPS,
'peepholes': False
},
{
'type': DenseLayer,
'num_units': source.n_outputs,
'nonlinearity': T.nnet.softplus
}
]
net = Net(**net_dict_copy)
return net
def main():
# EXPERIMENTS = list('abcdefghijklmnopqrstuvwxyz')
EXPERIMENTS = list('a')
for experiment in EXPERIMENTS:
full_exp_name = NAME + experiment
func_call = init_experiment(PATH, experiment, full_exp_name)
logger = logging.getLogger(full_exp_name)
try:
net = eval(func_call)
run_experiment(net, epochs=100000)
except KeyboardInterrupt:
logger.info("KeyboardInterrupt")
break
except Exception as exception:
logger.exception("Exception")
# raise
finally:
logging.shutdown()
if __name__ == "__main__":
main()<|fim▁end|> | # 7000: 1e-06
# 2000: 5e-06
# 3000: 1e-05 |
<|file_name|>dev.command.ts<|end_file_name|><|fim▁begin|>import * as yargs from "yargs";
import { getEnvironment, getSlimConfig } from "../cli-helpers";
export const devCommand: yargs.CommandModule = {
command: "dev",
describe: "Start a development server.",
builder: {
open: {
alias: "o",
type: "boolean",
description: "Automatically open the web browser."
},
"update-dlls": {
alias: "u",
type: "boolean",
description: "Create dynamically linked libraries for vendors (@angular/core, etc.) and polyfills."
},
cordova: {
type: "boolean",
description: "Output the build to the target directory."
},
aot: {
type: "boolean",
description: "Use the Angular AOT compiler."
}
},<|fim▁hole|> handler: (options: Options) => {
const dllTask = require("../tasks/dll.task");
const devTask = require("../tasks/dev.task");
const rootDir = process.cwd();
const slimConfig = getSlimConfig(rootDir);
const environmentVariables = getEnvironment(rootDir);
return dllTask(environmentVariables, slimConfig, options["update-dlls"])
.then(() => devTask(environmentVariables, slimConfig, options.open, options.aot))
.then(code => {
process.exit(code);
})
.catch(code => {
process.exit(code);
});
}
};<|fim▁end|> | |
<|file_name|>TpchIndex.java<|end_file_name|><|fim▁begin|>/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.tpch;
import com.facebook.presto.spi.Index;
import com.facebook.presto.spi.RecordSet;
import com.google.common.base.Function;
import static com.facebook.presto.tpch.TpchIndexedData.IndexedTable;<|fim▁hole|>public class TpchIndex
implements Index
{
private final Function<RecordSet, RecordSet> keyFormatter;
private final Function<RecordSet, RecordSet> outputFormatter;
private final IndexedTable indexedTable;
public TpchIndex(Function<RecordSet, RecordSet> keyFormatter, Function<RecordSet, RecordSet> outputFormatter, IndexedTable indexedTable)
{
this.keyFormatter = checkNotNull(keyFormatter, "keyFormatter is null");
this.outputFormatter = checkNotNull(outputFormatter, "outputFormatter is null");
this.indexedTable = checkNotNull(indexedTable, "indexedTable is null");
}
@Override
public RecordSet lookup(RecordSet rawInputRecordSet)
{
// convert the input record set from the column ordering in the query to
// match the column ordering of the index
RecordSet inputRecordSet = keyFormatter.apply(rawInputRecordSet);
// lookup the values in the index
RecordSet rawOutputRecordSet = indexedTable.lookupKeys(inputRecordSet);
// convert the output record set of the index into the column ordering
// expect by the query
return outputFormatter.apply(rawOutputRecordSet);
}
}<|fim▁end|> | import static com.google.common.base.Preconditions.checkNotNull;
|
<|file_name|>migrate.py<|end_file_name|><|fim▁begin|>from optparse import OptionParser
import simplejson as json
import spotify_client
import datatype
import datetime
import time
import calendar
import wiki
import omni_redis
def migrate_v1(path_in, path_out):
client = spotify_client.Client()
uris = []<|fim▁hole|> doc = json.loads(line)
uris.append(doc['u'])
tracks = client.track_data(uris)
with open(path_out, 'wb') as f:
for t in tracks:
ts = calendar.timegm(datetime.datetime.now().utctimetuple())
t.meta = datatype.Meta(date_added=ts, last_modified=ts)
f.write('%s\n' % json.dumps(t._to_dict()))
def migrate_v2(path_in, view):
with open(path_in, 'rb') as f:
tracks = [datatype.track_from_dict(json.loads(line)) for line in f]
for t in tracks:
t.meta.date_added = t.meta.date_added or int(round(time.time()))
t.meta.last_modified = t.meta.last_modified or int(round(time.time()))
print 'putting %d tracks' % len(tracks)
omni_redis.put_view('default', view, tracks)
migrate = migrate_v2
def add_countries(path_in, path_out):
tracks = []
artist_countries = {}
with open(path_in, 'rb') as f:
for line in f:
doc = json.loads(line)
tracks.append(doc)
artist_countries[doc['a']['n']] = None
for i,artist in enumerate(artist_countries.iterkeys()):
artist_countries[artist]=wiki.country_for_artist(artist)
print '%d/%d %s: %s' % (i+1, len(artist_countries), artist, artist_countries[artist])
with open(path_out, 'wb') as f:
for t in tracks:
t['a']['c'] = artist_countries[t['a']['n']]
f.write('%s\n' % json.dumps(t))
def main():
parser = OptionParser()
parser.add_option('-i', dest='input')
parser.add_option('-o', dest='output')
parser.add_option('-w', dest='wiki', action="store_true")
options, args = parser.parse_args()
if options.wiki:
add_countries(options.input, options.output)
else:
migrate(options.input, options.output)
if __name__ == '__main__':
main()<|fim▁end|> | with open(path_in, 'rb') as f:
for line in f: |
<|file_name|>dialogs_lt.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS version="2.1" language="lt">
<context>
<name>ReminderDialog</name>
<message>
<location filename="../src/reboot-reminder-dialog/reminderdialog.cpp" line="35"/>
<source>Restart the computer to use the system and the applications properly</source>
<translation>Norėdami tinkamai naudotis sistema ir programomis, paleiskite kompiuterį iš naujo</translation>
</message>
<message>
<location filename="../src/reboot-reminder-dialog/reminderdialog.cpp" line="36"/>
<source>Make sure all data had been saved first</source>
<translation>Iš pradžių, įsitikinkite, kad visi duomenys buvo įrašyti</translation>
</message>
<message>
<location filename="../src/reboot-reminder-dialog/reminderdialog.cpp" line="41"/>
<source>Cancel</source>
<translation>Atsisakyti</translation>
</message>
<message>
<location filename="../src/reboot-reminder-dialog/reminderdialog.cpp" line="41"/>
<source>Shut down</source><|fim▁hole|> <message>
<location filename="../src/reboot-reminder-dialog/reminderdialog.cpp" line="44"/>
<source>Reboot</source>
<translation>Paleisti iš naujo</translation>
</message>
</context>
</TS><|fim▁end|> | <translation>Išjungti</translation>
</message> |
<|file_name|>test_ldap.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations<|fim▁hole|>
from keystone.identity.backends import ldap
from keystone.tests.unit import core
from keystone.tests.unit.identity.backends import test_base
from keystone.tests.unit.ksfixtures import ldapdb
class TestIdentityDriver(core.BaseTestCase,
test_base.IdentityDriverTests):
allows_name_update = False
allows_self_service_change_password = False
expected_is_domain_aware = False
expected_default_assignment_driver = 'sql'
expected_is_sql = False
expected_generates_uuids = False
def setUp(self):
super(TestIdentityDriver, self).setUp()
config_fixture_ = self.useFixture(config_fixture.Config())
config_fixture_.config(
group='ldap',
url='fake://memory',
user='cn=Admin',
password='password',
suffix='cn=example,cn=com')
self.useFixture(ldapdb.LDAPDatabase())
self.driver = ldap.Identity()<|fim▁end|> | # under the License.
from oslo_config import fixture as config_fixture |
<|file_name|>Sam and sub-strings.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Problem Statement
Samantha and Sam are playing a game. They have 'N' balls in front of them, each ball numbered from 0 to 9, except the
first ball which is numbered from 1 to 9. Samantha calculates all the sub-strings of the number thus formed, one by one.
If the sub-string is S, Sam has to throw 'S' candies into an initially empty box. At the end of the game, Sam has to
find out the total number of candies in the box, T. As T can be large, Samantha asks Sam to tell T % (109+7) instead.
If Sam answers correctly, he can keep all the candies. Sam can't take all this Maths and asks for your help.
"""
__author__ = 'Danyang'
MOD = 1e9 + 7
class Solution(object):
def solve_TLE(self, cipher):
"""
O(N^2)
:param cipher: the cipher
"""
A = map(int, list(cipher))
f = A[0]
num = A[0]
sig = 1
for i in xrange(1, len(A)):
num = 10 * num + A[i]
sig *= 10
temp = num
temp_sig = sig
while temp_sig >= 1:
f += temp
f %= MOD
temp %= temp_sig
temp_sig /= 10
return int(f)
def solve(self, cipher):
"""
O(N)
example: 1234
1
12, 2
123, 23, 3
1234, 234, 34, 4
:param cipher:
:return:
"""
pre = [0 for _ in cipher]
pre[0] = int(cipher[0])
for i in xrange(1, len(cipher)):
pre[i] = (pre[i - 1] * 10 + int(cipher[i]) * (i + 1)) % MOD
s = 0
for elt in pre:
s = (s + elt) % MOD
return int(s)
if __name__ == "__main__":
import sys
f = open("0.in", "r")
# f = sys.stdin
solution = Solution()
# construct cipher
cipher = f.readline().strip()<|fim▁hole|> s = "%s\n" % (solution.solve(cipher))
print s,<|fim▁end|> |
# solve |
<|file_name|>colors.js<|end_file_name|><|fim▁begin|>({
// local representation of all CSS3 named colors, companion to dojo.colors. To be used where descriptive information
// is required for each color, such as a palette widget, and not for specifying color programatically.
//Note: due to the SVG 1.0 spec additions, some of these are alternate spellings for the same color e.g. gray vs. gray.
//TODO: should we be using unique rgb values as keys instead and avoid these duplicates, or rely on the caller to do the reverse mapping?
aliceblue: "أزرق فاتح",
antiquewhite: "أبيض عتيق",
aqua: "أزرق مائي",
aquamarine: "أزرق مائل للأخضر (زبرجد) ",
azure: "أزرق سماوي ",
beige: "بيج",
bisque: "أصفر برتقالي الى رمادي مصفر",
black: "أسود",
blanchedalmond: "أخضر مائل للبياض",
blue: "أزرق",
blueviolet: "أزرق-بنفسجي",
brown: "بني",
burlywood: "خشبي",
cadetblue: "أزرق ملون بالرمادي",
chartreuse: "أخضر مائل للصفرة",
chocolate: "بني غامق",
coral: "مرجاني",
cornflowerblue: "أزرق عنبري",
cornsilk: "حريري",
crimson: "قرمزي",
cyan: "أزرق سماوي",
darkblue: "أزرق داكن",
darkcyan: "أزرق سماوي داكن",
darkgoldenrod: "أصفر ذهبي داكن ",
darkgray: "رمادي داكن",
darkgreen: "أخضر داكن",
darkgrey: "رمادي داكن", // same as darkgray
darkkhaki: "كاكي داكن",
darkmagenta: "قرمزي داكن",
darkolivegreen: "أخضر زيتوني داكن",<|fim▁hole|>darkseagreen: "أخضر مائل للأزرق داكن",
darkslateblue: "أزرق اردوازي داكن",
darkslategray: "رمادي اردوازي داكن",
darkslategrey: "رمادي اردوازي داكن", // same as darkslategray
darkturquoise: "تركواز داكن",
darkviolet: "بنفسجي داكن",
deeppink: "أحمر وردي غامق",
deepskyblue: "أزرق سماوي غامق",
dimgray: "رمادي شاحب",
dimgrey: "رمادي شاحب", // same as dimgray
dodgerblue: "أزرق عنبري",
firebrick: "أصفر زاهي",
floralwhite: "أبيض زهري ",
forestgreen: "أخضر بلون أشجار الغابات ",
fuchsia: "فوشيا",
gainsboro: "رمادي مائل للأزرق فاتح ",
ghostwhite: "أبيض شفاف",
gold: "ذهبي",
goldenrod: "أصفر ذهبي ",
gray: "رمادي",
green: "أخضر",
greenyellow: "أخضر مائل للأصفر",
grey: "رمادي", // same as gray
honeydew: "أبيض مائل للأخضر ",
hotpink: "أحمر وردي زاهي",
indianred: "أحمر هندي",
indigo: "نيلي",
ivory: "عاجي",
khaki: "كاكي",
lavender: "أرجواني شاحب",
lavenderblush: "أحمر أرجواني",
lawngreen: "أخضر بلون العشب ",
lemonchiffon: "أصفر شفاف ",
lightblue: "أزرق فاتح",
lightcoral: "مرجاني فاتح",
lightcyan: "سماوي فاتح",
lightgoldenrodyellow: "أصفر ذهبي فاتح",
lightgray: "رمادي فاتح",
lightgreen: "أخضر فاتح",
lightgrey: "رمادي فاتح", // same as lightgray
lightpink: "وردي فاتح",
lightsalmon: "فضي فاتح",
lightseagreen: "أخضر مائل للأزرق فاتح",
lightskyblue: "أزرق سماوي فاتح",
lightslategray: "رمادي اردوازي فاتح",
lightslategrey: "رمادي اردوازي فاتح", // same as lightslategray
lightsteelblue: "أزرق معدني فاتح",
lightyellow: "أصفر فاتح ",
lime: "ليموني ",
limegreen: "أخضر ليموني ",
linen: "كتاني ",
magenta: "أحمر قرمزي ",
maroon: "أحمر داكن ",
mediumaquamarine: "أزرق مائل للأخضر (زبرجد) متوسط ",
mediumblue: "أزرق متوسط ",
mediumorchid: "أرجواني متوسط ",
mediumpurple: "قرمزي متوسط ",
mediumseagreen: "أخضر مائل للأزرق متوسط ",
mediumslateblue: "أزرق اردوازي متوسط ",
mediumspringgreen: "أخضر ربيعي متوسط ",
mediumturquoise: "تركواز متوسط ",
mediumvioletred: "أحمر-بنفسجي متوسط ",
midnightblue: "أزرق بحري ",
mintcream: "أصفر شاحب مائل للأخضر الزرعي ",
mistyrose: "وردي ",
moccasin: "نحاسي أحمر ",
navajowhite: "أبيض ملاحي ",
navy: "أزرق داكن ",
oldlace: "برتقالي مائل للأصفر شاحب ",
olive: "أخضر زيتوني داكن ",
olivedrab: "أسود فاتح ",
orange: "برتقالي ",
orangered: "أحمر مائل للبرتقالي ",
orchid: "أرجواني فاتح ",
palegoldenrod: "أصفر ذهبي شاحب ",
palegreen: "أخضر شاحب ",
paleturquoise: "تركواز شاحب ",
palevioletred: "أحمر-بنفسجي شاحب ",
papayawhip: "خوخي فاتح ",
peachpuff: "خوخي مائل للأصفر ",
peru: "بني جملي ",
pink: "وردي ",
plum: "أرجواني داكن ",
powderblue: "أزرق مائل للأصفر ",
purple: "ارجواني ",
red: "أحمر ",
rosybrown: "بني وردي ",
royalblue: "أزرق ملكي ",
saddlebrown: "بني فاتح ",
salmon: "برتقالي وردي شاحب ",
sandybrown: "بني مائل للصفرة ",
seagreen: "أخضر مائل للأزرق ",
seashell: "أبيض مائل للأصفر فاتح ",
sienna: "بني محروق ",
silver: "فضي ",
skyblue: "أزرق سماوي ",
slateblue: "أزرق اردوازي ",
slategray: "رمادي اردوازي ",
slategrey: "رمادي اردوازي ", // same as slategray
snow: "أبيض ثلجي ",
springgreen: "أخضر ربيعي ",
steelblue: "أزرق معدني ",
tan: "خمري ",
teal: "بترولي ",
thistle: "ارجواني شاحب ",
tomato: "أحمر مائل للأصفر ",
turquoise: "تركواز ",
violet: "بنفسجي ",
wheat: "أخضر قمحي ",
white: "أبيض ",
whitesmoke: "دخان أبيض ",
yellow: "أصفر ",
yellowgreen: "أخضر مائل للأصفر "
})<|fim▁end|> | darkorange: "برتقالي داكن",
darkorchid: "أرجواني داكن",
darkred: "أحمر داكن",
darksalmon: "فضي داكن", |
<|file_name|>psqt.rs<|end_file_name|><|fim▁begin|>use {Player,SQ,File,Piece};
use core::masks::*;
use core::score::*;
const BONUS: [[[Score; (FILE_CNT / 2)]; RANK_CNT]; PIECE_TYPE_CNT] = [
[ // NO PIECE
[ Score(0, 0), Score(0, 0), Score(0, 0), Score(0, 0)],
[ Score(0, 0), Score(0, 0), Score(0, 0), Score(0, 0)],
[ Score(0, 0), Score(0, 0), Score(0, 0), Score(0, 0)],
[ Score(0, 0), Score(0, 0), Score(0, 0), Score(0, 0)],
[ Score(0, 0), Score(0, 0), Score(0, 0), Score(0, 0)],
[ Score(0, 0), Score(0, 0), Score(0, 0), Score(0, 0)],
[ Score(0, 0), Score(0, 0), Score(0, 0), Score(0, 0)],
[ Score(0, 0), Score(0, 0), Score(0, 0), Score(0, 0)],
],
[ // Pawn
[ Score( 0, 0), Score( 0, 0), Score( 0, 0), Score( 0, 0) ],
[ Score(-11, 7), Score( 6,-4), Score( 7, 8), Score( 3,-2) ],
[ Score(-18,-4), Score( -2,-5), Score( 19, 5), Score(24, 4) ],
[ Score(-17, 3), Score( -9, 3), Score( 20,-8), Score(35,-3) ],
[ Score( -6, 8), Score( 5, 9), Score( 3, 7), Score(21,-6) ],
[ Score( -6, 8), Score( -8,-5), Score( -6, 2), Score(-2, 4) ],
[ Score( -4, 3), Score( 20,-9), Score( -8, 1), Score(-4,18) ],
[ Score( 0, 0), Score( 0, 0), Score( 0, 0), Score( 0, 0) ]
],
[ // Knight
[ Score(-161,-105), Score(-96,-82), Score(-80,-46), Score(-73,-14) ],
[ Score( -83, -69), Score(-43,-54), Score(-21,-17), Score(-10, 9) ],
[ Score( -71, -50), Score(-22,-39), Score( 0, -7), Score( 9, 28) ],
[ Score( -25, -41), Score( 18,-25), Score( 43, 6), Score( 47, 38) ],
[ Score( -26, -46), Score( 16,-25), Score( 38, 3), Score( 50, 40) ],
[ Score( -11, -54), Score( 37,-38), Score( 56, -7), Score( 65, 27) ],
[ Score( -63, -65), Score(-19,-50), Score( 5,-24), Score( 14, 13) ],
[ Score(-195,-109), Score(-67,-89), Score(-42,-50), Score(-29,-13) ]
],
[ // Bishop
[ Score(-44,-58), Score(-13,-31), Score(-25,-37), Score(-34,-19) ],
[ Score(-20,-34), Score( 20, -9), Score( 12,-14), Score( 1, 4) ],
[ Score( -9,-23), Score( 27, 0), Score( 21, -3), Score( 11, 16) ],
[ Score(-11,-26), Score( 28, -3), Score( 21, -5), Score( 10, 16) ],
[ Score(-11,-26), Score( 27, -4), Score( 16, -7), Score( 9, 14) ],
[ Score(-17,-24), Score( 16, -2), Score( 12, 0), Score( 2, 13) ],
[ Score(-23,-34), Score( 17,-10), Score( 6,-12), Score( -2, 6) ],
[ Score(-35,-55), Score(-11,-32), Score(-19,-36), Score(-29,-17) ]
],
[ // Rook
[ Score(-25, 0), Score(-16, 0), Score(-16, 0), Score(-9, 0) ],
[ Score(-21, 0), Score( -8, 0), Score( -3, 0), Score( 0, 0) ],
[ Score(-21, 0), Score( -9, 0), Score( -4, 0), Score( 2, 0) ],
[ Score(-22, 0), Score( -6, 0), Score( -1, 0), Score( 2, 0) ],
[ Score(-22, 0), Score( -7, 0), Score( 0, 0), Score( 1, 0) ],
[ Score(-21, 0), Score( -7, 0), Score( 0, 0), Score( 2, 0) ],
[ Score(-12, 0), Score( 4, 0), Score( 8, 0), Score(12, 0) ],
[ Score(-23, 0), Score(-15, 0), Score(-11, 0), Score(-5, 0) ]
],
[ // Queen
[ Score( 0,-71), Score(-4,-56), Score(-3,-42), Score(-1,-29) ],
[ Score(-4,-56), Score( 6,-30), Score( 9,-21), Score( 8, -5) ],
[ Score(-2,-39), Score( 6,-17), Score( 9, -8), Score( 9, 5) ],
[ Score(-1,-29), Score( 8, -5), Score(10, 9), Score( 7, 19) ],
[ Score(-3,-27), Score( 9, -5), Score( 8, 10), Score( 7, 21) ],
[ Score(-2,-40), Score( 6,-16), Score( 8,-10), Score(10, 3) ],
[ Score(-2,-55), Score( 7,-30), Score( 7,-21), Score( 6, -6) ],
[ Score(-1,-74), Score(-4,-55), Score(-1,-43), Score( 0,-30) ]
],
[ // King
[ Score(267, 0), Score(320, 48), Score(270, 75), Score(195, 84) ],
[ Score(264, 43), Score(304, 92), Score(238,143), Score(180,132) ],
[ Score(200, 83), Score(245,138), Score(176,167), Score(110,165) ],
[ Score(177,106), Score(185,169), Score(148,169), Score(110,179) ],
[ Score(149,108), Score(177,163), Score(115,200), Score( 66,203) ],
[ Score(118, 95), Score(159,155), Score( 84,176), Score( 41,174) ],
[ Score( 87, 50), Score(128, 99), Score( 63,122), Score( 20,139) ],
[ Score( 63, 9), Score( 88, 55), Score( 47, 80), Score( 0, 90) ]
],
[ // ALL PIECE
[ Score(0, 0), Score(0, 0), Score(0, 0), Score(0, 0)],
[ Score(0, 0), Score(0, 0), Score(0, 0), Score(0, 0)],
[ Score(0, 0), Score(0, 0), Score(0, 0), Score(0, 0)],
[ Score(0, 0), Score(0, 0), Score(0, 0), Score(0, 0)],
[ Score(0, 0), Score(0, 0), Score(0, 0), Score(0, 0)],
[ Score(0, 0), Score(0, 0), Score(0, 0), Score(0, 0)],
[ Score(0, 0), Score(0, 0), Score(0, 0), Score(0, 0)],
[ Score(0, 0), Score(0, 0), Score(0, 0), Score(0, 0)],
]
];
static mut PSQ: [[Score; SQ_CNT]; PIECE_CNT] =
[[Score(0,0); SQ_CNT]; PIECE_CNT];
static PIECE_VALUE: [[Value; PHASE_CNT]; PIECE_CNT] =
[[0, 0], // Empty
[ PAWN_MG, PAWN_EG], // White Pawn
[ KNIGHT_MG, KNIGHT_EG],// White Knight
[ BISHOP_MG, BISHOP_EG],// White Bishop
[ ROOK_MG, ROOK_EG], // White Rook
[ QUEEN_MG, QUEEN_MG], // White Queen
[ ZERO, ZERO], // White King
[0, 0],
[0, 0], // Empty
[ PAWN_MG, PAWN_EG], // Black Pawn
[ KNIGHT_MG, KNIGHT_EG],// Black Knight
[ BISHOP_MG, BISHOP_EG],// Black Bishop
[ ROOK_MG, ROOK_EG], // Black Rook
[ QUEEN_MG, QUEEN_MG], // Black Queen
[ ZERO, ZERO], // Black King
[0, 0],
];
#[cold]
pub fn init_psqt() {
for piece in 0..PIECE_TYPE_CNT {
let v: Score = Score(PIECE_VALUE[piece][0], PIECE_VALUE[piece][1]);
for s in 0..SQ_CNT {
let sq: SQ = SQ(s as u8);
let f: File = sq.file().min(!sq.file());
let score = v + BONUS[piece][sq.rank() as usize][f as usize];
unsafe {
PSQ[(Player::White as usize) << 3 | piece][s] = score;
PSQ[(Player::Black as usize) << 3 | piece][sq.flip().0 as usize] = -score;
}
}
}
}
/// Returns the score for a player's piece being at a particular square.
#[inline(always)]
pub fn psq(piece: Piece, sq: SQ) -> Score{
debug_assert!(sq.is_okay());
unsafe {
*(PSQ.get_unchecked(piece as usize)).get_unchecked(sq.0 as usize)
}
}
/// Returns the value of a piece for a player. If `eg` is true, it returns the end game value. Otherwise,
/// it'll return the midgame value.
#[inline(always)]
pub fn piece_value(piece: Piece, eg: bool) -> Value {
unsafe {<|fim▁hole|> }
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn psq_tes() {
init_psqt();
assert_eq!(psq(Piece::WhiteQueen, SQ::A1), -psq(Piece::BlackQueen, SQ::A8));
assert_eq!(psq(Piece::WhiteRook, SQ::A1), -psq( Piece::BlackRook, SQ::A8));
assert_eq!(psq(Piece::WhitePawn, SQ::B1), -psq( Piece::BlackPawn, SQ::B8));
assert_eq!(psq(Piece::BlackKnight, SQ::B4), -psq(Piece::WhiteKnight,SQ::B5));
}
}<|fim▁end|> | (*(PIECE_VALUE.get_unchecked(piece as usize)).get_unchecked(eg as usize)) |
<|file_name|>callback.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python2.7
from django.http import HttpResponse
from AfricasTalkingGateway import AfricasTalkingGateway, AfricasTalkingGatewayException
from reminder.models import Reminder
import sys
import os
import django
sys.path.append("/home/foxtrot/Dropbox/tunza_v2/")
os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.local"
django.setup()
username = "OtisKe"
apikey = "07984423a278ead54fee35d3daf956598deb51405b27fe70f1e2dfe964be5c04"
gateway = AfricasTalkingGateway(username, apikey)
# replace this line with a list of numbers from the
# patient__patient_contact linked to reminder model
reminder_service = Reminder.objects.values_list('service_id',
'patient_id',
'service__service_name',
'service__service_url',
'patient__patient_contact', )
# replace this message with service about from service__service_about
# linked to reminder model
def voice_callback(request):
if request.method == 'POST':
is_active = request.values.get('isActive', None)
session_id = request.values.get('sessionId', None)
caller_number = request.values.get('callerNumber', None)
direction = request.values.get('direction', None)
print "is_active -> ", is_active
if is_active == str(0):
# Compose the response
duration = request.values.get('durationInSeconds', None)
currency_code = request.values.get('currencyCode', None)
amount = request.values.get('amount', None)
# update session info to Redis
print duration, currency_code, amount
respond = '<?xml version="1.0" encoding="UTF-8"?>'
respond += '<Response>'
respond += '<Say playBeep="false" >Welcome to the reminder system</Say>'
respond += '</Response>'<|fim▁hole|> resp['Cache-Control'] = 'no-cache'
return resp
if is_active == str(1):
# Compose the response
respond = '<?xml version="1.0" encoding="UTF-8"?>'
respond += '<Response>'
respond += '<Say playBeep="false" >Welcome to mTunza.org</Say>'
respond += '</Response>'
resp = HttpResponse(respond, 200, content_type='application/xml')
resp['Cache-Control'] = 'no-cache'
return resp
else:
resp = HttpResponse('Bad Request', 400, content_type='application/xml', )
resp['Cache-Control'] = 'no-cache'
return resp<|fim▁end|> |
resp = HttpResponse(respond, 200, content_type='application/xml') |
<|file_name|>metrics.go<|end_file_name|><|fim▁begin|>// Copyright 2019, OpenCensus Authors
//<|fim▁hole|>// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package tailsampling
import (
"go.opencensus.io/stats"
"go.opencensus.io/stats/view"
"go.opencensus.io/tag"
"github.com/census-instrumentation/opencensus-service/internal/collector/telemetry"
)
// Variables related to metrics specific to tail sampling.
var (
tagPolicyKey, _ = tag.NewKey("policy")
tagSampledKey, _ = tag.NewKey("sampled")
statDecisionLatencyMicroSec = stats.Int64("sampling_decision_latency", "Latency (in microseconds) of a given sampling policy", "µs")
statOverallDecisionLatencyµs = stats.Int64("sampling_decision_timer_latency", "Latency (in microseconds) of each run of the sampling decision timer", "µs")
statTraceRemovalAgeSec = stats.Int64("sampling_trace_removal_age", "Time (in seconds) from arrival of a new trace until its removal from memory", "s")
statLateSpanArrivalAfterDecision = stats.Int64("sampling_late_span_age", "Time (in seconds) from the sampling decision was taken and the arrival of a late span", "s")
statPolicyEvaluationErrorCount = stats.Int64("sampling_policy_evaluation_error", "Count of sampling policy evaluation errors", stats.UnitDimensionless)
statCountTracesSampled = stats.Int64("count_traces_sampled", "Count of traces that were sampled or not", stats.UnitDimensionless)
statDroppedTooEarlyCount = stats.Int64("sampling_trace_dropped_too_early", "Count of traces that needed to be dropped the configured wait time", stats.UnitDimensionless)
statNewTraceIDReceivedCount = stats.Int64("new_trace_id_received", "Counts the arrival of new traces", stats.UnitDimensionless)
statTracesOnMemoryGauge = stats.Int64("sampling_traces_on_memory", "Tracks the number of traces current on memory", stats.UnitDimensionless)
)
// SamplingProcessorMetricViews return the metrics views according to given telemetry level.
func SamplingProcessorMetricViews(level telemetry.Level) []*view.View {
if level == telemetry.None {
return nil
}
policyTagKeys := []tag.Key{tagPolicyKey}
latencyDistributionAggregation := view.Distribution(1, 2, 5, 10, 25, 50, 75, 100, 150, 200, 300, 400, 500, 750, 1000, 2000, 3000, 4000, 5000, 10000, 20000, 30000, 50000)
ageDistributionAggregation := view.Distribution(1, 2, 5, 10, 20, 30, 40, 50, 60, 90, 120, 180, 300, 600, 1800, 3600, 7200)
decisionLatencyView := &view.View{
Name: statDecisionLatencyMicroSec.Name(),
Measure: statDecisionLatencyMicroSec,
Description: statDecisionLatencyMicroSec.Description(),
TagKeys: policyTagKeys,
Aggregation: latencyDistributionAggregation,
}
overallDecisionLatencyView := &view.View{
Name: statOverallDecisionLatencyµs.Name(),
Measure: statOverallDecisionLatencyµs,
Description: statOverallDecisionLatencyµs.Description(),
Aggregation: latencyDistributionAggregation,
}
traceRemovalAgeView := &view.View{
Name: statTraceRemovalAgeSec.Name(),
Measure: statTraceRemovalAgeSec,
Description: statTraceRemovalAgeSec.Description(),
Aggregation: ageDistributionAggregation,
}
lateSpanArrivalView := &view.View{
Name: statLateSpanArrivalAfterDecision.Name(),
Measure: statLateSpanArrivalAfterDecision,
Description: statLateSpanArrivalAfterDecision.Description(),
Aggregation: ageDistributionAggregation,
}
countPolicyEvaluationErrorView := &view.View{
Name: statPolicyEvaluationErrorCount.Name(),
Measure: statPolicyEvaluationErrorCount,
Description: statPolicyEvaluationErrorCount.Description(),
Aggregation: view.Sum(),
}
sampledTagKeys := []tag.Key{tagPolicyKey, tagSampledKey}
countTracesSampledView := &view.View{
Name: statCountTracesSampled.Name(),
Measure: statCountTracesSampled,
Description: statCountTracesSampled.Description(),
TagKeys: sampledTagKeys,
Aggregation: view.Sum(),
}
countTraceDroppedTooEarlyView := &view.View{
Name: statDroppedTooEarlyCount.Name(),
Measure: statDroppedTooEarlyCount,
Description: statDroppedTooEarlyCount.Description(),
Aggregation: view.Sum(),
}
countTraceIDArrivalView := &view.View{
Name: statNewTraceIDReceivedCount.Name(),
Measure: statNewTraceIDReceivedCount,
Description: statNewTraceIDReceivedCount.Description(),
Aggregation: view.Sum(),
}
trackTracesOnMemorylView := &view.View{
Name: statTracesOnMemoryGauge.Name(),
Measure: statTracesOnMemoryGauge,
Description: statTracesOnMemoryGauge.Description(),
Aggregation: view.LastValue(),
}
return []*view.View{
decisionLatencyView,
overallDecisionLatencyView,
traceRemovalAgeView,
lateSpanArrivalView,
countPolicyEvaluationErrorView,
countTracesSampledView,
countTraceDroppedTooEarlyView,
countTraceIDArrivalView,
trackTracesOnMemorylView,
}
}<|fim▁end|> | // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. |
<|file_name|>DeleteTail.java<|end_file_name|><|fim▁begin|>package chapter1_3;
public class DeleteTail<Item> {
private Node<Item> first;
private int N;
public int getN(){
return N;
}
private static class Node<Item>{
private Item item;
private Node<Item> next;
}
public static void main(String[] args) {
// TODO Auto-generated method stub
DeleteTail<String> dt = new DeleteTail<String>();
Node<String> p = new Node<String>();
for(int i = 0; i < dt.getN()-1; i++){
p = p.next;<|fim▁hole|> }
Node<String> q = p.next;
p.next = null;
}
}<|fim▁end|> | |
<|file_name|>ole32.go<|end_file_name|><|fim▁begin|>// Copyright 2010-2012 The W32 Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build windows
package w32
import (
"syscall"
"unsafe"
)<|fim▁hole|> procCoInitializeEx = modole32.NewProc("CoInitializeEx")
procCoInitialize = modole32.NewProc("CoInitialize")
procCoUninitialize = modole32.NewProc("CoUninitialize")
procCreateStreamOnHGlobal = modole32.NewProc("CreateStreamOnHGlobal")
)
func CoInitializeEx(coInit uintptr) HRESULT {
ret, _, _ := procCoInitializeEx.Call(
0,
coInit)
switch uint32(ret) {
case E_INVALIDARG:
panic("CoInitializeEx failed with E_INVALIDARG")
case E_OUTOFMEMORY:
panic("CoInitializeEx failed with E_OUTOFMEMORY")
case E_UNEXPECTED:
panic("CoInitializeEx failed with E_UNEXPECTED")
}
return HRESULT(ret)
}
func CoInitialize() {
procCoInitialize.Call(0)
}
func CoUninitialize() {
procCoUninitialize.Call()
}
func CreateStreamOnHGlobal(hGlobal HGLOBAL, fDeleteOnRelease bool) *IStream {
stream := new(IStream)
ret, _, _ := procCreateStreamOnHGlobal.Call(
uintptr(hGlobal),
uintptr(BoolToBOOL(fDeleteOnRelease)),
uintptr(unsafe.Pointer(&stream)))
switch uint32(ret) {
case E_INVALIDARG:
panic("CreateStreamOnHGlobal failed with E_INVALIDARG")
case E_OUTOFMEMORY:
panic("CreateStreamOnHGlobal failed with E_OUTOFMEMORY")
case E_UNEXPECTED:
panic("CreateStreamOnHGlobal failed with E_UNEXPECTED")
}
return stream
}<|fim▁end|> |
var (
modole32 = syscall.NewLazyDLL("ole32.dll")
|
<|file_name|>neptune.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>
extern crate rusoto_core;
extern crate rusoto_neptune;
use rusoto_core::Region;
use rusoto_neptune::{DescribeDBClustersMessage, Neptune, NeptuneClient};
#[tokio::test]
async fn should_describe_db_clusters() {
let client = NeptuneClient::new(Region::UsEast1);
let request = DescribeDBClustersMessage::default();
match client.describe_db_clusters(request).await {
Err(e) => panic!("Error listing Neptune clusters: {}", e),
Ok(clusters) => println!("Found clusters: {:?}", clusters),
}
}<|fim▁end|> | #![cfg(feature = "neptune")] |
<|file_name|>scraper.py<|end_file_name|><|fim▁begin|>"""
This module contains a single class that manages the scraping of data
from one or more supermarkets on mysupermarket.co.uk
"""
from datetime import datetime
from os import remove
from os.path import isfile, getmtime
from time import time
from scrapy import signals
from scrapy.crawler import Crawler
from scrapy.utils.project import get_project_settings
from app_config import supermarket_names, supermarket_url, supermarket_filename
from .reactor_control import ReactorControl
from .spiders.mysupermarket import MySupermarketSpider
class CachingScraper():
"""
A "crawler manager" that manages scraping mysupermarket.co.uk for one or
more supermarkets. For each supermarket, it checks the cache file then
creates and starts a crawler if appropriate.
"""
def __init__(self, supermarkets=supermarket_names(), force_refresh=False):
"""Create a CachingScraper for the given supermarket(s).
Keyword arguments:
supermarkets -- a list of supermarkets to scrape
force_refresh -- if True, cachefiles will not be used
"""
self.force_refresh = force_refresh
self.supermarkets = supermarkets
self.reactor_control = ReactorControl()
def cache_exists(self, supermarket):
"""Check whether a JSON file already exists for data scraped from
the given supermarket, and if so, whether it was created today.
Note that 'created today' is not the same as 'age < 24 hours'. Prices
are assumed to change overnight so a cachefile created at 9pm
yesterday is considered out of date at 9am today (but a cachefile
created at 9am is not out of date at 9pm).
Keyword arguments:
supermarket -- the supermarket whose cachefile should be checked
"""
cachefile = supermarket_filename(supermarket)
if not isfile(cachefile):
return False
mtime = datetime.fromtimestamp(getmtime(cachefile))
now = datetime.fromtimestamp(time())
return mtime.day == now.day
def setup_crawler(self, supermarket, reactor_control):
"""Set up the Scrapy crawler.
See http://doc.scrapy.org/en/latest/topics/practices.html#run-scrapy-from-a-script.
Keyword arguments:
supermarket -- the supermarket whose crawler should be set up
"""
cachefile = supermarket_filename(supermarket)
if isfile(cachefile):
remove(cachefile)
settings = get_project_settings()
url = supermarket_url(supermarket)
settings.set('FEED_URI', supermarket_filename(supermarket))
spider = MySupermarketSpider(url)
crawler = Crawler(settings)
crawler.signals.connect(reactor_control.remove_crawler, signal=signals.spider_closed)
crawler.configure()
crawler.crawl(spider)
crawler.start()
reactor_control.add_crawler()
def get_data(self):
"""Main entry point for the scraper class. Crawl or get data from cache
for the configured supermarkets. Supermarkets are set in __init__.<|fim▁hole|> """
if self.force_refresh:
supermarkets_to_crawl = self.supermarkets
else:
supermarkets_to_crawl = [x for x in self.supermarkets if not self.cache_exists(x)]
if supermarkets_to_crawl:
reactor_control = ReactorControl()
for supermarket in supermarkets_to_crawl:
self.setup_crawler(supermarket, reactor_control)
reactor_control.start_crawling()<|fim▁end|> | |
<|file_name|>ec2_ami_find.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: ec2_ami_find
version_added: 2.0
short_description: Searches for AMIs to obtain the AMI ID and other information
description:
- Returns list of matching AMIs with AMI ID, along with other useful information
- Can search AMIs with different owners
- Can search by matching tag(s), by AMI name and/or other criteria
- Results can be sorted and sliced
author: "Tom Bamford (@tombamford)"
notes:
- This module is not backwards compatible with the previous version of the ec2_search_ami module which worked only for Ubuntu AMIs listed on cloud-images.ubuntu.com.
- See the example below for a suggestion of how to search by distro/release.
options:
region:
description:
- The AWS region to use.
required: true
aliases: [ 'aws_region', 'ec2_region' ]
owner:
description:
- Search AMIs owned by the specified owner
- Can specify an AWS account ID, or one of the special IDs 'self', 'amazon' or 'aws-marketplace'
- If not specified, all EC2 AMIs in the specified region will be searched.
- You can include wildcards in many of the search options. An asterisk (*) matches zero or more characters, and a question mark (?) matches exactly one character. You can escape special characters using a backslash (\) before the character. For example, a value of \*amazon\?\\ searches for the literal string *amazon?\.
required: false
default: null
ami_id:
description:
- An AMI ID to match.
default: null
required: false
ami_tags:
description:
- A hash/dictionary of tags to match for the AMI.
default: null
required: false
architecture:
description:
- An architecture type to match (e.g. x86_64).
default: null
required: false
hypervisor:
description:
- A hypervisor type type to match (e.g. xen).
default: null
required: false
is_public:
description:
- Whether or not the image(s) are public.
choices: ['yes', 'no']
default: null
required: false
name:
description:
- An AMI name to match.
default: null
required: false
platform:
description:
- Platform type to match.
default: null
required: false
sort:
description:
- Optional attribute which with to sort the results.
- If specifying 'tag', the 'tag_name' parameter is required.
choices: ['name', 'description', 'tag']
default: null
required: false
sort_tag:
description:
- Tag name with which to sort results.
- Required when specifying 'sort=tag'.
default: null
required: false
sort_order:
description:
- Order in which to sort results.
- Only used when the 'sort' parameter is specified.
choices: ['ascending', 'descending']
default: 'ascending'
required: false
sort_start:
description:
- Which result to start with (when sorting).
- Corresponds to Python slice notation.
default: null
required: false
sort_end:
description:
- Which result to end with (when sorting).
- Corresponds to Python slice notation.
default: null
required: false
state:
description:
- AMI state to match.
default: 'available'
required: false
virtualization_type:
description:
- Virtualization type to match (e.g. hvm).
default: null
required: false
no_result_action:
description:
- What to do when no results are found.
- "'success' reports success and returns an empty array"
- "'fail' causes the module to report failure"
choices: ['success', 'fail']
default: 'success'
required: false
requirements:
- "python >= 2.6"
- boto
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Search for the AMI tagged "project:website"
- ec2_ami_find:
owner: self
ami_tags:
project: website
no_result_action: fail
register: ami_find
# Search for the latest Ubuntu 14.04 AMI
- ec2_ami_find:
name: "ubuntu/images/ebs/ubuntu-trusty-14.04-amd64-server-*"
owner: 099720109477
sort: name
sort_order: descending
sort_end: 1
register: ami_find
# Launch an EC2 instance
- ec2:
image: "{{ ami_find.results[0].ami_id }}"
instance_type: m3.medium
key_name: mykey
wait: yes
'''
try:
import boto.ec2
HAS_BOTO=True
except ImportError:
HAS_BOTO=False
import json
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
region = dict(required=True,
aliases = ['aws_region', 'ec2_region']),
owner = dict(required=False, default=None),
ami_id = dict(required=False),
ami_tags = dict(required=False, type='dict',
aliases = ['search_tags', 'image_tags']),
architecture = dict(required=False),
hypervisor = dict(required=False),
is_public = dict(required=False),
name = dict(required=False),
platform = dict(required=False),
sort = dict(required=False, default=None,
choices=['name', 'description', 'tag']),
sort_tag = dict(required=False),
sort_order = dict(required=False, default='ascending',
choices=['ascending', 'descending']),
sort_start = dict(required=False),
sort_end = dict(required=False),
state = dict(required=False, default='available'),
virtualization_type = dict(required=False),
no_result_action = dict(required=False, default='success',
choices = ['success', 'fail']),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module, install via pip or your package manager')
ami_id = module.params.get('ami_id')
ami_tags = module.params.get('ami_tags')
architecture = module.params.get('architecture')
hypervisor = module.params.get('hypervisor')
is_public = module.params.get('is_public')
name = module.params.get('name')
owner = module.params.get('owner')
platform = module.params.get('platform')
sort = module.params.get('sort')
sort_tag = module.params.get('sort_tag')
sort_order = module.params.get('sort_order')
sort_start = module.params.get('sort_start')
sort_end = module.params.get('sort_end')
state = module.params.get('state')
virtualization_type = module.params.get('virtualization_type')<|fim▁hole|>
if ami_id:
filter['image_id'] = ami_id
if ami_tags:
for tag in ami_tags:
filter['tag:'+tag] = ami_tags[tag]
if architecture:
filter['architecture'] = architecture
if hypervisor:
filter['hypervisor'] = hypervisor
if is_public:
filter['is_public'] = is_public
if name:
filter['name'] = name
if platform:
filter['platform'] = platform
if virtualization_type:
filter['virtualization_type'] = virtualization_type
ec2 = ec2_connect(module)
images_result = ec2.get_all_images(owners=owner, filters=filter)
if no_result_action == 'fail' and len(images_result) == 0:
module.fail_json(msg="No AMIs matched the attributes: %s" % json.dumps(filter))
results = []
for image in images_result:
data = {
'ami_id': image.id,
'architecture': image.architecture,
'description': image.description,
'is_public': image.is_public,
'name': image.name,
'owner_id': image.owner_id,
'platform': image.platform,
'root_device_name': image.root_device_name,
'root_device_type': image.root_device_type,
'state': image.state,
'tags': image.tags,
'virtualization_type': image.virtualization_type,
}
if image.kernel_id:
data['kernel_id'] = image.kernel_id
if image.ramdisk_id:
data['ramdisk_id'] = image.ramdisk_id
results.append(data)
if sort == 'tag':
if not sort_tag:
module.fail_json(msg="'sort_tag' option must be given with 'sort=tag'")
results.sort(key=lambda e: e['tags'][sort_tag], reverse=(sort_order=='descending'))
elif sort:
results.sort(key=lambda e: e[sort], reverse=(sort_order=='descending'))
try:
if sort and sort_start and sort_end:
results = results[int(sort_start):int(sort_end)]
elif sort and sort_start:
results = results[int(sort_start):]
elif sort and sort_end:
results = results[:int(sort_end)]
except TypeError:
module.fail_json(msg="Please supply numeric values for sort_start and/or sort_end")
module.exit_json(results=results)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()<|fim▁end|> | no_result_action = module.params.get('no_result_action')
filter = {'state': state} |
<|file_name|>smart-answers.js<|end_file_name|><|fim▁begin|>function browserSupportsHtml5HistoryApi() {
return !! (history && history.replaceState && history.pushState);
}
$(document).ready(function() {
//_gaq.push(['_trackEvent', 'Citizen-Format-Smartanswer', 'Load']);
if(browserSupportsHtml5HistoryApi()) {
var formSelector = ".current form";
initializeHistory();
var getCurrentPosition = function () {
var slugArray = document.URL.split('/');
return slugArray.splice(3, slugArray.length).join('/');
};
// events
// get new questions on submit
$(formSelector).live('submit', function(event) {
$('input[type=submit]', this).attr('disabled', 'disabled');
var form = $(this);
var postData = form.serializeArray();
reloadQuestions(form.attr('action'), postData);
event.preventDefault();
return false;
});
// Track when a user clicks on 'Start again' link
$('.start-right').live('click', function() {
window._gaq && window._gaq.push(['_trackEvent', 'MS_smart_answer', getCurrentPosition(), 'Start again']);
reloadQuestions($(this).attr('href'));
return false;
});
// Track when a user clicks on a 'Change Answer' link
$('.link-right a').live('click', function() {
var href = $(this).attr('href');
window._gaq && window._gaq.push(['_trackEvent', 'MS_smart_answer', href, 'Change Answer']);
reloadQuestions(href);
return false;
});
// manage next/back by tracking popstate event
window.onpopstate = function (event) {
if(event.state !== null) {
updateContent(event.state['html_fragment']);
} else {
return false;
}
};
}
$('#current-error').focus();
// helper functions
function toJsonUrl(url) {
var parts = url.split('?');
var json_url = parts[0].replace(/\/$/, "") + ".json";
if (parts[1]) {
json_url += "?";
json_url += parts[1];
}
return window.location.protocol + "//" + window.location.host + json_url;
}
function fromJsonUrl(url) {
return url.replace(/\.json$/, "");
}
function redirectToNonAjax(url) {
window.location = url;
}
// replace all the questions currently in the page with whatever is returned for given url
function reloadQuestions(url, params) {
var url = toJsonUrl(url);
addLoading('<p class="next-step">Loading next step…</p>');
$.ajax(url, {
type: 'GET',
dataType:'json',
data: params,
timeout: 10000,
error: function(jqXHR, textStatus, errorStr) {
var paramStr = $.param(params);
redirectToNonAjax(url.replace('.json', '?' + paramStr).replace('??', '?'));
},
success: function(data, textStatus, jqXHR) {
addToHistory(data);
updateContent(data['html_fragment']);
}
});
}
// manage the URL
function addToHistory(data) {
history.pushState(data, data['title'], data['url']);
window._gaq && window._gaq.push(['_trackPageview', data['url']]);
}
// add an indicator of loading
function addLoading(fragment){
$('#content .step.current')
.addClass('loading')
.find('form .next-question')
.append(fragment);
$.event.trigger('smartanswerAnswer');
};
// update the content (i.e. plonk in the html fragment)
function updateContent(fragment){
$('.smart_answer #js-replaceable').html(fragment);
$.event.trigger('smartanswerAnswer');
if ($(".outcome").length !== 0) {
$.event.trigger('smartanswerOutcome');
}
}
function initializeHistory(data) {
if (! browserSupportsHtml5HistoryApi() && window.location.pathname.match(/\/.*\//) ) {
addToHistory({url: window.location.pathname});
}
data = {
html_fragment: $('.smart_answer #js-replaceable').html(),
title: "Question",
url: window.location.toString()
};
history.replaceState(data, data['title'], data['url']);
}
var contentPosition = {
latestQuestionTop : 0,
latestQuestionIsOffScreen: function($latestQuestion) {
var top_of_view = $(window).scrollTop();
this.latestQuestionTop = $latestQuestion.offset().top;
return (this.latestQuestionTop < top_of_view);
},
correctOffscreen: function() {
$latestQuestion = $('.smart_answer .done-questions li.done:last-child');
if (!$latestQuestion.length) {
$latestQuestion = $('body');
}<|fim▁hole|> if(this.latestQuestionIsOffScreen($latestQuestion)) {
$(window).scrollTop(this.latestQuestionTop);
}
},
init: function() {
var self = this;
$(document).bind('smartanswerAnswer', function() {
self.correctOffscreen();
$('.meta-wrapper').show();
});
// Show feedback form in outcomes
$(document).bind('smartanswerOutcome', function() {
$('.report-a-problem-container form #url').val(window.location.href);
$('.meta-wrapper').show();
});
}
};
contentPosition.init();
});<|fim▁end|> | |
<|file_name|>companies.js<|end_file_name|><|fim▁begin|>$(function(){
$("#addCompanyForm").validate({
rules: {
name : {
required : true
},
email: {
required: true,
email: true
},
url : {
required : true,
url : true
}
},
messages: {
name : {
required : "Please enter your company name"
},
url : {
required : "Please enter your company website",
url : "Please enter a valid url"
},
email: {
required: "Enter your Company email address",
email: "Please enter a valid email address",
}
}
});
$('#addCompanyDialog').on('hide.bs.modal', function (e) {
refresh();
});
$('#addCompanyDialog').on('shown.bs.modal', function (e) {
$('#name').focus();
$('#id').val('');
var id = $(e.relatedTarget).attr('id');
var isEdit = $(e.relatedTarget).hasClass('fa-edit');
console.log(isEdit);
if(isEdit){
$('#id').val(id);
$.getJSON('/account/companies/' + id, function(data){
if(data.result){
$('#name').val(data.result.name);
$('#email').val(data.result.email);
$('#url').val(data.result.url);
}
});
}
var validator = $( "#addCompanyForm" ).validate();
validator.resetForm();
});
$('#confirm-delete').on('show.bs.modal', function(e) {
var id = $(e.relatedTarget).attr('id');
$(this).find('.btn-ok').on('click', function(){
$.ajax({
url: '/account/companies/' + id,
type: 'delete',
dataType: 'json',
success: function(data) {
$('#message ').html('<div class="error" style="text-align:center;padding:5px;">' + data.message + '</div>')
$('#confirm-delete').modal('hide');
if(data.result)
getCompanies();
}
});
});
});
$("#addCompanyForm").submit(function(e) {
e.preventDefault();
if($( "#addCompanyForm" ).valid()){
var actionurl = '/account/companies';
var type = 'post';
console.log($('#id').val() != '');
if($('#id').val() != ''){
type = 'put';
actionurl = '/account/companies/' + $('#id').val();
}
//var actionurl = e.currentTarget.action;
$.ajax({
url: actionurl,
type: type,
dataType: 'json',
data: $("#addCompanyForm").serialize(),
success: function(data) {
$('#message ').html('<div class="error" style="text-align:center;padding:5px;">' + data.message + '</div>')
$('#addCompanyDialog').modal('hide');
if(data.result)
getCompanies();
}
});
}
});
var getCompanies= function(){
$('#companylist').html('<div class="loader"><i class="fa fa-spinner fa-pulse"></i></div>');
$.get('/account/companies/list', function(data){
$('#companylist').html(data);
$('#message ').html('');
});
};
$('#refresh').on('click', function () {
$('#message ').html('');
getCompanies();
});
var refresh = function () {
$('#id').val('');
$('#name').val('');
$('#url').val('');
$('#email').val('');
};
<|fim▁hole|>});<|fim▁end|> | getCompanies(); |
<|file_name|>BootTableRenderer.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright (C) 2016 Kwaku Twumasi-Afriyie <[email protected]>.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Kwaku Twumasi-Afriyie <[email protected]> - initial API and implementation
******************************************************************************/
package com.quakearts.webapp.facelets.bootstrap.renderers;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.faces.component.UIColumn;
import javax.faces.component.UIComponent;
import javax.faces.component.UIData;
import javax.faces.context.FacesContext;
import javax.faces.context.ResponseWriter;
import com.quakearts.webapp.facelets.bootstrap.components.BootTable;
import com.quakearts.webapp.facelets.bootstrap.renderkit.Attribute;
import com.quakearts.webapp.facelets.bootstrap.renderkit.AttributeManager;
import com.quakearts.webapp.facelets.bootstrap.renderkit.html_basic.HtmlBasicRenderer;
import com.quakearts.webapp.facelets.util.UtilityMethods;
import static com.quakearts.webapp.facelets.bootstrap.renderkit.RenderKitUtils.*;
public class BootTableRenderer extends HtmlBasicRenderer {
private static final Attribute[] ATTRIBUTES =
AttributeManager.getAttributes(AttributeManager.Key.DATATABLE);
@Override
public void encodeBegin(FacesContext context, UIComponent component)
throws IOException {
if (!shouldEncode(component)) {
return;
}
BootTable data = (BootTable) component;
data.setRowIndex(-1);
ResponseWriter writer = context.getResponseWriter();
writer.startElement("table", component);
writer.writeAttribute("id", component.getClientId(context),
"id");
String styleClass = data.get("styleClass");
writer.writeAttribute("class","table "+(styleClass !=null?" "+styleClass:""), "styleClass");
renderHTML5DataAttributes(context, component);
renderPassThruAttributes(context, writer, component,
ATTRIBUTES);
writer.writeText("\n", component, null);
UIComponent caption = getFacet(component, "caption");
if (caption != null) {
String captionClass = data.get("captionClass");
String captionStyle = data.get("captionStyle");
writer.startElement("caption", component);
if (captionClass != null) {
writer.writeAttribute("class", captionClass, "captionClass");
}
if (captionStyle != null) {
writer.writeAttribute("style", captionStyle, "captionStyle");
}
encodeRecursive(context, caption);
writer.endElement("caption");
}
UIComponent colGroups = getFacet(component, "colgroups");
if (colGroups != null) {
encodeRecursive(context, colGroups);
}
BootMetaInfo info = getMetaInfo(context, component);
UIComponent header = getFacet(component, "header");
if (header != null || info.hasHeaderFacets) {
String headerClass = data.get("headerClass");
writer.startElement("thead", component);
writer.writeText("\n", component, null);
if (header != null) {
writer.startElement("tr", header);
writer.startElement("th", header);
if (headerClass != null) {
writer.writeAttribute("class", headerClass, "headerClass");
}
if (info.columns.size() > 1) {
writer.writeAttribute("colspan",
String.valueOf(info.columns.size()), null);
}
writer.writeAttribute("scope", "colgroup", null);
encodeRecursive(context, header);
writer.endElement("th");
writer.endElement("tr");
writer.write("\n");
}
if (info.hasHeaderFacets) {
writer.startElement("tr", component);
writer.writeText("\n", component, null);
for (UIColumn column : info.columns) {
String columnHeaderClass = info.getCurrentHeaderClass();
writer.startElement("th", column);
if (columnHeaderClass != null) {
writer.writeAttribute("class", columnHeaderClass,
"columnHeaderClass");
} else if (headerClass != null) {
writer.writeAttribute("class", headerClass, "headerClass");
}
writer.writeAttribute("scope", "col", null);
UIComponent facet = getFacet(column, "header");
if (facet != null) {
encodeRecursive(context, facet);
}
writer.endElement("th");
writer.writeText("\n", component, null);
}
writer.endElement("tr");
writer.write("\n");
}
writer.endElement("thead");
writer.writeText("\n", component, null);
}
}
@Override
public void encodeChildren(FacesContext context, UIComponent component)
throws IOException {
if (!shouldEncodeChildren(component)) {
return;
}
UIData data = (UIData) component;
ResponseWriter writer = context.getResponseWriter();
BootMetaInfo info = getMetaInfo(context, data);
if(info.columns.isEmpty()) {
writer.startElement("tbody", component);
renderEmptyTableRow(writer, component);
<|fim▁hole|>
int processed = 0;
int rowIndex = data.getFirst() - 1;
int rows = data.getRows();
List<Integer> bodyRows = getBodyRows(context.getExternalContext().getApplicationMap(), data);
boolean hasBodyRows = (bodyRows != null && !bodyRows.isEmpty());
boolean wroteTableBody = false;
if (!hasBodyRows) {
writer.startElement("tbody", component);
writer.writeText("\n", component, null);
}
boolean renderedRow = false;
while (true) {
if ((rows > 0) && (++processed > rows)) {
break;
}
data.setRowIndex(++rowIndex);
if (!data.isRowAvailable()) {
break;
}
if (hasBodyRows && bodyRows.contains(data.getRowIndex())) {
if (wroteTableBody) {
writer.endElement("tbody");
}
writer.startElement("tbody", data);
wroteTableBody = true;
}
writer.startElement("tr", component);
if (info.rowClasses.length > 0) {
writer.writeAttribute("class", info.getCurrentRowClass(),
"rowClasses");
}
writer.writeText("\n", component, null);
info.newRow();
for (UIColumn column : info.columns) {
boolean isRowHeader = Boolean.TRUE.equals(column.getAttributes()
.get("rowHeader"));
if (isRowHeader) {
writer.startElement("th", column);
writer.writeAttribute("scope", "row", null);
} else {
writer.startElement("td", column);
}
String columnClass = info.getCurrentColumnClass();
if (columnClass != null) {
writer.writeAttribute("class", columnClass, "columnClasses");
}
for (Iterator<UIComponent> gkids = getChildren(column); gkids
.hasNext();) {
encodeRecursive(context, gkids.next());
}
if (isRowHeader) {
writer.endElement("th");
} else {
writer.endElement("td");
}
writer.writeText("\n", component, null);
}
writer.endElement("tr");
writer.write("\n");
renderedRow = true;
}
if(!renderedRow) {
renderEmptyTableRow(writer, data);
}
writer.endElement("tbody");
writer.writeText("\n", component, null);
data.setRowIndex(-1);
}
@Override
public void encodeEnd(FacesContext context, UIComponent component)
throws IOException {
if (!shouldEncode(component)) {
return;
}
ResponseWriter writer = context.getResponseWriter();
BootMetaInfo info = getMetaInfo(context, component);
UIComponent footer = getFacet(component, "footer");
if (footer != null || info.hasFooterFacets) {
String footerClass = (String) component.getAttributes().get("footerClass");
writer.startElement("tfoot", component);
writer.writeText("\n", component, null);
if (info.hasFooterFacets) {
writer.startElement("tr", component);
writer.writeText("\n", component, null);
for (UIColumn column : info.columns) {
String columnFooterClass = (String) column.getAttributes().get(
"footerClass");
writer.startElement("td", column);
if (columnFooterClass != null) {
writer.writeAttribute("class", columnFooterClass,
"columnFooterClass");
} else if (footerClass != null) {
writer.writeAttribute("class", footerClass, "footerClass");
}
UIComponent facet = getFacet(column, "footer");
if (facet != null) {
encodeRecursive(context, facet);
}
writer.endElement("td");
writer.writeText("\n", component, null);
}
writer.endElement("tr");
writer.write("\n");
}
if (footer != null) {
writer.startElement("tr", footer);
writer.startElement("td", footer);
if (footerClass != null) {
writer.writeAttribute("class", footerClass, "footerClass");
}
if (info.columns.size() > 1) {
writer.writeAttribute("colspan",
String.valueOf(info.columns.size()), null);
}
encodeRecursive(context, footer);
writer.endElement("td");
writer.endElement("tr");
writer.write("\n");
}
writer.endElement("tfoot");
writer.writeText("\n", component, null);
}
clearMetaInfo(context, component);
((UIData) component).setRowIndex(-1);
writer.endElement("table");
writer.writeText("\n", component, null);
}
private List<Integer> getBodyRows(Map<String, Object> appMap, UIData data) {
List<Integer> result = null;
String bodyRows = (String) data.getAttributes().get("bodyrows");
if (bodyRows != null) {
String [] rows = UtilityMethods.split(appMap, bodyRows, ",");
if (rows != null) {
result = new ArrayList<Integer>(rows.length);
for (String curRow : rows) {
result.add(Integer.valueOf(curRow));
}
}
}
return result;
}
private void renderEmptyTableRow(final ResponseWriter writer,
final UIComponent component) throws IOException {
writer.startElement("tr", component);
writer.startElement("td", component);
writer.endElement("td");
writer.endElement("tr");
}
protected BootTableRenderer.BootMetaInfo getMetaInfo(FacesContext context,
UIComponent table) {
String key = createKey(table);
Map<Object, Object> attributes = context.getAttributes();
BootMetaInfo info = (BootMetaInfo) attributes
.get(key);
if (info == null) {
info = new BootMetaInfo(table);
attributes.put(key, info);
}
return info;
}
protected void clearMetaInfo(FacesContext context, UIComponent table) {
context.getAttributes().remove(createKey(table));
}
protected String createKey(UIComponent table) {
return BootMetaInfo.KEY + '_' + table.hashCode();
}
private static class BootMetaInfo {
private static final UIColumn PLACE_HOLDER_COLUMN = new UIColumn();
private static final String[] EMPTY_STRING_ARRAY = new String[0];
public static final String KEY = BootMetaInfo.class.getName();
public final String[] rowClasses;
public final String[] columnClasses;
public final String[] headerClasses;
public final List<UIColumn> columns;
public final boolean hasHeaderFacets;
public final boolean hasFooterFacets;
public final int columnCount;
public int columnStyleCounter;
public int headerStyleCounter;
public int rowStyleCounter;
public BootMetaInfo(UIComponent table) {
rowClasses = getRowClasses(table);
columnClasses = getColumnClasses(table);
headerClasses = getHeaderClasses(table);
columns = getColumns(table);
columnCount = columns.size();
hasHeaderFacets = hasFacet("header", columns);
hasFooterFacets = hasFacet("footer", columns);
}
public void newRow() {
columnStyleCounter = 0;
headerStyleCounter = 0;
}
public String getCurrentColumnClass() {
String style = null;
if (columnStyleCounter < columnClasses.length
&& columnStyleCounter <= columnCount) {
style = columnClasses[columnStyleCounter++];
}
return ((style != null && style.length() > 0) ? style : null);
}
public String getCurrentHeaderClass() {
String style = null;
if (headerStyleCounter < headerClasses.length
&& headerStyleCounter <= columnCount) {
style = headerClasses[headerStyleCounter++];
}
return ((style != null && style.length() > 0) ? style : null);
}
public String getCurrentRowClass() {
String style = rowClasses[rowStyleCounter++];
if (rowStyleCounter >= rowClasses.length) {
rowStyleCounter = 0;
}
return style;
}
private static String[] getColumnClasses(UIComponent table) {
String values = ((BootTable) table).get("columnClasses");
if (values == null) {
return EMPTY_STRING_ARRAY;
}
Map<String, Object> appMap = FacesContext.getCurrentInstance()
.getExternalContext().getApplicationMap();
return UtilityMethods.split(appMap, values.trim(), ",");
}
private static String[] getHeaderClasses(UIComponent table) {
String values = ((BootTable) table).get("headerClasses");
if (values == null) {
return EMPTY_STRING_ARRAY;
}
Map<String, Object> appMap = FacesContext.getCurrentInstance()
.getExternalContext().getApplicationMap();
return UtilityMethods.split(appMap, values.trim(), ",");
}
private static List<UIColumn> getColumns(UIComponent table) {
if (table instanceof UIData) {
int childCount = table.getChildCount();
if (childCount > 0) {
List<UIColumn> results = new ArrayList<UIColumn>(childCount);
for (UIComponent kid : table.getChildren()) {
if ((kid instanceof UIColumn) && kid.isRendered()) {
results.add((UIColumn) kid);
}
}
return results;
} else {
return Collections.emptyList();
}
} else {
int count;
Object value = table.getAttributes().get("columns");
if ((value != null) && (value instanceof Integer)) {
count = ((Integer) value);
} else {
count = 2;
}
if (count < 1) {
count = 1;
}
List<UIColumn> result = new ArrayList<UIColumn>(count);
for (int i = 0; i < count; i++) {
result.add(PLACE_HOLDER_COLUMN);
}
return result;
}
}
private static boolean hasFacet(String name, List<UIColumn> columns) {
if (!columns.isEmpty()) {
for (UIColumn column : columns) {
if (column.getFacetCount() > 0) {
if (column.getFacets().containsKey(name)) {
return true;
}
}
}
}
return false;
}
private static String[] getRowClasses(UIComponent table) {
String values = ((BootTable) table).get("rowClasses");
if (values == null) {
return (EMPTY_STRING_ARRAY);
}
Map<String, Object> appMap = FacesContext.getCurrentInstance()
.getExternalContext().getApplicationMap();
return UtilityMethods.split(appMap, values.trim(), ",");
}
}
}<|fim▁end|> | writer.endElement("tbody");
return;
}
|
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# SRL 5 documentation build configuration file, created by
# sphinx-quickstart on Sat Oct 16 15:51:55 2010.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.append(os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
#extensions = ['sphinx.ext.pngmath', 'sphinx.ext.jsmath']
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'SRL 5'
copyright = u'2010, SRL Developers'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '5'
# The full version, including alpha/beta/rc tags.
release = '1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:<|fim▁hole|>#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'nature'
#html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'SRL5doc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'SRL5.tex', u'SRL 5 Documentation',
u'SRL Developers', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True<|fim▁end|> | |
<|file_name|>test_permissions.py<|end_file_name|><|fim▁begin|>"""
This test will use the default permissions found in
flaskbb.utils.populate
"""
from flaskbb.utils.permissions import *
<|fim▁hole|> """
assert moderator_user in forum.moderators
assert can_post_reply(moderator_user, forum)
assert can_post_topic(moderator_user, forum)
assert can_edit_post(moderator_user, topic.user_id, forum)
assert can_moderate(moderator_user, forum)
assert can_delete_post(moderator_user, topic.user_id, forum)
assert can_delete_topic(moderator_user, topic.user_id, forum)
def test_moderator_permissions_without_forum(
forum, moderator_user, topic, topic_moderator):
"""Test the moderator permissions in a forum where the user is not a
moderator.
"""
forum.moderators.remove(moderator_user)
assert not moderator_user in forum.moderators
assert not can_moderate(moderator_user, forum)
assert can_post_reply(moderator_user, forum)
assert can_post_topic(moderator_user, forum)
assert not can_edit_post(moderator_user, topic.user_id, forum)
assert not can_delete_post(moderator_user, topic.user_id, forum)
assert not can_delete_topic(moderator_user, topic.user_id, forum)
# Test with own topic
assert can_delete_post(moderator_user, topic_moderator.user_id, forum)
assert can_delete_topic(moderator_user, topic_moderator.user_id, forum)
assert can_edit_post(moderator_user, topic_moderator.user_id, forum)
# Test moderator permissions
assert can_edit_user(moderator_user)
assert can_ban_user(moderator_user)
def test_normal_permissions(forum, user, topic):
"""Test the permissions for a normal user."""
assert not can_moderate(user, forum)
assert can_post_reply(user, forum)
assert can_post_topic(user, forum)
assert can_edit_post(user, topic.user_id, forum)
assert not can_delete_post(user, topic.user_id, forum)
assert not can_delete_topic(user, topic.user_id, forum)
assert not can_edit_user(user)
assert not can_ban_user(user)
def test_admin_permissions(forum, admin_user, topic):
"""Test the permissions for a admin user."""
assert can_moderate(admin_user, forum)
assert can_post_reply(admin_user, forum)
assert can_post_topic(admin_user, forum)
assert can_edit_post(admin_user, topic.user_id, forum)
assert can_delete_post(admin_user, topic.user_id, forum)
assert can_delete_topic(admin_user, topic.user_id, forum)
assert can_edit_user(admin_user)
assert can_ban_user(admin_user)
def test_super_moderator_permissions(forum, super_moderator_user, topic):
"""Test the permissions for a super moderator user."""
assert can_moderate(super_moderator_user, forum)
assert can_post_reply(super_moderator_user, forum)
assert can_post_topic(super_moderator_user, forum)
assert can_edit_post(super_moderator_user, topic.user_id, forum)
assert can_delete_post(super_moderator_user, topic.user_id, forum)
assert can_delete_topic(super_moderator_user, topic.user_id, forum)
assert can_edit_user(super_moderator_user)
assert can_ban_user(super_moderator_user)
def test_can_moderate_without_permission(moderator_user):
"""Test can moderate for a moderator_user without a permission."""
assert can_moderate(moderator_user) == False<|fim▁end|> | def test_moderator_permissions_in_forum(
forum, moderator_user, topic, topic_moderator):
"""Test the moderator permissions in a forum where the user is a
moderator. |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub mod multi;
pub mod threshold;
use std::cmp::min;
use crate::{Levenshtein, Metric};
impl Default for Levenshtein {
fn default() -> Self {
Levenshtein {}
}
}
impl<T: Eq, R: AsRef<[T]>> Metric<T, R> for Levenshtein {
fn distance(a: R, b: R) -> usize {
distance(a.as_ref(), b.as_ref())
}
}
fn distance<T>(a: &[T], b: &[T]) -> usize
where
T: Eq,
{
let a_length = a.len();
let b_length = b.len();
if a_length == 0 {
return b_length;
} else if b_length == 0 {
return a_length;
}
let (row_items, column_items) = if a_length < b_length { (b, a) } else { (a, b) };
let mut buffer = first_row(row_items, column_items);
other_rows(row_items, column_items, &mut buffer);
*buffer.last().unwrap()
}
fn first_row<T>(row_items: &[T], column_items: &[T]) -> Vec<usize>
where
T: Eq,
{
let columns = column_items.len();
let mut buffer = Vec::with_capacity(columns);
let mut column_iter = column_items.iter().enumerate();
let (_, column_item0) = column_iter.next().unwrap();
let row_item0 = &row_items[0];
// Row 1, Column 1
buffer.push(if row_item0 == column_item0 { 0 } else { 1 });
// Row 1, Column 2+
for (column, column_item) in column_iter {
let value = {
let diag = column;
if row_item0 == column_item {
diag
} else {
let left = buffer[column - 1];
min(diag, left) + 1<|fim▁hole|> }
buffer
}
fn other_rows<T>(row_items: &[T], column_items: &[T], buffer: &mut Vec<usize>)
where
T: Eq,
{
let row_iter = row_items.iter().enumerate();
for (row, row_item) in row_iter {
let mut last_up = buffer[0];
let mut column_iter = column_items.iter().enumerate();
// Row 2+, Column 1
let (_, column_item0) = column_iter.next().unwrap();
buffer[0] = {
let diag = row;
if row_item == column_item0 {
diag
} else {
let up = buffer[0];
min(diag, up) + 1
}
};
// Row 2+, Column 2+
for (column, column_item) in column_iter {
let value = {
let diag = last_up;
if row_item == column_item {
diag
} else {
let left = buffer[column - 1];
let up = buffer[column];
min(min(diag, left), up) + 1
}
};
last_up = buffer[column];
buffer[column] = value;
}
}
}<|fim▁end|> | }
};
buffer.push(value); |
<|file_name|>default_name_resolver.py<|end_file_name|><|fim▁begin|>import scrapy
from scrapy.crawler import CrawlerProcess
class IPv6Spider(scrapy.Spider):
name = "ipv6_spider"<|fim▁hole|>
process = CrawlerProcess(settings={"RETRY_ENABLED": False})
process.crawl(IPv6Spider)
process.start()<|fim▁end|> | start_urls = ["http://[::1]"]
|
<|file_name|>bba880ef5bbd_add_is_loud_and_pronouns_columns_to_.py<|end_file_name|><|fim▁begin|>"""Add is_loud and pronouns columns to PanelApplicant
Revision ID: bba880ef5bbd
Revises: 8f8419ebcf27
Create Date: 2019-07-20 02:57:17.794469
"""
# revision identifiers, used by Alembic.
revision = 'bba880ef5bbd'
down_revision = '8f8419ebcf27'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
try:
is_sqlite = op.get_context().dialect.name == 'sqlite'
except Exception:
is_sqlite = False
if is_sqlite:
op.get_context().connection.execute('PRAGMA foreign_keys=ON;')
utcnow_server_default = "(datetime('now', 'utc'))"
else:
utcnow_server_default = "timezone('utc', current_timestamp)"
def sqlite_column_reflect_listener(inspector, table, column_info):
"""Adds parenthesis around SQLite datetime defaults for utcnow."""
if column_info['default'] == "datetime('now', 'utc')":
column_info['default'] = utcnow_server_default
sqlite_reflect_kwargs = {
'listeners': [('column_reflect', sqlite_column_reflect_listener)]
}
# ===========================================================================
# HOWTO: Handle alter statements in SQLite
#
# def upgrade():
# if is_sqlite:
# with op.batch_alter_table('table_name', reflect_kwargs=sqlite_reflect_kwargs) as batch_op:
# batch_op.alter_column('column_name', type_=sa.Unicode(), server_default='', nullable=False)
# else:
# op.alter_column('table_name', 'column_name', type_=sa.Unicode(), server_default='', nullable=False)
#
# ===========================================================================
def upgrade():
op.add_column('panel_applicant', sa.Column('other_pronouns', sa.Unicode(), server_default='', nullable=False))<|fim▁hole|>def downgrade():
op.drop_column('panel_application', 'is_loud')
op.drop_column('panel_applicant', 'pronouns')
op.drop_column('panel_applicant', 'other_pronouns')<|fim▁end|> | op.add_column('panel_applicant', sa.Column('pronouns', sa.Unicode(), server_default='', nullable=False))
op.add_column('panel_application', sa.Column('is_loud', sa.Boolean(), server_default='False', nullable=False))
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::CCR1 {
#[doc = r" Modifies the contents of the register"]
#[inline(always)]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline(always)]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline(always)]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline(always)]
pub fn reset(&self) {
self.write(|w| w)
}
}<|fim▁hole|>#[doc = r" Value of the field"]
pub struct CCR1_HR {
bits: u16,
}
impl CCR1_HR {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u16 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct CCR1_LR {
bits: u16,
}
impl CCR1_LR {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u16 {
self.bits
}
}
#[doc = r" Proxy"]
pub struct _CCR1_HW<'a> {
w: &'a mut W,
}
impl<'a> _CCR1_HW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u16) -> &'a mut W {
const MASK: u16 = 65535;
const OFFSET: u8 = 16;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _CCR1_LW<'a> {
w: &'a mut W,
}
impl<'a> _CCR1_LW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u16) -> &'a mut W {
const MASK: u16 = 65535;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 16:31 - High Capture/Compare 1 value (TIM2 only)"]
#[inline(always)]
pub fn ccr1_h(&self) -> CCR1_HR {
let bits = {
const MASK: u16 = 65535;
const OFFSET: u8 = 16;
((self.bits >> OFFSET) & MASK as u32) as u16
};
CCR1_HR { bits }
}
#[doc = "Bits 0:15 - Low Capture/Compare 1 value"]
#[inline(always)]
pub fn ccr1_l(&self) -> CCR1_LR {
let bits = {
const MASK: u16 = 65535;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) as u16
};
CCR1_LR { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline(always)]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 16:31 - High Capture/Compare 1 value (TIM2 only)"]
#[inline(always)]
pub fn ccr1_h(&mut self) -> _CCR1_HW {
_CCR1_HW { w: self }
}
#[doc = "Bits 0:15 - Low Capture/Compare 1 value"]
#[inline(always)]
pub fn ccr1_l(&mut self) -> _CCR1_LW {
_CCR1_LW { w: self }
}
}<|fim▁end|> | |
<|file_name|>utilities.py<|end_file_name|><|fim▁begin|>'''Simple utility functions that should really be in a C module'''
from math import *
from OpenGLContext.arrays import *
from OpenGLContext import vectorutilities
def rotMatrix( (x,y,z,a) ):
"""Given rotation as x,y,z,a (a in radians), return rotation matrix
Returns a 4x4 rotation matrix for the given rotation,
the matrix is a Numeric Python array.
x,y,z should be a unit vector.
"""
c = cos( a )
s = sin( a )
t = 1-c
R = array( [
[ t*x*x+c, t*x*y+s*z, t*x*z-s*y, 0],
[ t*x*y-s*z, t*y*y+c, t*y*z+s*x, 0],
[ t*x*z+s*y, t*y*z-s*x, t*z*z+c, 0],
[ 0, 0, 0, 1]
] )
return R
def crossProduct( first, second ):
"""Given 2 4-item vectors, return the cross product as a 4-item vector"""
x,y,z = vectorutilities.crossProduct( first, second )[0]
return [x,y,z,0]
def magnitude( vector ):
"""Given a 3 or 4-item vector, return the vector's magnitude"""
return vectorutilities.magnitude( vector[:3] )[0]
def normalise( vector ):
"""Given a 3 or 4-item vector, return a 3-item unit vector"""
return vectorutilities.normalise( vector[:3] )[0]
def pointNormal2Plane( point, normal ):
"""Create parametric equation of plane from point and normal
"""
point = asarray(point,'f')
normal = normalise(normal)
result = zeros((4,),'f')
result[:3] = normal
result[3] = - dot(normal, point)
return result
def plane2PointNormal( (a,b,c,d) ):
"""Get a point and normal from a plane equation"""
return asarray((-d*a,-d*b,-d*c),'f'), asarray((a,b,c),'f')
def combineNormals( normals, weights=None ):
"""Given set of N normals, return (weighted) combination"""
normals = asarray( normals,'d')
if weights:
weights = reshape(asarray( weights, 'f'),(len(weights),1))
final = sum(normals*weights, 0)
else:
final = sum(normals,0)
x,y,z = final
if x == y == z == 0.0:
x,y,z = normals[0]
if x or y:
x,y,z = -x,-y,z
else:
x,y,z = -x,y,-z
return normalise( (x,y,z) )
def coplanar( points ):
"""Determine if points are coplanar
All sets of points < 4 are coplanar
Otherwise, take the first two points and create vector
for all other points, take vector to second point,
calculate cross-product where the cross-product is
non-zero (not colinear), if the normalised cross-product
is all equal, the points are collinear...
"""
points = asarray( points, 'f' )
if len(points) < 4:
return True
a,b = points[:2]<|fim▁hole|> rest,
vec1,
)
vecsNonZero = sometrue(vecs,1)
vecs = compress(vecsNonZero, vecs,0)
if not len(vecs):
return True
vecs = vectorutilities.normalise(vecs)
return allclose( vecs[0], vecs )<|fim▁end|> | vec1 = reshape(b-a,(1,3))
rest = points[2:] - b
vecs = vectorutilities.crossProduct( |
<|file_name|>entity_test.py<|end_file_name|><|fim▁begin|>import unittest
import itertools
class TestWorld(object):
def __init__(self, **kw):<|fim▁hole|> self.__dict__.update(kw)
self.components = self
self.entities = set()
self.new_entity_id = itertools.count().__next__
self.new_entity_id() # skip id 0
for comp in list(kw.values()):
comp.world = self
class TestComponent(dict):
def __init__(self):
self.entities = set()
def set(self, entity):
data = TestData()
self[entity] = data
self.entities.add(entity)
return data
def remove(self, entity):
del self[entity]
class TestData(object):
attr = 'deadbeef'
def __init__(self, **kw):
self.__dict__.update(kw)
class EntityTestCase(unittest.TestCase):
def test_repr(self):
from grease import Entity
entity = Entity(TestWorld())
self.assertTrue(repr(entity).startswith(
'<Entity id: %s of TestWorld' % entity.entity_id),
('<Entity id: %s of TestWorld' % entity.entity_id, repr(entity)))
def test_accessor_getattr_for_nonexistant_component(self):
from grease import Entity
comp = TestComponent()
world = TestWorld(test=comp)
entity = Entity(world)
self.assertTrue(entity not in comp)
self.assertRaises(AttributeError, getattr, entity, 'foo')
def test_accessor_getattr_for_non_member_entity(self):
from grease import Entity
comp = TestComponent()
world = TestWorld(test=comp)
entity = Entity(world)
accessor = entity.test
self.assertFalse(entity in comp)
self.assertRaises(AttributeError, getattr, accessor, 'attr')
def test_accessor_getattr_for_member_entity(self):
from grease import Entity
comp = TestComponent()
world = TestWorld(test=comp)
entity = Entity(world)
comp.set(entity)
self.assertTrue(entity in comp)
self.assertEqual(entity.test.attr, 'deadbeef')
def test_accessor_setattr_adds_non_member_entity(self):
from grease import Entity
comp = TestComponent()
world = TestWorld(test=comp)
entity = Entity(world)
self.assertFalse(entity in comp)
entity.test.attr = 'foobar'
self.assertEqual(entity.test.attr, 'foobar')
self.assertTrue(entity in comp)
def test_accessor_setattr_for_member_entity(self):
from grease import Entity
comp = TestComponent()
world = TestWorld(test=comp)
entity = Entity(world)
comp.set(entity)
self.assertNotEqual(entity.test.attr, 'spam')
entity.test.attr = 'spam'
self.assertTrue(entity in comp)
self.assertEqual(entity.test.attr, 'spam')
def test_eq(self):
from grease import Entity
world = TestWorld()
e1 = Entity(world)
e2 = Entity(world)
self.assertNotEqual(e1, e2)
e2.entity_id = e1.entity_id
self.assertEqual(e1, e2)
otherworld = TestWorld()
e3 = Entity(otherworld)
self.assertNotEqual(e1, e3)
self.assertNotEqual(e2, e3)
e3.entity_id = e1.entity_id
self.assertNotEqual(e1, e3)
self.assertNotEqual(e2, e3)
def test_delattr(self):
from grease import Entity
comp = TestComponent()
world = TestWorld(test=comp)
entity = Entity(world)
comp.set(entity)
self.assertTrue(entity in comp)
del entity.test
self.assertFalse(entity in comp)
def test_entity_id(self):
from grease import Entity
world = TestWorld()
entity1 = Entity(world)
entity2 = Entity(world)
self.assertTrue(entity1.entity_id > 0)
self.assertTrue(entity2.entity_id > 0)
self.assertNotEqual(entity1.entity_id, entity2.entity_id)
def test_delete_exists(self):
from grease import Entity
world = TestWorld()
self.assertEqual(world.entities, set())
entity1 = Entity(world)
entity2 = Entity(world)
self.assertEqual(world.entities, set([entity1, entity2]))
self.assertTrue(entity1.exists)
self.assertTrue(entity2.exists)
entity1.delete()
self.assertEqual(world.entities, set([entity2]))
self.assertFalse(entity1.exists)
self.assertTrue(entity2.exists)
entity2.delete()
self.assertEqual(world.entities, set())
self.assertFalse(entity1.exists)
self.assertFalse(entity2.exists)
def test_entity_subclass_slots(self):
from grease import Entity
class NewEntity(Entity):
pass
world = TestWorld()
entity = NewEntity(world)
self.assertRaises(AttributeError, setattr, entity, 'notanattr', 1234)
def test_entity_subclass_cant_have_slots(self):
from grease import Entity
self.assertRaises(TypeError,
type, 'Test', (Entity,), {'__slots__': ('foo', 'bar')})
def test_entity_subclass_init(self):
from grease import Entity
stuff = []
class TestEntity(Entity):
def __init__(self, world, other):
stuff.append(world)
stuff.append(other)
world = TestWorld()
TestEntity(world, self)
self.assertEqual(stuff, [world, self])
class EntityComponentAccessorTestCase(unittest.TestCase):
def test_getattr(self):
from grease.entity import EntityComponentAccessor
from grease import Entity
world = TestWorld()
entity = Entity(world)
component = {entity: TestData(foo=5)}
accessor = EntityComponentAccessor(component, entity)
self.assertEqual(accessor.foo, 5)
self.assertRaises(AttributeError, getattr, accessor, 'bar')
entity2 = Entity(world)
accessor = EntityComponentAccessor(component, entity2)
self.assertRaises(AttributeError, getattr, accessor, 'foo')
self.assertRaises(AttributeError, getattr, accessor, 'bar')
def test_setattr_member_entity(self):
from grease.entity import EntityComponentAccessor
from grease import Entity
world = TestWorld()
entity = Entity(world)
data = TestData(foo=5)
accessor = EntityComponentAccessor({entity: data}, entity)
self.assertEqual(data.foo, 5)
accessor.foo = 66
self.assertEqual(data.foo, 66)
accessor.bar = '!!'
self.assertEqual(data.bar, '!!')
def test_setattr_nonmember_entity(self):
from grease.entity import EntityComponentAccessor
from grease import Entity
world = TestWorld()
entity = Entity(world)
component = TestComponent()
accessor = EntityComponentAccessor(component, entity)
self.assertRaises(AttributeError, getattr, entity, 'baz')
self.assertTrue(entity not in component)
accessor.baz = 1000
self.assertTrue(entity in component)
self.assertEqual(accessor.baz, 1000)
self.assertEqual(component[entity].baz, 1000)
def test_truthiness(self):
from grease.entity import EntityComponentAccessor
from grease import Entity
world = TestWorld()
entity = Entity(world)
component = TestComponent()
accessor = EntityComponentAccessor(component, entity)
self.assertFalse(accessor)
component[entity] = 456
self.assertTrue(accessor)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | |
<|file_name|>check_query_file.py<|end_file_name|><|fim▁begin|>from django.core.management.base import BaseCommand, CommandError
from django.core import management
from django.db.models import Count
from scoping.models import *
<|fim▁hole|> help = 'check a query file - how many records'
def add_arguments(self, parser):
parser.add_argument('qid',type=int)
def handle(self, *args, **options):
qid = options['qid']
q = Query.objects.get(pk=qid)
p = 'TY - '
if q.query_file.name is not '':
fpath = q.query_file.path
else:
if q.database=="scopus":
fname = 's_results.txt'
else:
fname = 'results.txt'
fpath = f'{settings.QUERY_DIR}/{qid}/{fname}'
with open(fpath, 'r') as f:
c = f.read().count(p)
print('\n{} documents in downloaded file\n'.format(c))
if q.doc_set.count() > 0:
yts = q.doc_set.values('PY').annotate(
n = Count('pk')
)
for y in yts:
print('{} documents in {}'.format(y['n'],y['PY']))<|fim▁end|> | class Command(BaseCommand): |
<|file_name|>generic.go<|end_file_name|><|fim▁begin|>/*
Copyright The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by informer-gen. DO NOT EDIT.
package informers
import (
"fmt"
v1 "k8s.io/api/admissionregistration/v1"
v1beta1 "k8s.io/api/admissionregistration/v1beta1"
apiserverinternalv1alpha1 "k8s.io/api/apiserverinternal/v1alpha1"
appsv1 "k8s.io/api/apps/v1"
appsv1beta1 "k8s.io/api/apps/v1beta1"
v1beta2 "k8s.io/api/apps/v1beta2"
autoscalingv1 "k8s.io/api/autoscaling/v1"
v2beta1 "k8s.io/api/autoscaling/v2beta1"
v2beta2 "k8s.io/api/autoscaling/v2beta2"
batchv1 "k8s.io/api/batch/v1"
batchv1beta1 "k8s.io/api/batch/v1beta1"
v2alpha1 "k8s.io/api/batch/v2alpha1"
certificatesv1 "k8s.io/api/certificates/v1"
certificatesv1beta1 "k8s.io/api/certificates/v1beta1"
coordinationv1 "k8s.io/api/coordination/v1"
coordinationv1beta1 "k8s.io/api/coordination/v1beta1"
corev1 "k8s.io/api/core/v1"
v1alpha1 "k8s.io/api/discovery/v1alpha1"
discoveryv1beta1 "k8s.io/api/discovery/v1beta1"
eventsv1 "k8s.io/api/events/v1"
eventsv1beta1 "k8s.io/api/events/v1beta1"
extensionsv1beta1 "k8s.io/api/extensions/v1beta1"
flowcontrolv1alpha1 "k8s.io/api/flowcontrol/v1alpha1"
networkingv1 "k8s.io/api/networking/v1"
networkingv1beta1 "k8s.io/api/networking/v1beta1"
nodev1alpha1 "k8s.io/api/node/v1alpha1"
nodev1beta1 "k8s.io/api/node/v1beta1"
policyv1beta1 "k8s.io/api/policy/v1beta1"
rbacv1 "k8s.io/api/rbac/v1"
rbacv1alpha1 "k8s.io/api/rbac/v1alpha1"
rbacv1beta1 "k8s.io/api/rbac/v1beta1"
schedulingv1 "k8s.io/api/scheduling/v1"
schedulingv1alpha1 "k8s.io/api/scheduling/v1alpha1"
schedulingv1beta1 "k8s.io/api/scheduling/v1beta1"
storagev1 "k8s.io/api/storage/v1"
storagev1alpha1 "k8s.io/api/storage/v1alpha1"
storagev1beta1 "k8s.io/api/storage/v1beta1"
schema "k8s.io/apimachinery/pkg/runtime/schema"
cache "k8s.io/client-go/tools/cache"
)
// GenericInformer is type of SharedIndexInformer which will locate and delegate to other
// sharedInformers based on type
type GenericInformer interface {
Informer() cache.SharedIndexInformer
Lister() cache.GenericLister
}
type genericInformer struct {
informer cache.SharedIndexInformer
resource schema.GroupResource
}
// Informer returns the SharedIndexInformer.
func (f *genericInformer) Informer() cache.SharedIndexInformer {
return f.informer
}
// Lister returns the GenericLister.
func (f *genericInformer) Lister() cache.GenericLister {
return cache.NewGenericLister(f.Informer().GetIndexer(), f.resource)
}
// ForResource gives generic access to a shared informer of the matching type
// TODO extend this to unknown resources with a client pool
func (f *sharedInformerFactory) ForResource(resource schema.GroupVersionResource) (GenericInformer, error) {<|fim▁hole|> // Group=admissionregistration.k8s.io, Version=v1
case v1.SchemeGroupVersion.WithResource("mutatingwebhookconfigurations"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Admissionregistration().V1().MutatingWebhookConfigurations().Informer()}, nil
case v1.SchemeGroupVersion.WithResource("validatingwebhookconfigurations"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Admissionregistration().V1().ValidatingWebhookConfigurations().Informer()}, nil
// Group=admissionregistration.k8s.io, Version=v1beta1
case v1beta1.SchemeGroupVersion.WithResource("mutatingwebhookconfigurations"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Admissionregistration().V1beta1().MutatingWebhookConfigurations().Informer()}, nil
case v1beta1.SchemeGroupVersion.WithResource("validatingwebhookconfigurations"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Admissionregistration().V1beta1().ValidatingWebhookConfigurations().Informer()}, nil
// Group=apps, Version=v1
case appsv1.SchemeGroupVersion.WithResource("controllerrevisions"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Apps().V1().ControllerRevisions().Informer()}, nil
case appsv1.SchemeGroupVersion.WithResource("daemonsets"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Apps().V1().DaemonSets().Informer()}, nil
case appsv1.SchemeGroupVersion.WithResource("deployments"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Apps().V1().Deployments().Informer()}, nil
case appsv1.SchemeGroupVersion.WithResource("replicasets"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Apps().V1().ReplicaSets().Informer()}, nil
case appsv1.SchemeGroupVersion.WithResource("statefulsets"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Apps().V1().StatefulSets().Informer()}, nil
// Group=apps, Version=v1beta1
case appsv1beta1.SchemeGroupVersion.WithResource("controllerrevisions"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Apps().V1beta1().ControllerRevisions().Informer()}, nil
case appsv1beta1.SchemeGroupVersion.WithResource("deployments"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Apps().V1beta1().Deployments().Informer()}, nil
case appsv1beta1.SchemeGroupVersion.WithResource("statefulsets"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Apps().V1beta1().StatefulSets().Informer()}, nil
// Group=apps, Version=v1beta2
case v1beta2.SchemeGroupVersion.WithResource("controllerrevisions"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Apps().V1beta2().ControllerRevisions().Informer()}, nil
case v1beta2.SchemeGroupVersion.WithResource("daemonsets"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Apps().V1beta2().DaemonSets().Informer()}, nil
case v1beta2.SchemeGroupVersion.WithResource("deployments"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Apps().V1beta2().Deployments().Informer()}, nil
case v1beta2.SchemeGroupVersion.WithResource("replicasets"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Apps().V1beta2().ReplicaSets().Informer()}, nil
case v1beta2.SchemeGroupVersion.WithResource("statefulsets"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Apps().V1beta2().StatefulSets().Informer()}, nil
// Group=autoscaling, Version=v1
case autoscalingv1.SchemeGroupVersion.WithResource("horizontalpodautoscalers"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Autoscaling().V1().HorizontalPodAutoscalers().Informer()}, nil
// Group=autoscaling, Version=v2beta1
case v2beta1.SchemeGroupVersion.WithResource("horizontalpodautoscalers"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Autoscaling().V2beta1().HorizontalPodAutoscalers().Informer()}, nil
// Group=autoscaling, Version=v2beta2
case v2beta2.SchemeGroupVersion.WithResource("horizontalpodautoscalers"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Autoscaling().V2beta2().HorizontalPodAutoscalers().Informer()}, nil
// Group=batch, Version=v1
case batchv1.SchemeGroupVersion.WithResource("jobs"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Batch().V1().Jobs().Informer()}, nil
// Group=batch, Version=v1beta1
case batchv1beta1.SchemeGroupVersion.WithResource("cronjobs"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Batch().V1beta1().CronJobs().Informer()}, nil
// Group=batch, Version=v2alpha1
case v2alpha1.SchemeGroupVersion.WithResource("cronjobs"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Batch().V2alpha1().CronJobs().Informer()}, nil
// Group=certificates.k8s.io, Version=v1
case certificatesv1.SchemeGroupVersion.WithResource("certificatesigningrequests"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Certificates().V1().CertificateSigningRequests().Informer()}, nil
// Group=certificates.k8s.io, Version=v1beta1
case certificatesv1beta1.SchemeGroupVersion.WithResource("certificatesigningrequests"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Certificates().V1beta1().CertificateSigningRequests().Informer()}, nil
// Group=coordination.k8s.io, Version=v1
case coordinationv1.SchemeGroupVersion.WithResource("leases"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Coordination().V1().Leases().Informer()}, nil
// Group=coordination.k8s.io, Version=v1beta1
case coordinationv1beta1.SchemeGroupVersion.WithResource("leases"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Coordination().V1beta1().Leases().Informer()}, nil
// Group=core, Version=v1
case corev1.SchemeGroupVersion.WithResource("componentstatuses"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().ComponentStatuses().Informer()}, nil
case corev1.SchemeGroupVersion.WithResource("configmaps"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().ConfigMaps().Informer()}, nil
case corev1.SchemeGroupVersion.WithResource("endpoints"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().Endpoints().Informer()}, nil
case corev1.SchemeGroupVersion.WithResource("events"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().Events().Informer()}, nil
case corev1.SchemeGroupVersion.WithResource("limitranges"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().LimitRanges().Informer()}, nil
case corev1.SchemeGroupVersion.WithResource("namespaces"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().Namespaces().Informer()}, nil
case corev1.SchemeGroupVersion.WithResource("nodes"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().Nodes().Informer()}, nil
case corev1.SchemeGroupVersion.WithResource("persistentvolumes"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().PersistentVolumes().Informer()}, nil
case corev1.SchemeGroupVersion.WithResource("persistentvolumeclaims"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().PersistentVolumeClaims().Informer()}, nil
case corev1.SchemeGroupVersion.WithResource("pods"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().Pods().Informer()}, nil
case corev1.SchemeGroupVersion.WithResource("podtemplates"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().PodTemplates().Informer()}, nil
case corev1.SchemeGroupVersion.WithResource("replicationcontrollers"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().ReplicationControllers().Informer()}, nil
case corev1.SchemeGroupVersion.WithResource("resourcequotas"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().ResourceQuotas().Informer()}, nil
case corev1.SchemeGroupVersion.WithResource("secrets"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().Secrets().Informer()}, nil
case corev1.SchemeGroupVersion.WithResource("services"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().Services().Informer()}, nil
case corev1.SchemeGroupVersion.WithResource("serviceaccounts"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().ServiceAccounts().Informer()}, nil
// Group=discovery.k8s.io, Version=v1alpha1
case v1alpha1.SchemeGroupVersion.WithResource("endpointslices"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Discovery().V1alpha1().EndpointSlices().Informer()}, nil
// Group=discovery.k8s.io, Version=v1beta1
case discoveryv1beta1.SchemeGroupVersion.WithResource("endpointslices"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Discovery().V1beta1().EndpointSlices().Informer()}, nil
// Group=events.k8s.io, Version=v1
case eventsv1.SchemeGroupVersion.WithResource("events"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Events().V1().Events().Informer()}, nil
// Group=events.k8s.io, Version=v1beta1
case eventsv1beta1.SchemeGroupVersion.WithResource("events"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Events().V1beta1().Events().Informer()}, nil
// Group=extensions, Version=v1beta1
case extensionsv1beta1.SchemeGroupVersion.WithResource("daemonsets"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Extensions().V1beta1().DaemonSets().Informer()}, nil
case extensionsv1beta1.SchemeGroupVersion.WithResource("deployments"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Extensions().V1beta1().Deployments().Informer()}, nil
case extensionsv1beta1.SchemeGroupVersion.WithResource("ingresses"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Extensions().V1beta1().Ingresses().Informer()}, nil
case extensionsv1beta1.SchemeGroupVersion.WithResource("networkpolicies"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Extensions().V1beta1().NetworkPolicies().Informer()}, nil
case extensionsv1beta1.SchemeGroupVersion.WithResource("podsecuritypolicies"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Extensions().V1beta1().PodSecurityPolicies().Informer()}, nil
case extensionsv1beta1.SchemeGroupVersion.WithResource("replicasets"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Extensions().V1beta1().ReplicaSets().Informer()}, nil
// Group=flowcontrol.apiserver.k8s.io, Version=v1alpha1
case flowcontrolv1alpha1.SchemeGroupVersion.WithResource("flowschemas"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Flowcontrol().V1alpha1().FlowSchemas().Informer()}, nil
case flowcontrolv1alpha1.SchemeGroupVersion.WithResource("prioritylevelconfigurations"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Flowcontrol().V1alpha1().PriorityLevelConfigurations().Informer()}, nil
// Group=internal.apiserver.k8s.io, Version=v1alpha1
case apiserverinternalv1alpha1.SchemeGroupVersion.WithResource("storageversions"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Internal().V1alpha1().StorageVersions().Informer()}, nil
// Group=networking.k8s.io, Version=v1
case networkingv1.SchemeGroupVersion.WithResource("ingresses"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Networking().V1().Ingresses().Informer()}, nil
case networkingv1.SchemeGroupVersion.WithResource("ingressclasses"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Networking().V1().IngressClasses().Informer()}, nil
case networkingv1.SchemeGroupVersion.WithResource("networkpolicies"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Networking().V1().NetworkPolicies().Informer()}, nil
// Group=networking.k8s.io, Version=v1beta1
case networkingv1beta1.SchemeGroupVersion.WithResource("ingresses"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Networking().V1beta1().Ingresses().Informer()}, nil
case networkingv1beta1.SchemeGroupVersion.WithResource("ingressclasses"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Networking().V1beta1().IngressClasses().Informer()}, nil
// Group=node.k8s.io, Version=v1alpha1
case nodev1alpha1.SchemeGroupVersion.WithResource("runtimeclasses"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Node().V1alpha1().RuntimeClasses().Informer()}, nil
// Group=node.k8s.io, Version=v1beta1
case nodev1beta1.SchemeGroupVersion.WithResource("runtimeclasses"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Node().V1beta1().RuntimeClasses().Informer()}, nil
// Group=policy, Version=v1beta1
case policyv1beta1.SchemeGroupVersion.WithResource("poddisruptionbudgets"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Policy().V1beta1().PodDisruptionBudgets().Informer()}, nil
case policyv1beta1.SchemeGroupVersion.WithResource("podsecuritypolicies"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Policy().V1beta1().PodSecurityPolicies().Informer()}, nil
// Group=rbac.authorization.k8s.io, Version=v1
case rbacv1.SchemeGroupVersion.WithResource("clusterroles"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Rbac().V1().ClusterRoles().Informer()}, nil
case rbacv1.SchemeGroupVersion.WithResource("clusterrolebindings"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Rbac().V1().ClusterRoleBindings().Informer()}, nil
case rbacv1.SchemeGroupVersion.WithResource("roles"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Rbac().V1().Roles().Informer()}, nil
case rbacv1.SchemeGroupVersion.WithResource("rolebindings"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Rbac().V1().RoleBindings().Informer()}, nil
// Group=rbac.authorization.k8s.io, Version=v1alpha1
case rbacv1alpha1.SchemeGroupVersion.WithResource("clusterroles"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Rbac().V1alpha1().ClusterRoles().Informer()}, nil
case rbacv1alpha1.SchemeGroupVersion.WithResource("clusterrolebindings"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Rbac().V1alpha1().ClusterRoleBindings().Informer()}, nil
case rbacv1alpha1.SchemeGroupVersion.WithResource("roles"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Rbac().V1alpha1().Roles().Informer()}, nil
case rbacv1alpha1.SchemeGroupVersion.WithResource("rolebindings"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Rbac().V1alpha1().RoleBindings().Informer()}, nil
// Group=rbac.authorization.k8s.io, Version=v1beta1
case rbacv1beta1.SchemeGroupVersion.WithResource("clusterroles"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Rbac().V1beta1().ClusterRoles().Informer()}, nil
case rbacv1beta1.SchemeGroupVersion.WithResource("clusterrolebindings"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Rbac().V1beta1().ClusterRoleBindings().Informer()}, nil
case rbacv1beta1.SchemeGroupVersion.WithResource("roles"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Rbac().V1beta1().Roles().Informer()}, nil
case rbacv1beta1.SchemeGroupVersion.WithResource("rolebindings"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Rbac().V1beta1().RoleBindings().Informer()}, nil
// Group=scheduling.k8s.io, Version=v1
case schedulingv1.SchemeGroupVersion.WithResource("priorityclasses"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Scheduling().V1().PriorityClasses().Informer()}, nil
// Group=scheduling.k8s.io, Version=v1alpha1
case schedulingv1alpha1.SchemeGroupVersion.WithResource("priorityclasses"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Scheduling().V1alpha1().PriorityClasses().Informer()}, nil
// Group=scheduling.k8s.io, Version=v1beta1
case schedulingv1beta1.SchemeGroupVersion.WithResource("priorityclasses"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Scheduling().V1beta1().PriorityClasses().Informer()}, nil
// Group=storage.k8s.io, Version=v1
case storagev1.SchemeGroupVersion.WithResource("csidrivers"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Storage().V1().CSIDrivers().Informer()}, nil
case storagev1.SchemeGroupVersion.WithResource("csinodes"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Storage().V1().CSINodes().Informer()}, nil
case storagev1.SchemeGroupVersion.WithResource("storageclasses"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Storage().V1().StorageClasses().Informer()}, nil
case storagev1.SchemeGroupVersion.WithResource("volumeattachments"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Storage().V1().VolumeAttachments().Informer()}, nil
// Group=storage.k8s.io, Version=v1alpha1
case storagev1alpha1.SchemeGroupVersion.WithResource("csistoragecapacities"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Storage().V1alpha1().CSIStorageCapacities().Informer()}, nil
case storagev1alpha1.SchemeGroupVersion.WithResource("volumeattachments"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Storage().V1alpha1().VolumeAttachments().Informer()}, nil
// Group=storage.k8s.io, Version=v1beta1
case storagev1beta1.SchemeGroupVersion.WithResource("csidrivers"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Storage().V1beta1().CSIDrivers().Informer()}, nil
case storagev1beta1.SchemeGroupVersion.WithResource("csinodes"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Storage().V1beta1().CSINodes().Informer()}, nil
case storagev1beta1.SchemeGroupVersion.WithResource("storageclasses"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Storage().V1beta1().StorageClasses().Informer()}, nil
case storagev1beta1.SchemeGroupVersion.WithResource("volumeattachments"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Storage().V1beta1().VolumeAttachments().Informer()}, nil
}
return nil, fmt.Errorf("no informer found for %v", resource)
}<|fim▁end|> | switch resource { |
<|file_name|>tty.d.ts<|end_file_name|><|fim▁begin|>declare module "tty" {
import * as net from "net";
function isatty(fd: number): boolean;
class ReadStream extends net.Socket {
isRaw: boolean;
setRawMode(mode: boolean): void;
isTTY: boolean;
}
/**
* -1 - to the left from cursor
* 0 - the entire line
* 1 - to the right from cursor
*/
type Direction = -1 | 0 | 1;
class WriteStream extends net.Socket {
addListener(event: string, listener: (...args: any[]) => void): this;
addListener(event: "resize", listener: () => void): this;
emit(event: string | symbol, ...args: any[]): boolean;
emit(event: "resize"): boolean;
on(event: string, listener: (...args: any[]) => void): this;
on(event: "resize", listener: () => void): this;
once(event: string, listener: (...args: any[]) => void): this;
once(event: "resize", listener: () => void): this;
prependListener(event: string, listener: (...args: any[]) => void): this;
prependListener(event: "resize", listener: () => void): this;
prependOnceListener(event: string, listener: (...args: any[]) => void): this;
prependOnceListener(event: "resize", listener: () => void): this;
clearLine(dir: Direction): void;
clearScreenDown(): void;
cursorTo(x: number, y: number): void;
/**
* @default `process.env`<|fim▁hole|> getColorDepth(env?: {}): number;
getWindowSize(): [number, number];
columns: number;
rows: number;
isTTY: boolean;
}
}<|fim▁end|> | */ |
<|file_name|>pipes.py<|end_file_name|><|fim▁begin|>from cgi import escape
import gzip as gzip_module
import re
import time
import types
import uuid
from cStringIO import StringIO
def resolve_content(response):
rv = "".join(item for item in response.iter_content())
if type(rv) == unicode:
rv = rv.encode(response.encoding)
return rv
class Pipeline(object):
pipes = {}
def __init__(self, pipe_string):
self.pipe_functions = self.parse(pipe_string)
def parse(self, pipe_string):
functions = []
for item in PipeTokenizer().tokenize(pipe_string):
if not item:
break
if item[0] == "function":
functions.append((self.pipes[item[1]], []))
elif item[0] == "argument":
functions[-1][1].append(item[1])
return functions
def __call__(self, request, response):
for func, args in self.pipe_functions:
response = func(request, response, *args)
return response
class PipeTokenizer(object):
def __init__(self):
#This whole class can likely be replaced by some regexps
self.state = None
def tokenize(self, string):
self.string = string
self.state = self.func_name_state
self._index = 0
while self.state:
yield self.state()
yield None
<|fim▁hole|> return None
rv = self.string[self._index]
self._index += 1
return rv
def func_name_state(self):
rv = ""
while True:
char = self.get_char()
if char is None:
self.state = None
if rv:
return ("function", rv)
else:
return None
elif char == "(":
self.state = self.argument_state
return ("function", rv)
elif char == "|":
if rv:
return ("function", rv)
else:
rv += char
def argument_state(self):
rv = ""
while True:
char = self.get_char()
if char is None:
self.state = None
return ("argument", rv)
elif char == "\\":
rv += self.get_escape()
if rv is None:
#This should perhaps be an error instead
return ("argument", rv)
elif char == ",":
return ("argument", rv)
elif char == ")":
self.state = self.func_name_state
return ("argument", rv)
else:
rv += char
def get_escape(self):
char = self.get_char()
escapes = {"n": "\n",
"r": "\r",
"t": "\t"}
return escapes.get(char, char)
class pipe(object):
def __init__(self, *arg_converters):
self.arg_converters = arg_converters
self.max_args = len(self.arg_converters)
self.min_args = 0
opt_seen = False
for item in self.arg_converters:
if not opt_seen:
if isinstance(item, opt):
opt_seen = True
else:
self.min_args += 1
else:
if not isinstance(item, opt):
raise ValueError("Non-optional argument cannot follow optional argument")
def __call__(self, f):
def inner(request, response, *args):
if not (self.min_args <= len(args) <= self.max_args):
raise ValueError("Expected between %d and %d args, got %d" %
(self.min_args, self.max_args, len(args)))
arg_values = tuple(f(x) for f, x in zip(self.arg_converters, args))
return f(request, response, *arg_values)
Pipeline.pipes[f.__name__] = inner
#We actually want the undecorated function in the main namespace
return f
class opt(object):
def __init__(self, f):
self.f = f
def __call__(self, arg):
return self.f(arg)
def nullable(func):
def inner(arg):
if arg.lower() == "null":
return None
else:
return func(arg)
return inner
def boolean(arg):
if arg.lower() in ("true", "1"):
return True
elif arg.lower() in ("false", "0"):
return False
raise ValueError
@pipe(int)
def status(request, response, code):
"""Alter the status code.
:param code: Status code to use for the response."""
response.status = code
return response
@pipe(str, str, opt(boolean))
def header(request, response, name, value, append=False):
"""Set a HTTP header.
Replaces any existing HTTP header of the same name unless
append is set, in which case the header is appended without
replacement.
:param name: Name of the header to set.
:param value: Value to use for the header.
:param append: True if existing headers should not be replaced
"""
if not append:
response.headers.set(name, value)
else:
response.headers.append(name, value)
return response
@pipe(str)
def trickle(request, response, delays):
"""Send the response in parts, with time delays.
:param delays: A string of delays and amounts, in bytes, of the
response to send. Each component is separated by
a colon. Amounts in bytes are plain integers, whilst
delays are floats prefixed with a single d e.g.
d1:100:d2
Would cause a 1 second delay, would then send 100 bytes
of the file, and then cause a 2 second delay, before sending
the remainder of the file.
If the last token is of the form rN, instead of sending the
remainder of the file, the previous N instructions will be
repeated until the whole file has been sent e.g.
d1:100:d2:r2
Causes a delay of 1s, then 100 bytes to be sent, then a 2s delay
and then a further 100 bytes followed by a two second delay
until the response has been fully sent.
"""
def parse_delays():
parts = delays.split(":")
rv = []
for item in parts:
if item.startswith("d"):
item_type = "delay"
item = item[1:]
value = float(item)
elif item.startswith("r"):
item_type = "repeat"
value = int(item[1:])
if not value % 2 == 0:
raise ValueError
else:
item_type = "bytes"
value = int(item)
if len(rv) and rv[-1][0] == item_type:
rv[-1][1] += value
else:
rv.append((item_type, value))
return rv
delays = parse_delays()
if not delays:
return response
content = resolve_content(response)
modified_content = []
offset = [0]
def sleep(seconds):
def inner():
time.sleep(seconds)
return ""
return inner
def add_content(delays, repeat=False):
for i, (item_type, value) in enumerate(delays):
if item_type == "bytes":
modified_content.append(content[offset[0]:offset[0] + value])
offset[0] += value
elif item_type == "delay":
modified_content.append(sleep(value))
elif item_type == "repeat":
assert i == len(delays) - 1
while offset[0] < len(content):
add_content(delays[-(value + 1):-1], True)
if not repeat and offset[0] < len(content):
modified_content.append(content[offset[0]:])
add_content(delays)
response.content = modified_content
return response
@pipe(nullable(int), opt(nullable(int)))
def slice(request, response, start, end=None):
"""Send a byte range of the response body
:param start: The starting offset. Follows python semantics including
negative numbers.
:param end: The ending offset, again with python semantics and None
(spelled "null" in a query string) to indicate the end of
the file.
"""
content = resolve_content(response)
response.content = content[start:end]
return response
class ReplacementTokenizer(object):
def ident(scanner, token):
return ("ident", token)
def index(scanner, token):
token = token[1:-1]
try:
token = int(token)
except ValueError:
token = unicode(token, "utf8")
return ("index", token)
def var(scanner, token):
token = token[:-1]
return ("var", token)
def tokenize(self, string):
return self.scanner.scan(string)[0]
scanner = re.Scanner([(r"\$\w+:", var),
(r"\$?\w+(?:\(\))?", ident),
(r"\[[^\]]*\]", index)])
class FirstWrapper(object):
def __init__(self, params):
self.params = params
def __getitem__(self, key):
try:
return self.params.first(key)
except KeyError:
return ""
@pipe()
def sub(request, response):
"""Substitute environment information about the server and request into the script.
The format is a very limited template language. Substitutions are
enclosed by {{ and }}. There are several avaliable substitutions:
host
A simple string value and represents the primary host from which the
tests are being run.
domains
A dictionary of available domains indexed by subdomain name.
ports
A dictionary of lists of ports indexed by protocol.
location
A dictionary of parts of the request URL. Valid keys are
'server, 'scheme', 'host', 'hostname', 'port', 'path' and 'query'.
'server' is scheme://host:port, 'host' is hostname:port, and query
includes the leading '?', but other delimiters are omitted.
headers
A dictionary of HTTP headers in the request.
GET
A dictionary of query parameters supplied with the request.
uuid()
A pesudo-random UUID suitable for usage with stash
So for example in a setup running on localhost with a www
subdomain and a http server on ports 80 and 81::
{{host}} => localhost
{{domains[www]}} => www.localhost
{{ports[http][1]}} => 81
It is also possible to assign a value to a variable name, which must start with
the $ character, using the ":" syntax e.g.
{{$id:uuid()}
Later substitutions in the same file may then refer to the variable
by name e.g.
{{$id}}
"""
content = resolve_content(response)
new_content = template(request, content)
response.content = new_content
return response
def template(request, content):
#TODO: There basically isn't any error handling here
tokenizer = ReplacementTokenizer()
variables = {}
def config_replacement(match):
content, = match.groups()
tokens = tokenizer.tokenize(content)
if tokens[0][0] == "var":
variable = tokens[0][1]
tokens = tokens[1:]
else:
variable = None
assert tokens[0][0] == "ident" and all(item[0] == "index" for item in tokens[1:]), tokens
field = tokens[0][1]
if field in variables:
value = variables[field]
elif field == "headers":
value = request.headers
elif field == "GET":
value = FirstWrapper(request.GET)
elif field in request.server.config:
value = request.server.config[tokens[0][1]]
elif field == "location":
value = {"server": "%s://%s:%s" % (request.url_parts.scheme,
request.url_parts.hostname,
request.url_parts.port),
"scheme": request.url_parts.scheme,
"host": "%s:%s" % (request.url_parts.hostname,
request.url_parts.port),
"hostname": request.url_parts.hostname,
"port": request.url_parts.port,
"path": request.url_parts.path,
"query": "?%s" % request.url_parts.query}
elif field == "uuid()":
value = str(uuid.uuid4())
elif field == "url_base":
value = request.url_base
else:
raise Exception("Undefined template variable %s" % field)
for item in tokens[1:]:
value = value[item[1]]
assert isinstance(value, (int,) + types.StringTypes), tokens
if variable is not None:
variables[variable] = value
#Should possibly support escaping for other contexts e.g. script
#TODO: read the encoding of the response
return escape(unicode(value), quote=True).encode("utf-8")
template_regexp = re.compile(r"{{([^}]*)}}")
new_content, count = template_regexp.subn(config_replacement, content)
return new_content
@pipe()
def gzip(request, response):
"""This pipe gzip-encodes response data.
It sets (or overwrites) these HTTP headers:
Content-Encoding is set to gzip
Content-Length is set to the length of the compressed content
"""
content = resolve_content(response)
response.headers.set("Content-Encoding", "gzip")
out = StringIO()
with gzip_module.GzipFile(fileobj=out, mode="w") as f:
f.write(content)
response.content = out.getvalue()
response.headers.set("Content-Length", len(response.content))
return response<|fim▁end|> | def get_char(self):
if self._index >= len(self.string): |
<|file_name|>iban.js<|end_file_name|><|fim▁begin|>QUnit.test( "testGetIbanCheckDigits", function( assert ) {
assert.equal(getIBANCheckDigits( 'GB00WEST12345698765432' ), '82', 'Get check digits of an IBAN' );
assert.equal(getIBANCheckDigits( '1234567890' ), '', 'If string isn\'t an IBAN, returns empty' );
assert.equal(getIBANCheckDigits( '' ), '', 'If string is empty, returns empty' );
} );
QUnit.test( "testGetGlobalIdentifier", function( assert ) {
assert.equal(getGlobalIdentifier( 'G28667152', 'ES', '' ), 'ES55000G28667152', 'Obtain a global Id' );
} );
QUnit.test( "testReplaceCharactersNotInPattern", function( assert ) {
assert.equal(replaceCharactersNotInPattern(
'ABC123-?:',
'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789',
'0' ), 'ABC123000', 'Remove unwanted characters' );
assert.equal(replaceCharactersNotInPattern(
'12345',
'0123456789',
'0' ), '12345', 'If the string didn\'t have unwanted characters, returns it' );
} );
QUnit.test( "testReplaceLetterWithDigits", function( assert ) {
assert.equal(replaceLetterWithDigits( '510007547061BE00' ), '510007547061111400', 'Replaces letters with digits' );
assert.equal(replaceLetterWithDigits( '1234567890' ), '1234567890', 'If we only receive digits, we return them' );
assert.equal(replaceLetterWithDigits( '' ), '', 'If we receive empty, we return empty' );
} );
QUnit.test( "testGetAccountLength", function( assert ) {
assert.equal(getAccountLength( 'GB' ), 22, 'Returns tohe string th a SEPA country' );
assert.equal(getAccountLength( 'US' ), 0, 'If string isn\'t a SEPA country code, returns empty' );
assert.equal(getAccountLength( '' ), 0, 'If string is empty, returns empty' );
} );
QUnit.test( "testIsSepaCountry", function( assert ) {
assert.equal(isSepaCountry( 'ES' ) , 1, 'Detects SEPA countries' );<|fim▁hole|>
QUnit.test( "testIsValidIban", function( assert ) {
assert.equal(isValidIBAN( 'GB82WEST12345698765432' ), 1, 'Accepts a good IBAN' );
assert.equal(isValidIBAN( 'GB00WEST12345698765432' ) , 0, 'Rejects a wrong IBAN' );
assert.equal(isValidIBAN( '' ), 0, 'Rejects empty strings' );
} );<|fim▁end|> | assert.equal(isSepaCountry( 'US' ), 0, 'Rejects non SEPA countries' );
assert.equal(isSepaCountry( '' ) , 0, 'If string is empty, returns empty' );
} ); |
<|file_name|>path.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2014 Datacratic. All rights reserved.
package path
import (
"bytes"
"fmt"
"strings"<|fim▁hole|>// slice indexes should be specified using non-negative numbers. Only map keyed
// with string are currently supported. Channels can be read by providing either
// a number of values to read or a wildcard character to read all values until
// the channel is closed. To call through a function, specify the '()'.
type P []string
// New returns a new P object from a given path string.
func New(path string) P {
return strings.Split(path, ".")
}
// Newf returns a new P object from the given format strings applied to
// args. Formatting is done using fmt.Sprintf.
func Newf(path string, args ...interface{}) P {
return New(fmt.Sprintf(path, args...))
}
// String returns a simple string representation of the path.
func (path P) String() string {
buffer := new(bytes.Buffer)
for i, item := range path {
buffer.WriteString(item)
if i < len(path)-1 {
buffer.WriteString(".")
}
}
return buffer.String()
}
// Last returns the last component of the path.
func (path P) Last() string { return path[len(path)-1] }<|fim▁end|> | )
// P represents a path through an object seperated by '.' characters. A path can
// also contain wildcard components indicated by a '*' character. Arrays and |
<|file_name|>CronetUrlRequestContext.java<|end_file_name|><|fim▁begin|>// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.net.impl;
import android.content.Context;
import android.os.Build;
import android.os.ConditionVariable;
import android.os.Handler;
import android.os.Looper;
import android.os.Process;
import android.util.Log;
import org.chromium.base.ObserverList;
import org.chromium.base.VisibleForTesting;
import org.chromium.base.annotations.CalledByNative;
import org.chromium.base.annotations.JNINamespace;
import org.chromium.base.annotations.NativeClassQualifiedName;
import org.chromium.base.annotations.UsedByReflection;
import org.chromium.net.BidirectionalStream;
import org.chromium.net.CronetEngine;
import org.chromium.net.NetworkQualityRttListener;
import org.chromium.net.NetworkQualityThroughputListener;
import org.chromium.net.RequestFinishedInfo;
import org.chromium.net.UrlRequest;
import org.chromium.net.urlconnection.CronetHttpURLConnection;
import org.chromium.net.urlconnection.CronetURLStreamHandlerFactory;
import java.net.Proxy;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLStreamHandlerFactory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Executor;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.atomic.AtomicInteger;
import javax.annotation.concurrent.GuardedBy;
/**
* CronetEngine using Chromium HTTP stack implementation.
*/
@JNINamespace("cronet")
@UsedByReflection("CronetEngine.java")
@VisibleForTesting
public class CronetUrlRequestContext extends CronetEngine {
private static final int LOG_NONE = 3; // LOG(FATAL), no VLOG.
private static final int LOG_DEBUG = -1; // LOG(FATAL...INFO), VLOG(1)
private static final int LOG_VERBOSE = -2; // LOG(FATAL...INFO), VLOG(2)
static final String LOG_TAG = "ChromiumNetwork";
/**
* Synchronize access to mUrlRequestContextAdapter and shutdown routine.
*/
private final Object mLock = new Object();
private final ConditionVariable mInitCompleted = new ConditionVariable(false);
private final AtomicInteger mActiveRequestCount = new AtomicInteger(0);
private long mUrlRequestContextAdapter = 0;
private Thread mNetworkThread;
private boolean mNetworkQualityEstimatorEnabled;
/**
* Locks operations on network quality listeners, because listener
* addition and removal may occur on a different thread from notification.
*/
private final Object mNetworkQualityLock = new Object();
/**
* Locks operations on the list of RequestFinishedInfo.Listeners, because operations can happen
* on any thread.
*/
private final Object mFinishedListenerLock = new Object();
@GuardedBy("mNetworkQualityLock")
private final ObserverList<NetworkQualityRttListener> mRttListenerList =
new ObserverList<NetworkQualityRttListener>();
@GuardedBy("mNetworkQualityLock")
private final ObserverList<NetworkQualityThroughputListener> mThroughputListenerList =
new ObserverList<NetworkQualityThroughputListener>();
@GuardedBy("mFinishedListenerLock")
private final List<RequestFinishedInfo.Listener> mFinishedListenerList =
new ArrayList<RequestFinishedInfo.Listener>();
/**
* Synchronize access to mCertVerifierData.
*/
private ConditionVariable mWaitGetCertVerifierDataComplete = new ConditionVariable();
/** Holds CertVerifier data. */
private String mCertVerifierData;
@UsedByReflection("CronetEngine.java")
public CronetUrlRequestContext(final CronetEngine.Builder builder) {
CronetLibraryLoader.ensureInitialized(builder.getContext(), builder);
nativeSetMinLogLevel(getLoggingLevel());
synchronized (mLock) {
mUrlRequestContextAdapter = nativeCreateRequestContextAdapter(
createNativeUrlRequestContextConfig(builder.getContext(), builder));
if (mUrlRequestContextAdapter == 0) {
throw new NullPointerException("Context Adapter creation failed.");
}
mNetworkQualityEstimatorEnabled = builder.networkQualityEstimatorEnabled();
}
// Init native Chromium URLRequestContext on main UI thread.
Runnable task = new Runnable() {
@Override
public void run() {
CronetLibraryLoader.ensureInitializedOnMainThread(builder.getContext());
synchronized (mLock) {
// mUrlRequestContextAdapter is guaranteed to exist until
// initialization on main and network threads completes and
// initNetworkThread is called back on network thread.
nativeInitRequestContextOnMainThread(mUrlRequestContextAdapter);
}
}
};
// Run task immediately or post it to the UI thread.
if (Looper.getMainLooper() == Looper.myLooper()) {
task.run();
} else {
new Handler(Looper.getMainLooper()).post(task);
}
}
@VisibleForTesting
public static long createNativeUrlRequestContextConfig(
final Context context, CronetEngine.Builder builder) {
final long urlRequestContextConfig = nativeCreateRequestContextConfig(
builder.getUserAgent(), builder.storagePath(), builder.quicEnabled(),
builder.getDefaultQuicUserAgentId(context), builder.http2Enabled(),
builder.sdchEnabled(), builder.dataReductionProxyKey(),
builder.dataReductionProxyPrimaryProxy(), builder.dataReductionProxyFallbackProxy(),
builder.dataReductionProxySecureProxyCheckUrl(), builder.cacheDisabled(),
builder.httpCacheMode(), builder.httpCacheMaxSize(), builder.experimentalOptions(),
builder.mockCertVerifier(), builder.networkQualityEstimatorEnabled(),
builder.publicKeyPinningBypassForLocalTrustAnchorsEnabled(),
builder.certVerifierData());
for (Builder.QuicHint quicHint : builder.quicHints()) {
nativeAddQuicHint(urlRequestContextConfig, quicHint.mHost, quicHint.mPort,
quicHint.mAlternatePort);
}
for (Builder.Pkp pkp : builder.publicKeyPins()) {
nativeAddPkp(urlRequestContextConfig, pkp.mHost, pkp.mHashes, pkp.mIncludeSubdomains,
pkp.mExpirationDate.getTime());
}
return urlRequestContextConfig;
}
@Override
public UrlRequest createRequest(String url, UrlRequest.Callback callback, Executor executor,
int priority, Collection<Object> requestAnnotations, boolean disableCache,
boolean disableConnectionMigration) {
synchronized (mLock) {
checkHaveAdapter();
boolean metricsCollectionEnabled = false;
synchronized (mFinishedListenerLock) {
metricsCollectionEnabled = !mFinishedListenerList.isEmpty();
}
return new CronetUrlRequest(this, url, priority, callback, executor, requestAnnotations,
metricsCollectionEnabled, disableCache, disableConnectionMigration);
}
}
@Override
public BidirectionalStream createBidirectionalStream(String url,
BidirectionalStream.Callback callback, Executor executor, String httpMethod,
List<Map.Entry<String, String>> requestHeaders,
@BidirectionalStream.Builder.StreamPriority int priority, boolean disableAutoFlush,
boolean delayRequestHeadersUntilFirstFlush) {
synchronized (mLock) {
checkHaveAdapter();
return new CronetBidirectionalStream(this, url, priority, callback, executor,
httpMethod, requestHeaders, disableAutoFlush,
delayRequestHeadersUntilFirstFlush);
}
}
@Override
public boolean isEnabled() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH;
}
@Override
public String getVersionString() {
return "Cronet/" + ImplVersion.getVersion();
}
@Override
public void shutdown() {
synchronized (mLock) {
checkHaveAdapter();
if (mActiveRequestCount.get() != 0) {
throw new IllegalStateException("Cannot shutdown with active requests.");
}
// Destroying adapter stops the network thread, so it cannot be
// called on network thread.
if (Thread.currentThread() == mNetworkThread) {
throw new IllegalThreadStateException("Cannot shutdown from network thread.");
}
}
// Wait for init to complete on main and network thread (without lock,
// so other thread could access it).
mInitCompleted.block();
synchronized (mLock) {
// It is possible that adapter is already destroyed on another thread.
if (!haveRequestContextAdapter()) {
return;
}
nativeDestroy(mUrlRequestContextAdapter);
mUrlRequestContextAdapter = 0;
}
}
@Override
public void startNetLogToFile(String fileName, boolean logAll) {
synchronized (mLock) {
checkHaveAdapter();
nativeStartNetLogToFile(mUrlRequestContextAdapter, fileName, logAll);
}
}
@Override
public void stopNetLog() {
synchronized (mLock) {
checkHaveAdapter();
nativeStopNetLog(mUrlRequestContextAdapter);
}
}
@Override
public String getCertVerifierData(long timeout) {
if (timeout < 0) {
throw new IllegalArgumentException("timeout must be a positive value");
} else if (timeout == 0) {
timeout = 100;
}
mWaitGetCertVerifierDataComplete.close();
synchronized (mLock) {
checkHaveAdapter();
nativeGetCertVerifierData(mUrlRequestContextAdapter);
}
mWaitGetCertVerifierDataComplete.block(timeout);
return mCertVerifierData;
}
// This method is intentionally non-static to ensure Cronet native library
// is loaded by class constructor.
@Override
public byte[] getGlobalMetricsDeltas() {
return nativeGetHistogramDeltas();
}
@VisibleForTesting
@Override
public void configureNetworkQualityEstimatorForTesting(
boolean useLocalHostRequests, boolean useSmallerResponses) {
if (!mNetworkQualityEstimatorEnabled) {
throw new IllegalStateException("Network quality estimator must be enabled");
}
synchronized (mLock) {
checkHaveAdapter();
nativeConfigureNetworkQualityEstimatorForTesting(
mUrlRequestContextAdapter, useLocalHostRequests, useSmallerResponses);
}
}
@Override
public void addRttListener(NetworkQualityRttListener listener) {
if (!mNetworkQualityEstimatorEnabled) {
throw new IllegalStateException("Network quality estimator must be enabled");
}
synchronized (mNetworkQualityLock) {
if (mRttListenerList.isEmpty()) {
synchronized (mLock) {
checkHaveAdapter();
nativeProvideRTTObservations(mUrlRequestContextAdapter, true);
}
}
mRttListenerList.addObserver(listener);
}
}
@Override
public void removeRttListener(NetworkQualityRttListener listener) {
if (!mNetworkQualityEstimatorEnabled) {
throw new IllegalStateException("Network quality estimator must be enabled");
}
synchronized (mNetworkQualityLock) {
mRttListenerList.removeObserver(listener);
if (mRttListenerList.isEmpty()) {
synchronized (mLock) {
checkHaveAdapter();
nativeProvideRTTObservations(mUrlRequestContextAdapter, false);
}
}
}
}
@Override
public void addThroughputListener(NetworkQualityThroughputListener listener) {
if (!mNetworkQualityEstimatorEnabled) {
throw new IllegalStateException("Network quality estimator must be enabled");
}
synchronized (mNetworkQualityLock) {
if (mThroughputListenerList.isEmpty()) {
synchronized (mLock) {
checkHaveAdapter();
nativeProvideThroughputObservations(mUrlRequestContextAdapter, true);
}
}
mThroughputListenerList.addObserver(listener);
}
}
@Override
public void removeThroughputListener(NetworkQualityThroughputListener listener) {
if (!mNetworkQualityEstimatorEnabled) {
throw new IllegalStateException("Network quality estimator must be enabled");
}
synchronized (mNetworkQualityLock) {
mThroughputListenerList.removeObserver(listener);
if (mThroughputListenerList.isEmpty()) {
synchronized (mLock) {
checkHaveAdapter();
nativeProvideThroughputObservations(mUrlRequestContextAdapter, false);
}
}
}
}
@Override
public void addRequestFinishedListener(RequestFinishedInfo.Listener listener) {
synchronized (mFinishedListenerLock) {
mFinishedListenerList.add(listener);
}
}
@Override
public void removeRequestFinishedListener(RequestFinishedInfo.Listener listener) {
synchronized (mFinishedListenerLock) {
mFinishedListenerList.remove(listener);
}
}
@Override
public URLConnection openConnection(URL url) {
return openConnection(url, Proxy.NO_PROXY);
}
@Override
public URLConnection openConnection(URL url, Proxy proxy) {
if (proxy.type() != Proxy.Type.DIRECT) {
throw new UnsupportedOperationException();
}
String protocol = url.getProtocol();
if ("http".equals(protocol) || "https".equals(protocol)) {
return new CronetHttpURLConnection(url, this);
}
throw new UnsupportedOperationException("Unexpected protocol:" + protocol);
}
@Override
public URLStreamHandlerFactory createURLStreamHandlerFactory() {
return new CronetURLStreamHandlerFactory(this);
}
/**
* Mark request as started to prevent shutdown when there are active
* requests.
*/
void onRequestStarted() {
mActiveRequestCount.incrementAndGet();
}
/**
* Mark request as finished to allow shutdown when there are no active
* requests.
*/
void onRequestDestroyed() {
mActiveRequestCount.decrementAndGet();
}
@VisibleForTesting
public long getUrlRequestContextAdapter() {
synchronized (mLock) {
checkHaveAdapter();
return mUrlRequestContextAdapter;
}
}
private void checkHaveAdapter() throws IllegalStateException {
if (!haveRequestContextAdapter()) {
throw new IllegalStateException("Engine is shut down.");
}
}
private boolean haveRequestContextAdapter() {
return mUrlRequestContextAdapter != 0;
}
/**
* @return loggingLevel see {@link #LOG_NONE}, {@link #LOG_DEBUG} and
* {@link #LOG_VERBOSE}.
*/
private int getLoggingLevel() {
int loggingLevel;
if (Log.isLoggable(LOG_TAG, Log.VERBOSE)) {
loggingLevel = LOG_VERBOSE;
} else if (Log.isLoggable(LOG_TAG, Log.DEBUG)) {
loggingLevel = LOG_DEBUG;
} else {
loggingLevel = LOG_NONE;
}
return loggingLevel;
}
@SuppressWarnings("unused")
@CalledByNative
private void initNetworkThread() {
synchronized (mLock) {
mNetworkThread = Thread.currentThread();
mInitCompleted.open();
}
Thread.currentThread().setName("ChromiumNet");
Process.setThreadPriority(Process.THREAD_PRIORITY_BACKGROUND);
}
@SuppressWarnings("unused")
@CalledByNative
private void onRttObservation(final int rttMs, final long whenMs, final int source) {
synchronized (mNetworkQualityLock) {
for (final NetworkQualityRttListener listener : mRttListenerList) {
Runnable task = new Runnable() {
@Override
public void run() {
listener.onRttObservation(rttMs, whenMs, source);
}
};
postObservationTaskToExecutor(listener.getExecutor(), task);
}
}
}
@SuppressWarnings("unused")
@CalledByNative
private void onThroughputObservation(
final int throughputKbps, final long whenMs, final int source) {
synchronized (mNetworkQualityLock) {
for (final NetworkQualityThroughputListener listener : mThroughputListenerList) {
Runnable task = new Runnable() {
@Override
public void run() {
listener.onThroughputObservation(throughputKbps, whenMs, source);
}
};
postObservationTaskToExecutor(listener.getExecutor(), task);
}
}
}
@SuppressWarnings("unused")
@CalledByNative
private void onGetCertVerifierData(String certVerifierData) {
mCertVerifierData = certVerifierData;
mWaitGetCertVerifierDataComplete.open();
}
void reportFinished(final CronetUrlRequest request) {
final RequestFinishedInfo requestInfo = request.getRequestFinishedInfo();
ArrayList<RequestFinishedInfo.Listener> currentListeners;
synchronized (mFinishedListenerLock) {
currentListeners = new ArrayList<RequestFinishedInfo.Listener>(mFinishedListenerList);
}
for (final RequestFinishedInfo.Listener listener : currentListeners) {
Runnable task = new Runnable() {
@Override
public void run() {
listener.onRequestFinished(requestInfo);
}
};
postObservationTaskToExecutor(listener.getExecutor(), task);
}
}
private static void postObservationTaskToExecutor(Executor executor, Runnable task) {
try {
executor.execute(task);
} catch (RejectedExecutionException failException) {
Log.e(CronetUrlRequestContext.LOG_TAG, "Exception posting task to executor",
failException);
}
}
// Native methods are implemented in cronet_url_request_context_adapter.cc.
private static native long nativeCreateRequestContextConfig(String userAgent,
String storagePath, boolean quicEnabled, String quicUserAgentId, boolean http2Enabled,
boolean sdchEnabled, String dataReductionProxyKey,
String dataReductionProxyPrimaryProxy, String dataReductionProxyFallbackProxy,
String dataReductionProxySecureProxyCheckUrl, boolean disableCache, int httpCacheMode,
long httpCacheMaxSize, String experimentalOptions, long mockCertVerifier,
boolean enableNetworkQualityEstimator,
boolean bypassPublicKeyPinningForLocalTrustAnchors, String certVerifierData);
private static native void nativeAddQuicHint(
long urlRequestContextConfig, String host, int port, int alternatePort);
private static native void nativeAddPkp(long urlRequestContextConfig, String host,
byte[][] hashes, boolean includeSubdomains, long expirationTime);
private static native long nativeCreateRequestContextAdapter(long urlRequestContextConfig);
private static native int nativeSetMinLogLevel(int loggingLevel);
private static native byte[] nativeGetHistogramDeltas();
@NativeClassQualifiedName("CronetURLRequestContextAdapter")
private native void nativeDestroy(long nativePtr);
@NativeClassQualifiedName("CronetURLRequestContextAdapter")<|fim▁hole|>
@NativeClassQualifiedName("CronetURLRequestContextAdapter")
private native void nativeGetCertVerifierData(long nativePtr);
@NativeClassQualifiedName("CronetURLRequestContextAdapter")
private native void nativeInitRequestContextOnMainThread(long nativePtr);
@NativeClassQualifiedName("CronetURLRequestContextAdapter")
private native void nativeConfigureNetworkQualityEstimatorForTesting(
long nativePtr, boolean useLocalHostRequests, boolean useSmallerResponses);
@NativeClassQualifiedName("CronetURLRequestContextAdapter")
private native void nativeProvideRTTObservations(long nativePtr, boolean should);
@NativeClassQualifiedName("CronetURLRequestContextAdapter")
private native void nativeProvideThroughputObservations(long nativePtr, boolean should);
}<|fim▁end|> | private native void nativeStartNetLogToFile(long nativePtr, String fileName, boolean logAll);
@NativeClassQualifiedName("CronetURLRequestContextAdapter")
private native void nativeStopNetLog(long nativePtr); |
<|file_name|>iothub.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>
export import Client = require('./lib/client');
export import ConnectionString = require('./lib/connection_string');
export import Registry = require('./lib/registry');
export import SharedAccessSignature = require('./lib/shared_access_signature');
export import Amqp = require('./lib/amqp');
export import AmqpWs = require('./lib/amqp_ws');
export import DeviceMethodParams = require('./lib/device_method_params');
export import JobClient = require('./lib/job_client');
export import Device = require('./lib/device');<|fim▁end|> | // Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information. |
<|file_name|>authentication.rs<|end_file_name|><|fim▁begin|>// Authentication
use hyper::client::{Client};
use hyper::server::{Server, Request, Response, Listening};
use hyper::uri::RequestUri::AbsolutePath;
use regex::Regex;
use rustc_serialize::json::Json;
use std::fs;
use std::io::prelude::*;
use std::process::Command;
use std::sync::Mutex;
use std::sync::mpsc::channel;
pub type AuthToken = String;
type TempCode = String;
#[allow(dead_code)]
const AUTH_TOKEN_FILE: &'static str = "auth_token";
#[allow(dead_code)]
const SLACK_CLIENT_ID: &'static str = "2334733471.3592055147";
#[allow(dead_code)]
const SLACK_CLIENT_SECRET: &'static str = "37721a57d17018b206fb1264caa7d707";
pub fn get_oauth_token_or_panic() -> (AuthToken, Option<Listening>) {
match maybe_existing_token(AUTH_TOKEN_FILE) {
Some(token) => (token, None),
None => {
let (token, listener) = arrange_new_token();
store_token(&token);
(token, Some(listener))
}
}
}
fn maybe_existing_token(token_file: &str) -> Option<AuthToken> {
match fs::File::open(token_file) {
Ok(mut file) => {
let mut s = String::new();
file.read_to_string(&mut s).unwrap();
if !s.is_empty() {
Some(s)
} else {
None }
},
Err(_) => None
}
}
fn arrange_new_token() -> (AuthToken, Listening) {
let (temp_code, listener) = request_temp_code();
let token = request_token(&temp_code);
(token, listener)
}
// TODO Test
fn store_token(token: &AuthToken) {
let mut f = fs::File::create(AUTH_TOKEN_FILE).unwrap();
f.write_all(token.as_bytes()).unwrap();
}
#[allow(dead_code)] // Hard to test
fn request_temp_code() -> (TempCode, Listening) {
Command::new("xdg-open").arg(format!("https://slack.com/oauth/authorize?scope=client&client_id={}", SLACK_CLIENT_ID)).output().unwrap();
let (tx, rx) = channel();
let mtx = Mutex::new(tx);
let mut guard = Server::http("127.0.0.1:9999").unwrap().handle(move |req: Request, res: Response| {
match req.uri {
AbsolutePath(ref path) => {
match extract_temp_code(&path) {
Some(tempcode) => {
mtx.lock().unwrap().send(tempcode).unwrap();
},
None => ()
}
},
_ => ()
}
<|fim▁hole|> res.write_all(b"Thanks! Please return to Lax").unwrap();
res.end().unwrap();
}).unwrap();
let tempcode = rx.recv().unwrap();
guard.close().unwrap();
(tempcode, guard)
}
fn request_token(temp_code: &TempCode) -> AuthToken {
let mut client = Client::new();
// I thought & is sufficient to make it a slice
let mut res = client.get(format_access_uri(temp_code).as_str()).send().unwrap();
let mut body = String::new();
res.read_to_string(&mut body).unwrap();
match Json::from_str(&body) {
Ok(json) => {
match json.find("access_token") {
Some(j) => j.as_string().unwrap().to_string(),
_ => panic!("Unexpected json in slack response\n{}", json.pretty())
}
},
_ => panic!("Reponse not json")
}
}
// TODO Needs test
fn format_access_uri(temp_code: &TempCode) -> String {
let base = "https://slack.com/api/oauth.access";
let query = format!("?client_id={}&client_secret={}&code={}", SLACK_CLIENT_ID, SLACK_CLIENT_SECRET, temp_code);
format!("{}{}", base, query)
}
fn extract_temp_code(path: &str) -> Option<TempCode> {
let re = Regex::new(r"code=(.+?)(&|$)").unwrap();
re.captures(path).map(|cap| cap.at(1).unwrap().to_string())
}
#[cfg(test)]
mod tests {
use super::extract_temp_code;
use super::maybe_existing_token;
use std::fs;
use std::io::Write;
const AUTH_TOKEN_FILE: &'static str = "auth_token_test";
#[test]
fn test_extract_temp_code() {
let path = "www.timonv.nl?code=blablabla";
assert_eq!(extract_temp_code(path).unwrap(), "blablabla".to_string())
}
#[test]
fn test_maybe_existing_token() {
// None if file doesn't exist
fs::remove_file(AUTH_TOKEN_FILE).unwrap_or(());
assert_eq!(maybe_existing_token(AUTH_TOKEN_FILE), None);
// Some if file exists
let mut f = fs::File::create(AUTH_TOKEN_FILE).unwrap();
f.write_all(b"123").unwrap();
assert_eq!(maybe_existing_token(AUTH_TOKEN_FILE), Some("123".to_string()));
// None if file exists but empty
let mut f = fs::File::create(AUTH_TOKEN_FILE).unwrap();
f.write_all(b"").unwrap();
assert_eq!(maybe_existing_token(AUTH_TOKEN_FILE), None);
// Cleanup
fs::remove_file(AUTH_TOKEN_FILE).unwrap_or(());
}
}<|fim▁end|> | let mut res = res.start().unwrap(); |
<|file_name|>coroutine.py<|end_file_name|><|fim▁begin|>import asyncio
from unittest.mock import MagicMock
def SimpleCoroutineMock(f=lambda *args, **kwargs: None):
builder = CoroutineMockBuilder()
return builder.addDelegate(f).build().mock()
class CoroutineMock(object):
# Handy for debugging failing tests in the debugger.
__blocking_dict = {}
def __init__(self, returnSequence, block:asyncio.Event):
self.__startingEvent = asyncio.Event()
self.__endingEvent = asyncio.Event()
self.__returnSequence = tuple(returnSequence)
if (len(self.__returnSequence) < 1):
self.__returnSequence = (lambda *args, **kwargs: None, )
self.__returnSequenceLen = len(self.__returnSequence)
self.__block = block
self.__mock = self.__createMock()
# It's easier to find a dictionary that is an instance variable than
# one that is a class static, so just make an instance variable that
# references the shared dictionary.
self.__blocking_dict = CoroutineMock.__blocking_dict
def __createMock(self):
returnIndex = 0
async def cr(*args, **kwargs):
nonlocal returnIndex
try:
self.__endingEvent.clear()
self.__startingEvent.set()
if (self.__block is not None):
self.__blocking_dict[id(self)] = self
try:
await self.__block.wait()
finally:
del self.__blocking_dict[id(self)]
self.__block.clear()
returnFunc = self.__returnSequence[returnIndex % self.__returnSequenceLen]
returnIndex += 1
return returnFunc(*args, **kwargs)
finally:
self.__startingEvent.clear()
self.__endingEvent.set()
return MagicMock(wraps=cr)
def start(self):
return self.__startingEvent
def end(self):
return self.__endingEvent
def unblock(self):
self.__block.set()
def mock(self):
return self.__mock
async def waitForSingleCall(self):
await self.start().wait()
self.unblock()
await self.end().wait()
class CoroutineMockBuilder(object):
def __init__(self):
self.__block = None
self.__returnSequence = []
def blocks(self):
return self.blocksOn(asyncio.Event())
<|fim▁hole|> return self
def exception(self, e, repeats=1):
def r(*args, **kwargs):
raise e
self.__returnSequence.extend([r] * repeats)
return self
def returns(self, v, repeats=1):
def r(*args, **kwargs):
return v
self.__returnSequence.extend([r] * repeats)
return self
def addDelegate(self, f, repeats=1):
self.__returnSequence.extend([f] * repeats)
return self
def build(self):
return CoroutineMock(self.__returnSequence, self.__block)<|fim▁end|> | def blocksOn(self, event:asyncio.Event):
self.__block = event |
<|file_name|>raw.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Raw concurrency primitives you know and love.
//!
//! These primitives are not recommended for general use, but are provided for
//! flavorful use-cases. It is recommended to use the types at the top of the
//! `sync` crate which wrap values directly and provide safer abstractions for
//! containing data.
use core::prelude::*;
use core::atomic;
use core::finally::Finally;
use core::kinds::marker;
use core::mem;
use core::cell::UnsafeCell;
use collections::{Vec, MutableSeq};
use mutex;
use comm::{Receiver, Sender, channel};
/****************************************************************************
* Internals
****************************************************************************/
// Each waiting task receives on one of these.
type WaitEnd = Receiver<()>;
type SignalEnd = Sender<()>;
// A doubly-ended queue of waiting tasks.
struct WaitQueue {
head: Receiver<SignalEnd>,
tail: Sender<SignalEnd>,
}
impl WaitQueue {
fn new() -> WaitQueue {
let (block_tail, block_head) = channel();
WaitQueue { head: block_head, tail: block_tail }
}
// Signals one live task from the queue.
fn signal(&self) -> bool {
match self.head.try_recv() {
Ok(ch) => {
// Send a wakeup signal. If the waiter was killed, its port will
// have closed. Keep trying until we get a live task.
if ch.send_opt(()).is_ok() {
true
} else {
self.signal()
}
}
_ => false
}
}
fn broadcast(&self) -> uint {
let mut count = 0;
loop {
match self.head.try_recv() {
Ok(ch) => {
if ch.send_opt(()).is_ok() {
count += 1;
}
}
_ => break
}
}
count
}
fn wait_end(&self) -> WaitEnd {
let (signal_end, wait_end) = channel();
self.tail.send(signal_end);
wait_end
}
}
// The building-block used to make semaphores, mutexes, and rwlocks.
struct Sem<Q> {
lock: mutex::Mutex,
// n.b, we need Sem to be `Sync`, but the WaitQueue type is not send/share
// (for good reason). We have an internal invariant on this semaphore,
// however, that the queue is never accessed outside of a locked
// context.
inner: UnsafeCell<SemInner<Q>>
}
struct SemInner<Q> {
count: int,
waiters: WaitQueue,
// Can be either unit or another waitqueue. Some sems shouldn't come with
// a condition variable attached, others should.
blocked: Q,
}
#[must_use]
struct SemGuard<'a, Q> {
sem: &'a Sem<Q>,
}
impl<Q: Send> Sem<Q> {
fn new(count: int, q: Q) -> Sem<Q> {
assert!(count >= 0,
"semaphores cannot be initialized with negative values");
Sem {
lock: mutex::Mutex::new(),
inner: UnsafeCell::new(SemInner {
waiters: WaitQueue::new(),
count: count,
blocked: q,
})
}
}
unsafe fn with(&self, f: |&mut SemInner<Q>|) {
let _g = self.lock.lock();
// This &mut is safe because, due to the lock, we are the only one who can touch the data<|fim▁hole|> pub fn acquire(&self) {
unsafe {
let mut waiter_nobe = None;
self.with(|state| {
state.count -= 1;
if state.count < 0 {
// Create waiter nobe, enqueue ourself, and tell
// outer scope we need to block.
waiter_nobe = Some(state.waiters.wait_end());
}
});
// Uncomment if you wish to test for sem races. Not
// valgrind-friendly.
/* for _ in range(0u, 1000) { task::deschedule(); } */
// Need to wait outside the exclusive.
if waiter_nobe.is_some() {
let _ = waiter_nobe.unwrap().recv();
}
}
}
pub fn release(&self) {
unsafe {
self.with(|state| {
state.count += 1;
if state.count <= 0 {
state.waiters.signal();
}
})
}
}
pub fn access<'a>(&'a self) -> SemGuard<'a, Q> {
self.acquire();
SemGuard { sem: self }
}
}
#[unsafe_destructor]
impl<'a, Q: Send> Drop for SemGuard<'a, Q> {
fn drop(&mut self) {
self.sem.release();
}
}
impl Sem<Vec<WaitQueue>> {
fn new_and_signal(count: int, num_condvars: uint) -> Sem<Vec<WaitQueue>> {
let mut queues = Vec::new();
for _ in range(0, num_condvars) { queues.push(WaitQueue::new()); }
Sem::new(count, queues)
}
// The only other places that condvars get built are rwlock.write_cond()
// and rwlock_write_mode.
pub fn access_cond<'a>(&'a self) -> SemCondGuard<'a> {
SemCondGuard {
guard: self.access(),
cvar: Condvar { sem: self, order: Nothing, nocopy: marker::NoCopy },
}
}
}
// FIXME(#3598): Want to use an Option down below, but we need a custom enum
// that's not polymorphic to get around the fact that lifetimes are invariant
// inside of type parameters.
enum ReacquireOrderLock<'a> {
Nothing, // c.c
Just(&'a Semaphore),
}
/// A mechanism for atomic-unlock-and-deschedule blocking and signalling.
pub struct Condvar<'a> {
// The 'Sem' object associated with this condvar. This is the one that's
// atomically-unlocked-and-descheduled upon and reacquired during wakeup.
sem: &'a Sem<Vec<WaitQueue> >,
// This is (can be) an extra semaphore which is held around the reacquire
// operation on the first one. This is only used in cvars associated with
// rwlocks, and is needed to ensure that, when a downgrader is trying to
// hand off the access lock (which would be the first field, here), a 2nd
// writer waking up from a cvar wait can't race with a reader to steal it,
// See the comment in write_cond for more detail.
order: ReacquireOrderLock<'a>,
// Make sure condvars are non-copyable.
nocopy: marker::NoCopy,
}
impl<'a> Condvar<'a> {
/// Atomically drop the associated lock, and block until a signal is sent.
///
/// # Failure
///
/// A task which is killed while waiting on a condition variable will wake
/// up, fail, and unlock the associated lock as it unwinds.
pub fn wait(&self) { self.wait_on(0) }
/// As wait(), but can specify which of multiple condition variables to
/// wait on. Only a signal_on() or broadcast_on() with the same condvar_id
/// will wake this thread.
///
/// The associated lock must have been initialised with an appropriate
/// number of condvars. The condvar_id must be between 0 and num_condvars-1
/// or else this call will fail.
///
/// wait() is equivalent to wait_on(0).
pub fn wait_on(&self, condvar_id: uint) {
let mut wait_end = None;
let mut out_of_bounds = None;
// Release lock, 'atomically' enqueuing ourselves in so doing.
unsafe {
self.sem.with(|state| {
if condvar_id < state.blocked.len() {
// Drop the lock.
state.count += 1;
if state.count <= 0 {
state.waiters.signal();
}
// Create waiter nobe, and enqueue ourself to
// be woken up by a signaller.
wait_end = Some(state.blocked[condvar_id].wait_end());
} else {
out_of_bounds = Some(state.blocked.len());
}
})
}
// If deschedule checks start getting inserted anywhere, we can be
// killed before or after enqueueing.
check_cvar_bounds(out_of_bounds, condvar_id, "cond.wait_on()", || {
// Unconditionally "block". (Might not actually block if a
// signaller already sent -- I mean 'unconditionally' in contrast
// with acquire().)
(|| {
let _ = wait_end.take_unwrap().recv();
}).finally(|| {
// Reacquire the condvar.
match self.order {
Just(lock) => {
let _g = lock.access();
self.sem.acquire();
}
Nothing => self.sem.acquire(),
}
})
})
}
/// Wake up a blocked task. Returns false if there was no blocked task.
pub fn signal(&self) -> bool { self.signal_on(0) }
/// As signal, but with a specified condvar_id. See wait_on.
pub fn signal_on(&self, condvar_id: uint) -> bool {
unsafe {
let mut out_of_bounds = None;
let mut result = false;
self.sem.with(|state| {
if condvar_id < state.blocked.len() {
result = state.blocked[condvar_id].signal();
} else {
out_of_bounds = Some(state.blocked.len());
}
});
check_cvar_bounds(out_of_bounds,
condvar_id,
"cond.signal_on()",
|| result)
}
}
/// Wake up all blocked tasks. Returns the number of tasks woken.
pub fn broadcast(&self) -> uint { self.broadcast_on(0) }
/// As broadcast, but with a specified condvar_id. See wait_on.
pub fn broadcast_on(&self, condvar_id: uint) -> uint {
let mut out_of_bounds = None;
let mut queue = None;
unsafe {
self.sem.with(|state| {
if condvar_id < state.blocked.len() {
// To avoid :broadcast_heavy, we make a new waitqueue,
// swap it out with the old one, and broadcast on the
// old one outside of the little-lock.
queue = Some(mem::replace(state.blocked.get_mut(condvar_id),
WaitQueue::new()));
} else {
out_of_bounds = Some(state.blocked.len());
}
});
check_cvar_bounds(out_of_bounds,
condvar_id,
"cond.signal_on()",
|| {
queue.take_unwrap().broadcast()
})
}
}
}
// Checks whether a condvar ID was out of bounds, and fails if so, or does
// something else next on success.
#[inline]
fn check_cvar_bounds<U>(
out_of_bounds: Option<uint>,
id: uint,
act: &str,
blk: || -> U)
-> U {
match out_of_bounds {
Some(0) =>
fail!("{} with illegal ID {} - this lock has no condvars!", act, id),
Some(length) =>
fail!("{} with illegal ID {} - ID must be less than {}", act, id, length),
None => blk()
}
}
#[must_use]
struct SemCondGuard<'a> {
guard: SemGuard<'a, Vec<WaitQueue>>,
cvar: Condvar<'a>,
}
/****************************************************************************
* Semaphores
****************************************************************************/
/// A counting, blocking, bounded-waiting semaphore.
pub struct Semaphore {
sem: Sem<()>,
}
/// An RAII guard used to represent an acquired resource to a semaphore. When
/// dropped, this value will release the resource back to the semaphore.
#[must_use]
pub struct SemaphoreGuard<'a> {
_guard: SemGuard<'a, ()>,
}
impl Semaphore {
/// Create a new semaphore with the specified count.
///
/// # Failure
///
/// This function will fail if `count` is negative.
pub fn new(count: int) -> Semaphore {
Semaphore { sem: Sem::new(count, ()) }
}
/// Acquire a resource represented by the semaphore. Blocks if necessary
/// until resource(s) become available.
pub fn acquire(&self) { self.sem.acquire() }
/// Release a held resource represented by the semaphore. Wakes a blocked
/// contending task, if any exist. Won't block the caller.
pub fn release(&self) { self.sem.release() }
/// Acquire a resource of this semaphore, returning an RAII guard which will
/// release the resource when dropped.
pub fn access<'a>(&'a self) -> SemaphoreGuard<'a> {
SemaphoreGuard { _guard: self.sem.access() }
}
}
/****************************************************************************
* Mutexes
****************************************************************************/
/// A blocking, bounded-waiting, mutual exclusion lock with an associated
/// FIFO condition variable.
///
/// # Failure
/// A task which fails while holding a mutex will unlock the mutex as it
/// unwinds.
pub struct Mutex {
sem: Sem<Vec<WaitQueue>>,
}
/// An RAII structure which is used to gain access to a mutex's condition
/// variable. Additionally, when a value of this type is dropped, the
/// corresponding mutex is also unlocked.
#[must_use]
pub struct MutexGuard<'a> {
_guard: SemGuard<'a, Vec<WaitQueue>>,
/// Inner condition variable which is connected to the outer mutex, and can
/// be used for atomic-unlock-and-deschedule.
pub cond: Condvar<'a>,
}
impl Mutex {
/// Create a new mutex, with one associated condvar.
pub fn new() -> Mutex { Mutex::new_with_condvars(1) }
/// Create a new mutex, with a specified number of associated condvars. This
/// will allow calling wait_on/signal_on/broadcast_on with condvar IDs
/// between 0 and num_condvars-1. (If num_condvars is 0, lock_cond will be
/// allowed but any operations on the condvar will fail.)
pub fn new_with_condvars(num_condvars: uint) -> Mutex {
Mutex { sem: Sem::new_and_signal(1, num_condvars) }
}
/// Acquires ownership of this mutex, returning an RAII guard which will
/// unlock the mutex when dropped. The associated condition variable can
/// also be accessed through the returned guard.
pub fn lock<'a>(&'a self) -> MutexGuard<'a> {
let SemCondGuard { guard, cvar } = self.sem.access_cond();
MutexGuard { _guard: guard, cond: cvar }
}
}
/****************************************************************************
* Reader-writer locks
****************************************************************************/
// NB: Wikipedia - Readers-writers_problem#The_third_readers-writers_problem
/// A blocking, no-starvation, reader-writer lock with an associated condvar.
///
/// # Failure
///
/// A task which fails while holding an rwlock will unlock the rwlock as it
/// unwinds.
pub struct RWLock {
order_lock: Semaphore,
access_lock: Sem<Vec<WaitQueue>>,
// The only way the count flag is ever accessed is with xadd. Since it is
// a read-modify-write operation, multiple xadds on different cores will
// always be consistent with respect to each other, so a monotonic/relaxed
// consistency ordering suffices (i.e., no extra barriers are needed).
//
// FIXME(#6598): The atomics module has no relaxed ordering flag, so I use
// acquire/release orderings superfluously. Change these someday.
read_count: atomic::AtomicUint,
}
/// An RAII helper which is created by acquiring a read lock on an RWLock. When
/// dropped, this will unlock the RWLock.
#[must_use]
pub struct RWLockReadGuard<'a> {
lock: &'a RWLock,
}
/// An RAII helper which is created by acquiring a write lock on an RWLock. When
/// dropped, this will unlock the RWLock.
///
/// A value of this type can also be consumed to downgrade to a read-only lock.
#[must_use]
pub struct RWLockWriteGuard<'a> {
lock: &'a RWLock,
/// Inner condition variable that is connected to the write-mode of the
/// outer rwlock.
pub cond: Condvar<'a>,
}
impl RWLock {
/// Create a new rwlock, with one associated condvar.
pub fn new() -> RWLock { RWLock::new_with_condvars(1) }
/// Create a new rwlock, with a specified number of associated condvars.
/// Similar to mutex_with_condvars.
pub fn new_with_condvars(num_condvars: uint) -> RWLock {
RWLock {
order_lock: Semaphore::new(1),
access_lock: Sem::new_and_signal(1, num_condvars),
read_count: atomic::AtomicUint::new(0),
}
}
/// Acquires a read-lock, returning an RAII guard that will unlock the lock
/// when dropped. Calls to 'read' from other tasks may run concurrently with
/// this one.
pub fn read<'a>(&'a self) -> RWLockReadGuard<'a> {
let _guard = self.order_lock.access();
let old_count = self.read_count.fetch_add(1, atomic::Acquire);
if old_count == 0 {
self.access_lock.acquire();
}
RWLockReadGuard { lock: self }
}
/// Acquire a write-lock, returning an RAII guard that will unlock the lock
/// when dropped. No calls to 'read' or 'write' from other tasks will run
/// concurrently with this one.
///
/// You can also downgrade a write to a read by calling the `downgrade`
/// method on the returned guard. Additionally, the guard will contain a
/// `Condvar` attached to this lock.
///
/// # Example
///
/// ```rust
/// use sync::raw::RWLock;
///
/// let lock = RWLock::new();
/// let write = lock.write();
/// // ... exclusive access ...
/// let read = write.downgrade();
/// // ... shared access ...
/// drop(read);
/// ```
pub fn write<'a>(&'a self) -> RWLockWriteGuard<'a> {
let _g = self.order_lock.access();
self.access_lock.acquire();
// It's important to thread our order lock into the condvar, so that
// when a cond.wait() wakes up, it uses it while reacquiring the
// access lock. If we permitted a waking-up writer to "cut in line",
// there could arise a subtle race when a downgrader attempts to hand
// off the reader cloud lock to a waiting reader. This race is tested
// in arc.rs (test_rw_write_cond_downgrade_read_race) and looks like:
// T1 (writer) T2 (downgrader) T3 (reader)
// [in cond.wait()]
// [locks for writing]
// [holds access_lock]
// [is signalled, perhaps by
// downgrader or a 4th thread]
// tries to lock access(!)
// lock order_lock
// xadd read_count[0->1]
// tries to lock access
// [downgrade]
// xadd read_count[1->2]
// unlock access
// Since T1 contended on the access lock before T3 did, it will steal
// the lock handoff. Adding order_lock in the condvar reacquire path
// solves this because T1 will hold order_lock while waiting on access,
// which will cause T3 to have to wait until T1 finishes its write,
// which can't happen until T2 finishes the downgrade-read entirely.
// The astute reader will also note that making waking writers use the
// order_lock is better for not starving readers.
RWLockWriteGuard {
lock: self,
cond: Condvar {
sem: &self.access_lock,
order: Just(&self.order_lock),
nocopy: marker::NoCopy,
}
}
}
}
impl<'a> RWLockWriteGuard<'a> {
/// Consumes this write lock and converts it into a read lock.
pub fn downgrade(self) -> RWLockReadGuard<'a> {
let lock = self.lock;
// Don't run the destructor of the write guard, we're in charge of
// things from now on
unsafe { mem::forget(self) }
let old_count = lock.read_count.fetch_add(1, atomic::Release);
// If another reader was already blocking, we need to hand-off
// the "reader cloud" access lock to them.
if old_count != 0 {
// Guaranteed not to let another writer in, because
// another reader was holding the order_lock. Hence they
// must be the one to get the access_lock (because all
// access_locks are acquired with order_lock held). See
// the comment in write_cond for more justification.
lock.access_lock.release();
}
RWLockReadGuard { lock: lock }
}
}
#[unsafe_destructor]
impl<'a> Drop for RWLockWriteGuard<'a> {
fn drop(&mut self) {
self.lock.access_lock.release();
}
}
#[unsafe_destructor]
impl<'a> Drop for RWLockReadGuard<'a> {
fn drop(&mut self) {
let old_count = self.lock.read_count.fetch_sub(1, atomic::Release);
assert!(old_count > 0);
if old_count == 1 {
// Note: this release used to be outside of a locked access
// to exclusive-protected state. If this code is ever
// converted back to such (instead of using atomic ops),
// this access MUST NOT go inside the exclusive access.
self.lock.access_lock.release();
}
}
}
/****************************************************************************
* Tests
****************************************************************************/
#[cfg(test)]
mod tests {
use std::prelude::*;
use Arc;
use super::{Semaphore, Mutex, RWLock, Condvar};
use std::mem;
use std::result;
use std::task;
/************************************************************************
* Semaphore tests
************************************************************************/
#[test]
fn test_sem_acquire_release() {
let s = Semaphore::new(1);
s.acquire();
s.release();
s.acquire();
}
#[test]
fn test_sem_basic() {
let s = Semaphore::new(1);
let _g = s.access();
}
#[test]
#[should_fail]
fn test_sem_basic2() {
Semaphore::new(-1);
}
#[test]
fn test_sem_as_mutex() {
let s = Arc::new(Semaphore::new(1));
let s2 = s.clone();
task::spawn(proc() {
let _g = s2.access();
for _ in range(0u, 5) { task::deschedule(); }
});
let _g = s.access();
for _ in range(0u, 5) { task::deschedule(); }
}
#[test]
fn test_sem_as_cvar() {
/* Child waits and parent signals */
let (tx, rx) = channel();
let s = Arc::new(Semaphore::new(0));
let s2 = s.clone();
task::spawn(proc() {
s2.acquire();
tx.send(());
});
for _ in range(0u, 5) { task::deschedule(); }
s.release();
let _ = rx.recv();
/* Parent waits and child signals */
let (tx, rx) = channel();
let s = Arc::new(Semaphore::new(0));
let s2 = s.clone();
task::spawn(proc() {
for _ in range(0u, 5) { task::deschedule(); }
s2.release();
let _ = rx.recv();
});
s.acquire();
tx.send(());
}
#[test]
fn test_sem_multi_resource() {
// Parent and child both get in the critical section at the same
// time, and shake hands.
let s = Arc::new(Semaphore::new(2));
let s2 = s.clone();
let (tx1, rx1) = channel();
let (tx2, rx2) = channel();
task::spawn(proc() {
let _g = s2.access();
let _ = rx2.recv();
tx1.send(());
});
let _g = s.access();
tx2.send(());
let _ = rx1.recv();
}
#[test]
fn test_sem_runtime_friendly_blocking() {
// Force the runtime to schedule two threads on the same sched_loop.
// When one blocks, it should schedule the other one.
let s = Arc::new(Semaphore::new(1));
let s2 = s.clone();
let (tx, rx) = channel();
{
let _g = s.access();
task::spawn(proc() {
tx.send(());
drop(s2.access());
tx.send(());
});
rx.recv(); // wait for child to come alive
for _ in range(0u, 5) { task::deschedule(); } // let the child contend
}
rx.recv(); // wait for child to be done
}
/************************************************************************
* Mutex tests
************************************************************************/
#[test]
fn test_mutex_lock() {
// Unsafely achieve shared state, and do the textbook
// "load tmp = move ptr; inc tmp; store ptr <- tmp" dance.
let (tx, rx) = channel();
let m = Arc::new(Mutex::new());
let m2 = m.clone();
let mut sharedstate = box 0;
{
let ptr: *mut int = &mut *sharedstate;
task::spawn(proc() {
access_shared(ptr, &m2, 10);
tx.send(());
});
}
{
access_shared(&mut *sharedstate, &m, 10);
let _ = rx.recv();
assert_eq!(*sharedstate, 20);
}
fn access_shared(sharedstate: *mut int, m: &Arc<Mutex>, n: uint) {
for _ in range(0u, n) {
let _g = m.lock();
let oldval = unsafe { *sharedstate };
task::deschedule();
unsafe { *sharedstate = oldval + 1; }
}
}
}
#[test]
fn test_mutex_cond_wait() {
let m = Arc::new(Mutex::new());
// Child wakes up parent
{
let lock = m.lock();
let m2 = m.clone();
task::spawn(proc() {
let lock = m2.lock();
let woken = lock.cond.signal();
assert!(woken);
});
lock.cond.wait();
}
// Parent wakes up child
let (tx, rx) = channel();
let m3 = m.clone();
task::spawn(proc() {
let lock = m3.lock();
tx.send(());
lock.cond.wait();
tx.send(());
});
rx.recv(); // Wait until child gets in the mutex
{
let lock = m.lock();
let woken = lock.cond.signal();
assert!(woken);
}
rx.recv(); // Wait until child wakes up
}
fn test_mutex_cond_broadcast_helper(num_waiters: uint) {
let m = Arc::new(Mutex::new());
let mut rxs = Vec::new();
for _ in range(0u, num_waiters) {
let mi = m.clone();
let (tx, rx) = channel();
rxs.push(rx);
task::spawn(proc() {
let lock = mi.lock();
tx.send(());
lock.cond.wait();
tx.send(());
});
}
// wait until all children get in the mutex
for rx in rxs.mut_iter() { rx.recv(); }
{
let lock = m.lock();
let num_woken = lock.cond.broadcast();
assert_eq!(num_woken, num_waiters);
}
// wait until all children wake up
for rx in rxs.mut_iter() { rx.recv(); }
}
#[test]
fn test_mutex_cond_broadcast() {
test_mutex_cond_broadcast_helper(12);
}
#[test]
fn test_mutex_cond_broadcast_none() {
test_mutex_cond_broadcast_helper(0);
}
#[test]
fn test_mutex_cond_no_waiter() {
let m = Arc::new(Mutex::new());
let m2 = m.clone();
let _ = task::try(proc() {
drop(m.lock());
});
let lock = m2.lock();
assert!(!lock.cond.signal());
}
#[test]
fn test_mutex_killed_simple() {
use std::any::Any;
// Mutex must get automatically unlocked if failed/killed within.
let m = Arc::new(Mutex::new());
let m2 = m.clone();
let result: result::Result<(), Box<Any + Send>> = task::try(proc() {
let _lock = m2.lock();
fail!();
});
assert!(result.is_err());
// child task must have finished by the time try returns
drop(m.lock());
}
#[test]
fn test_mutex_cond_signal_on_0() {
// Tests that signal_on(0) is equivalent to signal().
let m = Arc::new(Mutex::new());
let lock = m.lock();
let m2 = m.clone();
task::spawn(proc() {
let lock = m2.lock();
lock.cond.signal_on(0);
});
lock.cond.wait();
}
#[test]
fn test_mutex_no_condvars() {
let result = task::try(proc() {
let m = Mutex::new_with_condvars(0);
m.lock().cond.wait();
});
assert!(result.is_err());
let result = task::try(proc() {
let m = Mutex::new_with_condvars(0);
m.lock().cond.signal();
});
assert!(result.is_err());
let result = task::try(proc() {
let m = Mutex::new_with_condvars(0);
m.lock().cond.broadcast();
});
assert!(result.is_err());
}
/************************************************************************
* Reader/writer lock tests
************************************************************************/
#[cfg(test)]
pub enum RWLockMode { Read, Write, Downgrade, DowngradeRead }
#[cfg(test)]
fn lock_rwlock_in_mode(x: &Arc<RWLock>, mode: RWLockMode, blk: ||) {
match mode {
Read => { let _g = x.read(); blk() }
Write => { let _g = x.write(); blk() }
Downgrade => { let _g = x.write(); blk() }
DowngradeRead => { let _g = x.write().downgrade(); blk() }
}
}
#[cfg(test)]
fn test_rwlock_exclusion(x: Arc<RWLock>,
mode1: RWLockMode,
mode2: RWLockMode) {
// Test mutual exclusion between readers and writers. Just like the
// mutex mutual exclusion test, a ways above.
let (tx, rx) = channel();
let x2 = x.clone();
let mut sharedstate = box 0;
{
let ptr: *const int = &*sharedstate;
task::spawn(proc() {
let sharedstate: &mut int =
unsafe { mem::transmute(ptr) };
access_shared(sharedstate, &x2, mode1, 10);
tx.send(());
});
}
{
access_shared(&mut *sharedstate, &x, mode2, 10);
let _ = rx.recv();
assert_eq!(*sharedstate, 20);
}
fn access_shared(sharedstate: &mut int, x: &Arc<RWLock>,
mode: RWLockMode, n: uint) {
for _ in range(0u, n) {
lock_rwlock_in_mode(x, mode, || {
let oldval = *sharedstate;
task::deschedule();
*sharedstate = oldval + 1;
})
}
}
}
#[test]
fn test_rwlock_readers_wont_modify_the_data() {
test_rwlock_exclusion(Arc::new(RWLock::new()), Read, Write);
test_rwlock_exclusion(Arc::new(RWLock::new()), Write, Read);
test_rwlock_exclusion(Arc::new(RWLock::new()), Read, Downgrade);
test_rwlock_exclusion(Arc::new(RWLock::new()), Downgrade, Read);
test_rwlock_exclusion(Arc::new(RWLock::new()), Write, DowngradeRead);
test_rwlock_exclusion(Arc::new(RWLock::new()), DowngradeRead, Write);
}
#[test]
fn test_rwlock_writers_and_writers() {
test_rwlock_exclusion(Arc::new(RWLock::new()), Write, Write);
test_rwlock_exclusion(Arc::new(RWLock::new()), Write, Downgrade);
test_rwlock_exclusion(Arc::new(RWLock::new()), Downgrade, Write);
test_rwlock_exclusion(Arc::new(RWLock::new()), Downgrade, Downgrade);
}
#[cfg(test)]
fn test_rwlock_handshake(x: Arc<RWLock>,
mode1: RWLockMode,
mode2: RWLockMode,
make_mode2_go_first: bool) {
// Much like sem_multi_resource.
let x2 = x.clone();
let (tx1, rx1) = channel();
let (tx2, rx2) = channel();
task::spawn(proc() {
if !make_mode2_go_first {
rx2.recv(); // parent sends to us once it locks, or ...
}
lock_rwlock_in_mode(&x2, mode2, || {
if make_mode2_go_first {
tx1.send(()); // ... we send to it once we lock
}
rx2.recv();
tx1.send(());
})
});
if make_mode2_go_first {
rx1.recv(); // child sends to us once it locks, or ...
}
lock_rwlock_in_mode(&x, mode1, || {
if !make_mode2_go_first {
tx2.send(()); // ... we send to it once we lock
}
tx2.send(());
rx1.recv();
})
}
#[test]
fn test_rwlock_readers_and_readers() {
test_rwlock_handshake(Arc::new(RWLock::new()), Read, Read, false);
// The downgrader needs to get in before the reader gets in, otherwise
// they cannot end up reading at the same time.
test_rwlock_handshake(Arc::new(RWLock::new()), DowngradeRead, Read, false);
test_rwlock_handshake(Arc::new(RWLock::new()), Read, DowngradeRead, true);
// Two downgrade_reads can never both end up reading at the same time.
}
#[test]
fn test_rwlock_downgrade_unlock() {
// Tests that downgrade can unlock the lock in both modes
let x = Arc::new(RWLock::new());
lock_rwlock_in_mode(&x, Downgrade, || { });
test_rwlock_handshake(x, Read, Read, false);
let y = Arc::new(RWLock::new());
lock_rwlock_in_mode(&y, DowngradeRead, || { });
test_rwlock_exclusion(y, Write, Write);
}
#[test]
fn test_rwlock_read_recursive() {
let x = RWLock::new();
let _g1 = x.read();
let _g2 = x.read();
}
#[test]
fn test_rwlock_cond_wait() {
// As test_mutex_cond_wait above.
let x = Arc::new(RWLock::new());
// Child wakes up parent
{
let lock = x.write();
let x2 = x.clone();
task::spawn(proc() {
let lock = x2.write();
assert!(lock.cond.signal());
});
lock.cond.wait();
}
// Parent wakes up child
let (tx, rx) = channel();
let x3 = x.clone();
task::spawn(proc() {
let lock = x3.write();
tx.send(());
lock.cond.wait();
tx.send(());
});
rx.recv(); // Wait until child gets in the rwlock
drop(x.read()); // Must be able to get in as a reader
{
let x = x.write();
assert!(x.cond.signal());
}
rx.recv(); // Wait until child wakes up
drop(x.read()); // Just for good measure
}
#[cfg(test)]
fn test_rwlock_cond_broadcast_helper(num_waiters: uint) {
// Much like the mutex broadcast test. Downgrade-enabled.
fn lock_cond(x: &Arc<RWLock>, blk: |c: &Condvar|) {
let lock = x.write();
blk(&lock.cond);
}
let x = Arc::new(RWLock::new());
let mut rxs = Vec::new();
for _ in range(0u, num_waiters) {
let xi = x.clone();
let (tx, rx) = channel();
rxs.push(rx);
task::spawn(proc() {
lock_cond(&xi, |cond| {
tx.send(());
cond.wait();
tx.send(());
})
});
}
// wait until all children get in the mutex
for rx in rxs.mut_iter() { let _ = rx.recv(); }
lock_cond(&x, |cond| {
let num_woken = cond.broadcast();
assert_eq!(num_woken, num_waiters);
});
// wait until all children wake up
for rx in rxs.mut_iter() { let _ = rx.recv(); }
}
#[test]
fn test_rwlock_cond_broadcast() {
test_rwlock_cond_broadcast_helper(0);
test_rwlock_cond_broadcast_helper(12);
}
#[cfg(test)]
fn rwlock_kill_helper(mode1: RWLockMode, mode2: RWLockMode) {
use std::any::Any;
// Mutex must get automatically unlocked if failed/killed within.
let x = Arc::new(RWLock::new());
let x2 = x.clone();
let result: result::Result<(), Box<Any + Send>> = task::try(proc() {
lock_rwlock_in_mode(&x2, mode1, || {
fail!();
})
});
assert!(result.is_err());
// child task must have finished by the time try returns
lock_rwlock_in_mode(&x, mode2, || { })
}
#[test]
fn test_rwlock_reader_killed_writer() {
rwlock_kill_helper(Read, Write);
}
#[test]
fn test_rwlock_writer_killed_reader() {
rwlock_kill_helper(Write, Read);
}
#[test]
fn test_rwlock_reader_killed_reader() {
rwlock_kill_helper(Read, Read);
}
#[test]
fn test_rwlock_writer_killed_writer() {
rwlock_kill_helper(Write, Write);
}
#[test]
fn test_rwlock_kill_downgrader() {
rwlock_kill_helper(Downgrade, Read);
rwlock_kill_helper(Read, Downgrade);
rwlock_kill_helper(Downgrade, Write);
rwlock_kill_helper(Write, Downgrade);
rwlock_kill_helper(DowngradeRead, Read);
rwlock_kill_helper(Read, DowngradeRead);
rwlock_kill_helper(DowngradeRead, Write);
rwlock_kill_helper(Write, DowngradeRead);
rwlock_kill_helper(DowngradeRead, Downgrade);
rwlock_kill_helper(DowngradeRead, Downgrade);
rwlock_kill_helper(Downgrade, DowngradeRead);
rwlock_kill_helper(Downgrade, DowngradeRead);
}
}<|fim▁end|> | f(&mut *self.inner.get())
}
|
<|file_name|>leads_dry_run.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from openerp import fields, models, api
class crm_leads_dry_run(models.TransientModel):
_name = "crm.leads.dry.run"
lead_id = fields.Many2one('crm.lead', string='Lead', required=True)
team_id = fields.Many2one('crm.team', string='SaleTeam', required=True, oldname='section_id')
user_id = fields.Many2one('res.users', string='Saleman')
@api.model
def assign_leads(self, ids=[]):
# Allow to assign the result from a previous dry run.
# Once the user agrees with the result shown by a dry run
# It differs from launching the assignement process again,
# because salemen would be selected at random again
all_dry_run = self.search([('user_id', '!=', False)])
for dry_run in all_dry_run:
lead_record = dry_run.lead_id
values = {
'team_id': dry_run.team_id.id,
'user_id': dry_run.user_id.id,
'assign_date': fields.Datetime.now()
}
lead_record.write(values)
lead_record.convert_opportunity(partner_id=None)
# Avoid to re-assign the same leads for nothing
self._cr.execute("""
TRUNCATE TABLE crm_leads_dry_run;<|fim▁hole|> """)<|fim▁end|> | |
<|file_name|>asm.py<|end_file_name|><|fim▁begin|># coding=utf8
"""
asm.py - (dis)assembly features.
(c) 2014 Samuel Groß
"""
from willie import web
from willie.module import commands, nickname_commands, example
from random import choice
from binascii import hexlify, unhexlify
import string
import re
import os
from subprocess import Popen, PIPE
@commands('disas', 'disas64', 'disassemble', 'disassemble64')
@example('.disas 66556689e590c9c3')
def disassemble(bot, trigger):
"""Disassemble x86 machine code."""
if not trigger.group(2):
return bot.reply('Nothing to disassemble')
try:
arg = trigger.group(2)
# remove all 0x
while "0x" in arg:
arg = arg.replace("0x","")
# remove everything except hex<|fim▁hole|> except Exception:
return bot.say('Invalid hex sequence')
bits = 64 if '64' in trigger.group(1) else 32
filename = '/tmp/' + ''.join( choice(string.ascii_lowercase) for i in range(10)) + '.bin'
with open(filename, 'wb') as f:
f.write(code)
result = Popen(['ndisasm', '-b', str(bits), '-o', '0x1000', filename], stdout=PIPE).stdout.read()
os.remove(filename)
for line in result.split('\n'):
bot.say(line)
@commands('as', 'as64', 'assemble', 'assemble64')
@example('.as push ebp; mov ebp, esp; jmp 0x14')
def assemble(bot, trigger):
"""Assemble x86 instructions."""
code = trigger.group(2)
if not code:
return bot.reply('Nothing to assemble')
bits = 64 if '64' in trigger.group(1) else 32
filename = '/tmp/' + ''.join(choice(string.ascii_lowercase) for i in range(10)) + '.asm'
with open(filename, 'w') as f:
f.write('BITS %i\n' % bits + re.sub(r';\s*', ';\n', code))
p = Popen(['nasm', '-f', 'bin', '-o', filename[:-4], filename], stderr=PIPE)
p.wait()
os.remove(filename)
for line in p.stderr.read().split('\n'):
bot.say(line)
if p.returncode == 0:
with open(filename[:-4], 'rb') as f:
raw = f.read()
hex = hexlify(raw)
if hex:
bot.say(hex)
os.remove(filename[:-4])
def x86jmp(bot, instr):
"""Display information about a x86 conditional jump."""
if instr not in jxx:
return bot.say('I can\'t find anything about that instruction, sorry')
bot.say('%s : %s' % (instr, jxx[instr]))
def x86instr(bot, instr):
"""Display information about any x86 instruction thats no a conditional jump."""
raw = web.get('http://www.felixcloutier.com/x86/')
match = re.search('<tr><td><a href="./(?P<page>[A-Z:]*).html">%s</a></td><td>(?P<desc>[^<]*)</td></tr>' % instr, raw)
if not match:
return bot.say('I can\'t find anything about that instruction, sorry')
bot.say('%s : %s -- %s' % (instr, match.group('desc'), 'http://www.felixcloutier.com/x86/%s' % match.group('page')))
@commands('x86', 'instr', 'instruction')
def instruction(bot, trigger):
"""Display information about an x86 instruction."""
instr = trigger.group(2)
if not instr:
return bot.reply('Give me an instruction')
instr = instr.strip().upper()
if 'J' == instr[0] and not instr == 'JMP':
return x86jmp(bot, instr)
x86instr(bot, instr)
jxx = {
'JA' : 'Jump if above (CF=0 and ZF=0)',
'JAE' : 'Jump if above or equal (CF=0)',
'JB' : 'Jump if below (CF=1)',
'JBE' : 'Jump if below or equal (CF=1 or ZF=1)',
'JC' : 'Jump if carry (CF=1)',
'JCXZ' : 'Jump if CX register is 0',
'JECXZ': 'Jump if ECX register is 0',
'JRCXZ': 'Jump if RCX register is 0',
'JE' : 'Jump if equal (ZF=1)',
'JG' : 'Jump if greater (ZF=0 and SF=OF)',
'JGE' : 'Jump if greater or equal (SF=OF)',
'JL' : 'Jump if less (SF!=OF)',
'JLE' : 'Jump if less or equal (ZF=1 or SF!=OF)',
'JNA' : 'Jump if not above (CF=1 or ZF=1)',
'JNAE' : 'Jump if not above or equal (CF=1)',
'JNB' : 'Jump if not below (CF=0)',
'JNBE' : 'Jump if not below or equal (CF=0 and ZF=0)',
'JNC' : 'Jump if not carry (CF=0)',
'JNE' : 'Jump if not equal (ZF=0)',
'JNG' : 'Jump if not greater (ZF=1 or SF!=OF)',
'JNGE' : 'Jump if not greater or equal (SF!=OF)',
'JNL' : 'Jump if not less (SF=OF)',
'JNLE' : 'Jump if not less or equal (ZF=0 and SF=OF)',
'JNO' : 'Jump if not overflow (OF=0)',
'JNP' : 'Jump if not parity (PF=0)',
'JNS' : 'Jump if not sign (SF=0)',
'JNZ' : 'Jump if not zero (ZF=0)',
'JO' : 'Jump if overflow (OF=1)',
'JP' : 'Jump if parity (PF=1)',
'JPE' : 'Jump if parity even (PF=1)',
'JPO' : 'Jump if parity odd (PF=0)',
'JS' : 'Jump if sign (SF=1)'
}<|fim▁end|> | arg = re.sub(r"[^a-fA-F0-9]", r"", arg)
code = unhexlify(arg) |
<|file_name|>rpc_test.go<|end_file_name|><|fim▁begin|>// Copyright 2013 <chaishushan{AT}gmail.com>. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package codec
import (
"errors"
"net"
"net/rpc"
"testing"
// can not import xxx.pb with rpc stub here,
// because it will cause import cycle.
msg "github.com/cockroachdb/cockroach/rpc/codec/message.pb"
"github.com/cockroachdb/cockroach/util/log"
)
type Arith int
func (t *Arith) Add(args *msg.ArithRequest, reply *msg.ArithResponse) error {
reply.C = args.GetA() + args.GetB()
log.Infof("Arith.Add(%v, %v): %v", args.GetA(), args.GetB(), reply.GetC())
return nil
}
func (t *Arith) Mul(args *msg.ArithRequest, reply *msg.ArithResponse) error {
reply.C = args.GetA() * args.GetB()<|fim▁hole|>func (t *Arith) Div(args *msg.ArithRequest, reply *msg.ArithResponse) error {
if args.GetB() == 0 {
return errors.New("divide by zero")
}
reply.C = args.GetA() / args.GetB()
return nil
}
func (t *Arith) Error(args *msg.ArithRequest, reply *msg.ArithResponse) error {
return errors.New("ArithError")
}
type Echo int
func (t *Echo) Echo(args *msg.EchoRequest, reply *msg.EchoResponse) error {
reply.Msg = args.Msg
return nil
}
func TestAll(t *testing.T) {
srvAddr, err := listenAndServeArithAndEchoService("tcp", "127.0.0.1:0")
if err != nil {
t.Fatal("could not start server")
}
conn, err := net.Dial(srvAddr.Network(), srvAddr.String())
if err != nil {
t.Fatalf("could not dial client to %s: %s", srvAddr, err)
}
client := rpc.NewClientWithCodec(NewClientCodec(conn))
defer client.Close()
testArithClient(t, client)
testEchoClient(t, client)
testArithClientAsync(t, client)
testEchoClientAsync(t, client)
}
func listenAndServeArithAndEchoService(network, addr string) (net.Addr, error) {
clients, err := net.Listen(network, addr)
if err != nil {
return nil, err
}
srv := rpc.NewServer()
if err := srv.RegisterName("ArithService", new(Arith)); err != nil {
return nil, err
}
if err := srv.RegisterName("EchoService", new(Echo)); err != nil {
return nil, err
}
go func() {
for {
conn, err := clients.Accept()
if err != nil {
log.Infof("clients.Accept(): %v\n", err)
continue
}
go srv.ServeCodec(NewServerCodec(conn))
}
}()
return clients.Addr(), nil
}
func testArithClient(t *testing.T, client *rpc.Client) {
var args msg.ArithRequest
var reply msg.ArithResponse
var err error
// Add
args.A = 1
args.B = 2
if err = client.Call("ArithService.Add", &args, &reply); err != nil {
t.Fatalf(`arith.Add: %v`, err)
}
if reply.GetC() != 3 {
t.Fatalf(`arith.Add: expected = %d, got = %d`, 3, reply.GetC())
}
// Mul
args.A = 2
args.B = 3
if err = client.Call("ArithService.Mul", &args, &reply); err != nil {
t.Fatalf(`arith.Mul: %v`, err)
}
if reply.GetC() != 6 {
t.Fatalf(`arith.Mul: expected = %d, got = %d`, 6, reply.GetC())
}
// Div
args.A = 13
args.B = 5
if err = client.Call("ArithService.Div", &args, &reply); err != nil {
t.Fatalf(`arith.Div: %v`, err)
}
if reply.GetC() != 2 {
t.Fatalf(`arith.Div: expected = %d, got = %d`, 2, reply.GetC())
}
// Div zero
args.A = 1
args.B = 0
if err = client.Call("ArithService.Div", &args, &reply); err.Error() != "divide by zero" {
t.Fatalf(`arith.Error: expected = "%s", got = "%s"`, "divide by zero", err.Error())
}
// Error
args.A = 1
args.B = 2
if err = client.Call("ArithService.Error", &args, &reply); err.Error() != "ArithError" {
t.Fatalf(`arith.Error: expected = "%s", got = "%s"`, "ArithError", err.Error())
}
}
func testArithClientAsync(t *testing.T, client *rpc.Client) {
done := make(chan *rpc.Call, 16)
callInfoList := []struct {
method string
args *msg.ArithRequest
reply *msg.ArithResponse
err error
}{
{
"ArithService.Add",
&msg.ArithRequest{A: 1, B: 2},
&msg.ArithResponse{C: 3},
nil,
},
{
"ArithService.Mul",
&msg.ArithRequest{A: 2, B: 3},
&msg.ArithResponse{C: 6},
nil,
},
{
"ArithService.Div",
&msg.ArithRequest{A: 13, B: 5},
&msg.ArithResponse{C: 2},
nil,
},
{
"ArithService.Div",
&msg.ArithRequest{A: 1, B: 0},
&msg.ArithResponse{},
errors.New("divide by zero"),
},
{
"ArithService.Error",
&msg.ArithRequest{A: 1, B: 2},
&msg.ArithResponse{},
errors.New("ArithError"),
},
}
// GoCall list
calls := make([]*rpc.Call, len(callInfoList))
for i := 0; i < len(calls); i++ {
calls[i] = client.Go(callInfoList[i].method,
callInfoList[i].args, callInfoList[i].reply,
done,
)
}
for i := 0; i < len(calls); i++ {
<-calls[i].Done
}
// check result
for i := 0; i < len(calls); i++ {
if callInfoList[i].err != nil {
if calls[i].Error.Error() != callInfoList[i].err.Error() {
t.Fatalf(`%s: expected %v, Got = %v`,
callInfoList[i].method,
callInfoList[i].err,
calls[i].Error,
)
}
continue
}
got := calls[i].Reply.(*msg.ArithResponse).GetC()
expected := callInfoList[i].reply.GetC()
if got != expected {
t.Fatalf(`%s: expected %v, Got = %v`,
callInfoList[i].method, got, expected,
)
}
}
}
func testEchoClient(t *testing.T, client *rpc.Client) {
var args msg.EchoRequest
var reply msg.EchoResponse
var err error
// EchoService.Echo
args.Msg = "Hello, Protobuf-RPC"
if err = client.Call("EchoService.Echo", &args, &reply); err != nil {
t.Fatalf(`EchoService.Echo: %v`, err)
}
if reply.GetMsg() != args.GetMsg() {
t.Fatalf(`EchoService.Echo: expected = "%s", got = "%s"`, args.GetMsg(), reply.GetMsg())
}
}
func testEchoClientAsync(t *testing.T, client *rpc.Client) {
// EchoService.Echo
args := &msg.EchoRequest{Msg: "Hello, Protobuf-RPC"}
reply := &msg.EchoResponse{}
echoCall := client.Go("EchoService.Echo", args, reply, nil)
// EchoService.Echo reply
echoCall = <-echoCall.Done
if echoCall.Error != nil {
t.Fatalf(`EchoService.Echo: %v`, echoCall.Error)
}
if echoCall.Reply.(*msg.EchoResponse).GetMsg() != args.GetMsg() {
t.Fatalf(`EchoService.Echo: expected = "%s", got = "%s"`,
args.GetMsg(),
echoCall.Reply.(*msg.EchoResponse).GetMsg(),
)
}
}<|fim▁end|> | return nil
}
|
<|file_name|>filer.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import absolute_import
import hashlib
import six
from django.core.files.base import ContentFile
from django.forms.models import modelform_factory
from filer.models import File, Folder, Image
def filer_folder_from_path(path):
"""
Split `path` by slashes and create a hierarchy of Filer Folder objects accordingly.
Blank path components are ignored, so "/////foo//////bar///" is the same as "foo/bar".
The empty string (and `None`) are handled as "no folder", i.e. root folder.
:param path: Pathname or None
:type path: str|None
:return: Folder
:rtype: filer.models.Folder
"""
if path is None:
return None
folder = None
for component in six.text_type(path).split("/"):
if component:
folder = Folder.objects.get_or_create(name=component, parent=folder)[0]
return folder
def _filer_file_from_upload(model, request, path, upload_data, sha1=None):
"""
Create some sort of Filer file (either File or Image, really) from the given upload data (ContentFile or UploadFile)
:param model: Model class
:param request: Request, to figure out the owner for this file
:type request: django.http.request.HttpRequest|None
:param path: Pathname string (see `filer_folder_from_path`) or a Filer Folder.
:type path: basestring|filer.models.Folder
:param upload_data: Upload data
:type upload_data: django.core.files.base.File
:param sha1: SHA1 checksum. If given and a matching `model` with the SHA1 is found, it is returned instead.
:type sha1: basestring
:return: Filer file
"""
if sha1:
upload = model.objects.filter(sha1=sha1).first()
if upload:
return upload
file_form_cls = modelform_factory(
model=model, fields=('original_filename', 'owner', 'file'))
upload_form = file_form_cls(
data={
'original_filename': upload_data.name,
'owner': (request.user.pk if (request and not request.user.is_anonymous()) else None)
},
files={
'file': upload_data
}
)<|fim▁hole|> else:
upload.folder = filer_folder_from_path(path)
upload.save()
return upload
def filer_file_from_upload(request, path, upload_data, sha1=None):
"""
Create a filer.models.filemodels.File from an upload (UploadedFile or such).
If the `sha1` parameter is passed and a file with said SHA1 is found, it will be returned instead.
:param request: Request, to figure out the owner for this file
:type request: django.http.request.HttpRequest|None
:param path: Pathname string (see `filer_folder_from_path`) or a Filer Folder.
:type path: basestring|filer.models.Folder
:param upload_data: Upload data
:type upload_data: django.core.files.base.File
:param sha1: SHA1 checksum. If given and a matching `model` with the SHA1 is found, it is returned instead.
:type sha1: basestring
:rtype: filer.models.filemodels.File
"""
return _filer_file_from_upload(model=File, request=request, path=path, upload_data=upload_data, sha1=sha1)
def filer_image_from_upload(request, path, upload_data, sha1=None):
"""
Create a Filer Image from an upload (UploadedFile or such).
If the `sha1` parameter is passed and an Image with said SHA1 is found, it will be returned instead.
:param request: Request, to figure out the owner for this file
:type request: django.http.request.HttpRequest|None
:param path: Pathname string (see `filer_folder_from_path`) or a Filer Folder.
:type path: basestring|filer.models.Folder
:param upload_data: Upload data
:type upload_data: django.core.files.base.File
:param sha1: SHA-1 checksum of the data, if available, to do deduplication
:type sha1: basestring
:rtype: filer.models.imagemodels.Image
"""
return _filer_file_from_upload(model=Image, request=request, path=path, upload_data=upload_data, sha1=sha1)
def filer_image_from_data(request, path, file_name, file_data, sha1=None):
"""
Create a Filer Image from the given data string.
If the `sha1` parameter is passed and True (the value True, not a truey value), the SHA-1 of the data string
is calculated and passed to the underlying creation function.
If the `sha1` parameter is truthy (generally the SHA-1 hex string), it's passed directly to the creation function.
:param request: Request, to figure out the owner for this file
:type request: django.http.request.HttpRequest|None
:param path: Pathname string (see `filer_folder_from_path`) or a Filer Folder.
:type path: basestring|filer.models.Folder
:param file_name: File name
:type file_data: basestring
:param file_data: Upload data
:type file_data: bytes
:param sha1: SHA-1 checksum of the data, if available, to do deduplication.
May also be `True` to calculate the SHA-1 first.
:type sha1: basestring|bool
:rtype: filer.models.imagemodels.Image
"""
if sha1 is True:
sha1 = hashlib.sha1(file_data).hexdigest()
upload_data = ContentFile(file_data, file_name)
return _filer_file_from_upload(model=Image, request=request, path=path, upload_data=upload_data, sha1=sha1)<|fim▁end|> | upload = upload_form.save(commit=False)
upload.is_public = True
if isinstance(path, Folder):
upload.folder = path |
<|file_name|>idleoutd.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
""" Marcos Moyano - [email protected]
Logout users of a specified period of idle time.
Copyright (c) 2006 Marcos Moyano
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License version 2 as published by
the Free Software Foundation.
"""
__revision__ = "$Id: idleoutd 2007-6-11 $"
import os, sys, smtplib
from time import sleep
from re import compile as comp
from re import match
from logging import fatal, info, warning, DEBUG, getLogger, Formatter
from logging.handlers import RotatingFileHandler
G_FILE = "/etc/group"
P_FILE = "/etc/passwd"
### Necesary data ###
USR_BY_NAME = {}
GROUP_BY_NAME = {}
PROCS = {}
NO_BANN = []
BANN = {}
PRINTINFO = 0
PRINTVERSION = "0.8.1"
LOG_FLAG = 0
####################
# Manage arguments #
####################
if len(sys.argv[1:]) == 1:
DEBUGG = sys.argv[1]
if DEBUGG == "-D" or DEBUGG == "-d" or DEBUGG == "--debug":
PRINTINFO = 1
elif DEBUGG == "-h" or DEBUGG == "--help":
printhelp()
sys.exit(0)
elif DEBUGG == "-v" or DEBUGG == "-V" or DEBUGG == "--version":
print ("idleoutd version is: %s \n" % PRINTVERSION)
sys.exit(0)
else:
print ("idleoutd: Invalid argument -- %s\n\
Try 'idleoutd -h' or 'idleoutd --help' for more information." % DEBUGG)
sys.exit(1)
elif len(sys.argv[1:]) > 1:
print ("To many arguments: %d recieved, 1 expected.\n\
Try 'idleoutd -h' or 'idleoutd --help'" % len(sys.argv[1:]))
sys.exit(1)
#### End of manage arguments ####
##################
# Print Help Msg #
##################
def printhelp():
"""
Print help information.
"""
print """Logout users of a specified period of idle time.
Usage: idleoutd [OPTION]
-D, -d, --debug Print debug information to the screen every 60 seconds.
-V, -v, --version Print version information and exit.
-h, --help Print this help and exit.
Report bugs to <[email protected]>."""
return
#### End of print help ####
######################
# Define logging way #
######################
def logg(LOG_FILE, LOG_SIZE):
"""
Configuration of the log file.
"""
RLOG = getLogger('')
handler = RotatingFileHandler(LOG_FILE, 'a', LOG_SIZE * 1024 * 1024, 10)
RLOG.addHandler(handler)
RLOG.setLevel(DEBUG)
formatter = Formatter('%(asctime)s: %(levelname)-8s %(message)s','%b %d %H:%M:%S')
handler.setFormatter(formatter)
return
#### End of define logging ####
##################
# Get group info #
##################
def fetch_group(group, param):
'''
Fetch all the users in /etc/passwd with the same group id as "group".
'''
tmp = []
gid = ""
mygfile = open(G_FILE,'r')
for lines in mygfile.readlines():
line = lines.strip()
name = line.split(':')[0]
if group == name:
gid = line.split(':')[2]
break
mygfile.close()
mypfile = open(P_FILE,'r')
for lines in mypfile.readlines():
line = lines.strip()
guid = line.split(':')[3]
if gid == guid:
tmp.append(line.split(":")[0])
mypfile.close()
GROUP_BY_NAME[group] = (tmp, param)
return (GROUP_BY_NAME)
#### End of get group info ####
#################
# Group defined #
#################
def group_define(spar, param):
"""
Fetch users from the specified group.
"""
idle_time = param[0].split("=")[1]
GROUP_BY_NAME = fetch_group(spar, param)
try:
filed = open(G_FILE,'r')
for lines in filed.readlines():
line = lines.strip()
if spar == str(line.split(':')[0]):
tmp = line.split(':')
groups = tmp[len(tmp)-1]
filed.close()
lofusr = GROUP_BY_NAME[spar][0]
groups = groups.split(',')
for x in lofusr:
if x not in groups:
groups.append(x)
if int(idle_time) == 0:
for x in groups:
if x not in NO_BANN:
NO_BANN.append(x)
for y in GROUP_BY_NAME.keys():
if x in GROUP_BY_NAME[y][0]:
GROUP_BY_NAME[y] = (GROUP_BY_NAME[y][0][1:], param)
if GROUP_BY_NAME[y][0] == []:
del GROUP_BY_NAME[y]
else:
for usr in groups:
if usr not in NO_BANN:
GROUP_BY_NAME[spar] = (groups, param)
except Exception, err:
warning("%s -> %s " % (err.__class__ , err))
warning("I was unable to open file %s." % G_FILE)
#### end of group definded ####
################
# User defined #
################
def usr_define(spar, param):
"""
Fetch the specified user.
"""
try:
filed = open(P_FILE,'r')
for lines in filed.readlines():
line = lines.strip()
user = str(line.split(':')[0])
if spar == user:
itime = int(param[0].split('=')[1])
if itime == 0:
if spar not in NO_BANN:
NO_BANN.append(spar)
else:
if spar in NO_BANN:
NO_BANN.remove(spar)
USR_BY_NAME[spar] = param
filed.close()
if spar not in USR_BY_NAME.keys() and spar not in NO_BANN:
info("Config file --> User %s is not defined in system." % spar)
except Exception, err:
warning("%s -> %s " % (err.__class__ , err))
warning("I was unable to open file %s." % P_FILE)
#### end of user definded ####
##################
# Get info #
##################
def get_info(LOG_FLAG):
"""
Parse the configuration file.
"""
try:
from idleoutconf import log, logsize, pid, host, port, domain
from idleoutconf import group, name
if LOG_FLAG != 1:
logg(log, int(logsize))
# Don't open another logging instance!
LOG_FLAG = 1
global smtp
smtp = [host, int(port), domain]
reg1 = comp('(\s+)\=(\s+)')
for users in name:
users = reg1.sub("=", users.strip())
usrtmp = users.split()
usrname = usrtmp[0]
rest = usrtmp[1:]
usr_define(usrname, rest)
for groups in group:
groups = reg1.sub("=", groups.strip())
grtmp = groups.split()
groupname = grtmp[0]
rest = grtmp[1:]
group_define(groupname, rest)
return(pid)
except Exception, err:
print >> sys.stderr, "Error: %d: %s" % (err.errno, err.strerror)
sys.exit(err.errno)
#### end get info ####
##################<|fim▁hole|>def compute(process):
"""
Manage all the information and call the require events.
"""
tmp = [x for x, y in BANN.iteritems() if x not in process.keys()]
for x in tmp:
del BANN[x] # Clean people who got back
for x, y in process.iteritems():
user = x.split(',')[0]
dev = x.split(',')[1]
time = int(y[0])
# Search in user define dictionary
if USR_BY_NAME.has_key(user):
idtm = int(USR_BY_NAME[user][0].split('=')[1])
if time >= idtm:
grace = int(USR_BY_NAME[user][1].split('=')[1])
silent = USR_BY_NAME[user][3].split('=')[1]
if x in BANN.keys():
if BANN[x] >= grace:
del BANN[x]
if silent == "no":
bann_usr(x, y[1], grace, 0) # Bann the user
else:
# Bann the user with silent
bann_usr(x, y[1], grace, 1)
mail = USR_BY_NAME[user][2].split('=')[1]
if mail == "yes":
send_mail(user, dev)
else:
BANN["%s" % x] = int(BANN[x]) + 1
else:
ret = checkcon(x)
if ret == 0:
BANN["%s" % x] = 1
if silent == "no":
notify(user, dev, grace) # Notify the user
else:
# No ssh session - Banning with silent
bann_usr(x, y[1], grace, 1)
else:
if x in BANN.keys():
del BANN[x]
else:
"""
Group search:
We'll grab the lowest idle configuration available. In addition we'll grab the
corresponding grace and mail configuration for that particular user.
By default we set the mail configuration to "no". If it needs to change it will do so.
"""
# Big number just to make sure idle time is lower in the first run
loweridt = 1000
lowgrace = 0
lowmail = "no"
silent = "no"
# Search in group define dictionary for the lowest idle time.
for j, k in GROUP_BY_NAME.iteritems():
if user in k[0]:
idtm = int(GROUP_BY_NAME[j][1][0].split('=')[1])
if idtm < loweridt:
loweridt = idtm
lowgrace = int(GROUP_BY_NAME[j][1][1].split('=')[1])
lowmail = GROUP_BY_NAME[j][1][2].split('=')[1]
silent = GROUP_BY_NAME[j][1][3].split('=')[1]
if time >= loweridt:
if x in BANN.keys():
if BANN[x] >= lowgrace:
del BANN[x]
if silent == "no":
bann_usr(x, y[1], lowgrace, 0) # Bann the user
else:
# Bann the user with silent
bann_usr(x, y[1], lowgrace, 1)
if lowmail == "yes":
send_mail(user, dev)
else:
BANN["%s" % x] = int(BANN[x]) + 1
else:
ret = checkcon(x)
if ret == 0:
BANN["%s" % x] = 1
if silent == "no":
notify(user, dev, lowgrace) # Notify the user
else:
bann_usr(x, y[1], lowgrace, 1)
else:
if x in BANN.keys():
del BANN[x]
#### End of compute ####
##################
# Notify user #
##################
def notify(user, dev, grace):
"""
Notify the user that he is going to be kicked out.
"""
fdr = "/dev/"+dev
seconds = grace*60
try:
tonot = open(fdr,'a')
tonot.write("\n\r\n<<< MESSAGE FROM IDLEOUT >>>\n\n\
\r\tYou have been idle for too long.\n\
\r\tIf you don't send an alive signal in the next %d seconds you will be kicked out!\n\n\
\r<<< END OF MESSAGE >>>\n\n" % seconds)
tonot.close()
warning("USER %s idle on DEVICE %s --> NOTIFYING!" % (user, dev))
except Exception, err:
warning("%s -> %s " % (err.__class__ , err))
warning("I was unable to open device %s." % fdr)
#### end of notify user ####
##########################
# check ssh connection #
##########################
def checkcon(info):
"""
Look for the sshd process of the specified user in the specified device.
"""
user = info.split(',')[0]
device = info.split(',')[1]
sshd = os.popen("ps -ef | grep %s | grep %s | grep sshd | grep -v \"grep\" | head -n 1" % (device, user), 'r')
sshd = sshd.read()
if sshd:
sshd = sshd.strip().split()
else:
warning("USER %s not on DEVICE %s --> KICKING OUT!" % (user, device))
return (1)
if sshd[5] == "?" and sshd[7] == "sshd:":
if sshd[8].strip() == "%s@%s" % (user.strip(), device.strip()):
return (0) # Found ssh session
else:
return (1) # There is no ssh session for the user in the device.
#### End of checkcon ####
###############
# Bann user #
###############
def bann_usr(user, pids, seconds, silent):
"""
Kick out the specified user.
"""
usr = user.split(',')[0]
device = user.split(',')[1]
seconds = int(seconds)*60
fdr = "/dev/"+device
warning("USER %s --> timeout on DEVICE %s --> KICKING OUT!" % (usr, device))
if int(silent) == 0:
try:
tonot = open(fdr,'a')
tonot.write("\n\r\n<<< MESSAGE FROM IDLEOUT >>> \n\n\
\r\tYour %s seconds has expired.\n\
\r\tKicking out user: %s\n\n\
\r<<< END OF MESSAGE >>>\n\n" % (seconds, usr))
tonot.close()
except Exception, err:
warning("%s -> %s " % (err.__class__ , err))
warning("I was unable to open device %s." % fdr)
for process in pids.split():
process = int(process)
try:
os.kill(process, 9)
except Exception, e:
warning("%s -> %s " % (e.__class__ , e))
warning("Process don't exist or error killing it (%d)" % process)
#### End of bann user ####
#############
# Get pids #
#############
def get_pids(idle_pos, name_pos, dev_pos):
"""
Find the idle info and processes of the users currently logged in.
"""
PROCS = {}
info1 = os.popen("finger | cut -c %s,%s,%s | sed 1d | egrep -v \"\*:0\" | sort | uniq" % (name_pos, dev_pos, idle_pos), "r")
for line in info1:
c = line.split()
# Added to check differences between distros. Distros like SuSE use this.
if "*" == c[1][0]:
c[1] = c[1][1:]
if c[0] not in NO_BANN:
if len(c) == 3:
try:
t = int(c[2])
except ValueError:
if ":" in c[2]:
t = c[2].strip()
t = int(t.split(':')[0])*60 + int(t.split(':')[1])
elif "d" in c[2]:
t = c[2].strip()
t = int(t)*60*24
lo = os.popen("ps -eo \"%s\" | awk '{print $3 \" \" $1 \" \" $2}' | grep %s | grep %s | egrep -v \"grep\" | awk '{print $2}' | xargs" % ("%p %y %U", c[0], c[1]), "r")
for li in lo.readlines():
li = li.strip()
info("USER: %s --> DEVICE: %s --> IDLE TIME: %s --> PROCESSES: %s" % (c[0], c[1], str(t), li))
PROCS["%s,%s" % (c[0], c[1])] = (t, li)
return(PROCS)
#### end of get_pids ####
##########################
# Check for SMTP service #
##########################
def check_smtp():
"""
Check for the SMTP service.
"""
try:
server = smtplib.SMTP(smtp[0], smtp[1])
except Exception, err:
warning("%s -> Exit code %s -> Message: %s" % (err.__class__ , err[0], err[1]))
return(1)
server.quit()
return(0)
#### end of check SMTP ####
#############
# Send mail #
#############
def send_mail(user, dev):
"""
Send an email to the specified user explaining the situation.
"""
ecode = check_smtp()
if ecode != 0:
warning("An SMTP error ocurred. NOT sending email.")
return
pid = os.fork()
if pid > 0:
sys.exit(0)
domain = smtp[2]
if domain.lower() != "none":
toaddrs = "%s@%s" % (user, domain)
fromaddr = "%s@%s" % ("idleout", domain)
else:
toaddrs = user
fromaddr = "idleout"
line = "You have been idle for too long.\n\
Idleout has decided to terminate your conection on device %s.\n" % dev
msg = ("From: %s\r\nTo: %s\r\n\r\n%s" % (fromaddr, toaddrs, line))
try:
server = smtplib.SMTP(smtp[0], smtp[1])
server.set_debuglevel(0)
server.sendmail(fromaddr, toaddrs, msg)
server.quit()
info("Email sent to user %s" % user )
except Exception, err:
warning("%s -> Exit code %s -> Message: %s" % (err.__class__ , err[0], err[1]))
warning("An SMTP error ocurred. NOT sending email.")
#### end of send_mail ####
#####################
# Get Idle position #
#####################
def get_pos():
'''
Function to find the locations of "Name", "Tty" and "Idle" from the finger command.
'''
idle = os.popen("finger | head -n 1", "r")
line = idle.readline().strip()
tmp = line.find("Idle") + 1
idle_pos = str("%d-%d" % (tmp - 1, tmp + 4))
tmp = line.find("Name")
name_pos = str("%d-%d" % (1, tmp))
tmp = line.find("Tty")
dev_pos = str("%d-%d" % (tmp, tmp + 7))
return(idle_pos, name_pos, dev_pos)
#### End of get_pos ####
####################
# Print debug info #
####################
def prinfo(PROCS, usr_name, group_name, nobann, ybann, smtp):
"""
Print the DEBUG information.
"""
print " <<<<< DEBUG MODE >>>>> "
print "---------------------------------------------------------"
print " <<< SMTP DIRECTIVES FROM CONFIG FILE >>>\n"
host = smtp[0]
port = smtp[1]
domain = smtp[2]
print ("HOST: %s --> PORT: %d --> DOMAIN: %s" % (host, port, domain))
print "---------------------------------------------------------"
print " <<< USER DIRECTIVES FROM CONFIG FILE >>>"
for name in usr_name.keys():
print ("USER: %s " % name)
tmp = " ".join(usr for usr in usr_name[name])
print ("CONFIGURATION: %s" % tmp)
print "---------------------------------------------------------"
print " <<< GROUP DIRECTIVES FROM CONFIG FILE >>>"
for group in group_name.keys():
print ("GROUP: %s" % group)
tmp = " ".join(usr for usr in group_name[group][0])
tmp1 = " ".join(conf for conf in group_name[group][1])
print ("USERS IN GROUP: %s" % tmp)
print ("CONFIGURATION: %s" % tmp1)
print "---------------------------------------"
tmp = " ".join(usr for usr in nobann)
print "---------------------------------------------------------"
print ("USERS THAT WILL NEVER BE KICKED OUT: %s" % tmp)
print "---------------------------------------------------------"
print "IDLE USERS: "
for info in PROCS.keys():
user = info.split(',')[0]
dev = info.split(',')[1]
time = PROCS[info][0]
print ("USER: %s --> DEVICE: %s --> IDLE TIME: %s" % (user, dev, time))
print "---------------------------------------------------------"
print " <<< PROCESSES OF IDLE USERS: >>>\n"
for info in PROCS.keys():
user = info.split(',')[0]
dev = info.split(',')[1]
pro = PROCS[info][1]
print ("USER: %s --> DEVICE: %s --> PROCESSES: %s" % (user , dev, pro))
print "---------------------------------------------------------"
print "<<< GRACE: USERS THAT WILL (eventually) BE KICKED OUT >>>\n"
for info in ybann.keys():
user = info.split(',')[0]
dev = info.split(',')[1]
gra = ybann[info]
print ("USER: %s --> DEVICE: %s --> GRACE MINUTE: %s" % (user, dev, gra))
print "\n#########################################################"
print " <<< Sleeping for 60 seconds >>> "
print "#########################################################\n"
#### End of prinfo ####
###########
# MAIN #
###########
def main():
"""
Main function.
"""
try:
count = 1
# Just at the beginning to get positions in finger.
#These positions changes between distros.
(id_pos, name_pos, dev_pos) = get_pos()
while True:
if count == 30:
count = 1
# Read conf file at start and every 30 minutes
get_info(LOG_FLAG)
else:
count = count + 1
PROCS = get_pids(id_pos, name_pos, dev_pos)
try:
compute(PROCS)
except Exception, err:
warning("%s -> %s " % (err.__class__ , err))
if PRINTINFO == 1:
prinfo(PROCS, USR_BY_NAME, GROUP_BY_NAME, NO_BANN, BANN, smtp)
sleep(60) # Sleep for 60 seconds
except:
print "Signal caught. Exiting!"
sys.exit(1)
#### End of MAIN :) ####
if __name__ == "__main__":
try:
sys.path.append('/etc/idleout')
LOG_FLAG = 0
pidfile = get_info(LOG_FLAG)
except Exception, err:
print ("%s -> %s " % (err.__class__ , err))
sys.exit(1)
info("<<< Starting Idleout daemon >>>")
try:
import psyco # try to speed up :)
psyco.full()
except ImportError:
info("Psyco is not installed, the program will just run a bit slower")
pass
if PRINTINFO == 1:
info("<<< Idleout daemon started in debug mode >>>")
main()
else:
try:
pid = os.fork()
if pid > 0:
sys.exit(0) # exit first parent
except OSError, e:
print >> sys.stderr, "fork 1 failed: %d (%s)" % (e.errno, e.strerror)
fatal("I was unable to fork into a deamon")
sys.exit(1)
try:
os.chdir("/")
except Exception, err:
info("%s -> %s " % (err.__class__ , err))
pass
try:
os.setsid()
except Exception, err:
info("%s -> %s " % (err.__class__ , err))
pass
try:
os.umask(0)
except Exception, err:
info("%s -> %s " % (err.__class__ , err))
pass
try:
pid = os.fork()
if pid > 0:
myfile = open(pidfile, 'w')
myfile.write(str(pid) + '\n')
myfile.close()
info("<<< Idleout daemon started - Pid: %s >>>" % str(pid))
sys.exit(0)
except OSError, err:
print >> sys.stderr, "fork 2 failed: %d: %s" % (err.errno, err.strerror)
fatal("I was unable to fork into a deamon")
sys.exit(1)
# Start the daemon
main()<|fim▁end|> | # Compute info #
################## |
<|file_name|>Overfit_underfit.py<|end_file_name|><|fim▁begin|>import numpy as np
import matplotlib.pyplot as plt
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
from sklearn.model_selection import cross_val_score
np.random.seed(0)
n_samples = 30
degrees = [1, 4, 15]
true_fun = lambda X: np.cos(1.5 * np.pi * X)
X = np.sort(np.random.rand(n_samples))
y = true_fun(X) + np.random.randn(n_samples) * 0.1
plot_titles = ['Underfit', 'Just right', 'Overfit']
plt.figure(figsize=(14, 5))
for i in range(len(degrees)):
ax = plt.subplot(1, len(degrees), i + 1)
plt.setp(ax, xticks=(), yticks=())
polynomial_features = PolynomialFeatures(degree=degrees[i],
include_bias=False)
linear_regression = LinearRegression()
pipeline = Pipeline([("polynomial_features", polynomial_features),
("linear_regression", linear_regression)])
pipeline.fit(X[:, np.newaxis], y)
# Evaluate the models using crossvalidation
scores = cross_val_score(pipeline, X[:, np.newaxis], y,<|fim▁hole|> scoring="neg_mean_squared_error", cv=10)
X_test = np.linspace(0, 1, 100)
plt.plot(X_test, pipeline.predict(X_test[:, np.newaxis]), label="Model")
#plt.plot(X_test, true_fun(X_test), label="True function")
plt.scatter(X, y, c='k', label="Samples")
plt.xlabel("x with degree: {}".format(degrees[i]))
plt.ylabel("y")
plt.xlim((0, 1))
plt.ylim((-2, 2))
plt.legend(loc="best")
plt.title(plot_titles[i])
#plt.show()
plt.savefig('images/overfit_underfit.png')<|fim▁end|> | |
<|file_name|>xml.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: ISO-8859-15 -*-
import rlib
myreport = rlib.Rlib()
print rlib.version<|fim▁hole|>myreport.add_report("array.xml")
myreport.set_output_format_from_text("pdf")
myreport.execute()
print myreport.get_content_type_as_text()
open('xml.pdf','wb').write(myreport.get_output())<|fim▁end|> | myreport.add_datasource_xml("local_xml")
myreport.add_query_as("local_xml", "data.xml", "data") |
<|file_name|>process.py<|end_file_name|><|fim▁begin|>import os
sys = os.system
CC = 'g++ {} -std=gnu++0x -Wall'
FLAG_clear = ['/c', '-c']
FLAG_window = ['/w', '-w']
FLAG_exit = ['/e', '-e']
def main():
print('List of existing <*.cpp> files:')<|fim▁hole|> counter = 0
for file in os.listdir():
if file[-4:] == '.cpp':
counter += 1
files.append(file)
print('{:->3d}) {}'.format(counter, file[:-4]))
name = ''
flags = []
command, *ex = input('Enter your <command> [<name>] [<*flags>]: ').split()
if len(ex):
name = ex[0]
flags = list(ex[1:])
try:
name = files[int(name) - 1]
except:
if name[0] == '#':
try:
fileid = int(name[1:])
name = files[fileid - 1]
except:
pass
else:
flags = list(ex)
if command == 'open':
if len(list(set(FLAG_clear).intersection(set(flags)))) > 0:
sys('cls')
if len(list(set(FLAG_window).intersection(set(flags)))) > 0:
sys('start {}'.format(name))
else:
sys('{}'.format(name))
elif command == 'compile':
if len(list(set(FLAG_clear).intersection(set(flags)))) > 0:
sys('cls')
print('Compiling...')
err = sys((CC+' -o {}.exe').format(name, name[:-4]))
if err:
print('Error during compiling. <{}>'.format(err))
else:
print('Compiled succesfully.')
elif command == 'run':
if len(list(set(FLAG_clear).intersection(set(flags)))) > 0:
sys('cls')
print('Compiling...')
err = sys((CC+' -o {}.exe').format(name, name[:-4]))
if err:
print('Error during compiling. <{}>'.format(err))
else:
print('Compiled succesfully. Starting:\n' + '-' * 31)
if len(list(set(FLAG_window).intersection(set(flags)))) > 0:
err2 = sys('start {}.exe'.format(name[:-4]))
else:
err2 = sys('{}.exe'.format(name[:-4]))
if err2:
print('-' * 30 + '\nError during execution. <{}>'.format(err2))
else:
print('-' * 17 + '\nDone succesfully.')
elif command == 'list':
if name != '':
if len(list(set(FLAG_clear).intersection(set(flags)))) > 0:
sys('cls')
print('List of existing <*.{}> files:'.format(name))
l = len(name)
for file in os.listdir():
if file[-l:] == name:
print('{:>20}'.format(file[:-l - 1]))
else:
print('List of all existing files:')
for file in os.listdir():
print('{:>20}'.format(file))
if len(list(set(FLAG_exit).intersection(set(flags)))) == 0:
input('-' * 25 + '\nEnd. Press enter to exit: ')
main()<|fim▁end|> | files = [] |
<|file_name|>overflowing_sub.rs<|end_file_name|><|fim▁begin|>#![feature(core, wrapping)]
extern crate core;
#[cfg(test)]
mod tests {
use core::num::wrapping::OverflowingOps;
// mod shift_max {
// #![allow(non_upper_case_globals)]
//
// pub const i8: u32 = (1 << 3) - 1;
// pub const i16: u32 = (1 << 4) - 1;
// pub const i32: u32 = (1 << 5) - 1;
// pub const i64: u32 = (1 << 6) - 1;
//
// pub const u8: u32 = i8;
// pub const u16: u32 = i16;<|fim▁hole|> // pub const u64: u32 = i64;
// }
// macro_rules! unsigned_overflowing_impl {
// ($($t:ident)*) => ($(
// impl OverflowingOps for $t {
// #[inline(always)]
// fn overflowing_add(self, rhs: $t) -> ($t, bool) {
// unsafe {
// concat_idents!($t, _add_with_overflow)(self, rhs)
// }
// }
// #[inline(always)]
// fn overflowing_sub(self, rhs: $t) -> ($t, bool) {
// unsafe {
// concat_idents!($t, _sub_with_overflow)(self, rhs)
// }
// }
// #[inline(always)]
// fn overflowing_mul(self, rhs: $t) -> ($t, bool) {
// unsafe {
// concat_idents!($t, _mul_with_overflow)(self, rhs)
// }
// }
//
// #[inline(always)]
// fn overflowing_div(self, rhs: $t) -> ($t, bool) {
// (self/rhs, false)
// }
// #[inline(always)]
// fn overflowing_rem(self, rhs: $t) -> ($t, bool) {
// (self % rhs, false)
// }
//
// #[inline(always)]
// fn overflowing_shl(self, rhs: u32) -> ($t, bool) {
// (self << (rhs & self::shift_max::$t),
// (rhs > self::shift_max::$t))
// }
// #[inline(always)]
// fn overflowing_shr(self, rhs: u32) -> ($t, bool) {
// (self >> (rhs & self::shift_max::$t),
// (rhs > self::shift_max::$t))
// }
//
// #[inline(always)]
// fn overflowing_neg(self) -> ($t, bool) {
// ((!self).wrapping_add(1), true)
// }
// }
// )*)
// }
// unsigned_overflowing_impl! { u8 u16 u32 u64 }
macro_rules! overflowing_sub_test {
($T:ty, $value:expr, $rhs:expr, $result:expr) => ({
let value: $T = $value;
let rhs: $T = $rhs;
let result: ($T, bool) = value.overflowing_sub(rhs);
assert_eq!(result, $result);
})
}
#[test]
fn overflowing_sub_test1() {
overflowing_sub_test!( u32, 0xffff0000, 0x0000ffff, (0xfffe0001, false) );
overflowing_sub_test!( u32, 0x00000000, 0x00000001, (0xffffffff, true) );
}
}<|fim▁end|> | // pub const u32: u32 = i32; |
<|file_name|>buildbot_spec.py<|end_file_name|><|fim▁begin|><|fim▁hole|># Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
#!/usr/bin/env python
usage = '''
Write buildbot spec to outfile based on the bot name:
$ python buildbot_spec.py outfile Test-Ubuntu-GCC-GCE-CPU-AVX2-x86-Debug
Or run self-tests:
$ python buildbot_spec.py test
'''
import inspect
import json
import os
import sys
import builder_name_schema
import dm_flags
import nanobench_flags
CONFIG_COVERAGE = 'Coverage'
CONFIG_DEBUG = 'Debug'
CONFIG_RELEASE = 'Release'
def lineno():
caller = inspect.stack()[1] # Up one level to our caller.
return inspect.getframeinfo(caller[0]).lineno
# Since we don't actually start coverage until we're in the self-test,
# some function def lines aren't reported as covered. Add them to this
# list so that we can ignore them.
cov_skip = []
cov_start = lineno()+1 # We care about coverage starting just past this def.
def gyp_defines(builder_dict):
gyp_defs = {}
# skia_arch_type.
if builder_dict['role'] == builder_name_schema.BUILDER_ROLE_BUILD:
arch = builder_dict['target_arch']
elif builder_dict['role'] == builder_name_schema.BUILDER_ROLE_HOUSEKEEPER:
arch = None
else:
arch = builder_dict['arch']
arch_types = {
'x86': 'x86',
'x86_64': 'x86_64',
'Arm7': 'arm',
'Arm64': 'arm64',
'Mips': 'mips32',
'Mips64': 'mips64',
'MipsDSP2': 'mips32',
}
if arch in arch_types:
gyp_defs['skia_arch_type'] = arch_types[arch]
# housekeeper: build shared lib.
if builder_dict['role'] == builder_name_schema.BUILDER_ROLE_HOUSEKEEPER:
gyp_defs['skia_shared_lib'] = '1'
# skia_gpu.
if builder_dict.get('cpu_or_gpu') == 'CPU':
gyp_defs['skia_gpu'] = '0'
# skia_warnings_as_errors.
werr = False
if builder_dict['role'] == builder_name_schema.BUILDER_ROLE_BUILD:
if 'Win' in builder_dict.get('os', ''):
if not ('GDI' in builder_dict.get('extra_config', '') or
'Exceptions' in builder_dict.get('extra_config', '')):
werr = True
elif ('Mac' in builder_dict.get('os', '') and
'Android' in builder_dict.get('extra_config', '')):
werr = False
else:
werr = True
gyp_defs['skia_warnings_as_errors'] = str(int(werr)) # True/False -> '1'/'0'
# Win debugger.
if 'Win' in builder_dict.get('os', ''):
gyp_defs['skia_win_debuggers_path'] = 'c:/DbgHelp'
# Qt SDK (Win).
if 'Win' in builder_dict.get('os', ''):
if builder_dict.get('os') == 'Win8':
gyp_defs['qt_sdk'] = 'C:/Qt/Qt5.1.0/5.1.0/msvc2012_64/'
else:
gyp_defs['qt_sdk'] = 'C:/Qt/4.8.5/'
# ANGLE.
if builder_dict.get('extra_config') == 'ANGLE':
gyp_defs['skia_angle'] = '1'
if builder_dict.get('os', '') in ('Ubuntu', 'Linux'):
gyp_defs['use_x11'] = '1'
gyp_defs['chromeos'] = '0'
# GDI.
if builder_dict.get('extra_config') == 'GDI':
gyp_defs['skia_gdi'] = '1'
# Build with Exceptions on Windows.
if ('Win' in builder_dict.get('os', '') and
builder_dict.get('extra_config') == 'Exceptions'):
gyp_defs['skia_win_exceptions'] = '1'
# iOS.
if (builder_dict.get('os') == 'iOS' or
builder_dict.get('extra_config') == 'iOS'):
gyp_defs['skia_os'] = 'ios'
# Shared library build.
if builder_dict.get('extra_config') == 'Shared':
gyp_defs['skia_shared_lib'] = '1'
# Build fastest Skia possible.
if builder_dict.get('extra_config') == 'Fast':
gyp_defs['skia_fast'] = '1'
# PDF viewer in GM.
if (builder_dict.get('os') == 'Mac10.8' and
builder_dict.get('arch') == 'x86_64' and
builder_dict.get('configuration') == 'Release'):
gyp_defs['skia_run_pdfviewer_in_gm'] = '1'
# Clang.
if builder_dict.get('compiler') == 'Clang':
gyp_defs['skia_clang_build'] = '1'
# Valgrind.
if 'Valgrind' in builder_dict.get('extra_config', ''):
gyp_defs['skia_release_optimization_level'] = '1'
# Link-time code generation just wastes time on compile-only bots.
if (builder_dict.get('role') == builder_name_schema.BUILDER_ROLE_BUILD and
builder_dict.get('compiler') == 'MSVC'):
gyp_defs['skia_win_ltcg'] = '0'
# Mesa.
if (builder_dict.get('extra_config') == 'Mesa' or
builder_dict.get('cpu_or_gpu_value') == 'Mesa'):
gyp_defs['skia_mesa'] = '1'
# VisualBench
if builder_dict.get('extra_config') == 'VisualBench':
gyp_defs['skia_use_sdl'] = '1'
# skia_use_android_framework_defines.
if builder_dict.get('extra_config') == 'Android_FrameworkDefs':
gyp_defs['skia_use_android_framework_defines'] = '1'
# Skia dump stats for perf tests and gpu
if (builder_dict.get('cpu_or_gpu') == 'GPU' and
builder_dict.get('role') == 'Perf'):
gyp_defs['skia_dump_stats'] = '1'
# CommandBuffer.
if builder_dict.get('extra_config') == 'CommandBuffer':
gyp_defs['skia_command_buffer'] = '1'
# Vulkan.
if builder_dict.get('extra_config') == 'Vulkan':
gyp_defs['skia_vulkan'] = '1'
return gyp_defs
cov_skip.extend([lineno(), lineno() + 1])
def get_extra_env_vars(builder_dict):
env = {}
if builder_dict.get('configuration') == CONFIG_COVERAGE:
# We have to use Clang 3.6 because earlier versions do not support the
# compile flags we use and 3.7 and 3.8 hit asserts during compilation.
env['CC'] = '/usr/bin/clang-3.6'
env['CXX'] = '/usr/bin/clang++-3.6'
elif builder_dict.get('compiler') == 'Clang':
env['CC'] = '/usr/bin/clang'
env['CXX'] = '/usr/bin/clang++'
# SKNX_NO_SIMD, SK_USE_DISCARDABLE_SCALEDIMAGECACHE, etc.
extra_config = builder_dict.get('extra_config', '')
if extra_config.startswith('SK') and extra_config.isupper():
env['CPPFLAGS'] = '-D' + extra_config
return env
cov_skip.extend([lineno(), lineno() + 1])
def build_targets_from_builder_dict(builder_dict, do_test_steps, do_perf_steps):
"""Return a list of targets to build, depending on the builder type."""
if builder_dict['role'] in ('Test', 'Perf') and builder_dict['os'] == 'iOS':
return ['iOSShell']
if builder_dict.get('extra_config') == 'Appurify':
return ['VisualBenchTest_APK']
t = []
if do_test_steps:
t.append('dm')
if do_perf_steps and builder_dict.get('extra_config') == 'VisualBench':
t.append('visualbench')
elif do_perf_steps:
t.append('nanobench')
if t:
return t
else:
return ['most']
cov_skip.extend([lineno(), lineno() + 1])
def device_cfg(builder_dict):
# Android.
if 'Android' in builder_dict.get('extra_config', ''):
if 'NoNeon' in builder_dict['extra_config']:
return 'arm_v7'
return {
'Arm64': 'arm64',
'x86': 'x86',
'x86_64': 'x86_64',
'Mips': 'mips',
'Mips64': 'mips64',
'MipsDSP2': 'mips_dsp2',
}.get(builder_dict['target_arch'], 'arm_v7_neon')
elif builder_dict.get('os') == 'Android':
return {
'AndroidOne': 'arm_v7_neon',
'GalaxyS3': 'arm_v7_neon',
'GalaxyS4': 'arm_v7_neon',
'NVIDIA_Shield': 'arm64',
'Nexus10': 'arm_v7_neon',
'Nexus5': 'arm_v7_neon',
'Nexus6': 'arm_v7_neon',
'Nexus7': 'arm_v7_neon',
'Nexus7v2': 'arm_v7_neon',
'Nexus9': 'arm64',
'NexusPlayer': 'x86',
}[builder_dict['model']]
# ChromeOS.
if 'CrOS' in builder_dict.get('extra_config', ''):
if 'Link' in builder_dict['extra_config']:
return 'link'
if 'Daisy' in builder_dict['extra_config']:
return 'daisy'
elif builder_dict.get('os') == 'ChromeOS':
return {
'Link': 'link',
'Daisy': 'daisy',
}[builder_dict['model']]
# iOS.
if 'iOS' in builder_dict.get('os', ''):
return {
'iPad4': 'iPad4,1',
}[builder_dict['model']]
return None
cov_skip.extend([lineno(), lineno() + 1])
def product_board(builder_dict):
if 'Android' in builder_dict.get('os', ''):
return {
'AndroidOne': None, # TODO(borenet,kjlubick)
'GalaxyS3': 'smdk4x12',
'GalaxyS4': None, # TODO(borenet,kjlubick)
'NVIDIA_Shield': None, # TODO(borenet,kjlubick)
'Nexus10': 'manta',
'Nexus5': 'hammerhead',
'Nexus6': 'shamu',
'Nexus7': 'grouper',
'Nexus7v2': 'flo',
'Nexus9': 'flounder',
'NexusPlayer': 'fugu',
}[builder_dict['model']]
return None
cov_skip.extend([lineno(), lineno() + 1])
def get_builder_spec(builder_name):
builder_dict = builder_name_schema.DictForBuilderName(builder_name)
env = get_extra_env_vars(builder_dict)
gyp_defs = gyp_defines(builder_dict)
gyp_defs_list = ['%s=%s' % (k, v) for k, v in gyp_defs.iteritems()]
gyp_defs_list.sort()
env['GYP_DEFINES'] = ' '.join(gyp_defs_list)
rv = {
'builder_cfg': builder_dict,
'dm_flags': dm_flags.get_args(builder_name),
'env': env,
'nanobench_flags': nanobench_flags.get_args(builder_name),
}
device = device_cfg(builder_dict)
if device:
rv['device_cfg'] = device
board = product_board(builder_dict)
if board:
rv['product.board'] = board
role = builder_dict['role']
if role == builder_name_schema.BUILDER_ROLE_HOUSEKEEPER:
configuration = CONFIG_RELEASE
else:
configuration = builder_dict.get(
'configuration', CONFIG_DEBUG)
arch = (builder_dict.get('arch') or builder_dict.get('target_arch'))
if ('Win' in builder_dict.get('os', '') and arch == 'x86_64'):
configuration += '_x64'
rv['configuration'] = configuration
if configuration == CONFIG_COVERAGE:
rv['do_compile_steps'] = False
rv['do_test_steps'] = role == builder_name_schema.BUILDER_ROLE_TEST
rv['do_perf_steps'] = (role == builder_name_schema.BUILDER_ROLE_PERF or
(role == builder_name_schema.BUILDER_ROLE_TEST and
configuration == CONFIG_DEBUG))
if rv['do_test_steps'] and 'Valgrind' in builder_name:
rv['do_perf_steps'] = True
if 'GalaxyS4' in builder_name:
rv['do_perf_steps'] = False
rv['build_targets'] = build_targets_from_builder_dict(
builder_dict, rv['do_test_steps'], rv['do_perf_steps'])
# Do we upload perf results?
upload_perf_results = False
if role == builder_name_schema.BUILDER_ROLE_PERF:
upload_perf_results = True
rv['upload_perf_results'] = upload_perf_results
# Do we upload correctness results?
skip_upload_bots = [
'ASAN',
'Coverage',
'MSAN',
'TSAN',
'UBSAN',
'Valgrind',
]
upload_dm_results = True
for s in skip_upload_bots:
if s in builder_name:
upload_dm_results = False
break
rv['upload_dm_results'] = upload_dm_results
return rv
cov_end = lineno() # Don't care about code coverage past here.
def self_test():
import coverage # This way the bots don't need coverage.py to be installed.
args = {}
cases = [
'Build-Mac10.8-Clang-Arm7-Debug-Android',
'Build-Win-MSVC-x86-Debug',
'Build-Win-MSVC-x86-Debug-GDI',
'Build-Win-MSVC-x86-Debug-Exceptions',
'Build-Ubuntu-GCC-Arm7-Debug-Android_FrameworkDefs',
'Build-Ubuntu-GCC-Arm7-Debug-Android_NoNeon',
'Build-Ubuntu-GCC-Arm7-Debug-CrOS_Daisy',
'Build-Ubuntu-GCC-x86_64-Debug-CrOS_Link',
'Build-Ubuntu-GCC-x86_64-Release-Mesa',
'Build-Ubuntu-GCC-x86_64-Release-ANGLE',
'Housekeeper-PerCommit',
'Perf-Win8-MSVC-ShuttleB-GPU-HD4600-x86_64-Release-Trybot',
'Perf-Ubuntu-GCC-ShuttleA-GPU-GTX660-x86_64-Release-VisualBench',
'Test-Android-GCC-GalaxyS4-GPU-SGX544-Arm7-Debug',
'Perf-Android-GCC-Nexus5-GPU-Adreno330-Arm7-Release-Appurify',
'Test-Android-GCC-Nexus6-GPU-Adreno420-Arm7-Debug',
'Test-ChromeOS-GCC-Link-CPU-AVX-x86_64-Debug',
'Test-iOS-Clang-iPad4-GPU-SGX554-Arm7-Debug',
'Test-Mac-Clang-MacMini6.2-GPU-HD4000-x86_64-Debug-CommandBuffer',
'Test-Mac10.8-Clang-MacMini4.1-GPU-GeForce320M-x86_64-Release',
'Test-Ubuntu-Clang-GCE-CPU-AVX2-x86_64-Coverage',
('Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Release-'
'SK_USE_DISCARDABLE_SCALEDIMAGECACHE'),
'Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Release-SKNX_NO_SIMD',
'Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Release-Fast',
'Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Release-Shared',
'Test-Ubuntu-GCC-ShuttleA-GPU-GTX550Ti-x86_64-Release-Valgrind',
'Test-Win10-MSVC-ShuttleA-GPU-GTX660-x86_64-Debug-Vulkan',
'Test-Win8-MSVC-ShuttleB-GPU-HD4600-x86-Release-ANGLE',
'Test-Win8-MSVC-ShuttleA-CPU-AVX-x86_64-Debug',
]
cov = coverage.coverage()
cov.start()
for case in cases:
args[case] = get_builder_spec(case)
cov.stop()
this_file = os.path.basename(__file__)
_, _, not_run, _ = cov.analysis(this_file)
filtered = [line for line in not_run if
line > cov_start and line < cov_end and line not in cov_skip]
if filtered:
print 'Lines not covered by test cases: ', filtered
sys.exit(1)
golden = this_file.replace('.py', '.json')
with open(os.path.join(os.path.dirname(__file__), golden), 'w') as f:
json.dump(args, f, indent=2, sort_keys=True)
if __name__ == '__main__':
if len(sys.argv) == 2 and sys.argv[1] == 'test':
self_test()
sys.exit(0)
if len(sys.argv) != 3:
print usage
sys.exit(1)
with open(sys.argv[1], 'w') as out:
json.dump(get_builder_spec(sys.argv[2]), out)<|fim▁end|> | #
# Copyright 2015 Google Inc.
# |
<|file_name|>subst.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Type substitutions.
pub use self::ParamSpace::*;
pub use self::RegionSubsts::*;
use middle::ty::{self, Ty};
use middle::ty_fold::{self, TypeFoldable, TypeFolder};
use util::ppaux::Repr;
use std::fmt;
use std::iter::IntoIterator;
use std::slice::Iter;
use std::vec::{Vec, IntoIter};
use syntax::codemap::{Span, DUMMY_SP};
///////////////////////////////////////////////////////////////////////////
/// A substitution mapping type/region parameters to new values. We
/// identify each in-scope parameter by an *index* and a *parameter
/// space* (which indices where the parameter is defined; see
/// `ParamSpace`).
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct Substs<'tcx> {
pub types: VecPerParamSpace<Ty<'tcx>>,
pub regions: RegionSubsts,
}
/// Represents the values to use when substituting lifetime parameters.
/// If the value is `ErasedRegions`, then this subst is occurring during
/// trans, and all region parameters will be replaced with `ty::ReStatic`.
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub enum RegionSubsts {
ErasedRegions,
NonerasedRegions(VecPerParamSpace<ty::Region>)
}
impl<'tcx> Substs<'tcx> {
pub fn new(t: VecPerParamSpace<Ty<'tcx>>,
r: VecPerParamSpace<ty::Region>)
-> Substs<'tcx>
{
Substs { types: t, regions: NonerasedRegions(r) }
}
pub fn new_type(t: Vec<Ty<'tcx>>,
r: Vec<ty::Region>)
-> Substs<'tcx>
{
Substs::new(VecPerParamSpace::new(t, Vec::new(), Vec::new()),
VecPerParamSpace::new(r, Vec::new(), Vec::new()))
}
pub fn new_trait(t: Vec<Ty<'tcx>>,
r: Vec<ty::Region>,
s: Ty<'tcx>)
-> Substs<'tcx>
{
Substs::new(VecPerParamSpace::new(t, vec!(s), Vec::new()),
VecPerParamSpace::new(r, Vec::new(), Vec::new()))
}
pub fn erased(t: VecPerParamSpace<Ty<'tcx>>) -> Substs<'tcx>
{
Substs { types: t, regions: ErasedRegions }
}
pub fn empty() -> Substs<'tcx> {
Substs {
types: VecPerParamSpace::empty(),
regions: NonerasedRegions(VecPerParamSpace::empty()),
}
}
pub fn trans_empty() -> Substs<'tcx> {
Substs {
types: VecPerParamSpace::empty(),
regions: ErasedRegions
}
}
pub fn is_noop(&self) -> bool {
let regions_is_noop = match self.regions {
ErasedRegions => false, // may be used to canonicalize
NonerasedRegions(ref regions) => regions.is_empty(),
};
regions_is_noop && self.types.is_empty()
}
pub fn type_for_def(&self, ty_param_def: &ty::TypeParameterDef) -> Ty<'tcx> {
*self.types.get(ty_param_def.space, ty_param_def.index as usize)
}
pub fn has_regions_escaping_depth(&self, depth: u32) -> bool {
self.types.iter().any(|&t| ty::type_escapes_depth(t, depth)) || {
match self.regions {
ErasedRegions =>
false,
NonerasedRegions(ref regions) =>
regions.iter().any(|r| r.escapes_depth(depth)),
}
}
}
pub fn self_ty(&self) -> Option<Ty<'tcx>> {
self.types.get_self().cloned()
}
pub fn with_self_ty(&self, self_ty: Ty<'tcx>) -> Substs<'tcx> {
assert!(self.self_ty().is_none());
let mut s = (*self).clone();
s.types.push(SelfSpace, self_ty);
s
}
pub fn erase_regions(self) -> Substs<'tcx> {
let Substs { types, regions: _ } = self;
Substs { types: types, regions: ErasedRegions }
}
/// Since ErasedRegions are only to be used in trans, most of the compiler can use this method
/// to easily access the set of region substitutions.
pub fn regions<'a>(&'a self) -> &'a VecPerParamSpace<ty::Region> {
match self.regions {
ErasedRegions => panic!("Erased regions only expected in trans"),
NonerasedRegions(ref r) => r
}
}
/// Since ErasedRegions are only to be used in trans, most of the compiler can use this method
/// to easily access the set of region substitutions.
pub fn mut_regions<'a>(&'a mut self) -> &'a mut VecPerParamSpace<ty::Region> {
match self.regions {
ErasedRegions => panic!("Erased regions only expected in trans"),
NonerasedRegions(ref mut r) => r
}
}
pub fn with_method(self,
m_types: Vec<Ty<'tcx>>,
m_regions: Vec<ty::Region>)
-> Substs<'tcx>
{
let Substs { types, regions } = self;
let types = types.with_vec(FnSpace, m_types);
let regions = regions.map(m_regions,
|r, m_regions| r.with_vec(FnSpace, m_regions));
Substs { types: types, regions: regions }
}
}
impl RegionSubsts {
fn map<A, F>(self, a: A, op: F) -> RegionSubsts where
F: FnOnce(VecPerParamSpace<ty::Region>, A) -> VecPerParamSpace<ty::Region>,
{
match self {
ErasedRegions => ErasedRegions,
NonerasedRegions(r) => NonerasedRegions(op(r, a))
}
}
pub fn is_erased(&self) -> bool {
match *self {
ErasedRegions => true,
NonerasedRegions(_) => false,
}
}
}
///////////////////////////////////////////////////////////////////////////
// ParamSpace
#[derive(PartialOrd, Ord, PartialEq, Eq, Copy,
Clone, Hash, RustcEncodable, RustcDecodable, Debug)]
pub enum ParamSpace {
TypeSpace, // Type parameters attached to a type definition, trait, or impl
SelfSpace, // Self parameter on a trait
FnSpace, // Type parameters attached to a method or fn
}
<|fim▁hole|> pub fn all() -> [ParamSpace; 3] {
[TypeSpace, SelfSpace, FnSpace]
}
pub fn to_uint(self) -> usize {
match self {
TypeSpace => 0,
SelfSpace => 1,
FnSpace => 2,
}
}
pub fn from_uint(u: usize) -> ParamSpace {
match u {
0 => TypeSpace,
1 => SelfSpace,
2 => FnSpace,
_ => panic!("Invalid ParamSpace: {}", u)
}
}
}
/// Vector of things sorted by param space. Used to keep
/// the set of things declared on the type, self, or method
/// distinct.
#[derive(PartialEq, Eq, Clone, Hash, RustcEncodable, RustcDecodable)]
pub struct VecPerParamSpace<T> {
// This was originally represented as a tuple with one Vec<T> for
// each variant of ParamSpace, and that remains the abstraction
// that it provides to its clients.
//
// Here is how the representation corresponds to the abstraction
// i.e. the "abstraction function" AF:
//
// AF(self) = (self.content[..self.type_limit],
// self.content[self.type_limit..self.self_limit],
// self.content[self.self_limit..])
type_limit: usize,
self_limit: usize,
content: Vec<T>,
}
/// The `split` function converts one `VecPerParamSpace` into this
/// `SeparateVecsPerParamSpace` structure.
pub struct SeparateVecsPerParamSpace<T> {
pub types: Vec<T>,
pub selfs: Vec<T>,
pub fns: Vec<T>,
}
impl<T: fmt::Debug> fmt::Debug for VecPerParamSpace<T> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
try!(write!(fmt, "VecPerParamSpace {{"));
for space in &ParamSpace::all() {
try!(write!(fmt, "{:?}: {:?}, ", *space, self.get_slice(*space)));
}
try!(write!(fmt, "}}"));
Ok(())
}
}
impl<T> VecPerParamSpace<T> {
fn limits(&self, space: ParamSpace) -> (usize, usize) {
match space {
TypeSpace => (0, self.type_limit),
SelfSpace => (self.type_limit, self.self_limit),
FnSpace => (self.self_limit, self.content.len()),
}
}
pub fn empty() -> VecPerParamSpace<T> {
VecPerParamSpace {
type_limit: 0,
self_limit: 0,
content: Vec::new()
}
}
pub fn params_from_type(types: Vec<T>) -> VecPerParamSpace<T> {
VecPerParamSpace::empty().with_vec(TypeSpace, types)
}
/// `t` is the type space.
/// `s` is the self space.
/// `f` is the fn space.
pub fn new(t: Vec<T>, s: Vec<T>, f: Vec<T>) -> VecPerParamSpace<T> {
let type_limit = t.len();
let self_limit = type_limit + s.len();
let mut content = t;
content.extend(s);
content.extend(f);
VecPerParamSpace {
type_limit: type_limit,
self_limit: self_limit,
content: content,
}
}
fn new_internal(content: Vec<T>, type_limit: usize, self_limit: usize)
-> VecPerParamSpace<T>
{
VecPerParamSpace {
type_limit: type_limit,
self_limit: self_limit,
content: content,
}
}
/// Appends `value` to the vector associated with `space`.
///
/// Unlike the `push` method in `Vec`, this should not be assumed
/// to be a cheap operation (even when amortized over many calls).
pub fn push(&mut self, space: ParamSpace, value: T) {
let (_, limit) = self.limits(space);
match space {
TypeSpace => { self.type_limit += 1; self.self_limit += 1; }
SelfSpace => { self.self_limit += 1; }
FnSpace => { }
}
self.content.insert(limit, value);
}
/// Appends `values` to the vector associated with `space`.
///
/// Unlike the `extend` method in `Vec`, this should not be assumed
/// to be a cheap operation (even when amortized over many calls).
pub fn extend<I:Iterator<Item=T>>(&mut self, space: ParamSpace, values: I) {
// This could be made more efficient, obviously.
for item in values {
self.push(space, item);
}
}
pub fn pop(&mut self, space: ParamSpace) -> Option<T> {
let (start, limit) = self.limits(space);
if start == limit {
None
} else {
match space {
TypeSpace => { self.type_limit -= 1; self.self_limit -= 1; }
SelfSpace => { self.self_limit -= 1; }
FnSpace => {}
}
if self.content.is_empty() {
None
} else {
Some(self.content.remove(limit - 1))
}
}
}
pub fn truncate(&mut self, space: ParamSpace, len: usize) {
// FIXME (#15435): slow; O(n^2); could enhance vec to make it O(n).
while self.len(space) > len {
self.pop(space);
}
}
pub fn replace(&mut self, space: ParamSpace, elems: Vec<T>) {
// FIXME (#15435): slow; O(n^2); could enhance vec to make it O(n).
self.truncate(space, 0);
for t in elems {
self.push(space, t);
}
}
pub fn get_self<'a>(&'a self) -> Option<&'a T> {
let v = self.get_slice(SelfSpace);
assert!(v.len() <= 1);
if v.is_empty() { None } else { Some(&v[0]) }
}
pub fn len(&self, space: ParamSpace) -> usize {
self.get_slice(space).len()
}
pub fn is_empty_in(&self, space: ParamSpace) -> bool {
self.len(space) == 0
}
pub fn get_slice<'a>(&'a self, space: ParamSpace) -> &'a [T] {
let (start, limit) = self.limits(space);
&self.content[start.. limit]
}
pub fn get_mut_slice<'a>(&'a mut self, space: ParamSpace) -> &'a mut [T] {
let (start, limit) = self.limits(space);
&mut self.content[start.. limit]
}
pub fn opt_get<'a>(&'a self,
space: ParamSpace,
index: usize)
-> Option<&'a T> {
let v = self.get_slice(space);
if index < v.len() { Some(&v[index]) } else { None }
}
pub fn get<'a>(&'a self, space: ParamSpace, index: usize) -> &'a T {
&self.get_slice(space)[index]
}
pub fn iter<'a>(&'a self) -> Iter<'a,T> {
self.content.iter()
}
pub fn into_iter(self) -> IntoIter<T> {
self.content.into_iter()
}
pub fn iter_enumerated<'a>(&'a self) -> EnumeratedItems<'a,T> {
EnumeratedItems::new(self)
}
pub fn as_slice(&self) -> &[T] {
&self.content
}
pub fn into_vec(self) -> Vec<T> {
self.content
}
pub fn all_vecs<P>(&self, mut pred: P) -> bool where
P: FnMut(&[T]) -> bool,
{
let spaces = [TypeSpace, SelfSpace, FnSpace];
spaces.iter().all(|&space| { pred(self.get_slice(space)) })
}
pub fn all<P>(&self, pred: P) -> bool where P: FnMut(&T) -> bool {
self.iter().all(pred)
}
pub fn any<P>(&self, pred: P) -> bool where P: FnMut(&T) -> bool {
self.iter().any(pred)
}
pub fn is_empty(&self) -> bool {
self.all_vecs(|v| v.is_empty())
}
pub fn map<U, P>(&self, pred: P) -> VecPerParamSpace<U> where P: FnMut(&T) -> U {
let result = self.iter().map(pred).collect();
VecPerParamSpace::new_internal(result,
self.type_limit,
self.self_limit)
}
pub fn map_enumerated<U, P>(&self, pred: P) -> VecPerParamSpace<U> where
P: FnMut((ParamSpace, usize, &T)) -> U,
{
let result = self.iter_enumerated().map(pred).collect();
VecPerParamSpace::new_internal(result,
self.type_limit,
self.self_limit)
}
pub fn split(self) -> SeparateVecsPerParamSpace<T> {
let VecPerParamSpace { type_limit, self_limit, content } = self;
let mut content_iter = content.into_iter();
SeparateVecsPerParamSpace {
types: content_iter.by_ref().take(type_limit).collect(),
selfs: content_iter.by_ref().take(self_limit - type_limit).collect(),
fns: content_iter.collect()
}
}
pub fn with_vec(mut self, space: ParamSpace, vec: Vec<T>)
-> VecPerParamSpace<T>
{
assert!(self.is_empty_in(space));
self.replace(space, vec);
self
}
}
#[derive(Clone)]
pub struct EnumeratedItems<'a,T:'a> {
vec: &'a VecPerParamSpace<T>,
space_index: usize,
elem_index: usize
}
impl<'a,T> EnumeratedItems<'a,T> {
fn new(v: &'a VecPerParamSpace<T>) -> EnumeratedItems<'a,T> {
let mut result = EnumeratedItems { vec: v, space_index: 0, elem_index: 0 };
result.adjust_space();
result
}
fn adjust_space(&mut self) {
let spaces = ParamSpace::all();
while
self.space_index < spaces.len() &&
self.elem_index >= self.vec.len(spaces[self.space_index])
{
self.space_index += 1;
self.elem_index = 0;
}
}
}
impl<'a,T> Iterator for EnumeratedItems<'a,T> {
type Item = (ParamSpace, usize, &'a T);
fn next(&mut self) -> Option<(ParamSpace, usize, &'a T)> {
let spaces = ParamSpace::all();
if self.space_index < spaces.len() {
let space = spaces[self.space_index];
let index = self.elem_index;
let item = self.vec.get(space, index);
self.elem_index += 1;
self.adjust_space();
Some((space, index, item))
} else {
None
}
}
}
impl<T> IntoIterator for VecPerParamSpace<T> {
type Item = T;
type IntoIter = IntoIter<T>;
fn into_iter(self) -> IntoIter<T> {
self.into_vec().into_iter()
}
}
impl<'a,T> IntoIterator for &'a VecPerParamSpace<T> {
type Item = &'a T;
type IntoIter = Iter<'a, T>;
fn into_iter(self) -> Iter<'a, T> {
self.as_slice().into_iter()
}
}
///////////////////////////////////////////////////////////////////////////
// Public trait `Subst`
//
// Just call `foo.subst(tcx, substs)` to perform a substitution across
// `foo`. Or use `foo.subst_spanned(tcx, substs, Some(span))` when
// there is more information available (for better errors).
pub trait Subst<'tcx> : Sized {
fn subst(&self, tcx: &ty::ctxt<'tcx>, substs: &Substs<'tcx>) -> Self {
self.subst_spanned(tcx, substs, None)
}
fn subst_spanned(&self, tcx: &ty::ctxt<'tcx>,
substs: &Substs<'tcx>,
span: Option<Span>)
-> Self;
}
impl<'tcx, T:TypeFoldable<'tcx>> Subst<'tcx> for T {
fn subst_spanned(&self,
tcx: &ty::ctxt<'tcx>,
substs: &Substs<'tcx>,
span: Option<Span>)
-> T
{
let mut folder = SubstFolder { tcx: tcx,
substs: substs,
span: span,
root_ty: None,
ty_stack_depth: 0,
region_binders_passed: 0 };
(*self).fold_with(&mut folder)
}
}
///////////////////////////////////////////////////////////////////////////
// The actual substitution engine itself is a type folder.
struct SubstFolder<'a, 'tcx: 'a> {
tcx: &'a ty::ctxt<'tcx>,
substs: &'a Substs<'tcx>,
// The location for which the substitution is performed, if available.
span: Option<Span>,
// The root type that is being substituted, if available.
root_ty: Option<Ty<'tcx>>,
// Depth of type stack
ty_stack_depth: usize,
// Number of region binders we have passed through while doing the substitution
region_binders_passed: u32,
}
impl<'a, 'tcx> TypeFolder<'tcx> for SubstFolder<'a, 'tcx> {
fn tcx(&self) -> &ty::ctxt<'tcx> { self.tcx }
fn enter_region_binder(&mut self) {
self.region_binders_passed += 1;
}
fn exit_region_binder(&mut self) {
self.region_binders_passed -= 1;
}
fn fold_region(&mut self, r: ty::Region) -> ty::Region {
// Note: This routine only handles regions that are bound on
// type declarations and other outer declarations, not those
// bound in *fn types*. Region substitution of the bound
// regions that appear in a function signature is done using
// the specialized routine `ty::replace_late_regions()`.
match r {
ty::ReEarlyBound(data) => {
match self.substs.regions {
ErasedRegions => ty::ReStatic,
NonerasedRegions(ref regions) =>
match regions.opt_get(data.space, data.index as usize) {
Some(&r) => {
self.shift_region_through_binders(r)
}
None => {
let span = self.span.unwrap_or(DUMMY_SP);
self.tcx().sess.span_bug(
span,
&format!("Type parameter out of range \
when substituting in region {} (root type={}) \
(space={:?}, index={})",
data.name.as_str(),
self.root_ty.repr(self.tcx()),
data.space,
data.index));
}
}
}
}
_ => r
}
}
fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
if !ty::type_needs_subst(t) {
return t;
}
// track the root type we were asked to substitute
let depth = self.ty_stack_depth;
if depth == 0 {
self.root_ty = Some(t);
}
self.ty_stack_depth += 1;
let t1 = match t.sty {
ty::TyParam(p) => {
self.ty_for_param(p, t)
}
_ => {
ty_fold::super_fold_ty(self, t)
}
};
assert_eq!(depth + 1, self.ty_stack_depth);
self.ty_stack_depth -= 1;
if depth == 0 {
self.root_ty = None;
}
return t1;
}
}
impl<'a,'tcx> SubstFolder<'a,'tcx> {
fn ty_for_param(&self, p: ty::ParamTy, source_ty: Ty<'tcx>) -> Ty<'tcx> {
// Look up the type in the substitutions. It really should be in there.
let opt_ty = self.substs.types.opt_get(p.space, p.idx as usize);
let ty = match opt_ty {
Some(t) => *t,
None => {
let span = self.span.unwrap_or(DUMMY_SP);
self.tcx().sess.span_bug(
span,
&format!("Type parameter `{}` ({}/{:?}/{}) out of range \
when substituting (root type={}) substs={}",
p.repr(self.tcx()),
source_ty.repr(self.tcx()),
p.space,
p.idx,
self.root_ty.repr(self.tcx()),
self.substs.repr(self.tcx())));
}
};
self.shift_regions_through_binders(ty)
}
/// It is sometimes necessary to adjust the debruijn indices during substitution. This occurs
/// when we are substituting a type with escaping regions into a context where we have passed
/// through region binders. That's quite a mouthful. Let's see an example:
///
/// ```
/// type Func<A> = fn(A);
/// type MetaFunc = for<'a> fn(Func<&'a int>)
/// ```
///
/// The type `MetaFunc`, when fully expanded, will be
///
/// for<'a> fn(fn(&'a int))
/// ^~ ^~ ^~~
/// | | |
/// | | DebruijnIndex of 2
/// Binders
///
/// Here the `'a` lifetime is bound in the outer function, but appears as an argument of the
/// inner one. Therefore, that appearance will have a DebruijnIndex of 2, because we must skip
/// over the inner binder (remember that we count Debruijn indices from 1). However, in the
/// definition of `MetaFunc`, the binder is not visible, so the type `&'a int` will have a
/// debruijn index of 1. It's only during the substitution that we can see we must increase the
/// depth by 1 to account for the binder that we passed through.
///
/// As a second example, consider this twist:
///
/// ```
/// type FuncTuple<A> = (A,fn(A));
/// type MetaFuncTuple = for<'a> fn(FuncTuple<&'a int>)
/// ```
///
/// Here the final type will be:
///
/// for<'a> fn((&'a int, fn(&'a int)))
/// ^~~ ^~~
/// | |
/// DebruijnIndex of 1 |
/// DebruijnIndex of 2
///
/// As indicated in the diagram, here the same type `&'a int` is substituted once, but in the
/// first case we do not increase the Debruijn index and in the second case we do. The reason
/// is that only in the second case have we passed through a fn binder.
fn shift_regions_through_binders(&self, ty: Ty<'tcx>) -> Ty<'tcx> {
debug!("shift_regions(ty={:?}, region_binders_passed={:?}, type_has_escaping_regions={:?})",
ty.repr(self.tcx()), self.region_binders_passed, ty::type_has_escaping_regions(ty));
if self.region_binders_passed == 0 || !ty::type_has_escaping_regions(ty) {
return ty;
}
let result = ty_fold::shift_regions(self.tcx(), self.region_binders_passed, &ty);
debug!("shift_regions: shifted result = {:?}", result.repr(self.tcx()));
result
}
fn shift_region_through_binders(&self, region: ty::Region) -> ty::Region {
ty_fold::shift_region(region, self.region_binders_passed)
}
}<|fim▁end|> | impl ParamSpace { |
<|file_name|>password-reset-init.component.spec.ts<|end_file_name|><|fim▁begin|>import { ComponentFixture, TestBed, inject } from '@angular/core/testing';
import { Renderer, ElementRef } from '@angular/core';
import { Observable } from 'rxjs/Rx';
import { JhipsterappTestModule } from '../../../../test.module';
import { PasswordResetInitComponent } from '../../../../../../../main/webapp/app/account/password-reset/init/password-reset-init.component';
import { PasswordResetInit } from '../../../../../../../main/webapp/app/account/password-reset/init/password-reset-init.service';
describe('Component Tests', () => {
describe('PasswordResetInitComponent', function () {
let fixture: ComponentFixture<PasswordResetInitComponent>;
let comp: PasswordResetInitComponent;
beforeEach(() => {
fixture = TestBed.configureTestingModule({
imports: [JhipsterappTestModule],
declarations: [PasswordResetInitComponent],
providers: [
PasswordResetInit,
{
provide: Renderer,
useValue: {
invokeElementMethod(renderElement: any, methodName: string, args?: any[]) {}
}
},
{
provide: ElementRef,
useValue: new ElementRef(null)
}
]
}).overrideComponent(PasswordResetInitComponent, {
set: {
template: ''
}
}).createComponent(PasswordResetInitComponent);
comp = fixture.componentInstance;
comp.ngOnInit();
});
it('should define its initial state', function () {
expect(comp.success).toBeUndefined();
expect(comp.error).toBeUndefined();
expect(comp.errorEmailNotExists).toBeUndefined();
expect(comp.resetAccount).toEqual({});
});
it('sets focus after the view has been initialized',
inject([ElementRef], (elementRef: ElementRef) => {
let element = fixture.nativeElement;
let node = {
focus() {}
};
elementRef.nativeElement = element;
spyOn(element, 'querySelector').and.returnValue(node);
spyOn(node, 'focus');
comp.ngAfterViewInit();
expect(element.querySelector).toHaveBeenCalledWith('#email');
expect(node.focus).toHaveBeenCalled();
})
);
it('notifies of success upon successful requestReset',
inject([PasswordResetInit], (service: PasswordResetInit) => {
spyOn(service, 'save').and.returnValue(Observable.of({}));
comp.resetAccount.email = '[email protected]';
comp.requestReset();
expect(service.save).toHaveBeenCalledWith('[email protected]');
expect(comp.success).toEqual('OK');
expect(comp.error).toBeNull();
expect(comp.errorEmailNotExists).toBeNull();
})
);
it('notifies of unknown email upon email address not registered/400',
inject([PasswordResetInit], (service: PasswordResetInit) => {
spyOn(service, 'save').and.returnValue(Observable.throw({
status: 400,
data: 'email address not registered'
}));
comp.resetAccount.email = '[email protected]';
<|fim▁hole|> comp.requestReset();
expect(service.save).toHaveBeenCalledWith('[email protected]');
expect(comp.success).toBeNull();
expect(comp.error).toBeNull();
expect(comp.errorEmailNotExists).toEqual('ERROR');
})
);
it('notifies of error upon error response',
inject([PasswordResetInit], (service: PasswordResetInit) => {
spyOn(service, 'save').and.returnValue(Observable.throw({
status: 503,
data: 'something else'
}));
comp.resetAccount.email = '[email protected]';
comp.requestReset();
expect(service.save).toHaveBeenCalledWith('[email protected]');
expect(comp.success).toBeNull();
expect(comp.errorEmailNotExists).toBeNull();
expect(comp.error).toEqual('ERROR');
})
);
});
});<|fim▁end|> | |
<|file_name|>test_codec.py<|end_file_name|><|fim▁begin|>import unittest
from elasticmagic.types import Integer, Float, Boolean
from elasticmagic.ext.queryfilter.codec import SimpleCodec
class SimpleCodecTest(unittest.TestCase):
def test_decode(self):
codec = SimpleCodec()
self.assertEqual(
codec.decode({'country': ['ru', 'ua', 'null']}),
{
'country': {
'exact': [['ru'], ['ua'], [None]],
}
}
)
self.assertEqual(
codec.decode({'category': ['5', '6:a', 'b:c', 'null']}, {'category': [Integer]}),
{
'category': {
'exact': [[5], [6, 'a'], [None]]
}
}
)
self.assertEqual(<|fim▁hole|> {
'manu': {
'exact': [[1, 'nokia', True], [2, 'samsung', False]],
}
}
)
self.assertEqual(
codec.decode({'is_active': ['true']}, {'is_active': Boolean}),
{
'is_active': {
'exact': [[True]],
}
}
)
self.assertEqual(
codec.decode([('price__gte', ['100.1', '101.0']), ('price__lte', ['200'])], {'price': Float}),
{
'price': {
'gte': [[100.1], [101.0]],
'lte': [[200.0]],
}
}
)
self.assertEqual(
codec.decode({'price__lte': '123a:bc'}, {'price': [Float]}),
{}
)
self.assertRaises(TypeError, lambda: codec.decode(''))<|fim▁end|> | codec.decode({'manu': ['1:nokia:true', '2:samsung:false']}, {'manu': [Integer, None, Boolean]}), |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# This file is part of the osmxapi Python module.
#
# osmxapi is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# osmxapi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with osmxapi. If not, see <http://www.gnu.org/licenses/>.
#
# Copyright: © 2009-2010 Etienne Chové <[email protected]>
# Copyright: © 2012 Morten Kjeldgaard <[email protected]>
# License: GPL-3+
__version__ = '0.1'
import xml.dom.minidom
import dom, http
import os.path
class OsmXapi:
def __init__(self, api = "www.overpass-api.de", base="api", debug = False):
self.debug = debug
self.base = os.path.join('/', base, 'xapi')
self.http = http.Http(api, debug)
#.
def nodeGet(self, query=None, raw=None):
""" Returns NodeData for query """
if not query:
return None
#.
uri = self.base+"?node"+repr(query)
data = self.http.get(uri)
if raw: return data
if not data: return data
data = xml.dom.minidom.parseString(data)
data = data.getElementsByTagName("osm")[0].getElementsByTagName("node")
nodelist = []
for n in data:
nodelist.append(dom.parseNode(n))
#.
return nodelist
#.
def wayGet(self, query=None, raw=None):
"""Returns way data for query"""
if not query:
return None
#.
uri = self.base+"?way"+repr(query)
data = self.http.get(uri)
if raw: return data
if not data: return data
data = xml.dom.minidom.parseString(data)
data = data.getElementsByTagName("osm")[0].getElementsByTagName("way")
waylist = []
for w in data:
waylist.append(dom.parseWay(w))
#.
return waylist
#.
def relationGet(self, query=None, raw=None):
"""Return relation data for query"""
uri = self.base+"?relation"+repr(query)
data = self.http.get(uri)
if raw: return data
data = xml.dom.minidom.parseString(data)
data = data.getElementsByTagName("osm")[0].getElementsByTagName("relation")
relationlist = []
for r in data:
relationlist.append(dom.parseRelation(r))
#.
return relationlist
#.
def anyGet(self, query=None, raw=None):
"""Return any data for query"""<|fim▁hole|> data = self.http.get(uri)
if raw: return data
data = xml.dom.minidom.parseString(data)
anydict = {}
for e in "node", "way", "relation":
d = data.getElementsByTagName("osm")[0].getElementsByTagName(e)
anylist = []
for a in d:
if e == "node":
anylist.append(dom.parseNode(a))
#.
if e == "way":
anylist.append(dom.parseWay(a))
#.
if e == "relation":
anylist.append(dom.parseRelation(a))
#.
#.
anydict[e] = anylist
#.
return anydict
#.
#.
if __name__ == '__main__':
from xapiquery import XapiQuery
xapi = OsmXapi(debug = True)
uniparken = XapiQuery (lats=56.1618032,
lonw=10.1891327,
latn=56.1719343,
lone=10.212822)
uniparken[u'amenity'] = u'parking'
N = xapi.nodeGet(uniparken)
print N
W = xapi.wayGet(uniparken)
print W
A = xapi.anyGet(uniparken)
print A
#.<|fim▁end|> | uri = self.base+"?*"+repr(query) |
<|file_name|>PromptsActivity.java<|end_file_name|><|fim▁begin|>package biz.golek.whattodofordinner.view.activities;
import android.support.annotation.NonNull;
import android.support.v7.app.ActionBar;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.ContextMenu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import org.greenrobot.eventbus.Subscribe;
import java.util.List;
import biz.golek.whattodofordinner.R;
import biz.golek.whattodofordinner.business.contract.request_data.GeneratePromptsRequestData;
import biz.golek.whattodofordinner.business.contract.response_data.DinnerListItem;
import biz.golek.whattodofordinner.view.ActivityDependencyProvider;
import biz.golek.whattodofordinner.view.adapters.DinnerListItemArrayAdapter;
import biz.golek.whattodofordinner.view.awareness.IActivityDependencyProviderAware;
import biz.golek.whattodofordinner.view.messages.DinnerDeletedMessage;
import biz.golek.whattodofordinner.view.messages.DinnerUpdatedMessage;
import biz.golek.whattodofordinner.view.view_models.PromptsActivityViewModel;
public class PromptsActivity extends AppCompatActivity implements IActivityDependencyProviderAware {
private String PROMPTS_LIST_VIEW_MODEL = "promptsListViewModel";
private PromptsActivityViewModel viewModel;
private ActivityDependencyProvider activityDependencyProvider;
private ArrayAdapter adapter;
private ListView listView;
private DinnerListItem nonOfThisListItem;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_prompts);
setupActionBar();
if (savedInstanceState != null)
viewModel = (PromptsActivityViewModel) savedInstanceState.getSerializable(PROMPTS_LIST_VIEW_MODEL);
else
viewModel = (PromptsActivityViewModel)getIntent().getSerializableExtra("VIEW_MODEL");
listView = (ListView) findViewById(R.id.prompts_list);
List<DinnerListItem> prompts = viewModel.prompts;
nonOfThisListItem = new DinnerListItem();
nonOfThisListItem.id = -1L;
nonOfThisListItem.name = getResources().getString(R.string.non_of_this);
prompts.add(nonOfThisListItem);
adapter = new DinnerListItemArrayAdapter(this, prompts);
listView.setAdapter(adapter);
listView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
DinnerListItem item = (DinnerListItem)listView.getItemAtPosition(position);
if (item == nonOfThisListItem)
activityDependencyProvider.getGeneratePromptsController().Run(getDenyRequestData());
else
activityDependencyProvider.getDinnerChosenController().Run(item.id, item.name);
}
});
registerForContextMenu(listView);
activityDependencyProvider.getEventBusProvider().get().register(this);
}
@Subscribe
public void onDinnerDeleteMessage(DinnerDeletedMessage event) {
DinnerListItem dinner = null;
for (DinnerListItem d : viewModel.prompts)
if (d.id.equals(event.getId()))
dinner = d;
if (dinner != null)
viewModel.prompts.remove(dinner);
adapter.notifyDataSetChanged();
}
@Subscribe
public void onDinnerUpdatedMessage(DinnerUpdatedMessage event) {
boolean updated = false;
for (DinnerListItem dinner : viewModel.prompts) {
if (dinner.id.equals(event.getId())) {
dinner.id = event.getId();
dinner.name = event.getName();
updated = true;
}
}
if (updated)
adapter.notifyDataSetChanged();
}
@Override
protected void onDestroy() {
super.onDestroy();
activityDependencyProvider.getEventBusProvider().get().unregister(this);
}
@NonNull
private GeneratePromptsRequestData getDenyRequestData() {
GeneratePromptsRequestData rd = new GeneratePromptsRequestData();
rd.soup_profile = viewModel.getSoupProfile();
rd.vegetarian_profile = viewModel.getVegetarianProfile();
rd.maximum_duration = viewModel.getMaximumDuration();
Long[] oldExcludes = viewModel.getExcludes();
List<DinnerListItem> prompts = viewModel.prompts;
int oldExcludesLength = oldExcludes!= null ? oldExcludes.length : 0;
int promptsSize = prompts != null ? prompts.size() : 0;
if (oldExcludesLength + promptsSize > 0)
{
Long[] excludes = new Long[oldExcludesLength + promptsSize];
Integer index = 0;
if (oldExcludes != null)
{
for (Long id: oldExcludes) {
excludes[index++] = id;
}
}
if (prompts != null)
{
for (DinnerListItem dli: prompts) {
excludes[index++] = dli.id;
}
}
rd.excludes = excludes;
}
return rd;
}
/**
* Set up the {@link android.app.ActionBar}, if the API is available.
*/
private void setupActionBar() {
ActionBar actionBar = getSupportActionBar();
if (actionBar != null) {
// Show the Up button in the action bar.
actionBar.setDisplayHomeAsUpEnabled(true);
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
if (id == android.R.id.home) {
finish();
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
public void onCreateContextMenu(ContextMenu menu, View v, ContextMenu.ContextMenuInfo menuInfo) {
super.onCreateContextMenu(menu, v, menuInfo);
if (v.getId()==R.id.prompts_list) {
AdapterView.AdapterContextMenuInfo info = (AdapterView.AdapterContextMenuInfo)menuInfo;
DinnerListItem dinnerListItem = (DinnerListItem)listView.getItemAtPosition(info.position);
if (dinnerListItem != nonOfThisListItem)
{
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.dinner_list_item_menu, menu);
}
}
}
@Override
public boolean onContextItemSelected(MenuItem item) {
AdapterView.AdapterContextMenuInfo info = (AdapterView.AdapterContextMenuInfo) item.getMenuInfo();
DinnerListItem dinnerListItem = (DinnerListItem)listView.getItemAtPosition(info.position);<|fim▁hole|> switch(item.getItemId()) {
case R.id.dinner_list_item_menu_edit:
activityDependencyProvider.getEditDinnerController().Run(dinnerListItem.id);
return true;
case R.id.dinner_list_item_menu_delete:
activityDependencyProvider.getDeleteDinnerController().Run(dinnerListItem.id);
return true;
default:
return super.onContextItemSelected(item);
}
}
@Override
protected void onSaveInstanceState(Bundle outState) {
outState.putSerializable(PROMPTS_LIST_VIEW_MODEL, viewModel);
super.onSaveInstanceState(outState);
}
@Override
public void Set(ActivityDependencyProvider item) {
activityDependencyProvider = item;
}
}<|fim▁end|> | |
<|file_name|>DriveToPointExample.java<|end_file_name|><|fim▁begin|>import pl.edu.agh.amber.common.AmberClient;
import pl.edu.agh.amber.drivetopoint.DriveToPointProxy;
import pl.edu.agh.amber.drivetopoint.Location;
import pl.edu.agh.amber.drivetopoint.Point;
import pl.edu.agh.amber.drivetopoint.Result;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Scanner;
/**
* Drive to point proxy example.
*
* @author Pawel Suder <[email protected]>
*/
public class DriveToPointExample {
public static void main(String[] args) {
(new DriveToPointExample()).runDemo();
}
public void runDemo() {
Scanner keyboard = new Scanner(System.in);
System.out.print("IP (default: 127.0.0.1): ");
String hostname = keyboard.nextLine();
if ("".equals(hostname)) {
hostname = "127.0.0.1";
}
AmberClient client;
try {
client = new AmberClient(hostname, 26233);
} catch (IOException e) {
System.out.println("Unable to connect to robot: " + e);
return;
}
DriveToPointProxy driveToPointProxy = new DriveToPointProxy(client, 0);
try {
List<Point> targets = Arrays.asList(<|fim▁hole|> new Point(2.44725, 4.22125, 0.25),
new Point(1.46706, 4.14285, 0.25),
new Point(0.67389, 3.76964, 0.25),
new Point(0.47339, 2.96781, 0.25));
driveToPointProxy.setTargets(targets);
while (true) {
Result<List<Point>> resultNextTargets = driveToPointProxy.getNextTargets();
Result<List<Point>> resultVisitedTargets = driveToPointProxy.getVisitedTargets();
List<Point> nextTargets = resultNextTargets.getResult();
List<Point> visitedTargets = resultVisitedTargets.getResult();
System.out.println(String.format("next targets: %s, visited targets: %s", nextTargets.toString(), visitedTargets.toString()));
Thread.sleep(1000);
}
} catch (IOException e) {
System.out.println("Error in sending a command: " + e);
} catch (Exception e) {
e.printStackTrace();
} finally {
client.terminate();
}
}
}<|fim▁end|> | |
<|file_name|>transport.py<|end_file_name|><|fim▁begin|># Copyright 2019, OpenCensus Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<|fim▁hole|># See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
import os
import threading
import time
import requests
from azure.core.exceptions import ClientAuthenticationError
from azure.identity._exceptions import CredentialUnavailableError
try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse
logger = logging.getLogger(__name__)
_MAX_CONSECUTIVE_REDIRECTS = 10
_MONITOR_OAUTH_SCOPE = "https://monitor.azure.com//.default"
_requests_lock = threading.Lock()
_requests_map = {}
class TransportMixin(object):
def _check_stats_collection(self):
return not os.environ.get("APPLICATIONINSIGHTS_STATSBEAT_DISABLED_ALL") and (not hasattr(self, '_is_stats') or not self._is_stats) # noqa: E501
def _transmit_from_storage(self):
if self.storage:
for blob in self.storage.gets():
# give a few more seconds for blob lease operation
# to reduce the chance of race (for perf consideration)
if blob.lease(self.options.timeout + 5):
envelopes = blob.get()
result = self._transmit(envelopes)
if result > 0:
blob.lease(result)
else:
blob.delete()
def _transmit(self, envelopes):
"""
Transmit the data envelopes to the ingestion service.
Return a negative value for partial success or non-retryable failure.
Return 0 if all envelopes have been successfully ingested.
Return the next retry time in seconds for retryable failure.
This function should never throw exception.
"""
if not envelopes:
return 0
exception = None
try:
start_time = time.time()
headers = {
'Accept': 'application/json',
'Content-Type': 'application/json; charset=utf-8',
}
endpoint = self.options.endpoint
if self.options.credential:
token = self.options.credential.get_token(_MONITOR_OAUTH_SCOPE)
headers["Authorization"] = "Bearer {}".format(token.token)
endpoint += '/v2.1/track'
if self._check_stats_collection():
with _requests_lock:
_requests_map['count'] = _requests_map.get('count', 0) + 1 # noqa: E501
response = requests.post(
url=endpoint,
data=json.dumps(envelopes),
headers=headers,
timeout=self.options.timeout,
proxies=json.loads(self.options.proxies),
allow_redirects=False,
)
except requests.Timeout:
logger.warning(
'Request time out. Ingestion may be backed up. Retrying.')
exception = self.options.minimum_retry_interval
except requests.RequestException as ex:
logger.warning(
'Retrying due to transient client side error %s.', ex)
if self._check_stats_collection():
with _requests_lock:
_requests_map['retry'] = _requests_map.get('retry', 0) + 1 # noqa: E501
# client side error (retryable)
exception = self.options.minimum_retry_interval
except CredentialUnavailableError as ex:
logger.warning('Credential error. %s. Dropping telemetry.', ex)
exception = -1
except ClientAuthenticationError as ex:
logger.warning('Authentication error %s', ex)
exception = self.options.minimum_retry_interval
except Exception as ex:
logger.warning(
'Error when sending request %s. Dropping telemetry.', ex)
if self._check_stats_collection():
with _requests_lock:
_requests_map['exception'] = _requests_map.get('exception', 0) + 1 # noqa: E501
# Extraneous error (non-retryable)
exception = -1
finally:
end_time = time.time()
if self._check_stats_collection():
with _requests_lock:
duration = _requests_map.get('duration', 0)
_requests_map['duration'] = duration + (end_time - start_time) # noqa: E501
if exception is not None:
return exception
text = 'N/A'
data = None
try:
text = response.text
except Exception as ex:
logger.warning('Error while reading response body %s.', ex)
else:
try:
data = json.loads(text)
except Exception:
pass
if response.status_code == 200:
self._consecutive_redirects = 0
if self._check_stats_collection():
with _requests_lock:
_requests_map['success'] = _requests_map.get('success', 0) + 1 # noqa: E501
return 0
# Status code not 200 counts as failure
if self._check_stats_collection():
with _requests_lock:
_requests_map['failure'] = _requests_map.get('failure', 0) + 1 # noqa: E501
if response.status_code == 206: # Partial Content
if data:
try:
resend_envelopes = []
for error in data['errors']:
if error['statusCode'] in (
429, # Too Many Requests
500, # Internal Server Error
503, # Service Unavailable
):
resend_envelopes.append(envelopes[error['index']])
else:
logger.error(
'Data drop %s: %s %s.',
error['statusCode'],
error['message'],
envelopes[error['index']],
)
if resend_envelopes:
self.storage.put(resend_envelopes)
except Exception as ex:
logger.error(
'Error while processing %s: %s %s.',
response.status_code,
text,
ex,
)
if self._check_stats_collection():
with _requests_lock:
_requests_map['retry'] = _requests_map.get('retry', 0) + 1 # noqa: E501
return -response.status_code
# cannot parse response body, fallback to retry
if response.status_code in (
206, # Partial Content
429, # Too Many Requests
500, # Internal Server Error
503, # Service Unavailable
):
logger.warning(
'Transient server side error %s: %s.',
response.status_code,
text,
)
# server side error (retryable)
if self._check_stats_collection():
with _requests_lock:
# 429 counts as throttle instead of retry
if response.status_code == 429:
_requests_map['throttle'] = _requests_map.get('throttle', 0) + 1 # noqa: E501
else:
_requests_map['retry'] = _requests_map.get('retry', 0) + 1 # noqa: E501
return self.options.minimum_retry_interval
# Authentication error
if response.status_code == 401:
logger.warning(
'Authentication error %s: %s.',
response.status_code,
text,
)
if self._check_stats_collection():
with _requests_lock:
_requests_map['retry'] = _requests_map.get('retry', 0) + 1 # noqa: E501
return self.options.minimum_retry_interval
# Forbidden error
# Can occur when v2 endpoint is used while AI resource is configured
# with disableLocalAuth
if response.status_code == 403:
logger.warning(
'Forbidden error %s: %s.',
response.status_code,
text,
)
if self._check_stats_collection():
with _requests_lock:
_requests_map['retry'] = _requests_map.get('retry', 0) + 1 # noqa: E501
return self.options.minimum_retry_interval
# Redirect
if response.status_code in (307, 308):
self._consecutive_redirects += 1
if self._consecutive_redirects < _MAX_CONSECUTIVE_REDIRECTS:
if response.headers:
location = response.headers.get("location")
if location:
url = urlparse(location)
if url.scheme and url.netloc:
# Change the host to the new redirected host
self.options.endpoint = "{}://{}".format(url.scheme, url.netloc) # noqa: E501
# Attempt to export again
return self._transmit(envelopes)
logger.error(
"Error parsing redirect information."
)
logger.error(
"Error sending telemetry because of circular redirects."
" Please check the integrity of your connection string."
)
logger.error(
'Non-retryable server side error %s: %s.',
response.status_code,
text,
)
# server side error (non-retryable)
return -response.status_code<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use super::core::{
af_array, af_window, AfError, Array, ColorMap, HasAfEnum, MarkerType, HANDLE_ERROR,
};
use libc::{c_char, c_double, c_float, c_int, c_uint};
use std::ffi::CString;
use std::ptr;
/// Represents a sub-view of Window
///
/// This struct is used in conjunction with [Window](./struct.Window.html) in multiview
/// mode to render multiple targets to sub-regions of a given window.
///
#[repr(C)]
struct af_cell {
pub row: c_int,
pub col: c_int,
pub title: *const c_char,
pub cmap: c_uint,
}
extern "C" {
fn af_create_window(out: *mut af_window, w: c_int, h: c_int, title: *const c_char) -> c_int;
fn af_set_position(wnd: af_window, x: c_uint, y: c_uint) -> c_int;
fn af_set_title(wnd: af_window, title: *const c_char) -> c_int;
fn af_set_size(wnd: af_window, w: c_uint, h: c_uint) -> c_int;
fn af_set_visibility(wnd: af_window, is_visible: bool) -> c_int;
fn af_set_axes_titles(
wnd: af_window,
xtitle: *const c_char,
ytitle: *const c_char,
ztitle: *const c_char,
props: *const af_cell,
) -> c_int;
fn af_set_axes_label_format(
wnd: af_window,
xformat: *const c_char,
yformat: *const c_char,
zformat: *const c_char,
props: *const af_cell,
) -> c_int;
fn af_set_axes_limits_compute(
wnd: af_window,
x: af_array,
y: af_array,
z: af_array,
exact: bool,
props: *const af_cell,
) -> c_int;
fn af_set_axes_limits_2d(
wnd: af_window,
xmin: c_float,
xmax: c_float,
ymin: c_float,
ymax: c_float,
exact: bool,
props: *const af_cell,
) -> c_int;
fn af_set_axes_limits_3d(
wnd: af_window,
xmin: c_float,
xmax: c_float,
ymin: c_float,
ymax: c_float,
zmin: c_float,
zmax: c_float,
exact: bool,
props: *const af_cell,
) -> c_int;
fn af_draw_image(wnd: af_window, arr: af_array, props: *const af_cell) -> c_int;
fn af_draw_hist(
wnd: af_window,
x: af_array,
minval: c_double,
maxval: c_double,
props: *const af_cell,
) -> c_int;
fn af_draw_surface(
wnd: af_window,
xvals: af_array,
yvals: af_array,
S: af_array,
props: *const af_cell,
) -> c_int;
fn af_draw_plot_2d(wnd: af_window, x: af_array, y: af_array, props: *const af_cell) -> c_int;
fn af_draw_plot_3d(
wnd: af_window,
x: af_array,
y: af_array,
z: af_array,
props: *const af_cell,
) -> c_int;
fn af_draw_plot_nd(wnd: af_window, P: af_array, props: *const af_cell) -> c_int;
fn af_draw_scatter_2d(
wnd: af_window,
x: af_array,
y: af_array,
marker: c_uint,
props: *const af_cell,
) -> c_int;
fn af_draw_scatter_3d(
wnd: af_window,
x: af_array,
y: af_array,
z: af_array,
marker: c_uint,
props: *const af_cell,
) -> c_int;
fn af_draw_scatter_nd(
wnd: af_window,
P: af_array,
marker: c_uint,
props: *const af_cell,
) -> c_int;
fn af_draw_vector_field_2d(
wnd: af_window,
xpnts: af_array,
ypnts: af_array,
xdirs: af_array,
ydirs: af_array,
props: *const af_cell,
) -> c_int;
fn af_draw_vector_field_3d(
wnd: af_window,
xpnts: af_array,
ypnts: af_array,
xdirs: af_array,
ydirs: af_array,
zdirs: af_array,
zdirs: af_array,
props: *const af_cell,
) -> c_int;
fn af_draw_vector_field_nd(
wnd: af_window,
pnts: af_array,
dirs: af_array,
props: *const af_cell,
) -> c_int;
fn af_grid(wnd: af_window, rows: c_int, cols: c_int) -> c_int;
fn af_show(wnd: af_window) -> c_int;
fn af_is_window_closed(out: *mut bool, wnd: af_window) -> c_int;
fn af_destroy_window(wnd: af_window) -> c_int;
}
/// Used to render [Array](./struct.Array.html) objects
///
/// The renderings can be either plots, histograms or simply just image displays.
/// A single window can also display multiple of the above renderings at the same time, which
/// is known as multiview mode. An example of that is given below.
///
/// # Examples
///
/// ```rust,no_run
/// use arrayfire::{histogram, load_image, Window};
/// let mut wnd = Window::new(1280, 720, String::from("Image Histogram"));
/// let img = load_image::<f32>("Path to image".to_string(), true/*If color image, 'false' otherwise*/);
/// let hst = histogram(&img, 256, 0 as f64, 255 as f64);
///
/// loop {
/// wnd.grid(2, 1);
///
/// wnd.set_view(0, 0);
/// wnd.draw_image(&img, Some("Input Image".to_string()));
///
/// wnd.set_view(1, 0);
/// wnd.draw_hist(&hst, 0.0, 255.0, Some("Input Image Histogram".to_string()));
///
/// wnd.show();
///
/// if wnd.is_closed() == true { break; }
/// }
/// ```
#[derive(Clone)]
pub struct Window {
handle: af_window,
row: i32,
col: i32,
cmap: ColorMap,
}
impl Drop for Window {
fn drop(&mut self) {
unsafe {
let err_val = af_destroy_window(self.handle);
match err_val {
0 => (),
_ => panic!(
"Window object destruction failed with error code: {}",
err_val
),
}
}
}
}
impl Window {
/// Creates new Window object
///
/// # Parameters
///
/// - `width` is width of the window
/// - `height` is the height of window
/// - `title` is the string displayed on window title bar
///
/// # Return Values
///
/// Window Object
#[allow(clippy::match_wild_err_arm)]
pub fn new(width: i32, height: i32, title: String) -> Self {
unsafe {
let cstr_ret = CString::new(title);
match cstr_ret {
Ok(cstr) => {
let mut temp: af_window = std::ptr::null_mut();
let err_val =
af_create_window(&mut temp as *mut af_window, width, height, cstr.as_ptr());
HANDLE_ERROR(AfError::from(err_val));
Window {
handle: temp,
row: -1,
col: -1,
cmap: ColorMap::DEFAULT,
}
}
Err(_) => {
panic!("String creation failed while prepping params for window creation.")
}
}
}
}
/// Set window starting position on the screen
///
/// # Parameters
///
/// - `x` is the horiontal coordinate where window is to be placed
/// - `y` is the vertical coordinate where window is to be placed
pub fn set_position(&self, x: u32, y: u32) {
unsafe {
let err_val = af_set_position(self.handle, x, y);
HANDLE_ERROR(AfError::from(err_val));
}
}
/// Set window title
///
/// # Parameters
///
/// - `title` is the string to be displayed on window title bar
pub fn set_title(&self, title: String) {
unsafe {
let cstr_ret = CString::new(title);
match cstr_ret {
Ok(cstr) => {
let err_val = af_set_title(self.handle, cstr.as_ptr());
HANDLE_ERROR(AfError::from(err_val));
}
Err(_) => HANDLE_ERROR(AfError::ERR_INTERNAL),
}
}
}
/// Set window visibility
///
/// # Parameters
///
/// - `is_visible` is a boolean indicating whether window is to be hidden or brought into focus
///
/// # Return Values
///
/// None
pub fn set_visibility(&self, is_visible: bool) {
unsafe {
let err_val = af_set_visibility(self.handle, is_visible);
HANDLE_ERROR(AfError::from(err_val));
}
}
/// Set window size
///
/// # Parameters
///
/// - `w` is the target width of window
/// - `h` is the target height of window
pub fn set_size(&self, w: u32, h: u32) {
unsafe {
let err_val = af_set_size(self.handle, w, h);
HANDLE_ERROR(AfError::from(err_val));
}
}
/// Set color map to be used for rendering image, it can take one of the values of enum
/// [ColorMap](./enum.ColorMap.html)
pub fn set_colormap(&mut self, cmap: ColorMap) {
self.cmap = cmap;
}
/// Returns true if the window close is triggered by the user
pub fn is_closed(&self) -> bool {
unsafe {
let mut temp: bool = true;
let err_val = af_is_window_closed(&mut temp as *mut bool, self.handle);
HANDLE_ERROR(AfError::from(err_val));
temp
}
}
/// Setup display layout in multiview mode
///
/// # Parameters
///
/// - `rows` is the number of rows into which whole window is split into in multiple view mode
/// - `cols` is the number of cols into which whole window is split into in multiple view mode
pub fn grid(&self, rows: i32, cols: i32) {
unsafe {
let err_val = af_grid(self.handle, rows, cols);
HANDLE_ERROR(AfError::from(err_val));
}
}
/// Used in multiview mode to swap back buffer with front buffer to show the recently rendered
/// frame
pub fn show(&mut self) {
unsafe {
let err_val = af_show(self.handle);
HANDLE_ERROR(AfError::from(err_val));
self.row = -1;
self.col = -1;
}
}
/// Set the current sub-region to render
///
/// This function is only to be used into multiview mode
///
/// # Parameters
///
/// - `r` is the target row id
/// - `c` is the target row id
pub fn set_view(&mut self, r: i32, c: i32) {
self.row = r;
self.col = c;
}
/// Set chart axes titles
///
/// # Parameters
///
/// - `xlabel` is x axis title
/// - `ylabel` is y axis title
/// - `zlabel` is z axis title
pub fn set_axes_titles(&mut self, xlabel: String, ylabel: String, zlabel: String) {
let cprops = af_cell {
row: self.row,
col: self.col,
title: ptr::null(),
cmap: self.cmap as u32,
};
let xstr = CString::new(xlabel).unwrap();
let ystr = CString::new(ylabel).unwrap();
let zstr = CString::new(zlabel).unwrap();
unsafe {
let err_val = af_set_axes_titles(
self.handle,
xstr.as_ptr(),
ystr.as_ptr(),
zstr.as_ptr(),
&cprops as *const af_cell,
);
HANDLE_ERROR(AfError::from(err_val));
}
}
/// Set chart axes labels format
///
/// # Parameters
///
/// - `xlabel_format` is x axis label format. format specific is identical to C's printf format
/// - `ylabel_format` is y axis label format. format specific is identical to C's printf format
/// - `zlabel_format` is z axis label format. format specific is identical to C's printf format
pub fn set_axes_label_format(
&mut self,
xlabel_format: String,
ylabel_format: String,
zlabel_format: String,
) {
let cprops = af_cell {
row: self.row,
col: self.col,
title: ptr::null(),
cmap: self.cmap as u32,
};
let xstr = CString::new(xlabel_format).unwrap();
let ystr = CString::new(ylabel_format).unwrap();
let zstr = CString::new(zlabel_format).unwrap();
unsafe {
let err_val = af_set_axes_label_format(
self.handle,
xstr.as_ptr(),
ystr.as_ptr(),
zstr.as_ptr(),
&cprops as *const af_cell,
);
HANDLE_ERROR(AfError::from(err_val));
}
}
/// Set chart axes labels formats
///
/// Axes labels use printf style format specifiers. Default specifier for the data displayed
/// as labels is %4.1f. This function lets the user change this label formatting to whichever
/// format that fits their data range and precision.
///
/// # Parameters
///
/// - `xlabel` is printf style format specifier for x axis
/// - `ylabel` is printf style format specifier for y axis
/// - `zlabel` is printf style format specifier for z axis
pub fn set_axes_label_formats(&mut self, xformat: String, yformat: String, zformat: String) {
let cprops = af_cell {
row: self.row,
col: self.col,
title: ptr::null(),
cmap: self.cmap as u32,
};
let xstr = CString::new(xformat).unwrap();
let ystr = CString::new(yformat).unwrap();
let zstr = CString::new(zformat).unwrap();
unsafe {
let err_val = af_set_axes_titles(
self.handle,
xstr.as_ptr(),
ystr.as_ptr(),
zstr.as_ptr(),
&cprops as *const af_cell,
);
HANDLE_ERROR(AfError::from(err_val));
}
}
/// Set chart axes limits by computing limits from data
///
/// In multiple view (grid) mode, setting limits will effect the chart that is currently
/// active via set_view call
///
/// # Parameters
///
/// - `xrange` is set of all x values to compute min/max for x axis
/// - `yrange` is set of all y values to compute min/max for y axis
/// - `zrange` is set of all z values to compute min/max for z axis. If None is passed to
/// this paramter, 2d chart limits are set.
/// - `exact` indicates if the exact min/max values from `xrange`, `yrange` and `zrange`
/// are to extracted. If exact is false then the most significant digit is rounded up
/// to next power of 2 and the magnitude remains the same.
pub fn set_axes_limits_compute<T>(
&mut self,
xrange: &Array<T>,
yrange: &Array<T>,
zrange: Option<&Array<T>>,
exact: bool,
) where
T: HasAfEnum,
{
let cprops = af_cell {
row: self.row,
col: self.col,
title: ptr::null(),
cmap: self.cmap as u32,
};
unsafe {
let err_val = af_set_axes_limits_compute(
self.handle,
xrange.get(),
yrange.get(),
match zrange {
Some(z) => z.get(),
None => std::ptr::null_mut(),
},
exact,
&cprops as *const af_cell,
);
HANDLE_ERROR(AfError::from(err_val));
}
}
/// Set 2d chart axes limits
///
/// In multiple view (grid) mode, setting limits will effect the chart that is currently
/// active via set_view call
///
/// # Parameters
///
/// - `xmin` is minimum value on x axis
/// - `xmax` is maximum value on x axis
/// - `ymin` is minimum value on y axis
/// - `ymax` is maximum value on y axis
/// - `exact` indicates if the exact min/max values from `xrange`, `yrange` and `zrange`
/// are to extracted. If exact is false then the most significant digit is rounded up
/// to next power of 2 and the magnitude remains the same.
pub fn set_axes_limits_2d(&mut self, xmin: f32, xmax: f32, ymin: f32, ymax: f32, exact: bool) {
let cprops = af_cell {
row: self.row,
col: self.col,
title: ptr::null(),
cmap: self.cmap as u32,
};
unsafe {
let err_val = af_set_axes_limits_2d(
self.handle,
xmin,
xmax,
ymin,
ymax,
exact,
&cprops as *const af_cell,
);
HANDLE_ERROR(AfError::from(err_val));
}
}
/// Set 3d chart axes limits
///
/// In multiple view (grid) mode, setting limits will effect the chart that is currently
/// active via set_view call
///
/// # Parameters
///
/// - `xmin` is minimum value on x axis
/// - `xmax` is maximum value on x axis
/// - `ymin` is minimum value on y axis
/// - `ymax` is maximum value on y axis
/// - `zmin` is minimum value on z axis
/// - `zmax` is maximum value on z axis
/// - `exact` indicates if the exact min/max values from `xrange`, `yrange` and `zrange`
/// are to extracted. If exact is false then the most significant digit is rounded up
/// to next power of 2 and the magnitude remains the same.
#[allow(clippy::too_many_arguments)]
pub fn set_axes_limits_3d(
&mut self,
xmin: f32,
xmax: f32,
ymin: f32,
ymax: f32,
zmin: f32,
zmax: f32,
exact: bool,
) {
let cprops = af_cell {
row: self.row,
col: self.col,
title: ptr::null(),
cmap: self.cmap as u32,
};
unsafe {
let err_val = af_set_axes_limits_3d(
self.handle,
xmin,
xmax,
ymin,
ymax,
zmin,
zmax,
exact,
&cprops as *const af_cell,
);
HANDLE_ERROR(AfError::from(err_val));
}
}
/// Render given Array as an image
///
/// # Parameters
///
/// - `input` image
/// - `title` parameter has effect only in multiview mode, where this string
/// is displayed as the respective cell/view title.
pub fn draw_image<T>(&self, input: &Array<T>, title: Option<String>)
where
T: HasAfEnum,
{
let tstr = match title {
Some(s) => s,
None => format!("Cell({},{}))", self.col, self.row),
};
let tstr = CString::new(tstr).unwrap();
let cprops = af_cell {
row: self.row,
col: self.col,
title: tstr.as_ptr(),
cmap: self.cmap as u32,
};
unsafe {
let err_val = af_draw_image(self.handle, input.get(), &cprops as *const af_cell);
HANDLE_ERROR(AfError::from(err_val));
}
}
/// Render given two Array's `x` and `y` as a 2d line plot
///
/// # Parameters
///
/// - `x` is the x coordinates of the plot
/// - `y` is the y coordinates of the plot
/// - `title` parameter has effect only in multiview mode, where this string
/// is displayed as the respective cell/view title.
pub fn draw_plot2<T>(&self, x: &Array<T>, y: &Array<T>, title: Option<String>)
where
T: HasAfEnum,
{
let tstr = match title {
Some(s) => s,
None => format!("Cell({},{}))", self.col, self.row),
};
let tstr = CString::new(tstr).unwrap();
let cprops = af_cell {
row: self.row,
col: self.col,
title: tstr.as_ptr(),
cmap: self.cmap as u32,
};
unsafe {
let err_val = af_draw_plot_2d(self.handle, x.get(), y.get(), &cprops as *const af_cell);
HANDLE_ERROR(AfError::from(err_val));
}
}
/// Render given Array's `x`, `y` and `z` as a 3d line plot
///
/// # Parameters
///
/// - `x` is the x coordinates of the plot
/// - `y` is the y coordinates of the plot
/// - `z` is the z coordinates of the plot
/// - `title` parameter has effect only in multiview mode, where this string
/// is displayed as the respective cell/view title.
pub fn draw_plot3<T>(&self, x: &Array<T>, y: &Array<T>, z: &Array<T>, title: Option<String>)
where
T: HasAfEnum,
{
let tstr = match title {
Some(s) => s,
None => format!("Cell({},{}))", self.col, self.row),
};
let tstr = CString::new(tstr).unwrap();
let cprops = af_cell {
row: self.row,
col: self.col,
title: tstr.as_ptr(),
cmap: self.cmap as u32,
};
unsafe {
let err_val = af_draw_plot_3d(
self.handle,
x.get(),
y.get(),
z.get(),
&cprops as *const af_cell,
);
HANDLE_ERROR(AfError::from(err_val));
}
}
/// Render give Arrays of points as a 3d line plot
///
/// # Parameters
///
/// - `points` is an Array containing list of points of plot
/// - `title` parameter has effect only in multiview mode, where this string
/// is displayed as the respective cell/view title.
pub fn draw_plot<T>(&self, points: &Array<T>, title: Option<String>)
where
T: HasAfEnum,
{
let tstr = match title {
Some(s) => s,
None => format!("Cell({},{}))", self.col, self.row),
};
let tstr = CString::new(tstr).unwrap();
let cprops = af_cell {
row: self.row,
col: self.col,
title: tstr.as_ptr(),
cmap: self.cmap as u32,
};
unsafe {
let err_val = af_draw_plot_nd(self.handle, points.get(), &cprops as *const af_cell);
HANDLE_ERROR(AfError::from(err_val));
}
}
/// Render given Array as a histogram
///
/// # Parameters
///
/// - `hst` is an Array containing histogram data
/// - `minval` is the minimum bin value of histogram
/// - `maxval` is the maximum bin value of histogram
/// - `title` parameter has effect only in multiview mode, where this string
/// is displayed as the respective cell/view title.
pub fn draw_hist<T>(&self, hst: &Array<T>, minval: f64, maxval: f64, title: Option<String>)
where
T: HasAfEnum,
{
let tstr = match title {
Some(s) => s,
None => format!("Cell({},{}))", self.col, self.row),
};
let tstr = CString::new(tstr).unwrap();
let cprops = af_cell {
row: self.row,
col: self.col,
title: tstr.as_ptr(),
cmap: self.cmap as u32,
};
unsafe {
let err_val = af_draw_hist(
self.handle,
hst.get(),
minval,
maxval,
&cprops as *const af_cell,
);
HANDLE_ERROR(AfError::from(err_val));
}
}
/// Render give Arrays as 3d surface
///
/// # Parameters
///
/// - `x` is the x coordinates of the surface plot
/// - `y` is the y coordinates of the surface plot
/// - `z` is the z coordinates of the surface plot
/// - `title` parameter has effect only in multiview mode, where this string
/// is displayed as the respective cell/view title.
pub fn draw_surface<T>(
&self,
xvals: &Array<T>,
yvals: &Array<T>,
zvals: &Array<T>,
title: Option<String>,
) where
T: HasAfEnum,
{
let tstr = match title {
Some(s) => s,
None => format!("Cell({},{}))", self.col, self.row),
};
let tstr = CString::new(tstr).unwrap();
let cprops = af_cell {
row: self.row,
col: self.col,
title: tstr.as_ptr(),
cmap: self.cmap as u32,
};
unsafe {
let err_val = af_draw_surface(
self.handle,
xvals.get(),
yvals.get(),
zvals.get(),
&cprops as *const af_cell,
);
HANDLE_ERROR(AfError::from(err_val));
}
}
/// Render given Arrays as 2d scatter plot
///
/// # Parameters
///
/// - `xvals` is the x coordinates of the scatter plot
/// - `yvals` is the y coordinates of the scatter plot
/// - `marker` is of enum type [MarkerType](./enum.MarkerType.html)
/// - `title` parameter has effect only in multiview mode, where this string
/// is displayed as the respective cell/view title.
pub fn draw_scatter2<T>(
&self,
xvals: &Array<T>,
yvals: &Array<T>,
marker: MarkerType,
title: Option<String>,
) where
T: HasAfEnum,
{
let tstr = match title {
Some(s) => s,
None => format!("Cell({},{}))", self.col, self.row),
};
let tstr = CString::new(tstr).unwrap();
let cprops = af_cell {
row: self.row,
col: self.col,
title: tstr.as_ptr(),
cmap: self.cmap as u32,
};
unsafe {
let err_val = af_draw_scatter_2d(
self.handle,
xvals.get(),
yvals.get(),
marker as c_uint,
&cprops as *const af_cell,
);
HANDLE_ERROR(AfError::from(err_val));
}
}
/// Render given Arrays as 3d scatter plot
///
/// # Parameters
///
/// - `xvals` is the x coordinates of the scatter plot
/// - `yvals` is the y coordinates of the scatter plot
/// - `zvals` is the z coordinates of the scatter plot
/// - `marker` is of enum type [MarkerType](./enum.MarkerType.html)
/// - `title` parameter has effect only in multiview mode, where this string
/// is displayed as the respective cell/view title.
pub fn draw_scatter3<T>(
&self,
xvals: &Array<T>,
yvals: &Array<T>,
zvals: &Array<T>,
marker: MarkerType,
title: Option<String>,
) where
T: HasAfEnum,
{
let tstr = match title {
Some(s) => s,
None => format!("Cell({},{}))", self.col, self.row),
};
let tstr = CString::new(tstr).unwrap();
let cprops = af_cell {
row: self.row,
col: self.col,
title: tstr.as_ptr(),
cmap: self.cmap as u32,
};
unsafe {
let err_val = af_draw_scatter_3d(
self.handle,
xvals.get(),
yvals.get(),
zvals.get(),
marker as c_uint,
&cprops as *const af_cell,
);
HANDLE_ERROR(AfError::from(err_val));
}
}
/// Render give Array as 3d scatter plot
///
/// # Parameters
///
/// - `points` is an Array containing list of points of plot
/// - `marker` is of enum type [MarkerType](./enum.MarkerType.html)
/// - `title` parameter has effect only in multiview mode, where this string
/// is displayed as the respective cell/view title.
pub fn draw_scatter<T>(&self, vals: &Array<T>, marker: MarkerType, title: Option<String>)
where
T: HasAfEnum,
{
let tstr = match title {
Some(s) => s,
None => format!("Cell({},{}))", self.col, self.row),
};
let tstr = CString::new(tstr).unwrap();
let cprops = af_cell {
row: self.row,
col: self.col,
title: tstr.as_ptr(),
cmap: self.cmap as u32,
};
unsafe {
let err_val = af_draw_scatter_nd(
self.handle,
vals.get(),
marker as c_uint,
&cprops as *const af_cell,
);
HANDLE_ERROR(AfError::from(err_val));
}
}
/// Render given Arrays as 2d vector field
///
/// # Parameters
///
/// - `xpnts` is an Array containing list of x coordinates
/// - `xdirs` is an Array containing direction component of x coord
/// - `ypnts` is an Array containing list of y coordinates
/// - `ydirs` is an Array containing direction component of y coord
/// - `title` parameter has effect only in multiview mode, where this string
/// is displayed as the respective cell/view title.
pub fn draw_vector_field2<T>(
&self,
xpnts: &Array<T>,
ypnts: &Array<T>,
xdirs: &Array<T>,
ydirs: &Array<T>,
title: Option<String>,<|fim▁hole|> ) where
T: HasAfEnum,
{
let tstr = match title {
Some(s) => s,
None => format!("Cell({},{}))", self.col, self.row),
};
let tstr = CString::new(tstr).unwrap();
let cprops = af_cell {
row: self.row,
col: self.col,
title: tstr.as_ptr(),
cmap: self.cmap as u32,
};
unsafe {
let err_val = af_draw_vector_field_2d(
self.handle,
xpnts.get(),
ypnts.get(),
xdirs.get(),
ydirs.get(),
&cprops as *const af_cell,
);
HANDLE_ERROR(AfError::from(err_val));
}
}
/// Render given Arrays as 3d vector field
///
/// # Parameters
///
/// - `xpnts` is an Array containing list of x coordinates
/// - `xdirs` is an Array containing direction component of x coord
/// - `ypnts` is an Array containing list of y coordinates
/// - `ydirs` is an Array containing direction component of y coord
/// - `zpnts` is an Array containing list of z coordinates
/// - `zdirs` is an Array containing direction component of z coord
/// - `title` parameter has effect only in multiview mode, where this string
/// is displayed as the respective cell/view title.
#[allow(clippy::too_many_arguments)]
pub fn draw_vector_field3<T>(
&self,
xpnts: &Array<T>,
ypnts: &Array<T>,
zpnts: &Array<T>,
xdirs: &Array<T>,
ydirs: &Array<T>,
zdirs: &Array<T>,
title: Option<String>,
) where
T: HasAfEnum,
{
let tstr = match title {
Some(s) => s,
None => format!("Cell({},{}))", self.col, self.row),
};
let tstr = CString::new(tstr).unwrap();
let cprops = af_cell {
row: self.row,
col: self.col,
title: tstr.as_ptr(),
cmap: self.cmap as u32,
};
unsafe {
let err_val = af_draw_vector_field_3d(
self.handle,
xpnts.get(),
ypnts.get(),
zpnts.get(),
xdirs.get(),
ydirs.get(),
zdirs.get(),
&cprops as *const af_cell,
);
HANDLE_ERROR(AfError::from(err_val));
}
}
/// Render given Array as vector field
///
/// # Parameters
///
/// - `points` is an Array containing list of coordinates of vector field
/// - `directions` is an Array containing directions at the coordinates specified in `points`
/// Array.
/// - `title` parameter has effect only in multiview mode, where this string
/// is displayed as the respective cell/view title.
pub fn draw_vector_field<T>(
&self,
points: &Array<T>,
directions: &Array<T>,
title: Option<String>,
) where
T: HasAfEnum,
{
let tstr = match title {
Some(s) => s,
None => format!("Cell({},{}))", self.col, self.row),
};
let tstr = CString::new(tstr).unwrap();
let cprops = af_cell {
row: self.row,
col: self.col,
title: tstr.as_ptr(),
cmap: self.cmap as u32,
};
unsafe {
let err_val = af_draw_vector_field_nd(
self.handle,
points.get(),
directions.get(),
&cprops as *const af_cell,
);
HANDLE_ERROR(AfError::from(err_val));
}
}
}<|fim▁end|> | |
<|file_name|>interproscanner.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
class OutputParser(object):
def __init__(self, fmt, file_to_parse):
self.fmt = fmt.upper()
self.file_to_parse = file_to_parse
def parse(self):
""" Wrapper function in case I want to be able to parse other formats at some time """
if self.fmt == "GFF3":
return self.parse_gff3()
def parse_gff3(self):
""" Function to handle parsing of the GFF3 file format, returns a list of Query objects """
queries = {}
with open(self.file_to_parse, 'r') as IN:
for line in IN:
if line.startswith("##"):
if line.startswith("##FASTA"):
break
continue
#elements = line[:-1].split("\t")
# make a dictionary of the elements in the line
hit_dict = {key: value for key, value in zip(["query", "source", "type", "start", "end", "score", "strand", "phase", "attributes"], line[:-1].split("\t"))}
if hit_dict["type"] == "polypeptide":
queries[hit_dict["query"]] = Query(hit_dict)
if hit_dict["score"] == ".":
continue
if hit_dict["query"] in queries:
queries[hit_dict["query"]].add_hit(hit_dict)
else:
print("WARNING! Not found in dict")
sys.exit()
return queries.values()
class Query(object):
def __init__(self, hit_dict):
self.length = int(hit_dict["end"]) - int(hit_dict["start"])
self.name = hit_dict["query"]
self.hits = []
def add_hit(self, hit_dict):
""" Need to move most of this processing to the GFF stuff """
elements = hit_dict["attributes"].split(";")
## Need to make sure all the params I want here are defined so I don't run into issues later
hit_dict["go_terms"] = []
for element in elements:
if element.startswith("Name="):
hit_dict["subject"] = element[5:]
if element.startswith("signature_desc="):
hit_dict["desc"] = element[15:]
if element.startswith("Ontology_term="):
element = element.replace("\"", "")
terms = element[14].split(",")
hit_dict["go_terms"] = element[14:].split(",")
# convert all number like things to numbers
for key in hit_dict:
if not isinstance(hit_dict[key], list):
try:
hit_dict[key] = float(hit_dict[key])
except ValueError:
continue
self.hits.append(hit_dict)
def remove_bad_hits(self, max_e=.05, min_cov=.5):
hits_to_keep = []
for hit in self.hits:
if hit["score"] > max_e:
continue
if (hit["end"] - hit["start"]) < min_cov * self.length:
continue
hits_to_keep.append(hit)
self.hits = hits_to_keep
def get_all_GO_terms(self):
""" Returns a list with all the GO terms for the hits. Do this after remove_bad_hits """
terms = {}
for hit in self.hits:
terms[hit["go_terms"]] == 1
return list(terms.keys())
def get_all_pfams(self):
""" Returns a list with all the pfams the query has hits to """
terms = {}
for hit in self.hits:
if hit["source"] == "Pfam":
terms[hit["subject"]] = 1
return list(terms.keys())
class InterproScanner(object):
def __init__(self,
# interproscan vars
fasta_in,
out_path="interproscan_results.txt",
fmt="GFF3",
bin="/proj/dangl_lab/apps/my_interproscan/interproscan-5.3-46.0/interproscan.sh",
othr_args="",
# bsub vars
threads=8,
queue="week",
stdout="interproscanner_bsub.out",
stderr="interproscanner_bsub.err",
# batch vars
seqs_per_file=25
):
self.ips_vars = {
'fasta_in': [fasta_in],
'out_path': out_path,
'fmt': fmt,
'bin': bin,
'other_args': othr_args
}
self.bsub_vars = {
'threads': threads,
'queue': queue,
'stdout': stdout,
'stderr': stderr
}
self.batch_vars = {
'seqs_per_file'
}
def run_interproscan(self):
# count sequences
seqs = 0
with open(self.ips_vars["fasta_in"][0], 'r') as IN:
for line in IN:
if line.startswith(">"):
seqs += 1
# split fasta if needed
fas_files = []
if seqs > self.batch_vars["seqs_per_file"]:
fas_files = self._split_fasta()
# run command on each fasta<|fim▁hole|> command = self._build_command(fas_file)
print("Executing: " + command)
def _split_fasta(self):
""" Splits a the fasta into multiple parts and changes the fasta in to the parts """
counter = 1
file_indx = 1
fastas = []
with open(self.ips_vars["fasta_in"][0], 'r') as IN:
OUT = open("tmp_iproscanner_{}.fasta".format(file_indx), 'w')
fastas.append("tmp_iproscanner_{}.fasta".format(file_indx))
for line in IN:
# increment counter for each header
if line.startswith(">"):
counter += 1
# reset and open new file if counter is enough
if counter > self.batch_vars["seqs_per_file"]:
counter = 1
file_indx += 1
OUT.close()
OUT = open("tmp_iproscanner_{}.fasta".format(file_indx), 'w')
fastas.append("tmp_iporscanner_{}.fasta".format(file_indx))
OUT.write(line)
self.fasta_in = fastas
def _build_command(self, fasta_file):
""" Builds a command to run interproscan for a given fasta file """
# shell with out and err
command = "bsub -o {} -e {}".format(self.bsub_params["stdout"], self.bsub_params["stderr"])
# add threading
hosts = self.bsub_params["threads"] // 8 # use as few hosts as possible
command += "-n {} -R 'span[hosts={}']".format(self.bsub_params["threads"], hosts)
# add interpro with base options and file
command += "{} --goterms -dp -i {}".format(self.ips_params["bin"], fasta_file)
# add output options
command += "-o {} -f {}".format(self.ips_params["out_path"]. self.ips_params["fmt"])
# add any other options
command += self.ips_params["other_args"]
return command
def go_term_enrichment(queries):
term_counts = {}
for qry in queries:
qry.remove_bad_hits(max_e=.05, min_cov=.01)
terms = qry.get_all_GO_terms()
for term in terms:
term_counts[term] = term_counts.get(term, 0) + 1
for term in sorted(term_counts, key=lambda key: term_counts[key], reverse=True):
print(term + "\t" + str(term_counts[term]))
def pfam_dump(queries):
for qry in queries:
qry.remove_bad_hits(max_e=.05, min_cov=.01)
#print("Getting pfams for " + qry.name)
pfams = qry.get_all_pfams()
for pfam in pfams:
print(pfam + "\t" + qry.name)
if __name__ == "__main__":
import sys
my_file = sys.argv[1]
outp = OutputParser(fmt="GFF3", file_to_parse=my_file)
queries = outp.parse()
pfam_dump(queries)<|fim▁end|> | for fas_file in fas_files: |
<|file_name|>CompositeAction.java<|end_file_name|><|fim▁begin|>package com.integpg.synapse.actions;
import com.integpg.logger.FileLogger;
import java.io.IOException;
import java.util.Json;
public class CompositeAction extends Action {
private String[] _actions;
public CompositeAction(Json json) {
_actions = (String[]) json.get("Actions");
ActionHash.put((String) json.get("ID"), this);
}
public void execute() throws IOException {
Thread thd = new Thread(new Runnable() {
public void run() {
FileLogger.debug("Executing Composite Action in " + Thread.currentThread().getName());
for (int i = 0; i < _actions.length; i++) {
try {
Action.execute(_actions[i]);
} catch (Exception ex) {
FileLogger.error("Error executing action: " + ex.getMessage());
}
}
}
});
thd.setDaemon(true);
thd.start();
}<|fim▁hole|><|fim▁end|> |
} |
<|file_name|>FlowLayout.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""PyQt4 port of the layouts/flowlayout example from Qt v4.x"""
from PyQt4 import QtCore, QtGui
# ------------------------------------------------------------------------
class FlowLayout(QtGui.QLayout):
"""
Standard PyQt examples FlowLayout modified to work with a scollable parent
"""
def __init__(self, parent=None, margin=0, spacing=-1):
super(FlowLayout, self).__init__(parent)
if parent is not None:
self.setMargin(margin)
self.setSpacing(spacing)
self.itemList = []
def __del__(self):
item = self.takeAt(0)
while item:
item = self.takeAt(0)
def addItem(self, item):
self.itemList.append(item)
def count(self):
return len(self.itemList)
def itemAt(self, index):
if index >= 0 and index < len(self.itemList):
return self.itemList[index]
return None
def takeAt(self, index):
if index >= 0 and index < len(self.itemList):
return self.itemList.pop(index)
return None
def expandingDirections(self):
return QtCore.Qt.Orientations(QtCore.Qt.Orientation(0))
def hasHeightForWidth(self):
return True
def heightForWidth(self, width):
height = self.doLayout(QtCore.QRect(0, 0, width, 0), True)
return height
def setGeometry(self, rect):
super(FlowLayout, self).setGeometry(rect)
self.doLayout(rect, False)
def sizeHint(self):
return self.minimumSize()
def minimumSize(self):
size = QtCore.QSize()
for item in self.itemList:
size = size.expandedTo(item.minimumSize())
size += QtCore.QSize(2 * self.margin(), 2 * self.margin())
return size
def minimumSize(self):
w = self.geometry().width()
h = self.doLayout(QtCore.QRect(0, 0, w, 0), True)
return QtCore.QSize(w + 2 * self.margin(), h + 2 * self.margin())
def doLayout(self, rect, testOnly=False):
"""
"""
x = rect.x()
y = rect.y()
lineHeight = 0
for item in self.itemList:
wid = item.widget()
spaceX = self.spacing()# + wid.style().layoutSpacing(QtGui.QSizePolicy.PushButton, QtGui.QSizePolicy.PushButton, QtCore.Qt.Horizontal)
spaceY = self.spacing()# + wid.style().layoutSpacing(QtGui.QSizePolicy.PushButton, QtGui.QSizePolicy.PushButton, QtCore.Qt.Vertical)
nextX = x + item.sizeHint().width() + spaceX
if nextX - spaceX > rect.right() and lineHeight > 0:
x = rect.x()
y = y + lineHeight + spaceY
nextX = x + item.sizeHint().width() + spaceX
lineHeight = 0
if not testOnly:
item.setGeometry(QtCore.QRect(QtCore.QPoint(x, y), item.sizeHint()))
x = nextX
lineHeight = max(lineHeight, item.sizeHint().height())
return y + lineHeight - rect.y()
# ------------------------------------------------------------------------
class ResizeScrollArea(QtGui.QScrollArea):
"""
A QScrollArea that propagates the resizing to any FlowLayout children.
"""
def __init(self, parent=None):
QtGui.QScrollArea.__init__(self, parent)
def resizeEvent(self, event):
wrapper = self.findChild(QtGui.QWidget)
flow = wrapper.findChild(FlowLayout)
if wrapper and flow:
width = self.viewport().width()
height = flow.heightForWidth(width)
size = QtCore.QSize(width, height)
point = self.viewport().rect().topLeft()
flow.setGeometry(QtCore.QRect(point, size))
self.viewport().update()
super(ResizeScrollArea, self).resizeEvent(event)
# ------------------------------------------------------------------------
class ScrollingFlowWidget(QtGui.QWidget):
"""
A resizable and scrollable widget that uses a flow layout.
Use its addWidget() method to flow children into it.
"""
def __init__(self,parent=None):
super(ScrollingFlowWidget,self).__init__(parent)
grid = QtGui.QGridLayout(self)
scroll = ResizeScrollArea()
self._wrapper = QtGui.QWidget(scroll)
self.flowLayout = FlowLayout(self._wrapper)
self._wrapper.setLayout(self.flowLayout)
scroll.setWidget(self._wrapper)
scroll.setWidgetResizable(True)
grid.addWidget(scroll)
def addWidget(self, widget):
self.flowLayout.addWidget(widget)
widget.setParent(self._wrapper)
# ------------------------------------------------------------------------
if __name__ == '__main__':
import sys
import random
class ExampleScroller(ScrollingFlowWidget):
def sizeHint(self):
return QtCore.QSize(500,300)
class ExampleWindow(QtGui.QWidget):
def __init__(self):
super(ExampleWindow, self).__init__()
self.scroller = ExampleScroller(self)
self.setLayout( QtGui.QVBoxLayout(self) )
self.layout().addWidget(self.scroller)
for w in range( random.randint(25,50)):
words = " ".join([ "".join([ chr(random.choice(range(ord('a'),ord('z'))))
for x in range( random.randint(2,9) ) ])
for n in range(random.randint(1,5)) ]).title()
widget = QtGui.QPushButton(words)
widget.setFixedHeight( 20 )
widget.setCursor( QtCore.Qt.PointingHandCursor )
widget.setCheckable( True )
self.scroller.addWidget(widget)
self.setWindowTitle("Scrolling Flow Layout")
app = QtGui.QApplication(sys.argv)
mainWin = ExampleWindow()<|fim▁hole|> mainWin.show()
sys.exit(app.exec_())<|fim▁end|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.