prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.<|fim▁end|> | # -*- coding: utf-8 -*-
# Copyright(C) 2014 Romain Bignon |
<|file_name|>test_0120_simple_repository_dependency_multiple_owners.py<|end_file_name|><|fim▁begin|>from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
datatypes_repository_name = 'blast_datatypes_0120'
datatypes_repository_description = 'Galaxy applicable datatypes for BLAST'
datatypes_repository_long_description = 'Galaxy datatypes for the BLAST top hit descriptons tool'
tool_repository_name = 'blastxml_to_top_descr_0120'
tool_repository_description = 'BLAST top hit descriptions'
tool_repository_long_description = 'Make a table from BLAST XML'
'''
Tool shed side:
1) Create and populate blast_datatypes_0120.
1a) Check for appropriate strings.
2) Create and populate blastxml_to_top_descr_0120.
2a) Check for appropriate strings.
3) Upload repository_dependencies.xml to blastxml_to_top_descr_0120 that defines a relationship to blast_datatypes_0120.
3a) Check for appropriate strings.
'''
base_datatypes_count = 0
repository_datatypes_count = 0
class TestRepositoryMultipleOwners( ShedTwillTestCase ):
def test_0000_initiate_users( self ):
"""Create necessary user accounts and login as an admin user."""
"""
Create all the user accounts that are needed for this test script to run independently of other tests.
Previously created accounts will not be re-created.
"""
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
test_user_1_private_role = self.test_db_util.get_private_role( test_user_1 )
self.logout()
self.login( email=common.test_user_2_email, username=common.test_user_2_name )
test_user_2 = self.test_db_util.get_user( common.test_user_1_email )
assert test_user_2 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_2_email
test_user_2_private_role = self.test_db_util.get_private_role( test_user_2 )
self.logout()
self.login( email=common.admin_email, username=common.admin_username )
admin_user = self.test_db_util.get_user( common.admin_email )
assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
admin_user_private_role = self.test_db_util.get_private_role( admin_user )
def test_0005_create_datatypes_repository( self ):
"""Create and populate the blast_datatypes_0120 repository"""
"""
We are at step 1.
Create and populate blast_datatypes.<|fim▁hole|> category = self.create_category( name='Test 0120', description='Description of test 0120' )
self.logout()
self.login( email=common.test_user_2_email, username=common.test_user_2_name )
strings_displayed = [ 'Repository %s' % "'%s'" % datatypes_repository_name,
'Repository %s has been created' % "<b>%s</b>" % datatypes_repository_name ]
repository = self.get_or_create_repository( name=datatypes_repository_name,
description=datatypes_repository_description,
long_description=datatypes_repository_long_description,
owner=common.test_user_2_name,
category_id=self.security.encode_id( category.id ),
strings_displayed=strings_displayed )
self.upload_file( repository,
filename='blast/blast_datatypes.tar',
filepath=None,
valid_tools_only=True,
uncompress_file=True,
remove_repo_files_not_in_tar=False,
commit_message='Uploaded blast_datatypes tarball.',
strings_displayed=[],
strings_not_displayed=[] )
def test_0010_verify_datatypes_repository( self ):
'''Verify the blast_datatypes_0120 repository.'''
'''
We are at step 1a.
Check for appropriate strings, most importantly BlastXml, BlastNucDb, and BlastProtDb,
the datatypes that are defined in datatypes_conf.xml.
'''
global repository_datatypes_count
repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_2_name )
strings_displayed = [ 'BlastXml', 'BlastNucDb', 'BlastProtDb', 'application/xml', 'text/html', 'blastxml', 'blastdbn', 'blastdbp']
self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
repository_datatypes_count = int( self.get_repository_datatypes_count( repository ) )
def test_0015_create_tool_repository( self ):
"""Create and populate the blastxml_to_top_descr_0120 repository"""
"""
We are at step 2.
Create and populate blastxml_to_top_descr_0120.
"""
category = self.create_category( name='Test 0120', description='Description of test 0120' )
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
strings_displayed = [ 'Repository %s' % "'%s'" % tool_repository_name,
'Repository %s has been created' % "<b>%s</b>" % tool_repository_name ]
repository = self.get_or_create_repository( name=tool_repository_name,
description=tool_repository_description,
long_description=tool_repository_long_description,
owner=common.test_user_1_name,
category_id=self.security.encode_id( category.id ),
strings_displayed=strings_displayed )
self.upload_file( repository,
filename='blast/blastxml_to_top_descr.tar',
filepath=None,
valid_tools_only=True,
uncompress_file=True,
remove_repo_files_not_in_tar=False,
commit_message='Uploaded blastxml_to_top_descr tarball.',
strings_displayed=[],
strings_not_displayed=[] )
def test_0020_verify_tool_repository( self ):
'''Verify the blastxml_to_top_descr_0120 repository.'''
'''
We are at step 2a.
Check for appropriate strings, such as tool name, description, and version.
'''
repository = self.test_db_util.get_repository_by_name_and_owner( tool_repository_name, common.test_user_1_name )
strings_displayed = [ 'blastxml_to_top_descr_0120', 'BLAST top hit descriptions', 'Make a table from BLAST XML' ]
strings_displayed.extend( [ '0.0.1', 'Valid tools'] )
self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
def test_0025_create_repository_dependency( self ):
'''Create a repository dependency on blast_datatypes_0120.'''
'''
We are at step 3.
Create a simple repository dependency for blastxml_to_top_descr_0120 that defines a dependency on blast_datatypes_0120.
'''
datatypes_repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_2_name )
tool_repository = self.test_db_util.get_repository_by_name_and_owner( tool_repository_name, common.test_user_1_name )
dependency_xml_path = self.generate_temp_path( 'test_0120', additional_paths=[ 'dependencies' ] )
datatypes_tuple = ( self.url, datatypes_repository.name, datatypes_repository.user.username, self.get_repository_tip( datatypes_repository ) )
self.create_repository_dependency( repository=tool_repository, repository_tuples=[ datatypes_tuple ], filepath=dependency_xml_path )
def test_0040_verify_repository_dependency( self ):
'''Verify the created repository dependency.'''
'''
We are at step 3a.
Check the newly created repository dependency to ensure that it was defined and displays correctly.
'''
datatypes_repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_2_name )
tool_repository = self.test_db_util.get_repository_by_name_and_owner( tool_repository_name, common.test_user_1_name )
self.check_repository_dependency( tool_repository, datatypes_repository )<|fim▁end|> | """ |
<|file_name|>test_affinity_filters.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_config import cfg
from nova import objects
from nova.scheduler.filters import affinity_filter
from nova import test
from nova.tests.unit.scheduler import fakes
CONF = cfg.CONF
CONF.import_opt('my_ip', 'nova.netconf')
class TestDifferentHostFilter(test.NoDBTestCase):
def setUp(self):
super(TestDifferentHostFilter, self).setUp()
self.filt_cls = affinity_filter.DifferentHostFilter()
def test_affinity_different_filter_passes(self):
host = fakes.FakeHostState('host1', 'node1', {})
inst1 = objects.Instance(uuid='different')
host.instances = {inst1.uuid: inst1}
filter_properties = {'context': mock.sentinel.ctx,
'scheduler_hints': {
'different_host': ['same'], }}
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_affinity_different_filter_no_list_passes(self):
host = fakes.FakeHostState('host1', 'node1', {})
host.instances = {}
filter_properties = {'context': mock.sentinel.ctx,
'scheduler_hints': {
'different_host': 'same'}}
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_affinity_different_filter_fails(self):
inst1 = objects.Instance(uuid='same')
host = fakes.FakeHostState('host1', 'node1', {})
host.instances = {inst1.uuid: inst1}
filter_properties = {'context': mock.sentinel.ctx,
'scheduler_hints': {
'different_host': ['same'], }}
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
def test_affinity_different_filter_handles_none(self):
inst1 = objects.Instance(uuid='same')
host = fakes.FakeHostState('host1', 'node1', {})
host.instances = {inst1.uuid: inst1}
filter_properties = {'context': mock.sentinel.ctx,
'scheduler_hints': None}
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
class TestSameHostFilter(test.NoDBTestCase):
def setUp(self):
super(TestSameHostFilter, self).setUp()
self.filt_cls = affinity_filter.SameHostFilter()
def test_affinity_same_filter_passes(self):
inst1 = objects.Instance(uuid='same')
host = fakes.FakeHostState('host1', 'node1', {})
host.instances = {inst1.uuid: inst1}
filter_properties = {'context': mock.sentinel.ctx,
'scheduler_hints': {
'same_host': ['same'], }}
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_affinity_same_filter_no_list_passes(self):
host = fakes.FakeHostState('host1', 'node1', {})
host.instances = {}
filter_properties = {'context': mock.sentinel.ctx,<|fim▁hole|>
def test_affinity_same_filter_fails(self):
inst1 = objects.Instance(uuid='different')
host = fakes.FakeHostState('host1', 'node1', {})
host.instances = {inst1.uuid: inst1}
filter_properties = {'context': mock.sentinel.ctx,
'scheduler_hints': {
'same_host': ['same'], }}
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
def test_affinity_same_filter_handles_none(self):
inst1 = objects.Instance(uuid='different')
host = fakes.FakeHostState('host1', 'node1', {})
host.instances = {inst1.uuid: inst1}
filter_properties = {'context': mock.sentinel.ctx,
'scheduler_hints': None}
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
class TestSimpleCIDRAffinityFilter(test.NoDBTestCase):
def setUp(self):
super(TestSimpleCIDRAffinityFilter, self).setUp()
self.filt_cls = affinity_filter.SimpleCIDRAffinityFilter()
def test_affinity_simple_cidr_filter_passes(self):
host = fakes.FakeHostState('host1', 'node1', {})
host.host_ip = '10.8.1.1'
affinity_ip = "10.8.1.100"
filter_properties = {'context': mock.sentinel.ctx,
'scheduler_hints': {
'cidr': '/24',
'build_near_host_ip': affinity_ip}}
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_affinity_simple_cidr_filter_fails(self):
host = fakes.FakeHostState('host1', 'node1', {})
host.host_ip = '10.8.1.1'
affinity_ip = "10.8.1.100"
filter_properties = {'context': mock.sentinel.ctx,
'scheduler_hints': {
'cidr': '/32',
'build_near_host_ip': affinity_ip}}
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
def test_affinity_simple_cidr_filter_handles_none(self):
host = fakes.FakeHostState('host1', 'node1', {})
affinity_ip = CONF.my_ip.split('.')[0:3]
affinity_ip.append('100')
affinity_ip = str.join('.', affinity_ip)
filter_properties = {'context': mock.sentinel.ctx,
'scheduler_hints': None}
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
class TestGroupAffinityFilter(test.NoDBTestCase):
def _test_group_anti_affinity_filter_passes(self, filt_cls, policy):
host = fakes.FakeHostState('host1', 'node1', {})
filter_properties = {}
self.assertTrue(filt_cls.host_passes(host, filter_properties))
filter_properties = {'group_policies': ['affinity']}
self.assertTrue(filt_cls.host_passes(host, filter_properties))
filter_properties = {'group_policies': [policy]}
filter_properties['group_hosts'] = []
self.assertTrue(filt_cls.host_passes(host, filter_properties))
filter_properties['group_hosts'] = ['host2']
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_group_anti_affinity_filter_passes(self):
self._test_group_anti_affinity_filter_passes(
affinity_filter.ServerGroupAntiAffinityFilter(),
'anti-affinity')
def _test_group_anti_affinity_filter_fails(self, filt_cls, policy):
host = fakes.FakeHostState('host1', 'node1', {})
filter_properties = {'group_policies': [policy],
'group_hosts': ['host1']}
self.assertFalse(filt_cls.host_passes(host, filter_properties))
def test_group_anti_affinity_filter_fails(self):
self._test_group_anti_affinity_filter_fails(
affinity_filter.ServerGroupAntiAffinityFilter(),
'anti-affinity')
def _test_group_affinity_filter_passes(self, filt_cls, policy):
host = fakes.FakeHostState('host1', 'node1', {})
filter_properties = {}
self.assertTrue(filt_cls.host_passes(host, filter_properties))
filter_properties = {'group_policies': ['anti-affinity']}
self.assertTrue(filt_cls.host_passes(host, filter_properties))
filter_properties = {'group_policies': ['affinity'],
'group_hosts': ['host1']}
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_group_affinity_filter_passes(self):
self._test_group_affinity_filter_passes(
affinity_filter.ServerGroupAffinityFilter(), 'affinity')
def _test_group_affinity_filter_fails(self, filt_cls, policy):
host = fakes.FakeHostState('host1', 'node1', {})
filter_properties = {'group_policies': [policy],
'group_hosts': ['host2']}
self.assertFalse(filt_cls.host_passes(host, filter_properties))
def test_group_affinity_filter_fails(self):
self._test_group_affinity_filter_fails(
affinity_filter.ServerGroupAffinityFilter(), 'affinity')<|fim▁end|> | 'scheduler_hints': {
'same_host': 'same'}}
self.assertTrue(self.filt_cls.host_passes(host, filter_properties)) |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>var compare = require('typewiselite')
var search = require('binary-search')
function compareKeys (a, b) {
return compare(a.key, b.key)
}
module.exports = function (_compare) {
var ary = [], kv
_compare = _compare || compare
function cmp (a, b) {
return _compare(a.key, b.key)
}
return kv = {
getIndex: function (key) {
return search(ary, {key: key}, cmp, 0, ary.length - 1)
},
get: function (key) {
var i = this.getIndex(key)
return i >= 0 ? ary[i].value : undefined
},
has: function (key) {
return this.getIndex(key) >= 0
},
//update a key
set: function (key, value) {
return kv.add({key: key, value: value})
},
add: function (o) {
var i = search(ary, o, cmp)
<|fim▁hole|> return i
},
toJSON: function () {
return ary.slice()
},
store: ary
}
}
module.exports.search = search
module.exports.compareKeys = compareKeys<|fim▁end|> | //overwrite a key, or insert a key
if(i < 0) ary.splice(~i, 0, o)
else ary[i] = o |
<|file_name|>skills-list.js<|end_file_name|><|fim▁begin|>import {<|fim▁hole|>
export default {
scope: '.user__skills-list',
emptyState: {
scope: '[data-test-user-skills-list-empty-state]'
},
skills: collection('[data-test-user-skills-list-item]')
};<|fim▁end|> | collection
} from 'ember-cli-page-object'; |
<|file_name|>akaze_features.cpp<|end_file_name|><|fim▁begin|>//=============================================================================
//
// akaze_features.cpp
// Authors: Pablo F. Alcantarilla (1), Jesus Nuevo (2)
// Institutions: Georgia Institute of Technology (1)
// TrueVision Solutions (2)
// Date: 16/09/2013
// Email: [email protected]
//
// AKAZE Features Copyright 2013, Pablo F. Alcantarilla, Jesus Nuevo
// All Rights Reserved
// See LICENSE for the license information<|fim▁hole|> * @brief Main program for detecting and computing binary descriptors in an
* accelerated nonlinear scale space
* @date Sep 16, 2013
* @author Pablo F. Alcantarilla, Jesus Nuevo
*/
#include "AKAZE.h"
using namespace std;
/* ************************************************************************* */
/**
* @brief This function parses the command line arguments for setting A-KAZE parameters
* @param options Structure that contains A-KAZE settings
* @param img_path Path for the input image
* @param kpts_path Path for the file where the keypoints where be stored
*/
int parse_input_options(AKAZEOptions& options, std::string& img_path,
std::string& kpts_path, int argc, char *argv[]);
/* ************************************************************************* */
int main(int argc, char *argv[]) {
// Variables
AKAZEOptions options;
string img_path, kpts_path;
// Variable for computation times.
double t1 = 0.0, t2 = 0.0, tdet = 0.0, tdesc = 0.0;
// Parse the input command line options
if (parse_input_options(options,img_path,kpts_path,argc,argv)) {
return -1;
}
if (options.verbosity) {
cout << "Check AKAZE options:" << endl;
cout << options << endl;
}
// Try to read the image and if necessary convert to grayscale.
cv::Mat img = cv::imread(img_path,0);
if (img.data == NULL) {
cerr << "Error: cannot load image from file:" << endl << img_path << endl;
return -1;
}
// Convert the image to float to extract features.
cv::Mat img_32;
img.convertTo(img_32, CV_32F, 1.0/255.0,0);
// Don't forget to specify image dimensions in AKAZE's options.
options.img_width = img.cols;
options.img_height = img.rows;
// Extract features.
vector<cv::KeyPoint> kpts;
t1 = cv::getTickCount();
AKAZE evolution(options);
evolution.Create_Nonlinear_Scale_Space(img_32);
evolution.Feature_Detection(kpts);
t2 = cv::getTickCount();
tdet = 1000.0*(t2-t1) / cv::getTickFrequency();
// Compute descriptors.
cv::Mat desc;
t1 = cv::getTickCount();
evolution.Compute_Descriptors(kpts,desc);
t2 = cv::getTickCount();
tdesc = 1000.0*(t2-t1) / cv::getTickFrequency();
// Summarize the computation times.
evolution.Show_Computation_Times();
evolution.Save_Scale_Space();
cout << "Number of points: " << kpts.size() << endl;
cout << "Time Detector: " << tdet << " ms" << endl;
cout << "Time Descriptor: " << tdesc << " ms" << endl;
// Save keypoints in ASCII format.
if (!kpts_path.empty())
save_keypoints(kpts_path,kpts,desc,true);
// Check out the result visually.
cv::Mat img_rgb = cv::Mat(cv::Size(img.cols, img.rows), CV_8UC3);
cvtColor(img,img_rgb,CV_GRAY2BGR);
draw_keypoints(img_rgb,kpts);
cv::imshow(img_path,img_rgb);
cv::waitKey(0);
}
/* ************************************************************************* */
int parse_input_options(AKAZEOptions& options, std::string& img_path,
std::string& kpts_path, int argc, char *argv[]) {
// If there is only one argument return
if (argc == 1) {
show_input_options_help(0);
return -1;
}
// Set the options from the command line
else if (argc >= 2) {
options = AKAZEOptions();
kpts_path = "./keypoints.txt";
if (!strcmp(argv[1],"--help")) {
show_input_options_help(0);
return -1;
}
img_path = argv[1];
for (int i = 2; i < argc; i++) {
if (!strcmp(argv[i],"--soffset")) {
i = i+1;
if (i >= argc) {
cerr << "Error introducing input options!!" << endl;
return -1;
}
else {
options.soffset = atof(argv[i]);
}
}
else if (!strcmp(argv[i],"--omax")) {
i = i+1;
if ( i >= argc ) {
cerr << "Error introducing input options!!" << endl;
return -1;
}
else {
options.omax = atof(argv[i]);
}
}
else if (!strcmp(argv[i],"--dthreshold")) {
i = i+1;
if (i >= argc) {
cerr << "Error introducing input options!!" << endl;
return -1;
}
else {
options.dthreshold = atof(argv[i]);
}
}
else if (!strcmp(argv[i],"--sderivatives")) {
i = i+1;
if (i >= argc) {
cerr << "Error introducing input options!!" << endl;
return -1;
}
else {
options.sderivatives = atof(argv[i]);
}
}
else if (!strcmp(argv[i],"--nsublevels")) {
i = i+1;
if (i >= argc) {
cerr << "Error introducing input options!!" << endl;
return -1;
}
else
options.nsublevels = atoi(argv[i]);
}
else if (!strcmp(argv[i],"--diffusivity")) {
i = i+1;
if (i >= argc) {
cerr << "Error introducing input options!!" << endl;
return -1;
}
else
options.diffusivity = DIFFUSIVITY_TYPE(atoi(argv[i]));
}
else if (!strcmp(argv[i],"--descriptor")) {
i = i+1;
if (i >= argc) {
cerr << "Error introducing input options!!" << endl;
return -1;
}
else {
options.descriptor = DESCRIPTOR_TYPE(atoi(argv[i]));
if (options.descriptor < 0 || options.descriptor > MLDB) {
options.descriptor = MLDB;
}
}
}
else if (!strcmp(argv[i],"--descriptor_channels")) {
i = i+1;
if (i >= argc) {
cerr << "Error introducing input options!!" << endl;
return -1;
}
else {
options.descriptor_channels = atoi(argv[i]);
if (options.descriptor_channels <= 0 || options.descriptor_channels > 3) {
options.descriptor_channels = 3;
}
}
}
else if (!strcmp(argv[i],"--descriptor_size")) {
i = i+1;
if (i >= argc) {
cerr << "Error introducing input options!!" << endl;
return -1;
}
else {
options.descriptor_size = atoi(argv[i]);
if (options.descriptor_size < 0) {
options.descriptor_size = 0;
}
}
}
else if (!strcmp(argv[i],"--save_scale_space")) {
i = i+1;
if (i >= argc) {
cerr << "Error introducing input options!!" << endl;
return -1;
}
else {
options.save_scale_space = (bool)atoi(argv[i]);
}
}
else if (!strcmp(argv[i],"--verbose")) {
options.verbosity = true;
}
else if (!strcmp(argv[i],"--output")) {
options.save_keypoints = true;
i = i+1;
if (i >= argc) {
cerr << "Error introducing input options!!" << endl;
return -1;
}
else
kpts_path = argv[i];
}
}
}
return 0;
}<|fim▁end|> | //=============================================================================
/**
* @file akaze_features.cpp |
<|file_name|>transform.rs<|end_file_name|><|fim▁begin|>use na::{RealField, Rotation2, Rotation3, Unit};
use crate::aliases::{TMat3, TMat4, TVec2, TVec3};
use crate::traits::Number;
/// A rotation 4 * 4 matrix created from an axis of 3 scalars and an angle expressed in radians.
///
/// # See also:
///
/// * [`scaling`](fn.scaling.html)
/// * [`translation`](fn.translation.html)
/// * [`rotation2d`](fn.rotation2d.html)
/// * [`scaling2d`](fn.scaling2d.html)
/// * [`translation2d`](fn.translation2d.html)
pub fn rotation<N: RealField>(angle: N, v: &TVec3<N>) -> TMat4<N> {
Rotation3::from_axis_angle(&Unit::new_normalize(*v), angle).to_homogeneous()
}
/// A 4 * 4 scale matrix created from a vector of 3 components.
///
/// # See also:
///
/// * [`rotation`](fn.rotation.html)
/// * [`translation`](fn.translation.html)
/// * [`rotation2d`](fn.rotation2d.html)
/// * [`scaling2d`](fn.scaling2d.html)
/// * [`translation2d`](fn.translation2d.html)
pub fn scaling<N: Number>(v: &TVec3<N>) -> TMat4<N> {
TMat4::new_nonuniform_scaling(v)
}
/// A 4 * 4 translation matrix created from the scaling factor on each axis.
///
/// # See also:
///
/// * [`rotation`](fn.rotation.html)
/// * [`scaling`](fn.scaling.html)
/// * [`rotation2d`](fn.rotation2d.html)
/// * [`scaling2d`](fn.scaling2d.html)
/// * [`translation2d`](fn.translation2d.html)
pub fn translation<N: Number>(v: &TVec3<N>) -> TMat4<N> {
TMat4::new_translation(v)
}
/// A rotation 3 * 3 matrix created from an angle expressed in radians.
///
/// # See also:
///<|fim▁hole|>/// * [`scaling`](fn.scaling.html)
/// * [`translation`](fn.translation.html)
/// * [`scaling2d`](fn.scaling2d.html)
/// * [`translation2d`](fn.translation2d.html)
pub fn rotation2d<N: RealField>(angle: N) -> TMat3<N> {
Rotation2::new(angle).to_homogeneous()
}
/// A 3 * 3 scale matrix created from a vector of 2 components.
///
/// # See also:
///
/// * [`rotation`](fn.rotation.html)
/// * [`scaling`](fn.scaling.html)
/// * [`translation`](fn.translation.html)
/// * [`rotation2d`](fn.rotation2d.html)
/// * [`translation2d`](fn.translation2d.html)
pub fn scaling2d<N: Number>(v: &TVec2<N>) -> TMat3<N> {
TMat3::new_nonuniform_scaling(v)
}
/// A 3 * 3 translation matrix created from the scaling factor on each axis.
///
/// # See also:
///
/// * [`rotation`](fn.rotation.html)
/// * [`scaling`](fn.scaling.html)
/// * [`translation`](fn.translation.html)
/// * [`rotation2d`](fn.rotation2d.html)
/// * [`scaling2d`](fn.scaling2d.html)
pub fn translation2d<N: Number>(v: &TVec2<N>) -> TMat3<N> {
TMat3::new_translation(v)
}<|fim▁end|> | /// * [`rotation`](fn.rotation.html) |
<|file_name|>ex2a_telnet.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import telnetlib
import time
import socket
import sys
import getpass
TELNET_PORT = 23
TELNET_TIMEOUT = 6
def send_command(remote_conn, cmd):
'''
Initiate the Telnet Session
'''
cmd = cmd.rstrip()
remote_conn.write(cmd + '\n')
time.sleep(1)
return remote_conn.read_very_eager()
def login(remote_conn, username, password):
'''
Login to pynet-rtr1
'''
output = remote_conn.read_until("sername:", TELNET_TIMEOUT)
remote_conn.write(username + '\n')<|fim▁hole|> remote_conn.write(password + '\n')
return output
def no_more(remote_conn, paging_cmd='terminal length 0'):
'''
No paging of Output
'''
return send_command(remote_conn, paging_cmd)
def telnet_connect(ip_addr):
'''
Establish the Telnet Connection
'''
try:
return telnetlib.Telnet(ip_addr, TELNET_PORT, TELNET_TIMEOUT)
except socket.timeout:
sys.exit("Connection timed-out")
def main():
'''
Connect to pynet-rtr1, login, and issue 'show ip int brief'
'''
ip_addr = raw_input("IP address: ")
ip_addr = ip_addr.strip()
username = 'pyclass'
password = getpass.getpass()
remote_conn = telnet_connect(ip_addr)
output = login(remote_conn, username, password)
time.sleep(1)
remote_conn.read_very_eager()
no_more(remote_conn)
output = send_command(remote_conn, 'show ip int brief')
print "\n\n"
print output
print "\n\n"
remote_conn.close()
if __name__ == "__main__":
main()<|fim▁end|> | output += remote_conn.read_until("ssword:", TELNET_TIMEOUT) |
<|file_name|>TestApp.cpp<|end_file_name|><|fim▁begin|>// TestApp.cpp : Defines the entry point for the console application.
//
#include "stdafx.h"
#include <stdio.h>
#include <iostream>
#include <algorithm>
#include "sqlite3.h"
#include <time.h>
#include <sstream>
using namespace std;
int _tmain(int argc, _TCHAR* argv[])
{
sqlite3* db;
sqlite3_open("testdb.db", &db);
const char* evaluated;
std::string sql = "CREATE TABLE IF NOT EXISTS Files (FileName varchar(256) NOT NULL PRIMARY KEY, FileContents varbinary(1048576))";
sqlite3_stmt* command;
sqlite3_prepare_v2(db,sql.data(),sql.size(),&command,&evaluated);
int val;
while ((val = sqlite3_step(command)) != SQLITE_DONE) {
}
sqlite3_reset(command);
sql = "INSERT INTO Files VALUES (?, ?)";
sqlite3_prepare_v2(db, sql.data(), sql.size(), &command, &evaluated);
for (int i = 200; i < 300; i++) {
unsigned char* data = new unsigned char[1024 * 1024];
//Insert 30 files
std::stringstream mval;
mval << i;
std::string mstr = mval.str();
sqlite3_bind_text(command, 1, mstr.data(), mstr.size(),0);
sqlite3_bind_blob(command, 2, data, 1024 * 1024, 0);<|fim▁hole|> }
sqlite3_reset(command);
sqlite3_clear_bindings(command);
}
return 0;
}<|fim▁end|> | while ((val = sqlite3_step(command)) != SQLITE_DONE) {
|
<|file_name|>hierarchy.js<|end_file_name|><|fim▁begin|>import "../arrays/merge";
import "../core/rebind";
import "layout";
d3.layout.hierarchy = function() {
var sort = d3_layout_hierarchySort,
children = d3_layout_hierarchyChildren,
value = d3_layout_hierarchyValue;
// Recursively compute the node depth and value.
// Also converts to a standard hierarchy structure.
function recurse(node, depth, nodes) {
var childs = children.call(hierarchy, node, depth);
node.depth = depth;
nodes.push(node);
if (childs && (n = childs.length)) {
var i = -1,
n,
c = node.children = [],
v = 0,
j = depth + 1,
d;
while (++i < n) {
d = recurse(childs[i], j, nodes);
d.parent = node;
c.push(d);
v += d.value;
}
if (sort) c.sort(sort);
if (value) node.value = v;
} else if (value) {
node.value = +value.call(hierarchy, node, depth) || 0;
}
return node;
}
// Recursively re-evaluates the node value.
function revalue(node, depth) {
var children = node.children,
v = 0;
if (children && (n = children.length)) {
var i = -1,
n,
j = depth + 1;
while (++i < n) v += revalue(children[i], j);
} else if (value) {
v = +value.call(hierarchy, node, depth) || 0;
}
if (value) node.value = v;
return v;
}
function hierarchy(d) {
var nodes = [];
recurse(d, 0, nodes);
return nodes;
}
hierarchy.sort = function(x) {
if (!arguments.length) return sort;
sort = x;
return hierarchy;
};
hierarchy.children = function(x) {
if (!arguments.length) return children;
children = x;
return hierarchy;
};
hierarchy.value = function(x) {
if (!arguments.length) return value;
value = x;
return hierarchy;
};
// Re-evaluates the `value` property for the specified hierarchy.
hierarchy.revalue = function(root) {
revalue(root, 0);
return root;
};
return hierarchy;
};
// A method assignment helper for hierarchy subclasses.
function d3_layout_hierarchyRebind(object, hierarchy) {
d3.rebind(object, hierarchy, "sort", "children", "value");
// Add an alias for nodes and links, for convenience.
object.nodes = object;
object.links = d3_layout_hierarchyLinks;
return object;
}
function d3_layout_hierarchyChildren(d) {
return d.children;
}
function d3_layout_hierarchyValue(d) {
return d.value;
}<|fim▁hole|> return b.value - a.value;
}
// Returns an array source+target objects for the specified nodes.
function d3_layout_hierarchyLinks(nodes) {
return d3.merge(nodes.map(function(parent) {
return (parent.children || []).map(function(child) {
return {source: parent, target: child};
});
}));
}<|fim▁end|> |
function d3_layout_hierarchySort(a, b) { |
<|file_name|>test_relator.py<|end_file_name|><|fim▁begin|>from flask_simple_alchemy import Relator
from testers import db, app, FakeTable, OtherTable
this_table = Relator(db)
this_table.add('FakeTable')
this_table.add('OtherTable', foreign_key='uuid')
class ThirdTable(db.Model, this_table.HasOneToOneWith.FakeTable):
__tablename__ = 'thirdtable'
id = db.Column(db.Integer, primary_key=True)
elf = db.Column(db.Boolean(False))
monkey = db.Column(db.String, default='yep')
def test_Relator_setattrs():
this_table = Relator(db)
this_table.add('FakeTable')
this_table.add('OtherTable', foreign_key='uuid')
assert this_table.HasForeignKeyOf
assert this_table.HasOneToOneWith
assert this_table.HasManyToOneWith
assert this_table.HasForeignKeyOf.FakeTable
assert this_table.HasOneToOneWith.FakeTable
assert this_table.HasManyToOneWith.FakeTable
assert this_table.HasForeignKeyOf.OtherTable
assert this_table.HasOneToOneWith.OtherTable
assert this_table.HasManyToOneWith.OtherTable
def test_Realtor_relationship():
<|fim▁hole|> assert ThirdTable.faketable
with app.app_context():
fk = FakeTable()
fk.unique_name = 'gggg'
db.session.add(fk)
db.session.commit()
saved = FakeTable.query.filter_by(unique_name='gggg').first()
tt = ThirdTable()
tt.faketable_id = saved.id
db.session.add(tt)
db.session.commit()
saved2 = ThirdTable.query.filter_by(monkey='yep').first()
assert saved
assert tt
assert saved2
def test_Realtor_relationship_again():
this_table = Relator(db)
this_table.add('FakeTable')
this_table.add('OtherTable', foreign_key='uuid', relation_name='OtherTableUUID1')
class FourthTable(db.Model, this_table.HasManyToOneWith.OtherTableUUID1):
__tablename__ = 'fourthtable'
id = db.Column(db.Integer, primary_key=True)
assert FourthTable.othertable_uuid
assert FourthTable.othertable
def test_Realtor_relation_name():
this_table = Relator(db)
this_table.add('FakeTable')
this_table.add('OtherTable')
this_table.add('OtherTable', foreign_key='uuid', relation_name="OtherTableUUID")
class SixthTable(db.Model, this_table.HasManyToOneWith.OtherTable):
__tablename__ = 'sixthtable'
id = db.Column(db.Integer, primary_key=True)
class FifthTable(db.Model, this_table.HasManyToOneWith.OtherTableUUID):
__tablename__ = 'fifthtable'
id = db.Column(db.Integer, primary_key=True)
assert SixthTable.othertable_id
assert SixthTable.othertable
assert FifthTable.othertable_uuid
assert FifthTable.othertable
def test_database_creation():
this_table = Relator(db)
this_table.add('FakeTable')
this_table.add('OtherTable', foreign_key='uuid')
#class ThirdTable(db.Model, this_table.HasOneToOneWith.FakeTable):
# __tablename__ = 'thirdtable'
# id = db.Column(db.Integer, primary_key=True)
db.drop_all()
db.create_all()
db.drop_all()<|fim▁end|> | assert ThirdTable.faketable_id |
<|file_name|>0003_enable_forums.py<|end_file_name|><|fim▁begin|>from django.db import migrations, models
def add_default_enable(apps, schema_editor):
ForumsConfig = apps.get_model("django_comment_common", "ForumsConfig")
settings_count = ForumsConfig.objects.count()
if settings_count == 0:
# By default we want the comment client enabled, but this is *not* enabling
# discussions themselves by default, as in showing the Disucussions tab, or
# inline discussions, etc. It just allows the underlying service client to work.
settings = ForumsConfig(enabled=True)
settings.save()
<|fim▁hole|>class Migration(migrations.Migration):
dependencies = [
('django_comment_common', '0002_forumsconfig'),
]
operations = [
migrations.RunPython(add_default_enable, reverse_code=reverse_noop),
]<|fim▁end|> | def reverse_noop(apps, schema_editor):
return
|
<|file_name|>profitability_analysis.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import flt, getdate, formatdate, cstr
from erpnext.accounts.report.financial_statements import filter_accounts, filter_out_zero_value_rows
from erpnext.accounts.report.trial_balance.trial_balance import validate_filters
value_fields = ("income", "expense", "gross_profit_loss")
def execute(filters=None):
if not filters.get('based_on'): filters["based_on"] = 'Cost Center'
based_on = filters.based_on.replace(' ', '_').lower()
validate_filters(filters)
accounts = get_accounts_data(based_on, filters.get("company"))
data = get_data(accounts, filters, based_on)
columns = get_columns(filters)
return columns, data
def get_accounts_data(based_on, company):
if based_on == 'cost_center':
return frappe.db.sql("""select name, parent_cost_center as parent_account, cost_center_name as account_name, lft, rgt
from `tabCost Center` where company=%s order by name""", company, as_dict=True)
else:
return frappe.get_all('Project', fields = ["name"], filters = {'company': company}, order_by = 'name')
def get_data(accounts, filters, based_on):
if not accounts:
return []
accounts, accounts_by_name, parent_children_map = filter_accounts(accounts)
gl_entries_by_account = {}
set_gl_entries_by_account(filters.get("company"), filters.get("from_date"),
filters.get("to_date"), based_on, gl_entries_by_account, ignore_closing_entries=not flt(filters.get("with_period_closing_entry")))
total_row = calculate_values(accounts, gl_entries_by_account, filters)
accumulate_values_into_parents(accounts, accounts_by_name)
data = prepare_data(accounts, filters, total_row, parent_children_map, based_on)
data = filter_out_zero_value_rows(data, parent_children_map,
show_zero_values=filters.get("show_zero_values"))
return data
def calculate_values(accounts, gl_entries_by_account, filters):
init = {
"income": 0.0,
"expense": 0.0,
"gross_profit_loss": 0.0
}
total_row = {
"cost_center": None,
"account_name": "'" + _("Total") + "'",
"warn_if_negative": True,
"income": 0.0,
"expense": 0.0,
"gross_profit_loss": 0.0,
"account": "'" + _("Total") + "'",
"parent_account": None,
"indent": 0,
"has_value": True
}
for d in accounts:
d.update(init.copy())
# add opening
for entry in gl_entries_by_account.get(d.name, []):
if cstr(entry.is_opening) != "Yes":
if entry.type == 'Income':
d["income"] += flt(entry.credit) - flt(entry.debit)
if entry.type == 'Expense':
d["expense"] += flt(entry.debit) - flt(entry.credit)
d["gross_profit_loss"] = d.get("income") - d.get("expense")
total_row["income"] += d["income"]
total_row["expense"] += d["expense"]
total_row["gross_profit_loss"] = total_row.get("income") - total_row.get("expense")
return total_row
def accumulate_values_into_parents(accounts, accounts_by_name):
for d in reversed(accounts):
if d.parent_account:
for key in value_fields:
accounts_by_name[d.parent_account][key] += d[key]
def prepare_data(accounts, filters, total_row, parent_children_map, based_on):
data = []
company_currency = frappe.get_cached_value('Company', filters.get("company"), "default_currency")
for d in accounts:
has_value = False
row = {
"account_name": d.account_name or d.name,
"account": d.name,
"parent_account": d.parent_account,
"indent": d.indent,
"fiscal_year": filters.get("fiscal_year"),
"currency": company_currency,
"based_on": based_on
}
for key in value_fields:
row[key] = flt(d.get(key, 0.0), 3)
if abs(row[key]) >= 0.005:
# ignore zero values
has_value = True
<|fim▁hole|> data.append(row)
data.extend([{},total_row])
return data
def get_columns(filters):
return [
{
"fieldname": "account",
"label": _(filters.get("based_on")),
"fieldtype": "Link",
"options": filters.get("based_on"),
"width": 300
},
{
"fieldname": "income",
"label": _("Income"),
"fieldtype": "Currency",
"options": "currency",
"width": 120
},
{
"fieldname": "expense",
"label": _("Expense"),
"fieldtype": "Currency",
"options": "currency",
"width": 120
},
{
"fieldname": "gross_profit_loss",
"label": _("Gross Profit / Loss"),
"fieldtype": "Currency",
"options": "currency",
"width": 120
},
{
"fieldname": "currency",
"label": _("Currency"),
"fieldtype": "Link",
"options": "Currency",
"hidden": 1
}
]
def set_gl_entries_by_account(company, from_date, to_date, based_on, gl_entries_by_account,
ignore_closing_entries=False):
"""Returns a dict like { "account": [gl entries], ... }"""
additional_conditions = []
if ignore_closing_entries:
additional_conditions.append("and ifnull(voucher_type, '')!='Period Closing Voucher'")
if from_date:
additional_conditions.append("and posting_date >= %(from_date)s")
gl_entries = frappe.db.sql("""select posting_date, {based_on} as based_on, debit, credit,
is_opening, (select root_type from `tabAccount` where name = account) as type
from `tabGL Entry` where company=%(company)s
{additional_conditions}
and posting_date <= %(to_date)s
and {based_on} is not null
order by {based_on}, posting_date""".format(additional_conditions="\n".join(additional_conditions), based_on= based_on),
{
"company": company,
"from_date": from_date,
"to_date": to_date
},
as_dict=True)
for entry in gl_entries:
gl_entries_by_account.setdefault(entry.based_on, []).append(entry)
return gl_entries_by_account<|fim▁end|> | row["has_value"] = has_value |
<|file_name|>table_test.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2014, Suryandaru Triandana <[email protected]>
// All rights reserved.
//
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package table
import (
"bytes"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"gx/ipfs/QmbBhyDKsY4mbY6xsKt3qu9Y7FPvMJ6qbD8AMjYYvPRw1g/goleveldb/leveldb/iterator"
"gx/ipfs/QmbBhyDKsY4mbY6xsKt3qu9Y7FPvMJ6qbD8AMjYYvPRw1g/goleveldb/leveldb/opt"
"gx/ipfs/QmbBhyDKsY4mbY6xsKt3qu9Y7FPvMJ6qbD8AMjYYvPRw1g/goleveldb/leveldb/storage"
"gx/ipfs/QmbBhyDKsY4mbY6xsKt3qu9Y7FPvMJ6qbD8AMjYYvPRw1g/goleveldb/leveldb/testutil"
"gx/ipfs/QmbBhyDKsY4mbY6xsKt3qu9Y7FPvMJ6qbD8AMjYYvPRw1g/goleveldb/leveldb/util"
)
type tableWrapper struct {
*Reader
}
func (t tableWrapper) TestFind(key []byte) (rkey, rvalue []byte, err error) {
return t.Reader.Find(key, false, nil)
}
func (t tableWrapper) TestGet(key []byte) (value []byte, err error) {
return t.Reader.Get(key, nil)
}
func (t tableWrapper) TestNewIterator(slice *util.Range) iterator.Iterator {
return t.Reader.NewIterator(slice, nil)
}
var _ = testutil.Defer(func() {
Describe("Table", func() {
Describe("approximate offset test", func() {
var (
buf = &bytes.Buffer{}
o = &opt.Options{
BlockSize: 1024,
Compression: opt.NoCompression,
}
)
// Building the table.
tw := NewWriter(buf, o)
tw.Append([]byte("k01"), []byte("hello"))
tw.Append([]byte("k02"), []byte("hello2"))
tw.Append([]byte("k03"), bytes.Repeat([]byte{'x'}, 10000))
tw.Append([]byte("k04"), bytes.Repeat([]byte{'x'}, 200000))
tw.Append([]byte("k05"), bytes.Repeat([]byte{'x'}, 300000))
tw.Append([]byte("k06"), []byte("hello3"))
tw.Append([]byte("k07"), bytes.Repeat([]byte{'x'}, 100000))
err := tw.Close()
It("Should be able to approximate offset of a key correctly", func() {
Expect(err).ShouldNot(HaveOccurred())
tr, err := NewReader(bytes.NewReader(buf.Bytes()), int64(buf.Len()), storage.FileDesc{}, nil, nil, o)
Expect(err).ShouldNot(HaveOccurred())
CheckOffset := func(key string, expect, threshold int) {
offset, err := tr.OffsetOf([]byte(key))
Expect(err).ShouldNot(HaveOccurred())
Expect(offset).Should(BeNumerically("~", expect, threshold), "Offset of key %q", key)
}
CheckOffset("k0", 0, 0)
CheckOffset("k01a", 0, 0)
CheckOffset("k02", 0, 0)
CheckOffset("k03", 0, 0)
CheckOffset("k04", 10000, 1000)
CheckOffset("k04a", 210000, 1000)
CheckOffset("k05", 210000, 1000)
CheckOffset("k06", 510000, 1000)
CheckOffset("k07", 510000, 1000)
CheckOffset("xyz", 610000, 2000)
})
})
Describe("read test", func() {
Build := func(kv testutil.KeyValue) testutil.DB {
o := &opt.Options{
BlockSize: 512,
BlockRestartInterval: 3,
}
buf := &bytes.Buffer{}
// Building the table.
tw := NewWriter(buf, o)
kv.Iterate(func(i int, key, value []byte) {
tw.Append(key, value)
})
tw.Close()
// Opening the table.
tr, _ := NewReader(bytes.NewReader(buf.Bytes()), int64(buf.Len()), storage.FileDesc{}, nil, nil, o)
return tableWrapper{tr}
}
Test := func(kv *testutil.KeyValue, body func(r *Reader)) func() {
return func() {<|fim▁hole|> if body != nil {
body(db.(tableWrapper).Reader)
}
testutil.KeyValueTesting(nil, *kv, db, nil, nil)
}
}
testutil.AllKeyValueTesting(nil, Build, nil, nil)
Describe("with one key per block", Test(testutil.KeyValue_Generate(nil, 9, 1, 10, 512, 512), func(r *Reader) {
It("should have correct blocks number", func() {
indexBlock, err := r.readBlock(r.indexBH, true)
Expect(err).To(BeNil())
Expect(indexBlock.restartsLen).Should(Equal(9))
})
}))
})
})
})<|fim▁end|> | db := Build(*kv) |
<|file_name|>read.js<|end_file_name|><|fim▁begin|><|fim▁hole|> console.log(JSON.stringify(ENV, null, 4));
});<|fim▁end|> | require('..').config({namespace:'NVM'})
.read({})
.on('read', function(ENV){ |
<|file_name|>boxCap.py<|end_file_name|><|fim▁begin|>""" Capturing and analyzing the box information
Author: Lyu Yaopengfei
Date: 23-May-2016
"""
import cv2
import threading
import time
from PIL import Image
import Lego.dsOperation as dso
import Lego.imgPreprocessing as imgprep
from Lego.ocr import tesserOcr
capImg = None
resImg = None
stopFlat = 0
lock = threading.Lock()
def capFrame(cap):
global capImg
global stopFlat
while(1):
lock.acquire()
try:
_,capImg = cap.read()
finally:
lock.release()
if (stopFlat > 0):
break
clickCnt = 0
clickFlag = 0
def detect_circle(event,x,y,flags,param):
global clickFlag
if event==cv2.EVENT_LBUTTONUP:
clickFlag = clickFlag+1
elif event==cv2.EVENT_RBUTTONUP:
clickFlag = -1
# lock.acquire()
# try:
# cv2.imwrite('cap.png',capImg)
# finally:
# clickCnt = clickCnt+1
# lock.release()
# detect the useful information from the selected image
def detectImg(logoAffinePos,img,idx):
_, _, _, _, affinedCropedImg, rtnFlag = logoAffinePos.rcvAffinedAll(img)
if (rtnFlag is False):
return None,None,None,False
filtedCroped = imgprep.imgFilter(affinedCropedImg)
filtedCroped = cv2.cvtColor(filtedCroped,cv2.COLOR_GRAY2RGB)
filtedCropedPIL = Image.fromarray(filtedCroped)
numStr = tesserOcr(filtedCropedPIL)
return affinedCropedImg,filtedCroped,numStr,True
def analyseBoxInfo(bds,imgfolder):
maxCnt = 0
tempCnt = 0
tempNumSet = set(bds.tempNumList)
bds.setImgFolder(imgfolder)
for item in tempNumSet:
tempCnt = bds.tempNumList.count(item)
if(tempCnt > maxCnt):
maxCnt = tempCnt
bds.number = item
def exportLog(lf, expStr):
print(expStr)
expStr = expStr+'\n'
lf.writelines(expStr)
if __name__ == '__main__':
bxnm = input('Input the box name: ')
# time.strftime('%Y-%m-%d-%H%M%S',time.localtime(time.time()))
bx1 = dso.boxds(bxnm)
settingInfo = open('../data/setting','r')
settingInfo.readline()
PATH = settingInfo.readline().strip().lstrip().rstrip(',')
DATAPATH = settingInfo.readline().strip().lstrip().rstrip(',')
FEATURE_IMG_FOLDER = settingInfo.readline().strip().lstrip().rstrip(',')
MATERIAL_IMG_FOLDER = settingInfo.readline().strip().lstrip().rstrip(',')
BOX_DATA_PATH = settingInfo.readline().strip().lstrip().rstrip(',')
LOG_PATH = settingInfo.readline().strip().lstrip().rstrip(',')
curTime = time.strftime('%Y-%m-%d-%H%M%S',time.localtime(time.time()))
LOG_PATH = LOG_PATH+curTime+bx1.boxname+'.log'
logFile = open(LOG_PATH,'w+')
boxData = open(BOX_DATA_PATH,'r')
logoTp = cv2.imread(MATERIAL_IMG_FOLDER + 'purelogo256.png')
logoAffinePos = imgprep.LogoAffinePos(logoTp)
cv2.namedWindow('capFrame')
cv2.setMouseCallback('capFrame',detect_circle)
VWIDTH = 1280
VHIGH = 720
cap = cv2.VideoCapture(0)
cap.set(3,VWIDTH)
cap.set(4,VHIGH)
cap.read();cap.read();cap.read()
tCapFrame = threading.Thread(target=capFrame, args=(cap,))
tCapFrame.start()
while(capImg is None):
pass
dtrtnFlag = False
showFlag = 0
while(1):<|fim▁hole|> stopFlat = 1
break
resImg = capImg.copy()
showImg = resImg.copy()
logoContourPts,logoContour,rtnFlag = logoAffinePos.extLegoLogo(resImg, minArea=5000)
if (rtnFlag is True):
# draw contour we finding
cv2.drawContours(showImg, [logoContourPts], -1, (0,255,0), 2)
cPts,rtnFlag = logoAffinePos.extQuadrangleCpts(logoContourPts, logoContour)
if (rtnFlag is True):
# draw corner points we finding
for idx, cPt in enumerate(cPts):
cPt = cPt.flatten()
ptsize = int(logoAffinePos.estLength/20)
showImg[cPt[1]-ptsize:cPt[1]+ptsize,cPt[0]-ptsize:cPt[0]+ptsize,:] = [255,255,0]
showImg = cv2.resize(showImg,(0,0),fx=0.4,fy=0.4)
# right click, discard the data and re-capturing
if(clickFlag < 0):
clickFlag = 0
exportLog(logFile, 'Data was discarded')
cv2.destroyWindow('filted')
# capturing image
if(clickFlag is 0):
dtrtnFlag = False
showFlag = 0
cv2.putText(showImg,'Capturing '+bx1.boxname+'_'+dso.SUF_DEF[clickCnt]+' picture',(10,250), cv2.FONT_HERSHEY_SIMPLEX, 0.5,(0,255,255),1)
# fisrt time left click, detect the image and output the result
elif(clickFlag is 1):
if(dtrtnFlag is False):
affinedCropedImg,filtedCroped,numStr,dtrtnFlag = detectImg(logoAffinePos,resImg,clickCnt)
if(dtrtnFlag is False):
# if detect result is False, set clickFlag 0, re-capturing
clickFlag = 0
exportLog(logFile, 'Detecting fault, re-capturing')
elif(dtrtnFlag is True):
cv2.imshow('filted',filtedCroped)
cv2.moveWindow('filted',50+int(0.4*VWIDTH),50)
exportLog(logFile, bx1.boxname+'_'+dso.SUF_DEF[clickCnt]+' OCR: '+str(numStr))
dtrtnFlag = None
else:
cv2.putText(showImg,'Do you save this result? Lclick Save, Rclick Discard',(10,250), cv2.FONT_HERSHEY_SIMPLEX, 0.5,(0,255,255),1)
elif(clickFlag is 2):
exportLog(logFile, 'Saving '+bx1.boxname+'_'+dso.SUF_DEF[clickCnt]+' data')
imgName = bx1.boxname+'_'+str(clickCnt)+'.tiff'
savingPath = FEATURE_IMG_FOLDER + imgName
savingPath2 = FEATURE_IMG_FOLDER + 'color/c' + imgName
cv2.imwrite(savingPath, filtedCroped)
cv2.imwrite(savingPath2, affinedCropedImg)
bx1.setSingleFeatureImgsName(dso.SUF_DEF[clickCnt], imgName)
exportLog(logFile, '--------Finish capturing--------\n')
if(numStr is not None):
bx1.appendTempNumList(numStr)
clickCnt = clickCnt + 1
clickFlag = 0
cv2.destroyWindow('filted')
else:
clickFlag = 0
cv2.destroyWindow('filted')
cv2.imshow('capFrame',showImg)
analyseBoxInfo(bx1,FEATURE_IMG_FOLDER)
dso.dsWrite(BOX_DATA_PATH,bx1)
print('\n')
logFile.close()
boxData.close()
cap.release()
cv2.destroyAllWindows()<|fim▁end|> | if ((cv2.waitKey(1) & 0xFF == 27) | (clickCnt>=6) ): |
<|file_name|>Solid.py<|end_file_name|><|fim▁begin|>import Gears as gears
from .. import *
from .Base import *
class Solid(Base) :
def applyWithArgs(
self,
spass,
functionName,<|fim▁hole|> *,
color : 'Solid pattern color, or Interactive.*'
= 'white'
) :
color = processColor(color, self.tb)
if not isGrey(color):
spass.enableColorMode()
stimulus = spass.getStimulus()
self.registerInteractiveControls(
spass, stimulus,
functionName+'_',
color = color,
)
spass.setShaderFunction( name = functionName, src = self.glslEsc( '''
vec3 @<X>@ (vec2 x, float time){
return @<X>@_color; }
''').format( X=functionName ) )<|fim▁end|> | |
<|file_name|>demo.js<|end_file_name|><|fim▁begin|>var reportResource = "/public/Samples/Reports/States";
visualize({
auth: {
name: "joeuser",
password: "joeuser",
organization: "organization_1"
}
}, function (v) {
v("#main").report({
resource: reportResource,
linkOptions: {
events: {
"click": function (ev, link) {
if (link.type == "ReportExecution") {
v("#drill-down").report({
resource: link.parameters._report,
params: {
state: [link.parameters.store_state]
},
});
console.log(link.parameters.store_state);
}
}
}<|fim▁hole|> error: function (err) {
alert(err.message);
}
});
});<|fim▁end|> | }, |
<|file_name|>block-hanging-single.tsx<|end_file_name|><|fim▁begin|>/** @jsx jsx */
import { Transforms } from 'slate'
import { jsx } from '../../..'
export const run = editor => {
Transforms.delete(editor)
}
export const input = (
<editor>
<block>
<anchor />
one
</block>
<block><|fim▁hole|> two
</block>
</editor>
)
export const output = (
<editor>
<block>
<cursor />
</block>
<block>two</block>
</editor>
)<|fim▁end|> | <focus /> |
<|file_name|>js_-uAWVNNLs-CDqxUul18yVdZpeIHUZse1JmT6XcSAYNU.js<|end_file_name|><|fim▁begin|>(function ($) {
/**
* Attaches double-click behavior to toggle full path of Krumo elements.
*/
Drupal.behaviors.devel = {
attach: function (context, settings) {
// Add hint to footnote
$('.krumo-footnote .krumo-call').once().before('<img style="vertical-align: middle;" title="Click to expand. Double-click to show path." src="' + settings.basePath + 'misc/help.png"/>');
var krumo_name = [];
var krumo_type = [];
function krumo_traverse(el) {
krumo_name.push($(el).html());
krumo_type.push($(el).siblings('em').html().match(/\w*/)[0]);
if ($(el).closest('.krumo-nest').length > 0) {
krumo_traverse($(el).closest('.krumo-nest').prev().find('.krumo-name'));
}
}
$('.krumo-child > div:first-child', context).dblclick(
function(e) {
if ($(this).find('> .krumo-php-path').length > 0) {
// Remove path if shown.
$(this).find('> .krumo-php-path').remove();
}
else {
// Get elements.
krumo_traverse($(this).find('> a.krumo-name'));
// Create path.
var krumo_path_string = '';
for (var i = krumo_name.length - 1; i >= 0; --i) {
// Start element.
if ((krumo_name.length - 1) == i)
krumo_path_string += '$' + krumo_name[i];
if (typeof krumo_name[(i-1)] !== 'undefined') {
if (krumo_type[i] == 'Array') {
krumo_path_string += "[";
if (!/^\d*$/.test(krumo_name[(i-1)]))
krumo_path_string += "'";
krumo_path_string += krumo_name[(i-1)];
if (!/^\d*$/.test(krumo_name[(i-1)]))
krumo_path_string += "'";
krumo_path_string += "]";
}
if (krumo_type[i] == 'Object')
krumo_path_string += '->' + krumo_name[(i-1)];
}
}
$(this).append('<div class="krumo-php-path" style="font-family: Courier, monospace; font-weight: bold;">' + krumo_path_string + '</div>');
// Reset arrays.
krumo_name = [];
krumo_type = [];
}
}
);
}
};
})(jQuery);
;
/**
* Cookie plugin 1.0
*
* Copyright (c) 2006 Klaus Hartl (stilbuero.de)
* Dual licensed under the MIT and GPL licenses:
* http://www.opensource.org/licenses/mit-license.php
* http://www.gnu.org/licenses/gpl.html
*
*/
jQuery.cookie=function(b,j,m){if(typeof j!="undefined"){m=m||{};if(j===null){j="";m.expires=-1}var e="";if(m.expires&&(typeof m.expires=="number"||m.expires.toUTCString)){var f;if(typeof m.expires=="number"){f=new Date();f.setTime(f.getTime()+(m.expires*24*60*60*1000))}else{f=m.expires}e="; expires="+f.toUTCString()}var l=m.path?"; path="+(m.path):"";var g=m.domain?"; domain="+(m.domain):"";var a=m.secure?"; secure":"";document.cookie=[b,"=",encodeURIComponent(j),e,l,g,a].join("")}else{var d=null;if(document.cookie&&document.cookie!=""){var k=document.cookie.split(";");for(var h=0;h<k.length;h++){var c=jQuery.trim(k[h]);if(c.substring(0,b.length+1)==(b+"=")){d=decodeURIComponent(c.substring(b.length+1));break}}}return d}};
;
(function ($) {
Drupal.ModuleFilter = {};
Drupal.ModuleFilter.explode = function(string) {
var queryArray = string.match(/([a-zA-Z]+\:(\w+|"[^"]+")*)|\w+|"[^"]+"/g);
if (!queryArray) {
queryArray = new Array();
}
var i = queryArray.length;
while (i--) {
queryArray[i] = queryArray[i].replace(/"/g, "");
}
return queryArray;
};
Drupal.ModuleFilter.getState = function(key) {
if (!Drupal.ModuleFilter.state) {
Drupal.ModuleFilter.state = {};
var cookie = $.cookie('DrupalModuleFilter');
var query = cookie ? cookie.split('&') : [];
if (query) {
for (var i in query) {
// Extra check to avoid js errors in Chrome, IE and Safari when
// combined with JS like twitter's widget.js.
// See http://drupal.org/node/798764.
if (typeof(query[i]) == 'string' && query[i].indexOf('=') != -1) {
var values = query[i].split('=');
if (values.length === 2) {
Drupal.ModuleFilter.state[values[0]] = values[1];
}
}
}
}
}
return Drupal.ModuleFilter.state[key] ? Drupal.ModuleFilter.state[key] : false;
};
Drupal.ModuleFilter.setState = function(key, value) {
var existing = Drupal.ModuleFilter.getState(key);
if (existing != value) {
Drupal.ModuleFilter.state[key] = value;
var query = [];
for (var i in Drupal.ModuleFilter.state) {
query.push(i + '=' + Drupal.ModuleFilter.state[i]);
}
$.cookie('DrupalModuleFilter', query.join('&'), { expires: 7, path: '/' });
}
};
Drupal.ModuleFilter.Filter = function(element, selector, options) {
var self = this;
this.element = element;
this.text = $(this.element).val();
this.settings = Drupal.settings.moduleFilter;
this.selector = selector;
this.options = $.extend({
delay: 500,
striping: false,
childSelector: null,
empty: Drupal.t('No results'),
rules: new Array()
}, options);
if (this.options.wrapper == undefined) {
this.options.wrapper = $(self.selector).parent();
}
// Add clear button.
this.element.after('<div class="module-filter-clear"><a href="#" class="js-hide">' + Drupal.t('clear') + '</a></div>');
if (this.text) {
$('.module-filter-clear a', this.element.parent()).removeClass('js-hide');
}
$('.module-filter-clear a', this.element.parent()).click(function() {
self.element.val('');
self.text = '';
delete self.queries;
self.applyFilter();
self.element.focus();
$(this).addClass('js-hide');
return false;
});
this.updateQueries = function() {
var queryStrings = Drupal.ModuleFilter.explode(self.text);
self.queries = new Array();
for (var i in queryStrings) {
var query = { operator: 'text', string: queryStrings[i] };
if (self.operators != undefined) {
// Check if an operator is possibly used.
if (queryStrings[i].indexOf(':') > 0) {
// Determine operator used.
var args = queryStrings[i].split(':', 2);
var operator = args.shift();
if (self.operators[operator] != undefined) {
query.operator = operator;
query.string = args.shift();
}
}
}
query.string = query.string.toLowerCase();
self.queries.push(query);
}
if (self.queries.length <= 0) {
// Add a blank string query.
self.queries.push({ operator: 'text', string: '' });
}
};
this.applyFilter = function() {
self.results = new Array();
self.updateQueries();
if (self.index == undefined) {
self.buildIndex();
}
self.element.trigger('moduleFilter:start');
$.each(self.index, function(key, item) {
var $item = item.element;
for (var i in self.queries) {
var query = self.queries[i];
if (query.operator == 'text') {
if (item.text.indexOf(query.string) < 0) {
continue;
}
}
else {
var func = self.operators[query.operator];
if (!(func(query.string, self, item))) {
continue;
}
}
var rulesResult = self.processRules(item);
if (rulesResult !== false) {
return true;
<|fim▁hole|> $item.addClass('js-hide');
});
self.element.trigger('moduleFilter:finish', { results: self.results });
if (self.options.striping) {
self.stripe();
}
if (self.results.length > 0) {
self.options.wrapper.find('.module-filter-no-results').remove();
}
else {
if (!self.options.wrapper.find('.module-filter-no-results').length) {
self.options.wrapper.append($('<p class="module-filter-no-results"/>').text(self.options.empty));
};
}
};
self.element.keyup(function(e) {
switch (e.which) {
case 13:
if (self.timeOut) {
clearTimeout(self.timeOut);
}
self.applyFilter();
break;
default:
if (self.text != $(this).val()) {
if (self.timeOut) {
clearTimeout(self.timeOut);
}
self.text = $(this).val();
if (self.text) {
self.element.parent().find('.module-filter-clear a').removeClass('js-hide');
}
else {
self.element.parent().find('.module-filter-clear a').addClass('js-hide');
}
self.element.trigger('moduleFilter:keyup');
self.timeOut = setTimeout(self.applyFilter, self.options.delay);
}
break;
}
});
self.element.keypress(function(e) {
if (e.which == 13) e.preventDefault();
});
};
Drupal.ModuleFilter.Filter.prototype.buildIndex = function() {
var self = this;
var index = new Array();
$(this.selector).each(function(i) {
var text = (self.options.childSelector) ? $(self.options.childSelector, this).text() : $(this).text();
var item = {
key: i,
element: $(this),
text: text.toLowerCase()
};
for (var j in self.options.buildIndex) {
var func = self.options.buildIndex[j];
item = $.extend(func(self, item), item);
}
$(this).data('indexKey', i);
index.push(item);
delete item;
});
this.index = index;
};
Drupal.ModuleFilter.Filter.prototype.processRules = function(item) {
var self = this;
var $item = item.element;
var rulesResult = true;
if (self.options.rules.length > 0) {
for (var i in self.options.rules) {
var func = self.options.rules[i];
rulesResult = func(self, item);
if (rulesResult === false) {
break;
}
}
}
if (rulesResult !== false) {
$item.removeClass('js-hide');
self.results.push(item);
}
return rulesResult;
};
Drupal.ModuleFilter.Filter.prototype.stripe = function() {
var self = this;
var flip = { even: 'odd', odd: 'even' };
var stripe = 'odd';
$.each(self.index, function(key, item) {
if (!item.element.hasClass('js-hide')) {
item.element.removeClass('odd even')
.addClass(stripe);
stripe = flip[stripe];
}
});
};
$.fn.moduleFilter = function(selector, options) {
var filterInput = this;
filterInput.parents('.module-filter-inputs-wrapper').show();
if (Drupal.settings.moduleFilter.setFocus) {
filterInput.focus();
}
filterInput.data('moduleFilter', new Drupal.ModuleFilter.Filter(this, selector, options));
};
})(jQuery);
;
(function($) {
Drupal.behaviors.moduleFilterUpdateStatus = {
attach: function(context) {
$('#module-filter-update-status-form').once('update-status', function() {
var filterInput = $('input[name="module_filter[name]"]', context);
filterInput.moduleFilter('table.update > tbody > tr', {
wrapper: $('table.update:first').parent(),
delay: 300,
childSelector: 'div.project a',
rules: [
function(moduleFilter, item) {
switch (moduleFilter.options.show) {
case 'all':
return true;
case 'updates':
if (item.state == 'warning' || item.state == 'error') {
return true;
}
break;
case 'security':
if (item.state == 'error') {
return true;
}
break;
case 'ignore':
if (item.state == 'ignored') {
return true;
}
break;
case 'unknown':
if (item.state == 'unknown') {
return true;
}
break;
}
return false;
}
],
buildIndex: [
function(moduleFilter, item) {
if ($('.version-status', item.element).text() == Drupal.t('Ignored from settings')) {
item.state = 'ignored';
return item;
}
if (item.element.is('.ok')) {
item.state = 'ok';
}
else if (item.element.is('.warning')) {
item.state = 'warning';
}
else if (item.element.is('.error')) {
item.state = 'error';
}
else if (item.element.is('.unknown')) {
item.state = 'unknown';
}
return item;
}
],
show: $('#edit-module-filter-show input[name="module_filter[show]"]', context).val()
});
var moduleFilter = filterInput.data('moduleFilter');
if (Drupal.settings.moduleFilter.rememberUpdateState) {
var updateShow = Drupal.ModuleFilter.getState('updateShow');
if (updateShow) {
moduleFilter.options.show = updateShow;
$('#edit-module-filter-show input[name="module_filter[show]"][value="' + updateShow + '"]', context).click();
}
}
$('#edit-module-filter-show input[name="module_filter[show]"]', context).change(function() {
moduleFilter.options.show = $(this).val();
Drupal.ModuleFilter.setState('updateShow', moduleFilter.options.show);
moduleFilter.applyFilter();
});
moduleFilter.element.bind('moduleFilter:start', function() {
$('table.update').each(function() {
$(this).show().prev('h3').show();
});
});
moduleFilter.element.bind('moduleFilter:finish', function(e, data) {
$('table.update').each(function() {
var $table = $(this);
if ($('tbody tr', $(this)).filter(':visible').length == 0) {
$table.hide().prev('h3').hide();
}
});
});
moduleFilter.element.bind('moduleFilter:keyup', function() {
if (moduleFilter.clearOffset == undefined) {
moduleFilter.inputWidth = filterInput.width();
moduleFilter.clearOffset = moduleFilter.element.parent().find('.module-filter-clear a').width();
}
if (moduleFilter.text) {
filterInput.width(moduleFilter.inputWidth - moduleFilter.clearOffset - 5).parent().css('margin-right', moduleFilter.clearOffset + 5);
}
else {
filterInput.width(moduleFilter.inputWidth).parent().css('margin-right', 0);
}
});
moduleFilter.element.parent().find('.module-filter-clear a').click(function() {
filterInput.width(moduleFilter.inputWidth).parent().css('margin-right', 0);
});
moduleFilter.applyFilter();
});
}
};
})(jQuery);
;<|fim▁end|> | }
}
|
<|file_name|>commentsPylintNoElseBoth.py<|end_file_name|><|fim▁begin|><|fim▁hole|>def func():
value = "not-none"
# pylint: disable=unused-argument1
<caret>if value is None:
print("None")
# pylint: disable=unused-argument2
print(value)<|fim▁end|> | |
<|file_name|>test_multipart.py<|end_file_name|><|fim▁begin|>import asyncio
import functools
import io
import unittest
import zlib
from unittest import mock
import pytest
import aiohttp.multipart
from aiohttp import helpers, payload
from aiohttp.hdrs import (CONTENT_DISPOSITION, CONTENT_ENCODING,
CONTENT_TRANSFER_ENCODING, CONTENT_TYPE)
from aiohttp.helpers import parse_mimetype
from aiohttp.multipart import (content_disposition_filename,
parse_content_disposition)
from aiohttp.streams import DEFAULT_LIMIT as stream_reader_default_limit
from aiohttp.streams import StreamReader
@pytest.fixture
def buf():
return bytearray()
@pytest.fixture
def stream(buf):
writer = mock.Mock()
def write(chunk):
buf.extend(chunk)
return ()
writer.write.side_effect = write
return writer
@pytest.fixture
def writer():
return aiohttp.multipart.MultipartWriter(boundary=':')
def run_in_loop(f):
@functools.wraps(f)
def wrapper(testcase, *args, **kwargs):
coro = asyncio.coroutine(f)
future = asyncio.wait_for(coro(testcase, *args, **kwargs), timeout=5)
return testcase.loop.run_until_complete(future)
return wrapper
class MetaAioTestCase(type):
def __new__(cls, name, bases, attrs):
for key, obj in attrs.items():
if key.startswith('test_'):
attrs[key] = run_in_loop(obj)
return super().__new__(cls, name, bases, attrs)
class TestCase(unittest.TestCase, metaclass=MetaAioTestCase):
def setUp(self):
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(self.loop)
def tearDown(self):
self.loop.close()
def future(self, obj):
fut = helpers.create_future(self.loop)
fut.set_result(obj)
return fut
class Response(object):
def __init__(self, headers, content):
self.headers = headers
self.content = content
class Stream(object):
def __init__(self, content):
self.content = io.BytesIO(content)
@asyncio.coroutine
def read(self, size=None):
return self.content.read(size)
def at_eof(self):
return self.content.tell() == len(self.content.getbuffer())
@asyncio.coroutine
def readline(self):
return self.content.readline()
def unread_data(self, data):
self.content = io.BytesIO(data + self.content.read())
class StreamWithShortenRead(Stream):
def __init__(self, content):
self._first = True
super().__init__(content)
@asyncio.coroutine
def read(self, size=None):
if size is not None and self._first:
self._first = False
size = size // 2
return (yield from super().read(size))
class MultipartResponseWrapperTestCase(TestCase):
def setUp(self):
super().setUp()
wrapper = aiohttp.multipart.MultipartResponseWrapper(mock.Mock(),
mock.Mock())
self.wrapper = wrapper
def test_at_eof(self):
self.wrapper.at_eof()
self.assertTrue(self.wrapper.resp.content.at_eof.called)
def test_next(self):
self.wrapper.stream.next.return_value = self.future(b'')
self.wrapper.stream.at_eof.return_value = False
yield from self.wrapper.next()
self.assertTrue(self.wrapper.stream.next.called)
def test_release(self):
self.wrapper.resp.release.return_value = self.future(None)
yield from self.wrapper.release()
self.assertTrue(self.wrapper.resp.release.called)
def test_release_when_stream_at_eof(self):
self.wrapper.resp.release.return_value = self.future(None)
self.wrapper.stream.next.return_value = self.future(b'')
self.wrapper.stream.at_eof.return_value = True
yield from self.wrapper.next()
self.assertTrue(self.wrapper.stream.next.called)
self.assertTrue(self.wrapper.resp.release.called)
class PartReaderTestCase(TestCase):
def setUp(self):
super().setUp()
self.boundary = b'--:'
def test_next(self):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {}, Stream(b'Hello, world!\r\n--:'))
result = yield from obj.next()
self.assertEqual(b'Hello, world!', result)
self.assertTrue(obj.at_eof())
def test_next_next(self):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {}, Stream(b'Hello, world!\r\n--:'))
result = yield from obj.next()
self.assertEqual(b'Hello, world!', result)
self.assertTrue(obj.at_eof())
result = yield from obj.next()
self.assertIsNone(result)
def test_read(self):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {}, Stream(b'Hello, world!\r\n--:'))
result = yield from obj.read()
self.assertEqual(b'Hello, world!', result)
self.assertTrue(obj.at_eof())
def test_read_chunk_at_eof(self):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {}, Stream(b'--:'))
obj._at_eof = True
result = yield from obj.read_chunk()
self.assertEqual(b'', result)
def test_read_chunk_without_content_length(self):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {}, Stream(b'Hello, world!\r\n--:'))
c1 = yield from obj.read_chunk(8)
c2 = yield from obj.read_chunk(8)
c3 = yield from obj.read_chunk(8)
self.assertEqual(c1 + c2, b'Hello, world!')
self.assertEqual(c3, b'')
def test_read_incomplete_chunk(self):
stream = Stream(b'')
def prepare(data):
f = helpers.create_future(self.loop)
f.set_result(data)
return f
with mock.patch.object(stream, 'read', side_effect=[
prepare(b'Hello, '),
prepare(b'World'),
prepare(b'!\r\n--:'),
prepare(b'')
]):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {}, stream)
c1 = yield from obj.read_chunk(8)
self.assertEqual(c1, b'Hello, ')
c2 = yield from obj.read_chunk(8)
self.assertEqual(c2, b'World')
c3 = yield from obj.read_chunk(8)
self.assertEqual(c3, b'!')
def test_read_all_at_once(self):
stream = Stream(b'Hello, World!\r\n--:--\r\n')
obj = aiohttp.multipart.BodyPartReader(self.boundary, {}, stream)
result = yield from obj.read_chunk()
self.assertEqual(b'Hello, World!', result)
result = yield from obj.read_chunk()
self.assertEqual(b'', result)
self.assertTrue(obj.at_eof())
def test_read_incomplete_body_chunked(self):
stream = Stream(b'Hello, World!\r\n-')
obj = aiohttp.multipart.BodyPartReader(self.boundary, {}, stream)
result = b''
with self.assertRaises(AssertionError):
for _ in range(4):
result += yield from obj.read_chunk(7)
self.assertEqual(b'Hello, World!\r\n-', result)
def test_read_boundary_with_incomplete_chunk(self):
stream = Stream(b'')
def prepare(data):
f = helpers.create_future(self.loop)
f.set_result(data)
return f
with mock.patch.object(stream, 'read', side_effect=[
prepare(b'Hello, World'),
prepare(b'!\r\n'),
prepare(b'--:'),
prepare(b'')
]):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {}, stream)
c1 = yield from obj.read_chunk(12)
self.assertEqual(c1, b'Hello, World')
c2 = yield from obj.read_chunk(8)
self.assertEqual(c2, b'!')
c3 = yield from obj.read_chunk(8)
self.assertEqual(c3, b'')
def test_multi_read_chunk(self):
stream = Stream(b'Hello,\r\n--:\r\n\r\nworld!\r\n--:--')
obj = aiohttp.multipart.BodyPartReader(self.boundary, {}, stream)
result = yield from obj.read_chunk(8)
self.assertEqual(b'Hello,', result)
result = yield from obj.read_chunk(8)
self.assertEqual(b'', result)
self.assertTrue(obj.at_eof())
def test_read_chunk_properly_counts_read_bytes(self):
expected = b'.' * 10
size = len(expected)
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {'CONTENT-LENGTH': size},
StreamWithShortenRead(expected + b'\r\n--:--'))
result = bytearray()
while True:
chunk = yield from obj.read_chunk()
if not chunk:
break
result.extend(chunk)
self.assertEqual(size, len(result))
self.assertEqual(b'.' * size, result)
self.assertTrue(obj.at_eof())
def test_read_does_not_read_boundary(self):
stream = Stream(b'Hello, world!\r\n--:')
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {}, stream)
result = yield from obj.read()
self.assertEqual(b'Hello, world!', result)
self.assertEqual(b'--:', (yield from stream.read()))
def test_multiread(self):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {}, Stream(b'Hello,\r\n--:\r\n\r\nworld!\r\n--:--'))
result = yield from obj.read()
self.assertEqual(b'Hello,', result)
result = yield from obj.read()
self.assertEqual(b'', result)
self.assertTrue(obj.at_eof())
def test_read_multiline(self):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {}, Stream(b'Hello\n,\r\nworld!\r\n--:--'))
result = yield from obj.read()
self.assertEqual(b'Hello\n,\r\nworld!', result)
result = yield from obj.read()
self.assertEqual(b'', result)
self.assertTrue(obj.at_eof())
def test_read_respects_content_length(self):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {'CONTENT-LENGTH': 100500},
Stream(b'.' * 100500 + b'\r\n--:--'))
result = yield from obj.read()
self.assertEqual(b'.' * 100500, result)
self.assertTrue(obj.at_eof())
def test_read_with_content_encoding_gzip(self):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {CONTENT_ENCODING: 'gzip'},
Stream(b'\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\x03\x0b\xc9\xccMU'
b'(\xc9W\x08J\xcdI\xacP\x04\x00$\xfb\x9eV\x0e\x00\x00\x00'
b'\r\n--:--'))
result = yield from obj.read(decode=True)
self.assertEqual(b'Time to Relax!', result)
def test_read_with_content_encoding_deflate(self):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {CONTENT_ENCODING: 'deflate'},
Stream(b'\x0b\xc9\xccMU(\xc9W\x08J\xcdI\xacP\x04\x00\r\n--:--'))
result = yield from obj.read(decode=True)
self.assertEqual(b'Time to Relax!', result)
def test_read_with_content_encoding_identity(self):
thing = (b'\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\x03\x0b\xc9\xccMU'
b'(\xc9W\x08J\xcdI\xacP\x04\x00$\xfb\x9eV\x0e\x00\x00\x00'
b'\r\n')
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {CONTENT_ENCODING: 'identity'},
Stream(thing + b'--:--'))
result = yield from obj.read(decode=True)
self.assertEqual(thing[:-2], result)
def test_read_with_content_encoding_unknown(self):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {CONTENT_ENCODING: 'snappy'},
Stream(b'\x0e4Time to Relax!\r\n--:--'))
with self.assertRaises(RuntimeError):
yield from obj.read(decode=True)
def test_read_with_content_transfer_encoding_base64(self):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {CONTENT_TRANSFER_ENCODING: 'base64'},
Stream(b'VGltZSB0byBSZWxheCE=\r\n--:--'))
result = yield from obj.read(decode=True)
self.assertEqual(b'Time to Relax!', result)
def test_read_with_content_transfer_encoding_quoted_printable(self):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {CONTENT_TRANSFER_ENCODING: 'quoted-printable'},
Stream(b'=D0=9F=D1=80=D0=B8=D0=B2=D0=B5=D1=82,'
b' =D0=BC=D0=B8=D1=80!\r\n--:--'))
result = yield from obj.read(decode=True)
self.assertEqual(b'\xd0\x9f\xd1\x80\xd0\xb8\xd0\xb2\xd0\xb5\xd1\x82,'
b' \xd0\xbc\xd0\xb8\xd1\x80!', result)
@pytest.mark.parametrize('encoding', [])
def test_read_with_content_transfer_encoding_binary(self):
data = b'\xd0\x9f\xd1\x80\xd0\xb8\xd0\xb2\xd0\xb5\xd1\x82,' \
b' \xd0\xbc\xd0\xb8\xd1\x80!'
for encoding in ('binary', '8bit', '7bit'):
with self.subTest(encoding):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {CONTENT_TRANSFER_ENCODING: encoding},
Stream(data + b'\r\n--:--'))
result = yield from obj.read(decode=True)
self.assertEqual(data, result)
def test_read_with_content_transfer_encoding_unknown(self):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {CONTENT_TRANSFER_ENCODING: 'unknown'},
Stream(b'\x0e4Time to Relax!\r\n--:--'))
with self.assertRaises(RuntimeError):
yield from obj.read(decode=True)
def test_read_text(self):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {}, Stream(b'Hello, world!\r\n--:--'))
result = yield from obj.text()
self.assertEqual('Hello, world!', result)
def test_read_text_default_encoding(self):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {},
Stream('Привет, Мир!\r\n--:--'.encode('utf-8')))
result = yield from obj.text()
self.assertEqual('Привет, Мир!', result)
def test_read_text_encoding(self):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {},
Stream('Привет, Мир!\r\n--:--'.encode('cp1251')))
result = yield from obj.text(encoding='cp1251')
self.assertEqual('Привет, Мир!', result)
def test_read_text_guess_encoding(self):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {CONTENT_TYPE: 'text/plain;charset=cp1251'},
Stream('Привет, Мир!\r\n--:--'.encode('cp1251')))
result = yield from obj.text()
self.assertEqual('Привет, Мир!', result)
def test_read_text_compressed(self):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {CONTENT_ENCODING: 'deflate',
CONTENT_TYPE: 'text/plain'},
Stream(b'\x0b\xc9\xccMU(\xc9W\x08J\xcdI\xacP\x04\x00\r\n--:--'))
result = yield from obj.text()
self.assertEqual('Time to Relax!', result)
def test_read_text_while_closed(self):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {CONTENT_TYPE: 'text/plain'}, Stream(b''))
obj._at_eof = True
result = yield from obj.text()
self.assertEqual('', result)
def test_read_json(self):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {CONTENT_TYPE: 'application/json'},
Stream(b'{"test": "passed"}\r\n--:--'))
result = yield from obj.json()
self.assertEqual({'test': 'passed'}, result)
def test_read_json_encoding(self):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {CONTENT_TYPE: 'application/json'},
Stream('{"тест": "пассед"}\r\n--:--'.encode('cp1251')))
result = yield from obj.json(encoding='cp1251')
self.assertEqual({'тест': 'пассед'}, result)
def test_read_json_guess_encoding(self):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {CONTENT_TYPE: 'application/json; charset=cp1251'},
Stream('{"тест": "пассед"}\r\n--:--'.encode('cp1251')))
result = yield from obj.json()
self.assertEqual({'тест': 'пассед'}, result)
def test_read_json_compressed(self):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {CONTENT_ENCODING: 'deflate',
CONTENT_TYPE: 'application/json'},
Stream(b'\xabV*I-.Q\xb2RP*H,.NMQ\xaa\x05\x00\r\n--:--'))
result = yield from obj.json()
self.assertEqual({'test': 'passed'}, result)
def test_read_json_while_closed(self):
stream = Stream(b'')
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {CONTENT_TYPE: 'application/json'}, stream)
obj._at_eof = True
result = yield from obj.json()
self.assertEqual(None, result)
def test_read_form(self):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {CONTENT_TYPE: 'application/x-www-form-urlencoded'},
Stream(b'foo=bar&foo=baz&boo=\r\n--:--'))
result = yield from obj.form()
self.assertEqual([('foo', 'bar'), ('foo', 'baz'), ('boo', '')],
result)
def test_read_form_encoding(self):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {CONTENT_TYPE: 'application/x-www-form-urlencoded'},
Stream('foo=bar&foo=baz&boo=\r\n--:--'.encode('cp1251')))
result = yield from obj.form(encoding='cp1251')
self.assertEqual([('foo', 'bar'), ('foo', 'baz'), ('boo', '')],
result)
def test_read_form_guess_encoding(self):
obj = aiohttp.multipart.BodyPartReader(
self.boundary,
{CONTENT_TYPE: 'application/x-www-form-urlencoded; charset=utf-8'},
Stream('foo=bar&foo=baz&boo=\r\n--:--'.encode('utf-8')))
result = yield from obj.form()
self.assertEqual([('foo', 'bar'), ('foo', 'baz'), ('boo', '')],
result)
def test_read_form_while_closed(self):
stream = Stream(b'')
obj = aiohttp.multipart.BodyPartReader(
self.boundary,
{CONTENT_TYPE: 'application/x-www-form-urlencoded'}, stream)
obj._at_eof = True
result = yield from obj.form()
self.assertEqual(None, result)
def test_readline(self):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {}, Stream(b'Hello\n,\r\nworld!\r\n--:--'))
result = yield from obj.readline()
self.assertEqual(b'Hello\n', result)
result = yield from obj.readline()
self.assertEqual(b',\r\n', result)
result = yield from obj.readline()
self.assertEqual(b'world!', result)
result = yield from obj.readline()
self.assertEqual(b'', result)
self.assertTrue(obj.at_eof())
def test_release(self):
stream = Stream(b'Hello,\r\n--:\r\n\r\nworld!\r\n--:--')
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {}, stream)
yield from obj.release()
self.assertTrue(obj.at_eof())
self.assertEqual(b'--:\r\n\r\nworld!\r\n--:--', stream.content.read())
def test_release_respects_content_length(self):
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {'CONTENT-LENGTH': 100500},
Stream(b'.' * 100500 + b'\r\n--:--'))
result = yield from obj.release()
self.assertIsNone(result)
self.assertTrue(obj.at_eof())
def test_release_release(self):
stream = Stream(b'Hello,\r\n--:\r\n\r\nworld!\r\n--:--')
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {}, stream)
yield from obj.release()
yield from obj.release()
self.assertEqual(b'--:\r\n\r\nworld!\r\n--:--', stream.content.read())
def test_filename(self):
part = aiohttp.multipart.BodyPartReader(
self.boundary,
{CONTENT_DISPOSITION: 'attachment; filename=foo.html'},
None)
self.assertEqual('foo.html', part.filename)
def test_reading_long_part(self):
size = 2 * stream_reader_default_limit
stream = StreamReader()
stream.feed_data(b'0' * size + b'\r\n--:--')
stream.feed_eof()
obj = aiohttp.multipart.BodyPartReader(
self.boundary, {}, stream)
data = yield from obj.read()
self.assertEqual(len(data), size)
class MultipartReaderTestCase(TestCase):
def test_from_response(self):
resp = Response({CONTENT_TYPE: 'multipart/related;boundary=":"'},
Stream(b'--:\r\n\r\nhello\r\n--:--'))
res = aiohttp.multipart.MultipartReader.from_response(resp)
self.assertIsInstance(res,
aiohttp.multipart.MultipartResponseWrapper)
self.assertIsInstance(res.stream,
aiohttp.multipart.MultipartReader)
def test_bad_boundary(self):
resp = Response(
{CONTENT_TYPE: 'multipart/related;boundary=' + 'a' * 80},
Stream(b''))
with self.assertRaises(ValueError):
aiohttp.multipart.MultipartReader.from_response(resp)
def test_dispatch(self):
reader = aiohttp.multipart.MultipartReader(
{CONTENT_TYPE: 'multipart/related;boundary=":"'},
Stream(b'--:\r\n\r\necho\r\n--:--'))
res = reader._get_part_reader({CONTENT_TYPE: 'text/plain'})
self.assertIsInstance(res, reader.part_reader_cls)
def test_dispatch_bodypart(self):
reader = aiohttp.multipart.MultipartReader(
{CONTENT_TYPE: 'multipart/related;boundary=":"'},
Stream(b'--:\r\n\r\necho\r\n--:--'))
res = reader._get_part_reader({CONTENT_TYPE: 'text/plain'})
self.assertIsInstance(res, reader.part_reader_cls)
def test_dispatch_multipart(self):
reader = aiohttp.multipart.MultipartReader(
{CONTENT_TYPE: 'multipart/related;boundary=":"'},
Stream(b'----:--\r\n'
b'\r\n'
b'test\r\n'
b'----:--\r\n'
b'\r\n'
b'passed\r\n'
b'----:----\r\n'
b'--:--'))
res = reader._get_part_reader(
{CONTENT_TYPE: 'multipart/related;boundary=--:--'})
self.assertIsInstance(res, reader.__class__)
def test_dispatch_custom_multipart_reader(self):
class CustomReader(aiohttp.multipart.MultipartReader):
pass
reader = aiohttp.multipart.MultipartReader(
{CONTENT_TYPE: 'multipart/related;boundary=":"'},
Stream(b'----:--\r\n'
b'\r\n'
b'test\r\n'
b'----:--\r\n'
b'\r\n'
b'passed\r\n'
b'----:----\r\n'
b'--:--'))
reader.multipart_reader_cls = CustomReader
res = reader._get_part_reader(
{CONTENT_TYPE: 'multipart/related;boundary=--:--'})
self.assertIsInstance(res, CustomReader)
def test_emit_next(self):
reader = aiohttp.multipart.MultipartReader(
{CONTENT_TYPE: 'multipart/related;boundary=":"'},
Stream(b'--:\r\n\r\necho\r\n--:--'))
res = yield from reader.next()
self.assertIsInstance(res, reader.part_reader_cls)
def test_invalid_boundary(self):
reader = aiohttp.multipart.MultipartReader(
{CONTENT_TYPE: 'multipart/related;boundary=":"'},
Stream(b'---:\r\n\r\necho\r\n---:--'))
with self.assertRaises(ValueError):
yield from reader.next()
def test_release(self):
reader = aiohttp.multipart.MultipartReader(
{CONTENT_TYPE: 'multipart/mixed;boundary=":"'},
Stream(b'--:\r\n'
b'Content-Type: multipart/related;boundary=--:--\r\n'
b'\r\n'
b'----:--\r\n'
b'\r\n'
b'test\r\n'
b'----:--\r\n'
b'\r\n'
b'passed\r\n'
b'----:----\r\n'
b'\r\n'
b'--:--'))
yield from reader.release()
self.assertTrue(reader.at_eof())
def test_release_release(self):
reader = aiohttp.multipart.MultipartReader(
{CONTENT_TYPE: 'multipart/related;boundary=":"'},
Stream(b'--:\r\n\r\necho\r\n--:--'))
yield from reader.release()
self.assertTrue(reader.at_eof())
yield from reader.release()
self.assertTrue(reader.at_eof())
def test_release_next(self):
reader = aiohttp.multipart.MultipartReader(
{CONTENT_TYPE: 'multipart/related;boundary=":"'},
Stream(b'--:\r\n\r\necho\r\n--:--'))
yield from reader.release()
self.assertTrue(reader.at_eof())
res = yield from reader.next()
self.assertIsNone(res)<|fim▁hole|> {CONTENT_TYPE: 'multipart/related;boundary=":"'},
Stream(b'--:\r\n'
b'\r\n'
b'test\r\n'
b'--:\r\n'
b'\r\n'
b'passed\r\n'
b'--:--'))
first = yield from reader.next()
self.assertIsInstance(first, aiohttp.multipart.BodyPartReader)
second = yield from reader.next()
self.assertTrue(first.at_eof())
self.assertFalse(second.at_eof())
def test_release_without_read_the_last_object(self):
reader = aiohttp.multipart.MultipartReader(
{CONTENT_TYPE: 'multipart/related;boundary=":"'},
Stream(b'--:\r\n'
b'\r\n'
b'test\r\n'
b'--:\r\n'
b'\r\n'
b'passed\r\n'
b'--:--'))
first = yield from reader.next()
second = yield from reader.next()
third = yield from reader.next()
self.assertTrue(first.at_eof())
self.assertTrue(second.at_eof())
self.assertTrue(second.at_eof())
self.assertIsNone(third)
def test_read_chunk_by_length_doesnt_breaks_reader(self):
reader = aiohttp.multipart.MultipartReader(
{CONTENT_TYPE: 'multipart/related;boundary=":"'},
Stream(b'--:\r\n'
b'Content-Length: 4\r\n\r\n'
b'test'
b'\r\n--:\r\n'
b'Content-Length: 6\r\n\r\n'
b'passed'
b'\r\n--:--'))
body_parts = []
while True:
read_part = b''
part = yield from reader.next()
if part is None:
break
while not part.at_eof():
read_part += yield from part.read_chunk(3)
body_parts.append(read_part)
self.assertListEqual(body_parts, [b'test', b'passed'])
def test_read_chunk_from_stream_doesnt_breaks_reader(self):
reader = aiohttp.multipart.MultipartReader(
{CONTENT_TYPE: 'multipart/related;boundary=":"'},
Stream(b'--:\r\n'
b'\r\n'
b'chunk'
b'\r\n--:\r\n'
b'\r\n'
b'two_chunks'
b'\r\n--:--'))
body_parts = []
while True:
read_part = b''
part = yield from reader.next()
if part is None:
break
while not part.at_eof():
chunk = yield from part.read_chunk(5)
self.assertTrue(chunk)
read_part += chunk
body_parts.append(read_part)
self.assertListEqual(body_parts, [b'chunk', b'two_chunks'])
def test_reading_skips_prelude(self):
reader = aiohttp.multipart.MultipartReader(
{CONTENT_TYPE: 'multipart/related;boundary=":"'},
Stream(b'Multi-part data is not supported.\r\n'
b'\r\n'
b'--:\r\n'
b'\r\n'
b'test\r\n'
b'--:\r\n'
b'\r\n'
b'passed\r\n'
b'--:--'))
first = yield from reader.next()
self.assertIsInstance(first, aiohttp.multipart.BodyPartReader)
second = yield from reader.next()
self.assertTrue(first.at_eof())
self.assertFalse(second.at_eof())
@asyncio.coroutine
def test_writer(writer):
assert writer.size == 0
assert writer.boundary == b':'
@asyncio.coroutine
def test_writer_serialize_io_chunk(buf, stream, writer):
flo = io.BytesIO(b'foobarbaz')
writer.append(flo)
yield from writer.write(stream)
assert (buf == b'--:\r\nContent-Type: application/octet-stream'
b'\r\nContent-Length: 9\r\n\r\nfoobarbaz\r\n--:--\r\n')
@asyncio.coroutine
def test_writer_serialize_json(buf, stream, writer):
writer.append_json({'привет': 'мир'})
yield from writer.write(stream)
assert (b'{"\\u043f\\u0440\\u0438\\u0432\\u0435\\u0442":'
b' "\\u043c\\u0438\\u0440"}' in buf)
@asyncio.coroutine
def test_writer_serialize_form(buf, stream, writer):
data = [('foo', 'bar'), ('foo', 'baz'), ('boo', 'zoo')]
writer.append_form(data)
yield from writer.write(stream)
assert (b'foo=bar&foo=baz&boo=zoo' in buf)
@asyncio.coroutine
def test_writer_serialize_form_dict(buf, stream, writer):
data = {'hello': 'мир'}
writer.append_form(data)
yield from writer.write(stream)
assert (b'hello=%D0%BC%D0%B8%D1%80' in buf)
@asyncio.coroutine
def test_writer_write(buf, stream, writer):
writer.append('foo-bar-baz')
writer.append_json({'test': 'passed'})
writer.append_form({'test': 'passed'})
writer.append_form([('one', 1), ('two', 2)])
sub_multipart = aiohttp.multipart.MultipartWriter(boundary='::')
sub_multipart.append('nested content')
sub_multipart.headers['X-CUSTOM'] = 'test'
writer.append(sub_multipart)
yield from writer.write(stream)
assert (
(b'--:\r\n'
b'Content-Type: text/plain; charset=utf-8\r\n'
b'Content-Length: 11\r\n\r\n'
b'foo-bar-baz'
b'\r\n'
b'--:\r\n'
b'Content-Type: application/json\r\n'
b'Content-Length: 18\r\n\r\n'
b'{"test": "passed"}'
b'\r\n'
b'--:\r\n'
b'Content-Type: application/x-www-form-urlencoded\r\n'
b'Content-Length: 11\r\n\r\n'
b'test=passed'
b'\r\n'
b'--:\r\n'
b'Content-Type: application/x-www-form-urlencoded\r\n'
b'Content-Length: 11\r\n\r\n'
b'one=1&two=2'
b'\r\n'
b'--:\r\n'
b'Content-Type: multipart/mixed; boundary="::"\r\n'
b'X-Custom: test\r\nContent-Length: 93\r\n\r\n'
b'--::\r\n'
b'Content-Type: text/plain; charset=utf-8\r\n'
b'Content-Length: 14\r\n\r\n'
b'nested content\r\n'
b'--::--\r\n'
b'\r\n'
b'--:--\r\n') == bytes(buf))
@asyncio.coroutine
def test_writer_serialize_with_content_encoding_gzip(buf, stream, writer):
writer.append('Time to Relax!', {CONTENT_ENCODING: 'gzip'})
yield from writer.write(stream)
headers, message = bytes(buf).split(b'\r\n\r\n', 1)
assert (b'--:\r\nContent-Encoding: gzip\r\n'
b'Content-Type: text/plain; charset=utf-8' == headers)
decompressor = zlib.decompressobj(wbits=16+zlib.MAX_WBITS)
data = decompressor.decompress(message.split(b'\r\n')[0])
data += decompressor.flush()
assert b'Time to Relax!' == data
@asyncio.coroutine
def test_writer_serialize_with_content_encoding_deflate(buf, stream, writer):
writer.append('Time to Relax!', {CONTENT_ENCODING: 'deflate'})
yield from writer.write(stream)
headers, message = bytes(buf).split(b'\r\n\r\n', 1)
assert (b'--:\r\nContent-Encoding: deflate\r\n'
b'Content-Type: text/plain; charset=utf-8' == headers)
thing = b'\x0b\xc9\xccMU(\xc9W\x08J\xcdI\xacP\x04\x00\r\n--:--\r\n'
assert thing == message
@asyncio.coroutine
def test_writer_serialize_with_content_encoding_identity(buf, stream, writer):
thing = b'\x0b\xc9\xccMU(\xc9W\x08J\xcdI\xacP\x04\x00'
writer.append(thing, {CONTENT_ENCODING: 'identity'})
yield from writer.write(stream)
headers, message = bytes(buf).split(b'\r\n\r\n', 1)
assert (b'--:\r\nContent-Encoding: identity\r\n'
b'Content-Type: application/octet-stream\r\n'
b'Content-Length: 16' == headers)
assert thing == message.split(b'\r\n')[0]
def test_writer_serialize_with_content_encoding_unknown(buf, stream, writer):
with pytest.raises(RuntimeError):
writer.append('Time to Relax!', {CONTENT_ENCODING: 'snappy'})
@asyncio.coroutine
def test_writer_with_content_transfer_encoding_base64(buf, stream, writer):
writer.append('Time to Relax!', {CONTENT_TRANSFER_ENCODING: 'base64'})
yield from writer.write(stream)
headers, message = bytes(buf).split(b'\r\n\r\n', 1)
assert (b'--:\r\nContent-Transfer-Encoding: base64\r\n'
b'Content-Type: text/plain; charset=utf-8' ==
headers)
assert b'VGltZSB0byBSZWxheCE=' == message.split(b'\r\n')[0]
@asyncio.coroutine
def test_writer_content_transfer_encoding_quote_printable(buf, stream, writer):
writer.append('Привет, мир!',
{CONTENT_TRANSFER_ENCODING: 'quoted-printable'})
yield from writer.write(stream)
headers, message = bytes(buf).split(b'\r\n\r\n', 1)
assert (b'--:\r\nContent-Transfer-Encoding: quoted-printable\r\n'
b'Content-Type: text/plain; charset=utf-8' == headers)
assert (b'=D0=9F=D1=80=D0=B8=D0=B2=D0=B5=D1=82,'
b' =D0=BC=D0=B8=D1=80!' == message.split(b'\r\n')[0])
def test_writer_content_transfer_encoding_unknown(buf, stream, writer):
with pytest.raises(RuntimeError):
writer.append('Time to Relax!', {CONTENT_TRANSFER_ENCODING: 'unknown'})
class MultipartWriterTestCase(unittest.TestCase):
def setUp(self):
self.buf = bytearray()
self.stream = mock.Mock()
def write(chunk):
self.buf.extend(chunk)
return ()
self.stream.write.side_effect = write
self.writer = aiohttp.multipart.MultipartWriter(boundary=':')
def test_default_subtype(self):
mtype, stype, *_ = parse_mimetype(
self.writer.headers.get(CONTENT_TYPE))
self.assertEqual('multipart', mtype)
self.assertEqual('mixed', stype)
def test_bad_boundary(self):
with self.assertRaises(ValueError):
aiohttp.multipart.MultipartWriter(boundary='тест')
def test_default_headers(self):
self.assertEqual({CONTENT_TYPE: 'multipart/mixed; boundary=":"'},
self.writer.headers)
def test_iter_parts(self):
self.writer.append('foo')
self.writer.append('bar')
self.writer.append('baz')
self.assertEqual(3, len(list(self.writer)))
def test_append(self):
self.assertEqual(0, len(self.writer))
self.writer.append('hello, world!')
self.assertEqual(1, len(self.writer))
self.assertIsInstance(self.writer._parts[0][0], payload.Payload)
def test_append_with_headers(self):
self.writer.append('hello, world!', {'x-foo': 'bar'})
self.assertEqual(1, len(self.writer))
self.assertIn('x-foo', self.writer._parts[0][0].headers)
self.assertEqual(self.writer._parts[0][0].headers['x-foo'], 'bar')
def test_append_json(self):
self.writer.append_json({'foo': 'bar'})
self.assertEqual(1, len(self.writer))
part = self.writer._parts[0][0]
self.assertEqual(part.headers[CONTENT_TYPE], 'application/json')
def test_append_part(self):
part = payload.get_payload(
'test', headers={CONTENT_TYPE: 'text/plain'})
self.writer.append(part, {CONTENT_TYPE: 'test/passed'})
self.assertEqual(1, len(self.writer))
part = self.writer._parts[0][0]
self.assertEqual(part.headers[CONTENT_TYPE], 'test/passed')
def test_append_json_overrides_content_type(self):
self.writer.append_json({'foo': 'bar'}, {CONTENT_TYPE: 'test/passed'})
self.assertEqual(1, len(self.writer))
part = self.writer._parts[0][0]
self.assertEqual(part.headers[CONTENT_TYPE], 'test/passed')
def test_append_form(self):
self.writer.append_form({'foo': 'bar'}, {CONTENT_TYPE: 'test/passed'})
self.assertEqual(1, len(self.writer))
part = self.writer._parts[0][0]
self.assertEqual(part.headers[CONTENT_TYPE], 'test/passed')
def test_append_multipart(self):
subwriter = aiohttp.multipart.MultipartWriter(boundary=':')
subwriter.append_json({'foo': 'bar'})
self.writer.append(subwriter, {CONTENT_TYPE: 'test/passed'})
self.assertEqual(1, len(self.writer))
part = self.writer._parts[0][0]
self.assertEqual(part.headers[CONTENT_TYPE], 'test/passed')
def test_write(self):
self.assertEqual([], list(self.writer.write(self.stream)))
def test_with(self):
with aiohttp.multipart.MultipartWriter(boundary=':') as writer:
writer.append('foo')
writer.append(b'bar')
writer.append_json({'baz': True})
self.assertEqual(3, len(writer))
def test_append_int_not_allowed(self):
with self.assertRaises(TypeError):
with aiohttp.multipart.MultipartWriter(boundary=':') as writer:
writer.append(1)
def test_append_float_not_allowed(self):
with self.assertRaises(TypeError):
with aiohttp.multipart.MultipartWriter(boundary=':') as writer:
writer.append(1.1)
def test_append_none_not_allowed(self):
with self.assertRaises(TypeError):
with aiohttp.multipart.MultipartWriter(boundary=':') as writer:
writer.append(None)
class ParseContentDispositionTestCase(unittest.TestCase):
# http://greenbytes.de/tech/tc2231/
def test_parse_empty(self):
disptype, params = parse_content_disposition(None)
self.assertEqual(None, disptype)
self.assertEqual({}, params)
def test_inlonly(self):
disptype, params = parse_content_disposition('inline')
self.assertEqual('inline', disptype)
self.assertEqual({}, params)
def test_inlonlyquoted(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader):
disptype, params = parse_content_disposition('"inline"')
self.assertEqual(None, disptype)
self.assertEqual({}, params)
def test_semicolon(self):
disptype, params = parse_content_disposition(
'form-data; name="data"; filename="file ; name.mp4"')
self.assertEqual(disptype, 'form-data')
self.assertEqual(
params, {'name': 'data', 'filename': 'file ; name.mp4'})
def test_inlwithasciifilename(self):
disptype, params = parse_content_disposition(
'inline; filename="foo.html"')
self.assertEqual('inline', disptype)
self.assertEqual({'filename': 'foo.html'}, params)
def test_inlwithfnattach(self):
disptype, params = parse_content_disposition(
'inline; filename="Not an attachment!"')
self.assertEqual('inline', disptype)
self.assertEqual({'filename': 'Not an attachment!'}, params)
def test_attonly(self):
disptype, params = parse_content_disposition('attachment')
self.assertEqual('attachment', disptype)
self.assertEqual({}, params)
def test_attonlyquoted(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader):
disptype, params = parse_content_disposition('"attachment"')
self.assertEqual(None, disptype)
self.assertEqual({}, params)
def test_attonlyucase(self):
disptype, params = parse_content_disposition('ATTACHMENT')
self.assertEqual('attachment', disptype)
self.assertEqual({}, params)
def test_attwithasciifilename(self):
disptype, params = parse_content_disposition(
'attachment; filename="foo.html"')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename': 'foo.html'}, params)
def test_inlwithasciifilenamepdf(self):
disptype, params = parse_content_disposition(
'attachment; filename="foo.pdf"')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename': 'foo.pdf'}, params)
def test_attwithasciifilename25(self):
disptype, params = parse_content_disposition(
'attachment; filename="0000000000111111111122222"')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename': '0000000000111111111122222'}, params)
def test_attwithasciifilename35(self):
disptype, params = parse_content_disposition(
'attachment; filename="00000000001111111111222222222233333"')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename': '00000000001111111111222222222233333'},
params)
def test_attwithasciifnescapedchar(self):
disptype, params = parse_content_disposition(
r'attachment; filename="f\oo.html"')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename': 'foo.html'}, params)
def test_attwithasciifnescapedquote(self):
disptype, params = parse_content_disposition(
'attachment; filename="\"quoting\" tested.html"')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename': '"quoting" tested.html'}, params)
@unittest.skip('need more smart parser which respects quoted text')
def test_attwithquotedsemicolon(self):
disptype, params = parse_content_disposition(
'attachment; filename="Here\'s a semicolon;.html"')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename': 'Here\'s a semicolon;.html'}, params)
def test_attwithfilenameandextparam(self):
disptype, params = parse_content_disposition(
'attachment; foo="bar"; filename="foo.html"')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename': 'foo.html', 'foo': 'bar'}, params)
def test_attwithfilenameandextparamescaped(self):
disptype, params = parse_content_disposition(
'attachment; foo="\"\\";filename="foo.html"')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename': 'foo.html', 'foo': '"\\'}, params)
def test_attwithasciifilenameucase(self):
disptype, params = parse_content_disposition(
'attachment; FILENAME="foo.html"')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename': 'foo.html'}, params)
def test_attwithasciifilenamenq(self):
disptype, params = parse_content_disposition(
'attachment; filename=foo.html')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename': 'foo.html'}, params)
def test_attwithtokfncommanq(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; filename=foo,bar.html')
self.assertEqual(None, disptype)
self.assertEqual({}, params)
def test_attwithasciifilenamenqs(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; filename=foo.html ;')
self.assertEqual(None, disptype)
self.assertEqual({}, params)
def test_attemptyparam(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; ;filename=foo')
self.assertEqual(None, disptype)
self.assertEqual({}, params)
def test_attwithasciifilenamenqws(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; filename=foo bar.html')
self.assertEqual(None, disptype)
self.assertEqual({}, params)
def test_attwithfntokensq(self):
disptype, params = parse_content_disposition(
"attachment; filename='foo.html'")
self.assertEqual('attachment', disptype)
self.assertEqual({'filename': "'foo.html'"}, params)
def test_attwithisofnplain(self):
disptype, params = parse_content_disposition(
'attachment; filename="foo-ä.html"')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename': 'foo-ä.html'}, params)
def test_attwithutf8fnplain(self):
disptype, params = parse_content_disposition(
'attachment; filename="foo-ä.html"')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename': 'foo-ä.html'}, params)
def test_attwithfnrawpctenca(self):
disptype, params = parse_content_disposition(
'attachment; filename="foo-%41.html"')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename': 'foo-%41.html'}, params)
def test_attwithfnusingpct(self):
disptype, params = parse_content_disposition(
'attachment; filename="50%.html"')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename': '50%.html'}, params)
def test_attwithfnrawpctencaq(self):
disptype, params = parse_content_disposition(
r'attachment; filename="foo-%\41.html"')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename': r'foo-%41.html'}, params)
def test_attwithnamepct(self):
disptype, params = parse_content_disposition(
'attachment; filename="foo-%41.html"')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename': 'foo-%41.html'}, params)
def test_attwithfilenamepctandiso(self):
disptype, params = parse_content_disposition(
'attachment; filename="ä-%41.html"')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename': 'ä-%41.html'}, params)
def test_attwithfnrawpctenclong(self):
disptype, params = parse_content_disposition(
'attachment; filename="foo-%c3%a4-%e2%82%ac.html"')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename': 'foo-%c3%a4-%e2%82%ac.html'}, params)
def test_attwithasciifilenamews1(self):
disptype, params = parse_content_disposition(
'attachment; filename ="foo.html"')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename': 'foo.html'}, params)
def test_attwith2filenames(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; filename="foo.html"; filename="bar.html"')
self.assertEqual(None, disptype)
self.assertEqual({}, params)
def test_attfnbrokentoken(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; filename=foo[1](2).html')
self.assertEqual(None, disptype)
self.assertEqual({}, params)
def test_attfnbrokentokeniso(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; filename=foo-ä.html')
self.assertEqual(None, disptype)
self.assertEqual({}, params)
def test_attfnbrokentokenutf(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; filename=foo-ä.html')
self.assertEqual(None, disptype)
self.assertEqual({}, params)
def test_attmissingdisposition(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'filename=foo.html')
self.assertEqual(None, disptype)
self.assertEqual({}, params)
def test_attmissingdisposition2(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'x=y; filename=foo.html')
self.assertEqual(None, disptype)
self.assertEqual({}, params)
def test_attmissingdisposition3(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'"foo; filename=bar;baz"; filename=qux')
self.assertEqual(None, disptype)
self.assertEqual({}, params)
def test_attmissingdisposition4(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'filename=foo.html, filename=bar.html')
self.assertEqual(None, disptype)
self.assertEqual({}, params)
def test_emptydisposition(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'; filename=foo.html')
self.assertEqual(None, disptype)
self.assertEqual({}, params)
def test_doublecolon(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
': inline; attachment; filename=foo.html')
self.assertEqual(None, disptype)
self.assertEqual({}, params)
def test_attandinline(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'inline; attachment; filename=foo.html')
self.assertEqual(None, disptype)
self.assertEqual({}, params)
def test_attandinline2(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; inline; filename=foo.html')
self.assertEqual(None, disptype)
self.assertEqual({}, params)
def test_attbrokenquotedfn(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; filename="foo.html".txt')
self.assertEqual(None, disptype)
self.assertEqual({}, params)
def test_attbrokenquotedfn2(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; filename="bar')
self.assertEqual(None, disptype)
self.assertEqual({}, params)
def test_attbrokenquotedfn3(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; filename=foo"bar;baz"qux')
self.assertEqual(None, disptype)
self.assertEqual({}, params)
def test_attmultinstances(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; filename=foo.html, attachment; filename=bar.html')
self.assertEqual(None, disptype)
self.assertEqual({}, params)
def test_attmissingdelim(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; foo=foo filename=bar')
self.assertEqual(None, disptype)
self.assertEqual({}, params)
def test_attmissingdelim2(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; filename=bar foo=foo')
self.assertEqual(None, disptype)
self.assertEqual({}, params)
def test_attmissingdelim3(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment filename=bar')
self.assertEqual(None, disptype)
self.assertEqual({}, params)
def test_attreversed(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'filename=foo.html; attachment')
self.assertEqual(None, disptype)
self.assertEqual({}, params)
def test_attconfusedparam(self):
disptype, params = parse_content_disposition(
'attachment; xfilename=foo.html')
self.assertEqual('attachment', disptype)
self.assertEqual({'xfilename': 'foo.html'}, params)
def test_attabspath(self):
disptype, params = parse_content_disposition(
'attachment; filename="/foo.html"')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename': 'foo.html'}, params)
def test_attabspathwin(self):
disptype, params = parse_content_disposition(
'attachment; filename="\\foo.html"')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename': 'foo.html'}, params)
def test_attcdate(self):
disptype, params = parse_content_disposition(
'attachment; creation-date="Wed, 12 Feb 1997 16:29:51 -0500"')
self.assertEqual('attachment', disptype)
self.assertEqual({'creation-date': 'Wed, 12 Feb 1997 16:29:51 -0500'},
params)
def test_attmdate(self):
disptype, params = parse_content_disposition(
'attachment; modification-date="Wed, 12 Feb 1997 16:29:51 -0500"')
self.assertEqual('attachment', disptype)
self.assertEqual(
{'modification-date': 'Wed, 12 Feb 1997 16:29:51 -0500'},
params)
def test_dispext(self):
disptype, params = parse_content_disposition('foobar')
self.assertEqual('foobar', disptype)
self.assertEqual({}, params)
def test_dispextbadfn(self):
disptype, params = parse_content_disposition(
'attachment; example="filename=example.txt"')
self.assertEqual('attachment', disptype)
self.assertEqual({'example': 'filename=example.txt'}, params)
def test_attwithisofn2231iso(self):
disptype, params = parse_content_disposition(
"attachment; filename*=iso-8859-1''foo-%E4.html")
self.assertEqual('attachment', disptype)
self.assertEqual({'filename*': 'foo-ä.html'}, params)
def test_attwithfn2231utf8(self):
disptype, params = parse_content_disposition(
"attachment; filename*=UTF-8''foo-%c3%a4-%e2%82%ac.html")
self.assertEqual('attachment', disptype)
self.assertEqual({'filename*': 'foo-ä-€.html'}, params)
def test_attwithfn2231noc(self):
disptype, params = parse_content_disposition(
"attachment; filename*=''foo-%c3%a4-%e2%82%ac.html")
self.assertEqual('attachment', disptype)
self.assertEqual({'filename*': 'foo-ä-€.html'}, params)
def test_attwithfn2231utf8comp(self):
disptype, params = parse_content_disposition(
"attachment; filename*=UTF-8''foo-a%cc%88.html")
self.assertEqual('attachment', disptype)
self.assertEqual({'filename*': 'foo-ä.html'}, params)
@unittest.skip('should raise decoding error: %82 is invalid for latin1')
def test_attwithfn2231utf8_bad(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionParam):
disptype, params = parse_content_disposition(
"attachment; filename*=iso-8859-1''foo-%c3%a4-%e2%82%ac.html")
self.assertEqual('attachment', disptype)
self.assertEqual({}, params)
@unittest.skip('should raise decoding error: %E4 is invalid for utf-8')
def test_attwithfn2231iso_bad(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionParam):
disptype, params = parse_content_disposition(
"attachment; filename*=utf-8''foo-%E4.html")
self.assertEqual('attachment', disptype)
self.assertEqual({}, params)
def test_attwithfn2231ws1(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionParam):
disptype, params = parse_content_disposition(
"attachment; filename *=UTF-8''foo-%c3%a4.html")
self.assertEqual('attachment', disptype)
self.assertEqual({}, params)
def test_attwithfn2231ws2(self):
disptype, params = parse_content_disposition(
"attachment; filename*= UTF-8''foo-%c3%a4.html")
self.assertEqual('attachment', disptype)
self.assertEqual({'filename*': 'foo-ä.html'}, params)
def test_attwithfn2231ws3(self):
disptype, params = parse_content_disposition(
"attachment; filename* =UTF-8''foo-%c3%a4.html")
self.assertEqual('attachment', disptype)
self.assertEqual({'filename*': 'foo-ä.html'}, params)
def test_attwithfn2231quot(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionParam):
disptype, params = parse_content_disposition(
"attachment; filename*=\"UTF-8''foo-%c3%a4.html\"")
self.assertEqual('attachment', disptype)
self.assertEqual({}, params)
def test_attwithfn2231quot2(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionParam):
disptype, params = parse_content_disposition(
"attachment; filename*=\"foo%20bar.html\"")
self.assertEqual('attachment', disptype)
self.assertEqual({}, params)
def test_attwithfn2231singleqmissing(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionParam):
disptype, params = parse_content_disposition(
"attachment; filename*=UTF-8'foo-%c3%a4.html")
self.assertEqual('attachment', disptype)
self.assertEqual({}, params)
@unittest.skip('urllib.parse.unquote is tolerate to standalone % chars')
def test_attwithfn2231nbadpct1(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionParam):
disptype, params = parse_content_disposition(
"attachment; filename*=UTF-8''foo%")
self.assertEqual('attachment', disptype)
self.assertEqual({}, params)
@unittest.skip('urllib.parse.unquote is tolerate to standalone % chars')
def test_attwithfn2231nbadpct2(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionParam):
disptype, params = parse_content_disposition(
"attachment; filename*=UTF-8''f%oo.html")
self.assertEqual('attachment', disptype)
self.assertEqual({}, params)
def test_attwithfn2231dpct(self):
disptype, params = parse_content_disposition(
"attachment; filename*=UTF-8''A-%2541.html")
self.assertEqual('attachment', disptype)
self.assertEqual({'filename*': 'A-%41.html'}, params)
def test_attwithfn2231abspathdisguised(self):
disptype, params = parse_content_disposition(
"attachment; filename*=UTF-8''%5cfoo.html")
self.assertEqual('attachment', disptype)
self.assertEqual({'filename*': '\\foo.html'}, params)
def test_attfncont(self):
disptype, params = parse_content_disposition(
'attachment; filename*0="foo."; filename*1="html"')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename*0': 'foo.',
'filename*1': 'html'}, params)
def test_attfncontqs(self):
disptype, params = parse_content_disposition(
r'attachment; filename*0="foo"; filename*1="\b\a\r.html"')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename*0': 'foo',
'filename*1': 'bar.html'}, params)
def test_attfncontenc(self):
disptype, params = parse_content_disposition(
'attachment; filename*0*=UTF-8''foo-%c3%a4; filename*1=".html"')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename*0*': 'UTF-8''foo-%c3%a4',
'filename*1': '.html'}, params)
def test_attfncontlz(self):
disptype, params = parse_content_disposition(
'attachment; filename*0="foo"; filename*01="bar"')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename*0': 'foo',
'filename*01': 'bar'}, params)
def test_attfncontnc(self):
disptype, params = parse_content_disposition(
'attachment; filename*0="foo"; filename*2="bar"')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename*0': 'foo',
'filename*2': 'bar'}, params)
def test_attfnconts1(self):
disptype, params = parse_content_disposition(
'attachment; filename*0="foo."; filename*2="html"')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename*0': 'foo.',
'filename*2': 'html'}, params)
def test_attfncontord(self):
disptype, params = parse_content_disposition(
'attachment; filename*1="bar"; filename*0="foo"')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename*0': 'foo',
'filename*1': 'bar'}, params)
def test_attfnboth(self):
disptype, params = parse_content_disposition(
'attachment; filename="foo-ae.html";'
" filename*=UTF-8''foo-%c3%a4.html")
self.assertEqual('attachment', disptype)
self.assertEqual({'filename': 'foo-ae.html',
'filename*': 'foo-ä.html'}, params)
def test_attfnboth2(self):
disptype, params = parse_content_disposition(
"attachment; filename*=UTF-8''foo-%c3%a4.html;"
' filename="foo-ae.html"')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename': 'foo-ae.html',
'filename*': 'foo-ä.html'}, params)
def test_attfnboth3(self):
disptype, params = parse_content_disposition(
"attachment; filename*0*=ISO-8859-15''euro-sign%3d%a4;"
" filename*=ISO-8859-1''currency-sign%3d%a4")
self.assertEqual('attachment', disptype)
self.assertEqual({'filename*': 'currency-sign=¤',
'filename*0*': "ISO-8859-15''euro-sign%3d%a4"},
params)
def test_attnewandfn(self):
disptype, params = parse_content_disposition(
'attachment; foobar=x; filename="foo.html"')
self.assertEqual('attachment', disptype)
self.assertEqual({'foobar': 'x',
'filename': 'foo.html'}, params)
def test_attrfc2047token(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; filename==?ISO-8859-1?Q?foo-=E4.html?=')
self.assertEqual(None, disptype)
self.assertEqual({}, params)
def test_attrfc2047quoted(self):
disptype, params = parse_content_disposition(
'attachment; filename="=?ISO-8859-1?Q?foo-=E4.html?="')
self.assertEqual('attachment', disptype)
self.assertEqual({'filename': '=?ISO-8859-1?Q?foo-=E4.html?='}, params)
def test_bad_continuous_param(self):
with self.assertWarns(aiohttp.multipart.BadContentDispositionParam):
disptype, params = parse_content_disposition(
'attachment; filename*0=foo bar')
self.assertEqual('attachment', disptype)
self.assertEqual({}, params)
class ContentDispositionFilenameTestCase(unittest.TestCase):
# http://greenbytes.de/tech/tc2231/
def test_no_filename(self):
self.assertIsNone(content_disposition_filename({}))
self.assertIsNone(content_disposition_filename({'foo': 'bar'}))
def test_filename(self):
params = {'filename': 'foo.html'}
self.assertEqual('foo.html', content_disposition_filename(params))
def test_filename_ext(self):
params = {'filename*': 'файл.html'}
self.assertEqual('файл.html', content_disposition_filename(params))
def test_attfncont(self):
params = {'filename*0': 'foo.', 'filename*1': 'html'}
self.assertEqual('foo.html', content_disposition_filename(params))
def test_attfncontqs(self):
params = {'filename*0': 'foo', 'filename*1': 'bar.html'}
self.assertEqual('foobar.html', content_disposition_filename(params))
def test_attfncontenc(self):
params = {'filename*0*': "UTF-8''foo-%c3%a4",
'filename*1': '.html'}
self.assertEqual('foo-ä.html', content_disposition_filename(params))
def test_attfncontlz(self):
params = {'filename*0': 'foo',
'filename*01': 'bar'}
self.assertEqual('foo', content_disposition_filename(params))
def test_attfncontnc(self):
params = {'filename*0': 'foo',
'filename*2': 'bar'}
self.assertEqual('foo', content_disposition_filename(params))
def test_attfnconts1(self):
params = {'filename*1': 'foo',
'filename*2': 'bar'}
self.assertEqual(None, content_disposition_filename(params))
def test_attfnboth(self):
params = {'filename': 'foo-ae.html',
'filename*': 'foo-ä.html'}
self.assertEqual('foo-ä.html', content_disposition_filename(params))
def test_attfnboth3(self):
params = {'filename*0*': "ISO-8859-15''euro-sign%3d%a4",
'filename*': 'currency-sign=¤'}
self.assertEqual('currency-sign=¤',
content_disposition_filename(params))
def test_attrfc2047quoted(self):
params = {'filename': '=?ISO-8859-1?Q?foo-=E4.html?='}
self.assertEqual('=?ISO-8859-1?Q?foo-=E4.html?=',
content_disposition_filename(params))<|fim▁end|> |
def test_second_next_releases_previous_object(self):
reader = aiohttp.multipart.MultipartReader( |
<|file_name|>test_version.py<|end_file_name|><|fim▁begin|>#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2020, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
import pytest ; pytest
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
import re
# External imports
import mock
# Bokeh imports
from bokeh._version import get_versions
# Module under test
import bokeh.util.version as buv # isort:skip
#-----------------------------------------------------------------------------
# Setup
#-----------------------------------------------------------------------------
VERSION_PAT = re.compile(r"^(\d+\.\d+\.\d+)$")
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
class Test___version__(object):
def test_basic(self) -> None:
assert isinstance(buv.__version__, str)
assert buv.__version__ == get_versions()['version']
class Test_base_version(object):
def test_returns_helper(self) -> None:
with mock.patch('bokeh.util.version._base_version_helper') as helper:
buv.base_version()
assert helper.called<|fim▁hole|> assert buv.is_full_release() == bool(VERSION_PAT.match(buv.__version__))
def test_mock_full(self, monkeypatch) -> None:
monkeypatch.setattr(buv, '__version__', "1.5.0")
assert buv.is_full_release()
@pytest.mark.parametrize('v', ("1.2.3dev2", "1.4.5rc3", "junk"))
def test_mock_not_full(self, monkeypatch, v) -> None:
monkeypatch.setattr(buv, '__version__', v)
assert not buv.is_full_release()
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
class Test__base_version_helper(object):
def test_release_version_unchanged(self) -> None:
assert buv._base_version_helper("0.2.3") == "0.2.3"
assert buv._base_version_helper("1.2.3") == "1.2.3"
def test_dev_version_stripped(self) -> None:
assert buv._base_version_helper("0.2.3dev2") == "0.2.3"
assert buv._base_version_helper("1.2.3dev10") == "1.2.3"
def test_rc_version_stripped(self) -> None:
assert buv._base_version_helper("0.2.3rc2") == "0.2.3"
assert buv._base_version_helper("1.2.3rc10") == "1.2.3"
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------<|fim▁end|> |
class Test_is_full_release(object):
def test_actual(self) -> None: |
<|file_name|>ads1256_analog_ph_ec.py<|end_file_name|><|fim▁begin|># coding=utf-8
import traceback
from flask_babel import lazy_gettext
from mycodo.config import SQL_DATABASE_MYCODO
from mycodo.databases.models import Conversion
from mycodo.databases.models import DeviceMeasurements
from mycodo.databases.utils import session_scope
from mycodo.inputs.base_input import AbstractInput
from mycodo.inputs.sensorutils import convert_from_x_to_y_unit
from mycodo.utils.database import db_retrieve_table_daemon
from mycodo.utils.system_pi import get_measurement
from mycodo.utils.system_pi import return_measurement_info
MYCODO_DB_PATH = 'sqlite:///' + SQL_DATABASE_MYCODO
def constraints_pass_positive_value(mod_input, value):
"""
Check if the user input is acceptable
:param mod_input: SQL object with user-saved Input options
:param value: float or int
:return: tuple: (bool, list of strings)
"""
errors = []
all_passed = True
# Ensure value is positive
if value <= 0:
all_passed = False
errors.append("Must be a positive value")
return all_passed, errors, mod_input
def execute_at_modification(
messages,
mod_input,
request_form,
custom_options_dict_presave,
custom_options_channels_dict_presave,
custom_options_dict_postsave,
custom_options_channels_dict_postsave):
try:
if (custom_options_dict_postsave['adc_channel_ph'] ==
custom_options_dict_postsave['adc_channel_ec']):
messages["error"].append("Cannot set pH and EC to be measured from the same channel.")
else:
with session_scope(MYCODO_DB_PATH) as new_session:
measurements = new_session.query(DeviceMeasurements).filter(
DeviceMeasurements.device_id == mod_input.unique_id).all()
for each_measure in measurements:
if each_measure.channel == int(custom_options_dict_postsave['adc_channel_ph']):
if each_measure.measurement != 'ion_concentration':
messages["page_refresh"] = True
each_measure.conversion_id = ''
each_measure.measurement = 'ion_concentration'
each_measure.unit = 'pH'
elif each_measure.channel == int(custom_options_dict_postsave['adc_channel_ec']):
if each_measure.measurement != 'electrical_conductivity':
messages["page_refresh"] = True
each_measure.conversion_id = ''
each_measure.measurement = 'electrical_conductivity'
each_measure.unit = 'uS_cm'
else:
if each_measure.measurement != 'electrical_potential':
messages["page_refresh"] = True
each_measure.conversion_id = ''
each_measure.measurement = 'electrical_potential'
each_measure.unit = 'V'
new_session.commit()
except Exception:
messages["error"].append("execute_at_modification() Error: {}".format(traceback.print_exc()))
return (messages,
mod_input,
custom_options_dict_postsave,
custom_options_channels_dict_postsave)
# Measurements
measurements_dict = {
0: {
'measurement': 'ion_concentration',
'unit': 'pH'
},
1: {
'measurement': 'electrical_conductivity',
'unit': 'uS_cm'
},
2: {
'measurement': 'electrical_potential',
'unit': 'V'
},
3: {
'measurement': 'electrical_potential',
'unit': 'V'
},
4: {
'measurement': 'electrical_potential',
'unit': 'V'
},
5: {
'measurement': 'electrical_potential',
'unit': 'V'
},
6: {
'measurement': 'electrical_potential',
'unit': 'V'
},
7: {
'measurement': 'electrical_potential',
'unit': 'V'
}
}
# Input information
INPUT_INFORMATION = {
'input_name_unique': 'ADS1256_ANALOG_PH_EC',
'input_manufacturer': 'Texas Instruments',
'input_name': 'ADS1256: Generic Analog pH/EC',
'input_library': 'wiringpi, kizniche/PiPyADC-py3',
'measurements_name': 'Ion Concentration/Electrical Conductivity',
'measurements_dict': measurements_dict,
'execute_at_modification': execute_at_modification,
'message': 'This input relies on an ADS1256 analog-to-digital converter (ADC) to measure pH and/or electrical conductivity (EC) from analog sensors. You can enable or disable either measurement if you want to only connect a pH sensor or an EC sensor by selecting which measurements you want to under Measurements Enabled. Select which channel each sensor is connected to on the ADC. There are default calibration values initially set for the Input. There are also functions to allow you to easily calibrate your sensors with calibration solutions. If you use the Calibrate Slot actions, these values will be calculated and will replace the currently-set values. You can use the Clear Calibration action to delete the database values and return to using the default values. If you delete the Input or create a new Input to use your ADC/sensors with, you will need to recalibrate in order to store new calibration data.',
'options_enabled': [
'measurements_select',
'adc_gain',
'adc_sample_speed',
'period',
'pre_output'
],
'options_disabled': ['interface'],
'dependencies_module': [
('pip-pypi', 'wiringpi', 'wiringpi'),
('pip-pypi', 'pipyadc_py3', 'git+https://github.com/kizniche/PiPyADC-py3.git') # PiPyADC ported to Python3
],
'interfaces': ['UART'],
# TODO: Next major revision, move settings such as these to custom_options
'adc_gain': [
(1, '1 (±5 V)'),
(2, '2 (±2.5 V)'),
(4, '4 (±1.25 V)'),
(8, '8 (±0.5 V)'),
(16, '16 (±0.25 V)'),
(32, '32 (±0.125 V)'),
(64, '64 (±0.0625 V)')
],
'adc_sample_speed': [
('30000', '30,000'),
('15000', '15,000'),
('7500', '7,500'),
('3750', '3,750'),
('2000', '2,000'),
('1000', '1,000'),
('500', '500'),
('100', '100'),
('60', '60'),
('50', '50'),
('30', '30'),
('25', '25'),
('15', '15'),
('10', '10'),
('5', '5'),
('2d5', '2.5')
],
'custom_options': [
{
'id': 'adc_channel_ph',
'type': 'select',
'default_value': '0',
'options_select': [
('-1', 'Not Connected'),
('0', 'Channel 0'),
('1', 'Channel 1'),
('2', 'Channel 2'),
('3', 'Channel 3'),
('4', 'Channel 4'),
('5', 'Channel 5'),
('6', 'Channel 6'),
('7', 'Channel 7'),
],
'name': 'ADC Channel: pH',
'phrase': 'The ADC channel the pH sensor is connected'
},
{
'id': 'adc_channel_ec',
'type': 'select',
'default_value': '1',
'options_select': [
('-1', 'Not Connected'),
('0', 'Channel 0'),
('1', 'Channel 1'),
('2', 'Channel 2'),
('3', 'Channel 3'),
('4', 'Channel 4'),
('5', 'Channel 5'),
('6', 'Channel 6'),
('7', 'Channel 7'),
],
'name': 'ADC Channel: EC',
'phrase': 'The ADC channel the EC sensor is connected'
},
{
'type': 'message',
'default_value': 'Temperature Compensation',
},
{
'id': 'temperature_comp_meas',
'type': 'select_measurement',
'default_value': '',
'options_select': [
'Input',
'Function',
'Math'
],
'name': "{}: {}".format(lazy_gettext('Temperature Compensation'), lazy_gettext('Measurement')),
'phrase': lazy_gettext('Select a measurement for temperature compensation')
},
{
'id': 'max_age',
'type': 'integer',
'default_value': 120,
'required': True,
'constraints_pass': constraints_pass_positive_value,
'name': "{}: {}".format(lazy_gettext('Temperature Compensation'), lazy_gettext('Max Age')),
'phrase': lazy_gettext('The maximum age (seconds) of the measurement to use')
},
{
'type': 'message',
'default_value': 'pH Calibration Data',
},
{
'id': 'ph_cal_v1',
'type': 'float',
'default_value': 1.500,
'name': 'Cal data: V1 (internal)',
'phrase': 'Calibration data: Voltage'
},
{
'id': 'ph_cal_ph1',
'type': 'float',
'default_value': 7.0,
'name': 'Cal data: pH1 (internal)',
'phrase': 'Calibration data: pH'
},
{
'id': 'ph_cal_t1',
'type': 'float',
'default_value': 25.0,
'name': 'Cal data: T1 (internal)',
'phrase': 'Calibration data: Temperature'
},
{
'type': 'new_line'
},
{
'id': 'ph_cal_v2',
'type': 'float',
'default_value': 2.032,
'name': 'Cal data: V2 (internal)',
'phrase': 'Calibration data: Voltage'
},
{
'id': 'ph_cal_ph2',
'type': 'float',
'default_value': 4.0,
'name': 'Cal data: pH2 (internal)',
'phrase': 'Calibration data: pH'
},
{
'id': 'ph_cal_t2',
'type': 'float',
'default_value': 25.0,
'name': 'Cal data: T2 (internal)',
'phrase': 'Calibration data: Temperature'
},
{
'type': 'message',
'default_value': 'EC Calibration Data'
},
{
'id': 'ec_cal_v1',
'type': 'float',
'default_value': 0.232,
'name': 'EC cal data: V1 (internal)',
'phrase': 'EC calibration data: Voltage'
},
{
'id': 'ec_cal_ec1',
'type': 'float',
'default_value': 1413.0,
'name': 'EC cal data: EC1 (internal)',
'phrase': 'EC calibration data: EC'
},
{
'id': 'ec_cal_t1',
'type': 'float',
'default_value': 25.0,
'name': 'EC cal data: T1 (internal)',
'phrase': 'EC calibration data: EC'
},
{
'type': 'new_line'
},
{
'id': 'ec_cal_v2',
'type': 'float',
'default_value': 2.112,
'name': 'EC cal data: V2 (internal)',
'phrase': 'EC calibration data: Voltage'
},
{
'id': 'ec_cal_ec2',
'type': 'float',
'default_value': 12880.0,
'name': 'EC cal data: EC2 (internal)',
'phrase': 'EC calibration data: EC'
},
{
'id': 'ec_cal_t2',
'type': 'float',
'default_value': 25.0,
'name': 'EC cal data: T2 (internal)',
'phrase': 'EC calibration data: EC'
},
{
'type': 'new_line'
},
{
'id': 'adc_calibration',
'type': 'select',
'default_value': '',
'options_select': [
('', 'No Calibration'),
('SELFOCAL', 'Self Offset'),
('SELFGCAL', 'Self Gain'),
('SELFCAL', 'Self Offset + Self Gain'),
('SYSOCAL', 'System Offset'),
('SYSGCAL', 'System Gain')
],
'name': lazy_gettext('Calibration'),
'phrase': lazy_gettext('Set the calibration method to perform during Input activation')
},
],
'custom_actions': [
{
'type': 'message',
'default_value': """pH Calibration Actions: Place your probe in a solution of known pH.
Set the known pH value in the `Calibration buffer pH` field, and press `Calibrate pH, slot 1`.
Repeat with a second buffer, and press `Calibrate pH, slot 2`.
You don't need to change the values under `Custom Options`."""
},
{
'id': 'calibration_ph',
'type': 'float',
'default_value': 7.0,
'name': 'Calibration buffer pH',
'phrase': 'This is the nominal pH of the calibration buffer, usually labelled on the bottle.'
},
{
'id': 'calibrate_ph_slot_1',
'type': 'button',
'wait_for_return': True,
'name': 'Calibrate pH, slot 1'
},
{
'id': 'calibrate_ph_slot_2',
'type': 'button',
'wait_for_return': True,
'name': 'Calibrate pH, slot 2'
},
{
'id': 'clear_ph_calibrate_slots',
'type': 'button',
'wait_for_return': True,
'name': 'Clear pH Calibration Slots'
},
{
'type': 'message',
'default_value': """EC Calibration Actions: Place your probe in a solution of known EC.
Set the known EC value in the `Calibration standard EC` field, and press `Calibrate EC, slot 1`.
Repeat with a second standard, and press `Calibrate EC, slot 2`.
You don't need to change the values under `Custom Options`."""
},
{
'id': 'calibration_ec',
'type': 'float',
'default_value': 1413.0,
'name': 'Calibration standard EC',
'phrase': 'This is the nominal EC of the calibration standard, usually labelled on the bottle.'
},
{
'id': 'calibrate_ec_slot_1',
'type': 'button',
'wait_for_return': True,
'name': 'Calibrate EC, slot 1'
},
{
'id': 'calibrate_ec_slot_2',
'type': 'button',
'wait_for_return': True,
'name': 'Calibrate EC, slot 2'
},
{
'id': 'clear_ec_calibrate_slots',
'type': 'button',
'wait_for_return': True,
'name': 'Clear EC Calibration Slots'
},
]
}
class InputModule(AbstractInput):
"""Read ADC
Choose a gain of 1 for reading measurements from 0 to 4.09V.
Or pick a different gain to change the range of measurements that are read:
- 1 = ±5 V
- 2 = ±2.5 V
- 4 = ±1.25 V
- 8 = ±0.5 V
- 16 = ±0.25 V
- 32 = ±0.125 V
- 64 = ±0.0625 V
See table 3 in the ADS1256 datasheet for more info on gain.
"""
def __init__(self, input_dev, testing=False):
super(InputModule, self).__init__(input_dev, testing=testing, name=__name__)
self.sensor = None
self.CH_SEQUENCE = None
self.adc_gain = None
self.adc_sample_speed = None
self.adc_calibration = None
self.dict_gains = {
1: 0.125,
2: 0.0625,
4: 0.03125,
8: 0.015625,
16: 0.0078125,
32: 0.00390625,
64: 0.00195312,
}
self.adc_channel_ph = None
self.adc_channel_ec = None
self.temperature_comp_meas_device_id = None
self.temperature_comp_meas_measurement_id = None
self.max_age = None
self.ph_cal_v1 = None
self.ph_cal_ph1 = None
self.ph_cal_t1 = None
self.ph_cal_v2 = None
self.ph_cal_ph2 = None
self.ph_cal_t2 = None
self.ec_cal_v1 = None
self.ec_cal_ec1 = None
self.ec_cal_t1 = None
self.ec_cal_v2 = None
self.ec_cal_ec2 = None
self.ec_cal_t2 = None
self.slope = None
self.intercept = None
if not testing:
self.setup_custom_options(
INPUT_INFORMATION['custom_options'], input_dev)
self.initialize_input()
def initialize_input(self):
#import adafruit_ads1x15.ads1115 as ADS
#from adafruit_ads1x15.analog_in import AnalogIn
#from adafruit_extended_bus import ExtendedI2C
import glob
from pipyadc_py3 import ADS1256
from pipyadc_py3.ADS1256_definitions import POS_AIN0
from pipyadc_py3.ADS1256_definitions import POS_AIN1
from pipyadc_py3.ADS1256_definitions import POS_AIN2
from pipyadc_py3.ADS1256_definitions import POS_AIN3
from pipyadc_py3.ADS1256_definitions import POS_AIN4
from pipyadc_py3.ADS1256_definitions import POS_AIN5
from pipyadc_py3.ADS1256_definitions import POS_AIN6
from pipyadc_py3.ADS1256_definitions import POS_AIN7
from pipyadc_py3.ADS1256_definitions import NEG_AINCOM
# Input pin for the potentiometer on the Waveshare Precision ADC board
POTI = POS_AIN0 | NEG_AINCOM
# Light dependant resistor
LDR = POS_AIN1 | NEG_AINCOM
# The other external input screw terminals of the Waveshare board
EXT2, EXT3, EXT4 = POS_AIN2 | NEG_AINCOM, POS_AIN3 | NEG_AINCOM, POS_AIN4 | NEG_AINCOM
EXT5, EXT6, EXT7 = POS_AIN5 | NEG_AINCOM, POS_AIN6 | NEG_AINCOM, POS_AIN7 | NEG_AINCOM
channels = {
0: POTI,
1: LDR,
2: EXT2,
3: EXT3,
4: EXT4,
5: EXT5,
6: EXT6,
7: EXT7,
}
#self.analog_in = AnalogIn
#self.ads = ADS
# Generate the channel sequence for enabled channels
self.CH_SEQUENCE = []
for channel in self.channels_measurement:
if self.is_enabled(channel):
self.CH_SEQUENCE.append(channels[channel])
self.CH_SEQUENCE = tuple(self.CH_SEQUENCE)
if self.input_dev.adc_gain == 0:
self.adc_gain = 1
else:
self.adc_gain = self.input_dev.adc_gain
self.adc_sample_speed = self.input_dev.adc_sample_speed
if glob.glob('/dev/spi*'):
self.sensor = ADS1256()
# Perform selected calibration
if self.adc_calibration == 'SELFOCAL':
self.sensor.cal_self_offset()
elif self.adc_calibration == 'SELFGCAL':
self.sensor.cal_self_gain()
elif self.adc_calibration == 'SELFCAL':
self.sensor.cal_self()
elif self.adc_calibration == 'SYSOCAL':
self.sensor.cal_system_offset()
elif self.adc_calibration == 'SYSGCAL':
self.sensor.cal_system_gain()
else:
raise Exception(
"SPI device /dev/spi* not found. Ensure SPI is enabled and the device is recognized/setup by linux.")
# self.adc = ADS.ADS1115(
# ExtendedI2C(self.input_dev.i2c_bus),
# address=int(str(self.input_dev.i2c_location), 16))
def calibrate_ph(self, cal_slot, args_dict):
"""Calibration helper method."""
if 'calibration_ph' not in args_dict:
self.logger.error("Cannot conduct calibration without a buffer pH value")
return
if (not isinstance(args_dict['calibration_ph'], float) and
not isinstance(args_dict['calibration_ph'], int)):
self.logger.error("buffer value does not represent a number: '{}', type: {}".format(
args_dict['calibration_ph'], type(args_dict['calibration_ph'])))
return
v = self.get_volt_data(self.get_voltages(), int(self.adc_channel_ph)) # pH
temp = self.get_temp_data()
if temp is not None:
# Use measured temperature
t = temp
else:
# Assume room temperature of 25C
t = 25
self.logger.debug("Assigning voltage {} and temperature {} to pH {}".format(
v, t, args_dict['calibration_ph']))
if cal_slot == 1:
# set values currently being used
self.ph_cal_v1 = v
self.ph_cal_ph1 = args_dict['calibration_ph']
self.ph_cal_t1 = t
# save values for next startup
self.set_custom_option("ph_cal_v1", v)
self.set_custom_option("ph_cal_ph1", args_dict['calibration_ph'])
self.set_custom_option("ph_cal_t1", t)
elif cal_slot == 2:
# set values currently being used
self.ph_cal_v2 = v
self.ph_cal_ph2 = args_dict['calibration_ph']
self.ph_cal_t2 = t
# save values for next startup
self.set_custom_option("ph_cal_v2", v)
self.set_custom_option("ph_cal_ph2", args_dict['calibration_ph'])
self.set_custom_option("ph_cal_t2", t)
def calibrate_ph_slot_1(self, args_dict):
"""calibrate."""
self.calibrate_ph(1, args_dict)
def calibrate_ph_slot_2(self, args_dict):
"""calibrate."""
self.calibrate_ph(2, args_dict)
def clear_ph_calibrate_slots(self, args_dict):
self.delete_custom_option("ph_cal_v1")
self.delete_custom_option("ph_cal_ph1")
self.delete_custom_option("ph_cal_t1")
self.delete_custom_option("ph_cal_v2")
self.delete_custom_option("ph_cal_ph2")
self.delete_custom_option("ph_cal_t2")
self.setup_custom_options(
INPUT_INFORMATION['custom_options'], self.input_dev)
def calibrate_ec(self, cal_slot, args_dict):
"""Calibration helper method."""
if 'calibration_ec' not in args_dict:
self.logger.error("Cannot conduct calibration without a standard EC value")
return
if (not isinstance(args_dict['calibration_ec'], float) and
not isinstance(args_dict['calibration_ec'], int)):<|fim▁hole|> args_dict['calibration_ec'], type(args_dict['calibration_ec'])))
return
v = self.get_volt_data(self.get_voltages(), int(self.adc_channel_ec)) # EC
temp = self.get_temp_data()
if temp is not None:
# Use measured temperature
t = temp
else:
# Assume room temperature of 25C
t = 25
self.logger.debug("Assigning voltage {} and temperature {} to EC {}".format(
v, t, args_dict['calibration_ec']))
# For future sessions
if cal_slot == 1:
# set values currently being used
self.ec_cal_v1 = v
self.ec_cal_ec1 = args_dict['calibration_ec']
self.ec_cal_t1 = t
# save values for next startup
self.set_custom_option("ec_cal_v1", v)
self.set_custom_option("ec_cal_ec1", args_dict['calibration_ec'])
self.set_custom_option("ec_cal_t1", t)
elif cal_slot == 2:
self.ec_cal_v2 = v
self.ec_cal_ec2 = args_dict['calibration_ec']
self.ec_cal_t2 = t
self.set_custom_option("ec_cal_v2", v)
self.set_custom_option("ec_cal_ec2", args_dict['calibration_ec'])
self.set_custom_option("ec_cal_t2", t)
def calibrate_ec_slot_1(self, args_dict):
"""calibrate."""
self.calibrate_ec(1, args_dict)
def calibrate_ec_slot_2(self, args_dict):
"""calibrate."""
self.calibrate_ec(2, args_dict)
def clear_ec_calibrate_slots(self, args_dict):
self.delete_custom_option("ec_cal_v1")
self.delete_custom_option("ec_cal_ec1")
self.delete_custom_option("ec_cal_t1")
self.delete_custom_option("ec_cal_v2")
self.delete_custom_option("ec_cal_ec2")
self.delete_custom_option("ec_cal_t2")
self.setup_custom_options(
INPUT_INFORMATION['custom_options'], self.input_dev)
@staticmethod
def nernst_correction(volt, temp):
"""Apply temperature correction for pH. This provides the voltage as if it were measured at 25C.
Based on the Nernst equation: E = E0 - ln(10) * RT/nF * pH; this gives E = E0 - 0.198 * T * pH.
The correction is a simple ratio of absolute temperature."""
volt_25C = volt * 298/(temp+273)
return volt_25C
@staticmethod
def viscosity_correction(volt, temp):
"""Apply temperature correction for EC. This provides the voltage as if it were measured at 25C.
Based on the Nernst-Einstein and Stokes-Einstein relations, related to viscosity: EC/EC25 = vis25/vis.
The correction is a linear approximation to the full curve, valid for 10-30C."""
volt_25C = volt / (1 + 0.020 * (temp - 25))
return volt_25C
def get_voltages(self):
voltages_list = []
for _ in range(2):
raw_channels = self.sensor.read_sequence(self.CH_SEQUENCE)
voltages_list = [i * self.sensor.v_per_digit for i in raw_channels]
if 0 not in voltages_list:
break
return voltages_list
def get_temp_data(self):
"""Get the temperature."""
if self.temperature_comp_meas_measurement_id:
self.logger.debug("Temperature corrections will be applied")
last_measurement = self.get_last_measurement(
self.temperature_comp_meas_device_id,
self.temperature_comp_meas_measurement_id,
max_age=self.max_age
)
if last_measurement and len(last_measurement) > 1:
device_measurement = get_measurement(
self.temperature_comp_meas_measurement_id)
conversion = db_retrieve_table_daemon(
Conversion, unique_id=device_measurement.conversion_id)
_, unit, _ = return_measurement_info(
device_measurement, conversion)
if unit != "C":
out_value = convert_from_x_to_y_unit(
unit, "C", last_measurement[1])
else:
out_value = last_measurement[1]
self.logger.debug("Latest temperature: {temp} C".format(
temp=out_value))
else:
self.logger.error(
"Temperature measurement not found within the "
"past {} seconds".format(self.max_age))
out_value = None
else:
self.logger.debug("No temperature corrections applied")
out_value = None
return out_value
def get_volt_data(self, voltages, channel):
"""Measure voltage at ADC channel."""
if not voltages or 0 in voltages:
self.logger.error("ADC returned measurement of 0 (indicating something is wrong).")
return
volt_data = voltages[channel]
# chan = self.analog_in(self.adc, channel)
# self.adc.gain = self.adc_gain
# self.logger.debug("Channel {}: Gain {}, {} raw, {} volts".format(
# channel, self.adc_gain, chan.value, chan.voltage))
# volt_data = chan.voltage
# raw_channel2 = self.sensor.read_oneshot(self.chan)
# volt_data2 = raw_channel2 * self.sensor.v_per_digit
return volt_data
def convert_volt_to_ph(self, volt, temp):
"""Convert voltage to pH."""
# Calculate slope and intercept from calibration points.
self.slope = ((self.ph_cal_ph1 - self.ph_cal_ph2) /
(self.nernst_correction(self.ph_cal_v1, self.ph_cal_t1) -
self.nernst_correction(self.ph_cal_v2, self.ph_cal_t2)))
self.intercept = (self.ph_cal_ph1 -
self.slope *
self.nernst_correction(self.ph_cal_v1, self.ph_cal_t1))
if temp is not None:
# Perform temperature corrections
ph = self.slope * self.nernst_correction(volt, temp) + self.intercept
else:
# Don't perform temperature corrections
ph = self.slope * volt + self.intercept
return ph
def convert_volt_to_ec(self, volt, temp):
"""Convert voltage to EC."""
# Calculate slope and intercept from calibration points.
self.slope = ((self.ec_cal_ec1 - self.ec_cal_ec2) /
(self.viscosity_correction(self.ec_cal_v1, self.ec_cal_t1) -
self.viscosity_correction(self.ec_cal_v2, self.ec_cal_t2)))
self.intercept = (self.ec_cal_ec1 -
self.slope *
self.viscosity_correction(self.ec_cal_v1, self.ec_cal_t1))
if temp is not None:
# Perform temperature corrections
ec = self.slope * self.viscosity_correction(volt, temp) + self.intercept
else:
# Don't perform temperature corrections
ec = self.slope * volt + self.intercept
return ec
def generate_dict(self):
return_dict = {}
with session_scope(MYCODO_DB_PATH) as new_session:
measurements = new_session.query(DeviceMeasurements).filter(
DeviceMeasurements.device_id == self.unique_id).all()
for each_measure in measurements:
return_dict[each_measure.channel] = {
'measurement': each_measure.measurement,
'unit': each_measure.unit
}
return return_dict
def get_measurement(self):
"""Gets the measurement."""
if not self.sensor:
self.logger.error("Error 101: Device not set up. See https://kizniche.github.io/Mycodo/Error-Codes#error-101 for more info.")
return
self.return_dict = self.generate_dict()
voltages = self.get_voltages()
for each_channel in range(8):
if (each_channel == int(self.adc_channel_ph) and
self.is_enabled(int(self.adc_channel_ph))): # pH
self.value_set(
int(self.adc_channel_ph),
self.convert_volt_to_ph(
self.get_volt_data(voltages, int(self.adc_channel_ph)),
self.get_temp_data()))
elif (each_channel == int(self.adc_channel_ec) and
self.is_enabled(int(self.adc_channel_ec))): # EC
self.value_set(
int(self.adc_channel_ec),
self.convert_volt_to_ec(
self.get_volt_data(voltages, int(self.adc_channel_ec)),
self.get_temp_data()))
elif self.is_enabled(each_channel):
self.value_set(
each_channel, self.get_volt_data(voltages, each_channel))
return self.return_dict<|fim▁end|> | self.logger.error("standard value does not represent a number: '{}', type: {}".format( |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|><|fim▁hole|> println!("{}", get_middle("A"));
println!("{}", get_middle("of"));
}
fn get_middle(s:&str) -> &str {
let s_len = s.len() + 1;
let fmid = s_len as f32 / 2.0;
let mid = s_len/2;
if fmid == mid as f32 {
&s[mid-1..mid]
} else {
&s[mid-1..mid+1]
}
}
#[test]
fn example_tests() {
assert_eq!(get_middle("test"),"es");
assert_eq!(get_middle("testing"),"t");
assert_eq!(get_middle("middle"),"dd");
assert_eq!(get_middle("A"),"A");
assert_eq!(get_middle("of"),"of");
}<|fim▁end|> | fn main() {
println!("{}", get_middle("test"));
println!("{}", get_middle("testing"));
println!("{}", get_middle("middle")); |
<|file_name|>chario.rs<|end_file_name|><|fim▁begin|>// Zinc, the bare metal stack for rust.
// Copyright 2014 Vladimir "farcaller" Pouzanov <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Generic char output trait.
use core::str::{Str, StrSlice};
use core::slice::{Slice, ImmutableSlice};
use core::collections::Collection;
use core::iter::range;
use core::mem::zeroed;
use lib::strconv;
/// CharIO provides interface for outputting characters.
///
/// This trait implements the common functions to output strings and numbers,
/// requiring only one method: `putc`.
pub trait CharIO {
/// Outputs a character.
fn putc(&self, value: char);
/// Outputs a string.
fn puts(&self, s: &str) {
let chars : &[u8] = s.as_slice().as_bytes();
for i in range(0, s.len()) {
let c : char = chars[i] as char;
self.putc(c);
}
}
/// Outputs an integer with given base.
fn putint(&self, i: u32, base: u32) {
let mut buf : [u8, ..32] = unsafe { zeroed() };
let bsl : &mut [u8] = buf;
strconv::itoa(i, bsl, base);
for &i in bsl.iter() {
if i == 0 {
break;
}
self.putc(i as char);
}
}
/// Outputs an integer.
fn puti(&self, i: u32) {
self.putint(i, 10);
}
/// Outputs an integer as a hex string.
fn puth(&self, i: u32) {
self.putint(i, 16);
}
}
#[cfg(test)]
pub mod test {
use core::cell::RefCell;
use drivers::chario::CharIO;
pub struct TestCharIOData {
last_char: char,
putc_calls: uint,
}
pub struct TestCharIO {
data: RefCell<TestCharIOData>
}
impl CharIO for TestCharIO {
fn putc(&self, value: char) {
let mut data = self.data.borrow_mut();
data.putc_calls += 1;
data.last_char = value;
}
}
impl TestCharIO {
pub fn new() -> TestCharIO {
TestCharIO {
data: RefCell::new(TestCharIOData {
last_char: '\0',
putc_calls: 0,
}),
}
}
fn get_last_char(&self) -> char {
self.data.borrow().last_char
}
fn get_and_reset_putc_calls(&self) -> uint {
let current = self.data.borrow().putc_calls;
self.data.borrow_mut().putc_calls = 0;
current
}
}
#[test]
fn putc_should_store_a_char() {
let io = TestCharIO::new();
io.putc('a');
assert!(io.get_last_char() == 'a');
io.putc('z');
assert!(io.get_last_char() == 'z');
}
#[test]
fn puti_should_store_a_number_as_char() {
let io = TestCharIO::new();
io.puti(3);
assert!(io.get_last_char() == '3');
io.puti(9);
assert!(io.get_last_char() == '9');
io.puti(10);
assert!(io.get_last_char() == '0');
io.puti(11);
assert!(io.get_last_char() == '1');
}
<|fim▁hole|> fn puth_should_store_a_number_as_char() {
let io = TestCharIO::new();
io.puth(3);
assert!(io.get_last_char() == '3');
io.puth(9);
assert!(io.get_last_char() == '9');
io.puth(10);
assert!(io.get_last_char() == 'a');
io.puth(11);
assert!(io.get_last_char() == 'b');
io.puth(16);
assert!(io.get_last_char() == '0');
io.puth(17);
assert!(io.get_last_char() == '1');
}
#[test]
fn putint_should_work_with_different_bases() {
let io = TestCharIO::new();
io.putint(0, 2);
assert!(io.get_last_char() == '0');
io.putint(1, 2);
assert!(io.get_last_char() == '1');
io.putint(2, 2);
assert!(io.get_last_char() == '0');
io.putint(3, 2);
assert!(io.get_last_char() == '1');
io.putint(7, 7);
assert!(io.get_last_char() == '0');
io.putint(8, 7);
assert!(io.get_last_char() == '1');
io.putint(12, 7);
assert!(io.get_last_char() == '5');
io.putint(14, 7);
assert!(io.get_last_char() == '0');
}
#[test]
fn puts_should_leave_us_with_just_the_last_char() {
let io = TestCharIO::new();
io.puts("fu!");
assert!(io.get_last_char() == '!');
assert!(io.get_and_reset_putc_calls() == 3);
io.puts("\n\t");
assert!(io.get_last_char() == '\t');
assert!(io.get_and_reset_putc_calls() == 2);
}
}<|fim▁end|> | #[test] |
<|file_name|>ChoiceContentImpl.cpp<|end_file_name|><|fim▁begin|>// -*- mode: c++; c-basic-style: "bsd"; c-basic-offset: 4; -*-
/*
* kdm/data/ChoiceContentImpl.cpp
* Copyright (C) Cátedra SAES-UMU 2010 <[email protected]>
* Copyright (C) INCHRON GmbH 2016 <[email protected]>
*
* EMF4CPP is free software: you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* EMF4CPP is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License<|fim▁hole|>
#include "ChoiceContent.hpp"
#include <kdm/data/DataPackage.hpp>
#include <kdm/data/ComplexContentType.hpp>
#include <kdm/kdm/Attribute.hpp>
#include <kdm/kdm/Annotation.hpp>
#include <kdm/kdm/Stereotype.hpp>
#include <kdm/kdm/ExtendedValue.hpp>
#include <kdm/source/SourceRef.hpp>
#include <kdm/data/AbstractDataRelationship.hpp>
#include <kdm/action/ActionElement.hpp>
#include <kdm/data/AbstractContentElement.hpp>
#include <kdm/core/KDMRelationship.hpp>
#include <kdm/core/AggregatedRelationship.hpp>
#include <kdm/core/KDMEntity.hpp>
#include <kdm/kdm/KDMModel.hpp>
#include <ecore/EObject.hpp>
#include <ecore/EClass.hpp>
#include <ecore/EStructuralFeature.hpp>
#include <ecore/EReference.hpp>
#include <ecore/EObject.hpp>
#include <ecorecpp/mapping.hpp>
/*PROTECTED REGION ID(ChoiceContentImpl.cpp) START*/
// Please, enable the protected region if you add manually written code.
// To do this, add the keyword ENABLED before START.
/*PROTECTED REGION END*/
using namespace ::kdm::data;
void ChoiceContent::_initialize()
{
// Supertypes
::kdm::data::ComplexContentType::_initialize();
// References
/*PROTECTED REGION ID(ChoiceContentImpl__initialize) START*/
// Please, enable the protected region if you add manually written code.
// To do this, add the keyword ENABLED before START.
/*PROTECTED REGION END*/
}
// Operations
// EObject
::ecore::EJavaObject ChoiceContent::eGet(::ecore::EInt _featureID,
::ecore::EBoolean _resolve)
{
::ecore::EJavaObject _any;
switch (_featureID)
{
case ::kdm::core::CorePackage::ELEMENT__ATTRIBUTE:
{
_any = m_attribute->asEListOf< ::ecore::EObject_ptr >();
}
return _any;
case ::kdm::core::CorePackage::ELEMENT__ANNOTATION:
{
_any = m_annotation->asEListOf< ::ecore::EObject_ptr >();
}
return _any;
case ::kdm::core::CorePackage::MODELELEMENT__STEREOTYPE:
{
_any = m_stereotype->asEListOf< ::ecore::EObject_ptr >();
}
return _any;
case ::kdm::core::CorePackage::MODELELEMENT__TAGGEDVALUE:
{
_any = m_taggedValue->asEListOf< ::ecore::EObject_ptr >();
}
return _any;
case ::kdm::core::CorePackage::KDMENTITY__NAME:
{
::ecorecpp::mapping::any_traits < ::kdm::core::String
> ::toAny(_any, m_name);
}
return _any;
case ::kdm::data::DataPackage::ABSTRACTDATAELEMENT__SOURCE:
{
_any = m_source->asEListOf< ::ecore::EObject_ptr >();
}
return _any;
case ::kdm::data::DataPackage::ABSTRACTDATAELEMENT__DATARELATION:
{
_any = m_dataRelation->asEListOf< ::ecore::EObject_ptr >();
}
return _any;
case ::kdm::data::DataPackage::ABSTRACTDATAELEMENT__ABSTRACTION:
{
_any = m_abstraction->asEListOf< ::ecore::EObject_ptr >();
}
return _any;
case ::kdm::data::DataPackage::COMPLEXCONTENTTYPE__CONTENTELEMENT:
{
_any = m_contentElement->asEListOf< ::ecore::EObject_ptr >();
}
return _any;
}
throw "Error";
}
void ChoiceContent::eSet(::ecore::EInt _featureID,
::ecore::EJavaObject const& _newValue)
{
switch (_featureID)
{
case ::kdm::core::CorePackage::ELEMENT__ATTRIBUTE:
{
::ecorecpp::mapping::EList< ::ecore::EObject_ptr >::ptr_type _t0 =
::ecorecpp::mapping::any::any_cast < ::ecorecpp::mapping::EList
< ::ecore::EObject_ptr > ::ptr_type > (_newValue);
::kdm::core::Element::getAttribute().clear();
::kdm::core::Element::getAttribute().insert_all(*_t0);
}
return;
case ::kdm::core::CorePackage::ELEMENT__ANNOTATION:
{
::ecorecpp::mapping::EList< ::ecore::EObject_ptr >::ptr_type _t0 =
::ecorecpp::mapping::any::any_cast < ::ecorecpp::mapping::EList
< ::ecore::EObject_ptr > ::ptr_type > (_newValue);
::kdm::core::Element::getAnnotation().clear();
::kdm::core::Element::getAnnotation().insert_all(*_t0);
}
return;
case ::kdm::core::CorePackage::MODELELEMENT__STEREOTYPE:
{
::ecorecpp::mapping::EList< ::ecore::EObject_ptr >::ptr_type _t0 =
::ecorecpp::mapping::any::any_cast < ::ecorecpp::mapping::EList
< ::ecore::EObject_ptr > ::ptr_type > (_newValue);
::kdm::core::ModelElement::getStereotype().clear();
::kdm::core::ModelElement::getStereotype().insert_all(*_t0);
}
return;
case ::kdm::core::CorePackage::MODELELEMENT__TAGGEDVALUE:
{
::ecorecpp::mapping::EList< ::ecore::EObject_ptr >::ptr_type _t0 =
::ecorecpp::mapping::any::any_cast < ::ecorecpp::mapping::EList
< ::ecore::EObject_ptr > ::ptr_type > (_newValue);
::kdm::core::ModelElement::getTaggedValue().clear();
::kdm::core::ModelElement::getTaggedValue().insert_all(*_t0);
}
return;
case ::kdm::core::CorePackage::KDMENTITY__NAME:
{
::kdm::core::String _t0;
::ecorecpp::mapping::any_traits < ::kdm::core::String
> ::fromAny(_newValue, _t0);
::kdm::core::KDMEntity::setName(_t0);
}
return;
case ::kdm::data::DataPackage::ABSTRACTDATAELEMENT__SOURCE:
{
::ecorecpp::mapping::EList< ::ecore::EObject_ptr >::ptr_type _t0 =
::ecorecpp::mapping::any::any_cast < ::ecorecpp::mapping::EList
< ::ecore::EObject_ptr > ::ptr_type > (_newValue);
::kdm::data::AbstractDataElement::getSource().clear();
::kdm::data::AbstractDataElement::getSource().insert_all(*_t0);
}
return;
case ::kdm::data::DataPackage::ABSTRACTDATAELEMENT__DATARELATION:
{
::ecorecpp::mapping::EList< ::ecore::EObject_ptr >::ptr_type _t0 =
::ecorecpp::mapping::any::any_cast < ::ecorecpp::mapping::EList
< ::ecore::EObject_ptr > ::ptr_type > (_newValue);
::kdm::data::AbstractDataElement::getDataRelation().clear();
::kdm::data::AbstractDataElement::getDataRelation().insert_all(*_t0);
}
return;
case ::kdm::data::DataPackage::ABSTRACTDATAELEMENT__ABSTRACTION:
{
::ecorecpp::mapping::EList< ::ecore::EObject_ptr >::ptr_type _t0 =
::ecorecpp::mapping::any::any_cast < ::ecorecpp::mapping::EList
< ::ecore::EObject_ptr > ::ptr_type > (_newValue);
::kdm::data::AbstractDataElement::getAbstraction().clear();
::kdm::data::AbstractDataElement::getAbstraction().insert_all(*_t0);
}
return;
case ::kdm::data::DataPackage::COMPLEXCONTENTTYPE__CONTENTELEMENT:
{
::ecorecpp::mapping::EList< ::ecore::EObject_ptr >::ptr_type _t0 =
::ecorecpp::mapping::any::any_cast < ::ecorecpp::mapping::EList
< ::ecore::EObject_ptr > ::ptr_type > (_newValue);
::kdm::data::ComplexContentType::getContentElement().clear();
::kdm::data::ComplexContentType::getContentElement().insert_all(*_t0);
}
return;
}
throw "Error";
}
::ecore::EBoolean ChoiceContent::eIsSet(::ecore::EInt _featureID)
{
switch (_featureID)
{
case ::kdm::core::CorePackage::ELEMENT__ATTRIBUTE:
return m_attribute && m_attribute->size();
case ::kdm::core::CorePackage::ELEMENT__ANNOTATION:
return m_annotation && m_annotation->size();
case ::kdm::core::CorePackage::MODELELEMENT__STEREOTYPE:
return m_stereotype && m_stereotype->size();
case ::kdm::core::CorePackage::MODELELEMENT__TAGGEDVALUE:
return m_taggedValue && m_taggedValue->size();
case ::kdm::core::CorePackage::KDMENTITY__NAME:
return ::ecorecpp::mapping::set_traits < ::kdm::core::String
> ::is_set(m_name);
case ::kdm::data::DataPackage::ABSTRACTDATAELEMENT__SOURCE:
return m_source && m_source->size();
case ::kdm::data::DataPackage::ABSTRACTDATAELEMENT__DATARELATION:
return m_dataRelation && m_dataRelation->size();
case ::kdm::data::DataPackage::ABSTRACTDATAELEMENT__ABSTRACTION:
return m_abstraction && m_abstraction->size();
case ::kdm::data::DataPackage::COMPLEXCONTENTTYPE__CONTENTELEMENT:
return m_contentElement && m_contentElement->size();
}
throw "Error";
}
void ChoiceContent::eUnset(::ecore::EInt _featureID)
{
switch (_featureID)
{
}
throw "Error";
}
::ecore::EClass_ptr ChoiceContent::_eClass()
{
static ::ecore::EClass_ptr _eclass =
dynamic_cast< ::kdm::data::DataPackage* >(::kdm::data::DataPackage::_instance().get())->getChoiceContent();
return _eclass;
}
/** Set the local end of a reference with an EOpposite property.
*/
void ChoiceContent::_inverseAdd(::ecore::EInt _featureID,
::ecore::EJavaObject const& _newValue)
{
switch (_featureID)
{
case ::kdm::core::CorePackage::ELEMENT__ATTRIBUTE:
{
}
return;
case ::kdm::core::CorePackage::ELEMENT__ANNOTATION:
{
}
return;
case ::kdm::core::CorePackage::MODELELEMENT__STEREOTYPE:
{
}
return;
case ::kdm::core::CorePackage::MODELELEMENT__TAGGEDVALUE:
{
}
return;
case ::kdm::data::DataPackage::ABSTRACTDATAELEMENT__SOURCE:
{
}
return;
case ::kdm::data::DataPackage::ABSTRACTDATAELEMENT__DATARELATION:
{
}
return;
case ::kdm::data::DataPackage::ABSTRACTDATAELEMENT__ABSTRACTION:
{
}
return;
case ::kdm::data::DataPackage::COMPLEXCONTENTTYPE__CONTENTELEMENT:
{
}
return;
}
throw "Error: _inverseAdd() does not handle this featureID";
}
/** Unset the local end of a reference with an EOpposite property.
*/
void ChoiceContent::_inverseRemove(::ecore::EInt _featureID,
::ecore::EJavaObject const& _oldValue)
{
switch (_featureID)
{
case ::kdm::core::CorePackage::ELEMENT__ATTRIBUTE:
{
}
return;
case ::kdm::core::CorePackage::ELEMENT__ANNOTATION:
{
}
return;
case ::kdm::core::CorePackage::MODELELEMENT__STEREOTYPE:
{
}
return;
case ::kdm::core::CorePackage::MODELELEMENT__TAGGEDVALUE:
{
}
return;
case ::kdm::data::DataPackage::ABSTRACTDATAELEMENT__SOURCE:
{
}
return;
case ::kdm::data::DataPackage::ABSTRACTDATAELEMENT__DATARELATION:
{
}
return;
case ::kdm::data::DataPackage::ABSTRACTDATAELEMENT__ABSTRACTION:
{
}
return;
case ::kdm::data::DataPackage::COMPLEXCONTENTTYPE__CONTENTELEMENT:
{
}
return;
}
throw "Error: _inverseRemove() does not handle this featureID";
}<|fim▁end|> | * along with this program. If not, see <http://www.gnu.org/licenses/>.
*/ |
<|file_name|>dom.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Types and traits used to access the DOM from style calculation.
#![allow(unsafe_code)]
#![deny(missing_docs)]
use {Atom, Namespace, LocalName};
use applicable_declarations::ApplicableDeclarationBlock;
use atomic_refcell::{AtomicRef, AtomicRefCell, AtomicRefMut};
#[cfg(feature = "gecko")] use context::PostAnimationTasks;
#[cfg(feature = "gecko")] use context::UpdateAnimationsTasks;
use data::ElementData;
use element_state::ElementState;
use font_metrics::FontMetricsProvider;
use media_queries::Device;
use properties::{AnimationRules, ComputedValues, PropertyDeclarationBlock};
#[cfg(feature = "gecko")] use properties::LonghandId;
#[cfg(feature = "gecko")] use properties::animated_properties::AnimationValue;
use rule_tree::CascadeLevel;
use selector_parser::{AttrValue, PseudoClassStringArg, PseudoElement, SelectorImpl};
use selectors::Element as SelectorsElement;
use selectors::matching::{ElementSelectorFlags, QuirksMode, VisitedHandlingMode};
use selectors::sink::Push;
use servo_arc::{Arc, ArcBorrow};
use shared_lock::Locked;
use std::fmt;
#[cfg(feature = "gecko")] use hash::FnvHashMap;
use std::fmt::Debug;
use std::hash::Hash;
use std::ops::Deref;
use stylist::Stylist;
use traversal_flags::TraversalFlags;
/// An opaque handle to a node, which, unlike UnsafeNode, cannot be transformed
/// back into a non-opaque representation. The only safe operation that can be
/// performed on this node is to compare it to another opaque handle or to another
/// OpaqueNode.
///
/// Layout and Graphics use this to safely represent nodes for comparison purposes.
/// Because the script task's GC does not trace layout, node data cannot be safely stored in layout
/// data structures. Also, layout code tends to be faster when the DOM is not being accessed, for
/// locality reasons. Using `OpaqueNode` enforces this invariant.
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
#[cfg_attr(feature = "servo", derive(MallocSizeOf, Deserialize, Serialize))]
pub struct OpaqueNode(pub usize);
impl OpaqueNode {
/// Returns the address of this node, for debugging purposes.
#[inline]
pub fn id(&self) -> usize {
self.0
}
}
/// Simple trait to provide basic information about the type of an element.
///
/// We avoid exposing the full type id, since computing it in the general case
/// would be difficult for Gecko nodes.
pub trait NodeInfo {
/// Whether this node is an element.
fn is_element(&self) -> bool;
/// Whether this node is a text node.
fn is_text_node(&self) -> bool;
}
/// A node iterator that only returns node that don't need layout.
pub struct LayoutIterator<T>(pub T);
impl<T, N> Iterator for LayoutIterator<T>
where
T: Iterator<Item = N>,
N: NodeInfo,
{
type Item = N;
fn next(&mut self) -> Option<N> {
loop {
match self.0.next() {
Some(n) => {
// Filter out nodes that layout should ignore.
if n.is_text_node() || n.is_element() {
return Some(n)
}
}
None => return None,
}
}
}
}
/// An iterator over the DOM children of a node.
pub struct DomChildren<N>(Option<N>);
impl<N> Iterator for DomChildren<N>
where
N: TNode
{
type Item = N;
fn next(&mut self) -> Option<N> {
match self.0.take() {
Some(n) => {
self.0 = n.next_sibling();
Some(n)
}
None => None,
}
}
}
/// An iterator over the DOM descendants of a node in pre-order.
pub struct DomDescendants<N> {
previous: Option<N>,
scope: N,
}
impl<N> Iterator for DomDescendants<N>
where
N: TNode
{
type Item = N;
#[inline]
fn next(&mut self) -> Option<N> {
let prev = match self.previous.take() {
None => return None,
Some(n) => n,
};
self.previous = prev.next_in_preorder(Some(self.scope));
self.previous
}
}
/// The `TDocument` trait, to represent a document node.
pub trait TDocument : Sized + Copy + Clone {
/// The concrete `TNode` type.
type ConcreteNode: TNode<ConcreteDocument = Self>;
/// Get this document as a `TNode`.
fn as_node(&self) -> Self::ConcreteNode;
/// Returns whether this document is an HTML document.
fn is_html_document(&self) -> bool;
/// Returns the quirks mode of this document.
fn quirks_mode(&self) -> QuirksMode;
/// Get a list of elements with a given ID in this document, sorted by
/// document position.
///
/// Can return an error to signal that this list is not available, or also
/// return an empty slice.
fn elements_with_id(
&self,
_id: &Atom,
) -> Result<&[<Self::ConcreteNode as TNode>::ConcreteElement], ()> {
Err(())
}
}
/// The `TNode` trait. This is the main generic trait over which the style
/// system can be implemented.
pub trait TNode : Sized + Copy + Clone + Debug + NodeInfo + PartialEq {
/// The concrete `TElement` type.
type ConcreteElement: TElement<ConcreteNode = Self>;
/// The concrete `TDocument` type.
type ConcreteDocument: TDocument<ConcreteNode = Self>;
/// Get this node's parent node.
fn parent_node(&self) -> Option<Self>;
/// Get this node's first child.
fn first_child(&self) -> Option<Self>;
/// Get this node's first child.
fn last_child(&self) -> Option<Self>;
/// Get this node's previous sibling.
fn prev_sibling(&self) -> Option<Self>;
/// Get this node's next sibling.
fn next_sibling(&self) -> Option<Self>;
/// Get the owner document of this node.
fn owner_doc(&self) -> Self::ConcreteDocument;
/// Iterate over the DOM children of a node.
fn dom_children(&self) -> DomChildren<Self> {
DomChildren(self.first_child())
}
/// Returns whether the node is attached to a document.
fn is_in_document(&self) -> bool;
/// Iterate over the DOM children of a node, in preorder.
fn dom_descendants(&self) -> DomDescendants<Self> {
DomDescendants {
previous: Some(*self),
scope: *self,
}
}
/// Returns the next children in pre-order, optionally scoped to a subtree
/// root.
#[inline]
fn next_in_preorder(&self, scoped_to: Option<Self>) -> Option<Self> {
if let Some(c) = self.first_child() {
return Some(c);
}
if Some(*self) == scoped_to {
return None;
}
let mut current = *self;
loop {
if let Some(s) = current.next_sibling() {
return Some(s);
}
let parent = current.parent_node();
if parent == scoped_to {
return None;
}
current = parent.expect("Not a descendant of the scope?");
}
}
/// Get this node's parent element from the perspective of a restyle
/// traversal.
fn traversal_parent(&self) -> Option<Self::ConcreteElement>;
/// Get this node's parent element if present.
fn parent_element(&self) -> Option<Self::ConcreteElement> {
self.parent_node().and_then(|n| n.as_element())
}
/// Converts self into an `OpaqueNode`.
fn opaque(&self) -> OpaqueNode;
/// A debug id, only useful, mm... for debugging.
fn debug_id(self) -> usize;
/// Get this node as an element, if it's one.
fn as_element(&self) -> Option<Self::ConcreteElement>;
/// Get this node as a document, if it's one.
fn as_document(&self) -> Option<Self::ConcreteDocument>;
/// Whether this node can be fragmented. This is used for multicol, and only
/// for Servo.
fn can_be_fragmented(&self) -> bool;
/// Set whether this node can be fragmented.
unsafe fn set_can_be_fragmented(&self, value: bool);
}
/// Wrapper to output the subtree rather than the single node when formatting
/// for Debug.
pub struct ShowSubtree<N: TNode>(pub N);
impl<N: TNode> Debug for ShowSubtree<N> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "DOM Subtree:")?;
fmt_subtree(f, &|f, n| write!(f, "{:?}", n), self.0, 1)
}
}
/// Wrapper to output the subtree along with the ElementData when formatting
/// for Debug.
pub struct ShowSubtreeData<N: TNode>(pub N);
impl<N: TNode> Debug for ShowSubtreeData<N> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "DOM Subtree:")?;
fmt_subtree(f, &|f, n| fmt_with_data(f, n), self.0, 1)
}
}
/// Wrapper to output the subtree along with the ElementData and primary
/// ComputedValues when formatting for Debug. This is extremely verbose.
#[cfg(feature = "servo")]
pub struct ShowSubtreeDataAndPrimaryValues<N: TNode>(pub N);
#[cfg(feature = "servo")]
impl<N: TNode> Debug for ShowSubtreeDataAndPrimaryValues<N> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "DOM Subtree:")?;
fmt_subtree(f, &|f, n| fmt_with_data_and_primary_values(f, n), self.0, 1)
}
}
fn fmt_with_data<N: TNode>(f: &mut fmt::Formatter, n: N) -> fmt::Result {
if let Some(el) = n.as_element() {
write!(
f, "{:?} dd={} aodd={} data={:?}",
el,
el.has_dirty_descendants(),
el.has_animation_only_dirty_descendants(),
el.borrow_data(),
)
} else {
write!(f, "{:?}", n)
}
}
#[cfg(feature = "servo")]
fn fmt_with_data_and_primary_values<N: TNode>(f: &mut fmt::Formatter, n: N) -> fmt::Result {
if let Some(el) = n.as_element() {
let dd = el.has_dirty_descendants();
let aodd = el.has_animation_only_dirty_descendants();
let data = el.borrow_data();
let values = data.as_ref().and_then(|d| d.styles.get_primary());
write!(f, "{:?} dd={} aodd={} data={:?} values={:?}", el, dd, aodd, &data, values)
} else {
write!(f, "{:?}", n)
}
}
fn fmt_subtree<F, N: TNode>(f: &mut fmt::Formatter, stringify: &F, n: N, indent: u32)
-> fmt::Result
where F: Fn(&mut fmt::Formatter, N) -> fmt::Result
{
for _ in 0..indent {
write!(f, " ")?;
}
stringify(f, n)?;
if let Some(e) = n.as_element() {
for kid in e.traversal_children() {
writeln!(f, "")?;
fmt_subtree(f, stringify, kid, indent + 1)?;
}
}
Ok(())
}
/// The element trait, the main abstraction the style crate acts over.
pub trait TElement
: Eq
+ PartialEq
+ Debug
+ Hash
+ Sized
+ Copy
+ Clone
+ SelectorsElement<Impl = SelectorImpl>
{
/// The concrete node type.
type ConcreteNode: TNode<ConcreteElement = Self>;
/// A concrete children iterator type in order to iterate over the `Node`s.
///
/// TODO(emilio): We should eventually replace this with the `impl Trait`
/// syntax.
type TraversalChildrenIterator: Iterator<Item = Self::ConcreteNode>;
/// Type of the font metrics provider
///
/// XXXManishearth It would be better to make this a type parameter on
/// ThreadLocalStyleContext and StyleContext
type FontMetricsProvider: FontMetricsProvider + Send;
/// Get this element as a node.
fn as_node(&self) -> Self::ConcreteNode;
/// A debug-only check that the device's owner doc matches the actual doc
/// we're the root of.
///
/// Otherwise we may set document-level state incorrectly, like the root
/// font-size used for rem units.
fn owner_doc_matches_for_testing(&self, _: &Device) -> bool { true }
/// Whether this element should match user and author rules.
///
/// We use this for Native Anonymous Content in Gecko.
fn matches_user_and_author_rules(&self) -> bool { true }
/// Returns the depth of this element in the DOM.
fn depth(&self) -> usize {
let mut depth = 0;
let mut curr = *self;
while let Some(parent) = curr.traversal_parent() {
depth += 1;
curr = parent;
}
depth
}
/// The style scope of this element is a node that represents which rules
/// apply to the element.
///
/// In Servo, where we don't know about Shadow DOM or XBL, the style scope
/// is always the document.
fn style_scope(&self) -> Self::ConcreteNode {
self.as_node().owner_doc().as_node()
}
/// Get this node's parent element from the perspective of a restyle
/// traversal.
fn traversal_parent(&self) -> Option<Self> {
self.as_node().traversal_parent()
}
/// Get this node's children from the perspective of a restyle traversal.
fn traversal_children(&self) -> LayoutIterator<Self::TraversalChildrenIterator>;
/// Returns the parent element we should inherit from.
///
/// This is pretty much always the parent element itself, except in the case
/// of Gecko's Native Anonymous Content, which uses the traversal parent
/// (i.e. the flattened tree parent) and which also may need to find the
/// closest non-NAC ancestor.
fn inheritance_parent(&self) -> Option<Self> {
self.parent_element()
}
/// The ::before pseudo-element of this element, if it exists.
fn before_pseudo_element(&self) -> Option<Self> {
None
}
/// The ::after pseudo-element of this element, if it exists.
fn after_pseudo_element(&self) -> Option<Self> {
None
}
/// Execute `f` for each anonymous content child (apart from ::before and
/// ::after) whose originating element is `self`.
fn each_anonymous_content_child<F>(&self, _f: F)
where
F: FnMut(Self),
{}
/// For a given NAC element, return the closest non-NAC ancestor, which is
/// guaranteed to exist.
fn closest_non_native_anonymous_ancestor(&self) -> Option<Self> {
unreachable!("Servo doesn't know about NAC");
}
/// Get this element's style attribute.
fn style_attribute(&self) -> Option<ArcBorrow<Locked<PropertyDeclarationBlock>>>;
/// Unset the style attribute's dirty bit.
/// Servo doesn't need to manage ditry bit for style attribute.
fn unset_dirty_style_attribute(&self) {
}
/// Get this element's SMIL override declarations.
fn get_smil_override(&self) -> Option<ArcBorrow<Locked<PropertyDeclarationBlock>>> {
None
}
/// Get this element's animation rule by the cascade level.
fn get_animation_rule_by_cascade(&self,
_cascade_level: CascadeLevel)
-> Option<Arc<Locked<PropertyDeclarationBlock>>> {
None
}
/// Get the combined animation and transition rules.
fn get_animation_rules(&self) -> AnimationRules {
if !self.may_have_animations() {
return AnimationRules(None, None)
}
AnimationRules(
self.get_animation_rule(),
self.get_transition_rule(),
)
}
/// Get this element's animation rule.
fn get_animation_rule(&self)
-> Option<Arc<Locked<PropertyDeclarationBlock>>> {
None
}
/// Get this element's transition rule.
fn get_transition_rule(&self)
-> Option<Arc<Locked<PropertyDeclarationBlock>>> {
None
}
/// Get this element's state, for non-tree-structural pseudos.
fn get_state(&self) -> ElementState;
/// Whether this element has an attribute with a given namespace.
fn has_attr(&self, namespace: &Namespace, attr: &LocalName) -> bool;
/// The ID for this element.
fn get_id(&self) -> Option<Atom>;
/// Internal iterator for the classes of this element.
fn each_class<F>(&self, callback: F) where F: FnMut(&Atom);
/// Whether a given element may generate a pseudo-element.
///
/// This is useful to avoid computing, for example, pseudo styles for
/// `::-first-line` or `::-first-letter`, when we know it won't affect us.
///
/// TODO(emilio, bz): actually implement the logic for it.
fn may_generate_pseudo(
&self,
pseudo: &PseudoElement,
_primary_style: &ComputedValues,
) -> bool {
// ::before/::after are always supported for now, though we could try to
// optimize out leaf elements.
// ::first-letter and ::first-line are only supported for block-inside
// things, and only in Gecko, not Servo. Unfortunately, Gecko has
// block-inside things that might have any computed display value due to
// things like fieldsets, legends, etc. Need to figure out how this
// should work.
debug_assert!(pseudo.is_eager(),
"Someone called may_generate_pseudo with a non-eager pseudo.");
true
}
/// Returns true if this element may have a descendant needing style processing.
///
/// Note that we cannot guarantee the existence of such an element, because
/// it may have been removed from the DOM between marking it for restyle and
/// the actual restyle traversal.
fn has_dirty_descendants(&self) -> bool;
/// Returns whether state or attributes that may change style have changed
/// on the element, and thus whether the element has been snapshotted to do
/// restyle hint computation.
fn has_snapshot(&self) -> bool;
/// Returns whether the current snapshot if present has been handled.
fn handled_snapshot(&self) -> bool;
/// Flags this element as having handled already its snapshot.
unsafe fn set_handled_snapshot(&self);
/// Returns whether the element's styles are up-to-date for |traversal_flags|.
fn has_current_styles_for_traversal(
&self,
data: &ElementData,
traversal_flags: TraversalFlags,
) -> bool {
if traversal_flags.for_animation_only() {
// In animation-only restyle we never touch snapshots and don't
// care about them. But we can't assert '!self.handled_snapshot()'
// here since there are some cases that a second animation-only
// restyle which is a result of normal restyle (e.g. setting
// animation-name in normal restyle and creating a new CSS
// animation in a SequentialTask) is processed after the normal
// traversal in that we had elements that handled snapshot.
return data.has_styles() &&
!data.hint.has_animation_hint_or_recascade();
}
if traversal_flags.contains(TraversalFlags::UnstyledOnly) {
// We don't process invalidations in UnstyledOnly mode.
return data.has_styles();
}
if self.has_snapshot() && !self.handled_snapshot() {
return false;
}
data.has_styles() && !data.hint.has_non_animation_invalidations()
}
/// Returns whether the element's styles are up-to-date after traversal
/// (i.e. in post traversal).
fn has_current_styles(&self, data: &ElementData) -> bool {
if self.has_snapshot() && !self.handled_snapshot() {
return false;
}
data.has_styles() &&
// TODO(hiro): When an animating element moved into subtree of
// contenteditable element, there remains animation restyle hints in
// post traversal. It's generally harmless since the hints will be
// processed in a next styling but ideally it should be processed soon.
//
// Without this, we get failures in:
// layout/style/crashtests/1383319.html
// layout/style/crashtests/1383001.html
//
// https://bugzilla.mozilla.org/show_bug.cgi?id=1389675 tracks fixing
// this.
!data.hint.has_non_animation_invalidations()
}
/// Flag that this element has a descendant for style processing.
///
/// Only safe to call with exclusive access to the element.
unsafe fn set_dirty_descendants(&self);
/// Flag that this element has no descendant for style processing.
///
/// Only safe to call with exclusive access to the element.
unsafe fn unset_dirty_descendants(&self);
/// Similar to the dirty_descendants but for representing a descendant of
/// the element needs to be updated in animation-only traversal.
fn has_animation_only_dirty_descendants(&self) -> bool {
false
}
/// Flag that this element has a descendant for animation-only restyle
/// processing.
///
/// Only safe to call with exclusive access to the element.
unsafe fn set_animation_only_dirty_descendants(&self) {
}
/// Flag that this element has no descendant for animation-only restyle processing.
///
/// Only safe to call with exclusive access to the element.
unsafe fn unset_animation_only_dirty_descendants(&self) {
}
/// Clear all bits related describing the dirtiness of descendants.
///
/// In Gecko, this corresponds to the regular dirty descendants bit, the
/// animation-only dirty descendants bit, and the lazy frame construction
/// descendants bit.
unsafe fn clear_descendant_bits(&self) { self.unset_dirty_descendants(); }
/// Clear all element flags related to dirtiness.
///
/// In Gecko, this corresponds to the regular dirty descendants bit, the
/// animation-only dirty descendants bit, the lazy frame construction bit,
/// and the lazy frame construction descendants bit.
unsafe fn clear_dirty_bits(&self) { self.unset_dirty_descendants(); }
/// Returns true if this element is a visited link.
///
/// Servo doesn't support visited styles yet.
fn is_visited_link(&self) -> bool { false }
/// Returns true if this element is native anonymous (only Gecko has native
/// anonymous content).
fn is_native_anonymous(&self) -> bool { false }
/// Returns the pseudo-element implemented by this element, if any.
///
/// Gecko traverses pseudo-elements during the style traversal, and we need
/// to know this so we can properly grab the pseudo-element style from the
/// parent element.
///
/// Note that we still need to compute the pseudo-elements before-hand,
/// given otherwise we don't know if we need to create an element or not.
///
/// Servo doesn't have to deal with this.<|fim▁hole|>
/// Atomically stores the number of children of this node that we will
/// need to process during bottom-up traversal.
fn store_children_to_process(&self, n: isize);
/// Atomically notes that a child has been processed during bottom-up
/// traversal. Returns the number of children left to process.
fn did_process_child(&self) -> isize;
/// Gets a reference to the ElementData container, or creates one.
///
/// Unsafe because it can race to allocate and leak if not used with
/// exclusive access to the element.
unsafe fn ensure_data(&self) -> AtomicRefMut<ElementData>;
/// Clears the element data reference, if any.
///
/// Unsafe following the same reasoning as ensure_data.
unsafe fn clear_data(&self);
/// Gets a reference to the ElementData container.
fn get_data(&self) -> Option<&AtomicRefCell<ElementData>>;
/// Immutably borrows the ElementData.
fn borrow_data(&self) -> Option<AtomicRef<ElementData>> {
self.get_data().map(|x| x.borrow())
}
/// Mutably borrows the ElementData.
fn mutate_data(&self) -> Option<AtomicRefMut<ElementData>> {
self.get_data().map(|x| x.borrow_mut())
}
/// Whether we should skip any root- or item-based display property
/// blockification on this element. (This function exists so that Gecko
/// native anonymous content can opt out of this style fixup.)
fn skip_root_and_item_based_display_fixup(&self) -> bool;
/// Sets selector flags, which indicate what kinds of selectors may have
/// matched on this element and therefore what kind of work may need to
/// be performed when DOM state changes.
///
/// This is unsafe, like all the flag-setting methods, because it's only safe
/// to call with exclusive access to the element. When setting flags on the
/// parent during parallel traversal, we use SequentialTask to queue up the
/// set to run after the threads join.
unsafe fn set_selector_flags(&self, flags: ElementSelectorFlags);
/// Returns true if the element has all the specified selector flags.
fn has_selector_flags(&self, flags: ElementSelectorFlags) -> bool;
/// In Gecko, element has a flag that represents the element may have
/// any type of animations or not to bail out animation stuff early.
/// Whereas Servo doesn't have such flag.
fn may_have_animations(&self) -> bool { false }
/// Creates a task to update various animation state on a given (pseudo-)element.
#[cfg(feature = "gecko")]
fn update_animations(&self,
before_change_style: Option<Arc<ComputedValues>>,
tasks: UpdateAnimationsTasks);
/// Creates a task to process post animation on a given element.
#[cfg(feature = "gecko")]
fn process_post_animation(&self, tasks: PostAnimationTasks);
/// Returns true if the element has relevant animations. Relevant
/// animations are those animations that are affecting the element's style
/// or are scheduled to do so in the future.
fn has_animations(&self) -> bool;
/// Returns true if the element has a CSS animation.
fn has_css_animations(&self) -> bool;
/// Returns true if the element has a CSS transition (including running transitions and
/// completed transitions).
fn has_css_transitions(&self) -> bool;
/// Returns true if the element has animation restyle hints.
fn has_animation_restyle_hints(&self) -> bool {
let data = match self.borrow_data() {
Some(d) => d,
None => return false,
};
return data.hint.has_animation_hint()
}
/// Returns the anonymous content for the current element's XBL binding,
/// given if any.
///
/// This is used in Gecko for XBL and shadow DOM.
fn xbl_binding_anonymous_content(&self) -> Option<Self::ConcreteNode> {
None
}
/// Return the element which we can use to look up rules in the selector
/// maps.
///
/// This is always the element itself, except in the case where we are an
/// element-backed pseudo-element, in which case we return the originating
/// element.
fn rule_hash_target(&self) -> Self {
let is_implemented_pseudo =
self.implemented_pseudo_element().is_some();
if is_implemented_pseudo {
self.closest_non_native_anonymous_ancestor().unwrap()
} else {
*self
}
}
/// Implements Gecko's `nsBindingManager::WalkRules`.
///
/// Returns whether to cut off the inheritance.
fn each_xbl_stylist<'a, F>(&self, _: F) -> bool
where
Self: 'a,
F: FnMut(AtomicRef<'a, Stylist>),
{
false
}
/// Gets the current existing CSS transitions, by |property, end value| pairs in a FnvHashMap.
#[cfg(feature = "gecko")]
fn get_css_transitions_info(&self)
-> FnvHashMap<LonghandId, Arc<AnimationValue>>;
/// Does a rough (and cheap) check for whether or not transitions might need to be updated that
/// will quickly return false for the common case of no transitions specified or running. If
/// this returns false, we definitely don't need to update transitions but if it returns true
/// we can perform the more thoroughgoing check, needs_transitions_update, to further
/// reduce the possibility of false positives.
#[cfg(feature = "gecko")]
fn might_need_transitions_update(
&self,
old_values: Option<&ComputedValues>,
new_values: &ComputedValues
) -> bool;
/// Returns true if one of the transitions needs to be updated on this element. We check all
/// the transition properties to make sure that updating transitions is necessary.
/// This method should only be called if might_needs_transitions_update returns true when
/// passed the same parameters.
#[cfg(feature = "gecko")]
fn needs_transitions_update(
&self,
before_change_style: &ComputedValues,
after_change_style: &ComputedValues
) -> bool;
/// Returns true if we need to update transitions for the specified property on this element.
#[cfg(feature = "gecko")]
fn needs_transitions_update_per_property(
&self,
property: &LonghandId,
combined_duration: f32,
before_change_style: &ComputedValues,
after_change_style: &ComputedValues,
existing_transitions: &FnvHashMap<LonghandId, Arc<AnimationValue>>
) -> bool;
/// Returns the value of the `xml:lang=""` attribute (or, if appropriate,
/// the `lang=""` attribute) on this element.
fn lang_attr(&self) -> Option<AttrValue>;
/// Returns whether this element's language matches the language tag
/// `value`. If `override_lang` is not `None`, it specifies the value
/// of the `xml:lang=""` or `lang=""` attribute to use in place of
/// looking at the element and its ancestors. (This argument is used
/// to implement matching of `:lang()` against snapshots.)
fn match_element_lang(
&self,
override_lang: Option<Option<AttrValue>>,
value: &PseudoClassStringArg
) -> bool;
/// Returns whether this element is the main body element of the HTML
/// document it is on.
fn is_html_document_body_element(&self) -> bool;
/// Generate the proper applicable declarations due to presentational hints,
/// and insert them into `hints`.
fn synthesize_presentational_hints_for_legacy_attributes<V>(
&self,
visited_handling: VisitedHandlingMode,
hints: &mut V,
)
where
V: Push<ApplicableDeclarationBlock>;
}
/// TNode and TElement aren't Send because we want to be careful and explicit
/// about our parallel traversal. However, there are certain situations
/// (including but not limited to the traversal) where we need to send DOM
/// objects to other threads.
///
/// That's the reason why `SendNode` exists.
#[derive(Clone, Debug, PartialEq)]
pub struct SendNode<N: TNode>(N);
unsafe impl<N: TNode> Send for SendNode<N> {}
impl<N: TNode> SendNode<N> {
/// Unsafely construct a SendNode.
pub unsafe fn new(node: N) -> Self {
SendNode(node)
}
}
impl<N: TNode> Deref for SendNode<N> {
type Target = N;
fn deref(&self) -> &N {
&self.0
}
}
/// Same reason as for the existence of SendNode, SendElement does the proper
/// things for a given `TElement`.
#[derive(Debug, Eq, Hash, PartialEq)]
pub struct SendElement<E: TElement>(E);
unsafe impl<E: TElement> Send for SendElement<E> {}
impl<E: TElement> SendElement<E> {
/// Unsafely construct a SendElement.
pub unsafe fn new(el: E) -> Self {
SendElement(el)
}
}
impl<E: TElement> Deref for SendElement<E> {
type Target = E;
fn deref(&self) -> &E {
&self.0
}
}<|fim▁end|> | fn implemented_pseudo_element(&self) -> Option<PseudoElement> { None } |
<|file_name|>ExportGeometryInfo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
***************************************************************************
ExportGeometryInfo.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from qgis.PyQt.QtGui import QIcon
from qgis.PyQt.QtCore import QVariant
from qgis.core import (QgsCoordinateTransform,
QgsField,
QgsFields,
QgsWkbTypes,
QgsFeatureSink,
QgsDistanceArea,
QgsProcessingUtils,
QgsProcessingParameterFeatureSource,
QgsProcessingParameterEnum,
QgsProcessingParameterFeatureSink)
from processing.algs.qgis.QgisAlgorithm import QgisAlgorithm
from processing.tools import vector
pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0]
<|fim▁hole|> OUTPUT = 'OUTPUT'
def icon(self):
return QIcon(os.path.join(pluginPath, 'images', 'ftools', 'export_geometry.png'))
def tags(self):
return self.tr('export,add,information,measurements,areas,lengths,perimeters,latitudes,longitudes,x,y,z,extract,points,lines,polygons').split(',')
def group(self):
return self.tr('Vector geometry')
def __init__(self):
super().__init__()
self.export_z = False
self.export_m = False
self.distance_area = None
self.calc_methods = [self.tr('Layer CRS'),
self.tr('Project CRS'),
self.tr('Ellipsoidal')]
def initAlgorithm(self, config=None):
self.addParameter(QgsProcessingParameterFeatureSource(self.INPUT,
self.tr('Input layer')))
self.addParameter(QgsProcessingParameterEnum(self.METHOD,
self.tr('Calculate using'), options=self.calc_methods, defaultValue=0))
self.addParameter(QgsProcessingParameterFeatureSink(self.OUTPUT, self.tr('Added geom info')))
def name(self):
return 'exportaddgeometrycolumns'
def displayName(self):
return self.tr('Export geometry columns')
def processAlgorithm(self, parameters, context, feedback):
source = self.parameterAsSource(parameters, self.INPUT, context)
method = self.parameterAsEnum(parameters, self.METHOD, context)
wkb_type = source.wkbType()
fields = source.fields()
new_fields = QgsFields()
if QgsWkbTypes.geometryType(wkb_type) == QgsWkbTypes.PolygonGeometry:
new_fields.append(QgsField('area', QVariant.Double))
new_fields.append(QgsField('perimeter', QVariant.Double))
elif QgsWkbTypes.geometryType(wkb_type) == QgsWkbTypes.LineGeometry:
new_fields.append(QgsField('length', QVariant.Double))
else:
new_fields.append(QgsField('xcoord', QVariant.Double))
new_fields.append(QgsField('ycoord', QVariant.Double))
if QgsWkbTypes.hasZ(source.wkbType()):
self.export_z = True
new_fields.append(QgsField('zcoord', QVariant.Double))
if QgsWkbTypes.hasM(source.wkbType()):
self.export_m = True
new_fields.append(QgsField('mvalue', QVariant.Double))
fields = QgsProcessingUtils.combineFields(fields, new_fields)
(sink, dest_id) = self.parameterAsSink(parameters, self.OUTPUT, context,
fields, wkb_type, source.sourceCrs())
coordTransform = None
# Calculate with:
# 0 - layer CRS
# 1 - project CRS
# 2 - ellipsoidal
self.distance_area = QgsDistanceArea()
if method == 2:
self.distance_area.setSourceCrs(source.sourceCrs())
self.distance_area.setEllipsoid(context.project().ellipsoid())
elif method == 1:
coordTransform = QgsCoordinateTransform(source.sourceCrs(), context.project().crs())
features = source.getFeatures()
total = 100.0 / source.featureCount() if source.featureCount() else 0
for current, f in enumerate(features):
if feedback.isCanceled():
break
outFeat = f
attrs = f.attributes()
inGeom = f.geometry()
if inGeom:
if coordTransform is not None:
inGeom.transform(coordTransform)
if inGeom.type() == QgsWkbTypes.PointGeometry:
attrs.extend(self.point_attributes(inGeom))
elif inGeom.type() == QgsWkbTypes.PolygonGeometry:
attrs.extend(self.polygon_attributes(inGeom))
else:
attrs.extend(self.line_attributes(inGeom))
outFeat.setAttributes(attrs)
sink.addFeature(outFeat, QgsFeatureSink.FastInsert)
feedback.setProgress(int(current * total))
return {self.OUTPUT: dest_id}
def point_attributes(self, geometry):
pt = None
if not geometry.isMultipart():
pt = geometry.geometry()
else:
if geometry.numGeometries() > 0:
pt = geometry.geometryN(0)
attrs = []
if pt:
attrs.append(pt.x())
attrs.append(pt.y())
# add point z/m
if self.export_z:
attrs.append(pt.z())
if self.export_m:
attrs.append(pt.m())
return attrs
def line_attributes(self, geometry):
return [self.distance_area.measureLength(geometry)]
def polygon_attributes(self, geometry):
area = self.distance_area.measureArea(geometry)
perimeter = self.distance_area.measurePerimeter(geometry)
return [area, perimeter]<|fim▁end|> | class ExportGeometryInfo(QgisAlgorithm):
INPUT = 'INPUT'
METHOD = 'CALC_METHOD' |
<|file_name|>test_admin_views.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import json
from django.test import TestCase, override_settings
from django.utils.http import urlquote
from django.core.urlresolvers import reverse
from django.contrib.auth.models import Permission
from django.core.files.uploadedfile import SimpleUploadedFile
from django.template.defaultfilters import filesizeformat
# Get the chars that Django considers safe to leave unescaped in a URL
# This list changed in Django 1.8: https://github.com/django/django/commit/e167e96cfea670422ca75d0b35fe7c4195f25b63
try:
from django.utils.http import RFC3986_SUBDELIMS
urlquote_safechars = RFC3986_SUBDELIMS + str('/~:@')
except ImportError: # < Django 1,8
urlquote_safechars = '/'
from wagtail.tests.utils import WagtailTestUtils
from wagtail.wagtailimages.utils import generate_signature
from .utils import Image, get_test_image_file
class TestImageIndexView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
def get(self, params={}):
return self.client.get(reverse('wagtailimages:index'), params)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/images/index.html')
def test_search(self):
response = self.get({'q': "Hello"})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['query_string'], "Hello")
def test_pagination(self):
pages = ['0', '1', '-1', '9999', 'Not a page']
for page in pages:
response = self.get({'p': page})
self.assertEqual(response.status_code, 200)
def test_ordering(self):
orderings = ['title', '-created_at']
for ordering in orderings:
response = self.get({'ordering': ordering})
self.assertEqual(response.status_code, 200)
class TestImageAddView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
def get(self, params={}):
return self.client.get(reverse('wagtailimages:add'), params)
def post(self, post_data={}):
return self.client.post(reverse('wagtailimages:add'), post_data)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/images/add.html')
def test_add(self):
response = self.post({
'title': "Test image",
'file': SimpleUploadedFile('test.png', get_test_image_file().file.getvalue()),
})
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailimages:index'))
# Check that the image was created
images = Image.objects.filter(title="Test image")
self.assertEqual(images.count(), 1)
# Test that size was populated correctly
image = images.first()
self.assertEqual(image.width, 640)
self.assertEqual(image.height, 480)
# Test that the file_size field was set
self.assertTrue(image.file_size)
def test_add_no_file_selected(self):
response = self.post({
'title': "Test image",
})
# Shouldn't redirect anywhere
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/images/add.html')
# The form should have an error
self.assertFormError(response, 'form', 'file', "This field is required.")
@override_settings(WAGTAILIMAGES_MAX_UPLOAD_SIZE=1)
def test_add_too_large_file(self):
file_content = get_test_image_file().file.getvalue()
response = self.post({
'title': "Test image",
'file': SimpleUploadedFile('test.png', file_content),
})
# Shouldn't redirect anywhere
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/images/add.html')
# The form should have an error
self.assertFormError(response, 'form', 'file', "This file is too big ({file_size}). Maximum filesize {max_file_size}.".format(
file_size=filesizeformat(len(file_content)),
max_file_size=filesizeformat(1),
))
class TestImageEditView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
# Create an image to edit
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
def get(self, params={}):
return self.client.get(reverse('wagtailimages:edit', args=(self.image.id,)), params)
def post(self, post_data={}):
return self.client.post(reverse('wagtailimages:edit', args=(self.image.id,)), post_data)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/images/edit.html')
def test_edit(self):
response = self.post({
'title': "Edited",
})
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailimages:index'))
# Check that the image was edited
image = Image.objects.get(id=self.image.id)
self.assertEqual(image.title, "Edited")
def test_edit_with_new_image_file(self):
file_content = get_test_image_file().file.getvalue()
# Change the file size of the image
self.image.file_size = 100000
self.image.save()
response = self.post({
'title': "Edited",
'file': SimpleUploadedFile('new.png', file_content),
})
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailimages:index'))
# Check that the image file size changed (assume it changed to the correct value)
image = Image.objects.get(id=self.image.id)
self.assertNotEqual(image.file_size, 100000)
def test_with_missing_image_file(self):
self.image.file.delete(False)
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/images/edit.html')
class TestImageDeleteView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
# Create an image to edit
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
def get(self, params={}):
return self.client.get(reverse('wagtailimages:delete', args=(self.image.id,)), params)
def post(self, post_data={}):
return self.client.post(reverse('wagtailimages:delete', args=(self.image.id,)), post_data)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/images/confirm_delete.html')
def test_delete(self):
response = self.post({
'hello': 'world'
})
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailimages:index'))
# Check that the image was deleted
images = Image.objects.filter(title="Test image")
self.assertEqual(images.count(), 0)
class TestImageChooserView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
def get(self, params={}):
return self.client.get(reverse('wagtailimages:chooser'), params)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/chooser/chooser.html')
self.assertTemplateUsed(response, 'wagtailimages/chooser/chooser.js')
def test_search(self):
response = self.get({'q': "Hello"})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['query_string'], "Hello")
def test_pagination(self):
pages = ['0', '1', '-1', '9999', 'Not a page']
for page in pages:
response = self.get({'p': page})
self.assertEqual(response.status_code, 200)
class TestImageChooserChosenView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
# Create an image to edit
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
def get(self, params={}):
return self.client.get(reverse('wagtailimages:image_chosen', args=(self.image.id,)), params)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/chooser/image_chosen.js')
# TODO: Test posting
class TestImageChooserUploadView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
def get(self, params={}):
return self.client.get(reverse('wagtailimages:chooser_upload'), params)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/chooser/chooser.html')
self.assertTemplateUsed(response, 'wagtailimages/chooser/chooser.js')
def test_upload(self):
response = self.client.post(reverse('wagtailimages:chooser_upload'), {
'title': "Test image",
'file': SimpleUploadedFile('test.png', get_test_image_file().file.getvalue()),
})
# Check response
self.assertEqual(response.status_code, 200)
# Check that the image was created
images = Image.objects.filter(title="Test image")
self.assertEqual(images.count(), 1)
# Test that size was populated correctly
image = images.first()
self.assertEqual(image.width, 640)
self.assertEqual(image.height, 480)
def test_upload_no_file_selected(self):
response = self.client.post(reverse('wagtailimages:chooser_upload'), {
'title': "Test image",
})
# Shouldn't redirect anywhere
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/chooser/chooser.html')
# The form should have an error
self.assertFormError(response, 'uploadform', 'file', "This field is required.")
class TestMultipleImageUploader(TestCase, WagtailTestUtils):
"""
This tests the multiple image upload views located in wagtailimages/views/multiple.py
"""
def setUp(self):
self.login()
# Create an image for running tests on
self.image = Image.objects.create(
title="Test image",<|fim▁hole|> )
def test_add(self):
"""
This tests that the add view responds correctly on a GET request
"""
# Send request
response = self.client.get(reverse('wagtailimages:add_multiple'))
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/multiple/add.html')
@override_settings(WAGTAILIMAGES_MAX_UPLOAD_SIZE=1000)
def test_add_max_file_size_context_variables(self):
response = self.client.get(reverse('wagtailimages:add_multiple'))
self.assertEqual(response.context['max_filesize'], 1000)
self.assertEqual(response.context['error_max_file_size'], "This file is too big. Maximum filesize 1000\xa0bytes.")
def test_add_post(self):
"""
This tests that a POST request to the add view saves the image and returns an edit form
"""
response = self.client.post(reverse('wagtailimages:add_multiple'), {
'files[]': SimpleUploadedFile('test.png', get_test_image_file().file.getvalue()),
}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
# Check response
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertTemplateUsed(response, 'wagtailimages/multiple/edit_form.html')
# Check image
self.assertIn('image', response.context)
self.assertEqual(response.context['image'].title, 'test.png')
self.assertTrue(response.context['image'].file_size)
# Check form
self.assertIn('form', response.context)
self.assertEqual(response.context['form'].initial['title'], 'test.png')
# Check JSON
response_json = json.loads(response.content.decode())
self.assertIn('image_id', response_json)
self.assertIn('form', response_json)
self.assertIn('success', response_json)
self.assertEqual(response_json['image_id'], response.context['image'].id)
self.assertTrue(response_json['success'])
def test_add_post_noajax(self):
"""
This tests that only AJAX requests are allowed to POST to the add view
"""
response = self.client.post(reverse('wagtailimages:add_multiple'), {})
# Check response
self.assertEqual(response.status_code, 400)
def test_add_post_nofile(self):
"""
This tests that the add view checks for a file when a user POSTs to it
"""
response = self.client.post(reverse('wagtailimages:add_multiple'), {}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
# Check response
self.assertEqual(response.status_code, 400)
def test_add_post_badfile(self):
"""
This tests that the add view checks for a file when a user POSTs to it
"""
response = self.client.post(reverse('wagtailimages:add_multiple'), {
'files[]': SimpleUploadedFile('test.png', b"This is not an image!"),
}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
# Check response
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
# Check JSON
response_json = json.loads(response.content.decode())
self.assertNotIn('image_id', response_json)
self.assertNotIn('form', response_json)
self.assertIn('success', response_json)
self.assertIn('error_message', response_json)
self.assertFalse(response_json['success'])
self.assertEqual(response_json['error_message'], "Not a supported image format. Supported formats: GIF, JPEG, PNG.")
def test_edit_get(self):
"""
This tests that a GET request to the edit view returns a 405 "METHOD NOT ALLOWED" response
"""
# Send request
response = self.client.get(reverse('wagtailimages:edit_multiple', args=(self.image.id, )))
# Check response
self.assertEqual(response.status_code, 405)
def test_edit_post(self):
"""
This tests that a POST request to the edit view edits the image
"""
# Send request
response = self.client.post(reverse('wagtailimages:edit_multiple', args=(self.image.id, )), {
('image-%d-title' % self.image.id): "New title!",
('image-%d-tags' % self.image.id): "",
}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
# Check response
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
# Check JSON
response_json = json.loads(response.content.decode())
self.assertIn('image_id', response_json)
self.assertNotIn('form', response_json)
self.assertIn('success', response_json)
self.assertEqual(response_json['image_id'], self.image.id)
self.assertTrue(response_json['success'])
def test_edit_post_noajax(self):
"""
This tests that a POST request to the edit view without AJAX returns a 400 response
"""
# Send request
response = self.client.post(reverse('wagtailimages:edit_multiple', args=(self.image.id, )), {
('image-%d-title' % self.image.id): "New title!",
('image-%d-tags' % self.image.id): "",
})
# Check response
self.assertEqual(response.status_code, 400)
def test_edit_post_validation_error(self):
"""
This tests that a POST request to the edit page returns a json document with "success=False"
and a form with the validation error indicated
"""
# Send request
response = self.client.post(reverse('wagtailimages:edit_multiple', args=(self.image.id, )), {
('image-%d-title' % self.image.id): "", # Required
('image-%d-tags' % self.image.id): "",
}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
# Check response
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertTemplateUsed(response, 'wagtailimages/multiple/edit_form.html')
# Check that a form error was raised
self.assertFormError(response, 'form', 'title', "This field is required.")
# Check JSON
response_json = json.loads(response.content.decode())
self.assertIn('image_id', response_json)
self.assertIn('form', response_json)
self.assertIn('success', response_json)
self.assertEqual(response_json['image_id'], self.image.id)
self.assertFalse(response_json['success'])
def test_delete_get(self):
"""
This tests that a GET request to the delete view returns a 405 "METHOD NOT ALLOWED" response
"""
# Send request
response = self.client.get(reverse('wagtailimages:delete_multiple', args=(self.image.id, )))
# Check response
self.assertEqual(response.status_code, 405)
def test_delete_post(self):
"""
This tests that a POST request to the delete view deletes the image
"""
# Send request
response = self.client.post(reverse('wagtailimages:delete_multiple', args=(self.image.id, )), HTTP_X_REQUESTED_WITH='XMLHttpRequest')
# Check response
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
# Make sure the image is deleted
self.assertFalse(Image.objects.filter(id=self.image.id).exists())
# Check JSON
response_json = json.loads(response.content.decode())
self.assertIn('image_id', response_json)
self.assertIn('success', response_json)
self.assertEqual(response_json['image_id'], self.image.id)
self.assertTrue(response_json['success'])
def test_delete_post_noajax(self):
"""
This tests that a POST request to the delete view without AJAX returns a 400 response
"""
# Send request
response = self.client.post(reverse('wagtailimages:delete_multiple', args=(self.image.id, )))
# Check response
self.assertEqual(response.status_code, 400)
class TestURLGeneratorView(TestCase, WagtailTestUtils):
def setUp(self):
# Create an image for running tests on
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
# Login
self.user = self.login()
def test_get(self):
"""
This tests that the view responds correctly for a user with edit permissions on this image
"""
# Get
response = self.client.get(reverse('wagtailimages:url_generator', args=(self.image.id, )))
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/images/url_generator.html')
def test_get_bad_permissions(self):
"""
This tests that the view gives a 403 if a user without correct permissions attemts to access it
"""
# Remove privileges from user
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(content_type__app_label='wagtailadmin', codename='access_admin')
)
self.user.save()
# Get
response = self.client.get(reverse('wagtailimages:url_generator', args=(self.image.id, )))
# Check response
self.assertEqual(response.status_code, 403)
class TestGenerateURLView(TestCase, WagtailTestUtils):
def setUp(self):
# Create an image for running tests on
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
# Login
self.user = self.login()
def test_get(self):
"""
This tests that the view responds correctly for a user with edit permissions on this image
"""
# Get
response = self.client.get(reverse('wagtailimages:generate_url', args=(self.image.id, 'fill-800x600')))
# Check response
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
# Check JSON
content_json = json.loads(response.content.decode())
self.assertEqual(set(content_json.keys()), set(['url', 'preview_url']))
expected_url = 'http://localhost/images/%(signature)s/%(image_id)d/fill-800x600/' % {
'signature': urlquote(generate_signature(self.image.id, 'fill-800x600').decode(), safe=urlquote_safechars),
'image_id': self.image.id,
}
self.assertEqual(content_json['url'], expected_url)
expected_preview_url = reverse('wagtailimages:preview', args=(self.image.id, 'fill-800x600'))
self.assertEqual(content_json['preview_url'], expected_preview_url)
def test_get_bad_permissions(self):
"""
This tests that the view gives a 403 if a user without correct permissions attemts to access it
"""
# Remove privileges from user
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(content_type__app_label='wagtailadmin', codename='access_admin')
)
self.user.save()
# Get
response = self.client.get(reverse('wagtailimages:generate_url', args=(self.image.id, 'fill-800x600')))
# Check response
self.assertEqual(response.status_code, 403)
self.assertEqual(response['Content-Type'], 'application/json')
# Check JSON
self.assertJSONEqual(response.content.decode(), json.dumps({
'error': 'You do not have permission to generate a URL for this image.',
}))
def test_get_bad_image(self):
"""
This tests that the view gives a 404 response if a user attempts to use it with an image which doesn't exist
"""
# Get
response = self.client.get(reverse('wagtailimages:generate_url', args=(self.image.id + 1, 'fill-800x600')))
# Check response
self.assertEqual(response.status_code, 404)
self.assertEqual(response['Content-Type'], 'application/json')
# Check JSON
self.assertJSONEqual(response.content.decode(), json.dumps({
'error': 'Cannot find image.',
}))
def test_get_bad_filter_spec(self):
"""
This tests that the view gives a 400 response if the user attempts to use it with an invalid filter spec
"""
# Get
response = self.client.get(reverse('wagtailimages:generate_url', args=(self.image.id, 'bad-filter-spec')))
# Check response
self.assertEqual(response.status_code, 400)
self.assertEqual(response['Content-Type'], 'application/json')
# Check JSON
self.assertJSONEqual(response.content.decode(), json.dumps({
'error': 'Invalid filter spec.',
}))
class TestPreviewView(TestCase, WagtailTestUtils):
def setUp(self):
# Create an image for running tests on
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
# Login
self.user = self.login()
def test_get(self):
"""
Test a valid GET request to the view
"""
# Get the image
response = self.client.get(reverse('wagtailimages:preview', args=(self.image.id, 'fill-800x600')))
# Check response
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'image/png')
def test_get_invalid_filter_spec(self):
"""
Test that an invalid filter spec returns a 400 response
This is very unlikely to happen in reality. A user would have
to create signature for the invalid filter spec which can't be
done with Wagtails built in URL generator. We should test it
anyway though.
"""
# Get the image
response = self.client.get(reverse('wagtailimages:preview', args=(self.image.id, 'bad-filter-spec')))
# Check response
self.assertEqual(response.status_code, 400)<|fim▁end|> | file=get_test_image_file(), |
<|file_name|>util.py<|end_file_name|><|fim▁begin|># collections.abc new as of 3.3, and collections is deprecated. collections
# will be unavailable in 3.9
try:
import collections.abc as collections
except ImportError:
import collections
import datetime
import logging
try:
import json
except ImportError:
import simplejson as json
import re
def get_log():
return logging.getLogger(__name__.split('.')[0])
class MarathonJsonEncoder(json.JSONEncoder):
"""Custom JSON encoder for Marathon object serialization."""
def default(self, obj):
if hasattr(obj, 'json_repr'):
return self.default(obj.json_repr())
if isinstance(obj, datetime.datetime):
return obj.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
if isinstance(obj, collections.Iterable) and not isinstance(obj, str):
try:
return {k: self.default(v) for k, v in obj.items()}
except AttributeError:
return [self.default(e) for e in obj]
return obj
class MarathonMinimalJsonEncoder(json.JSONEncoder):
"""Custom JSON encoder for Marathon object serialization."""
def default(self, obj):
if hasattr(obj, 'json_repr'):
return self.default(obj.json_repr(minimal=True))
if isinstance(obj, datetime.datetime):
return obj.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
if isinstance(obj, collections.Iterable) and not isinstance(obj, str):
try:
return {k: self.default(v) for k, v in obj.items() if (v or v in (False, 0))}
except AttributeError:
return [self.default(e) for e in obj if (e or e in (False, 0))]
<|fim▁hole|> words = snake_str.split('_')
return words[0] + ''.join(w.capitalize() for w in words[1:])
def to_snake_case(camel_str):
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', camel_str)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
DATETIME_FORMATS = [
'%Y-%m-%dT%H:%M:%S.%fZ',
'%Y-%m-%dT%H:%M:%SZ', # Marathon omits milliseconds when they would be .000
]
def to_datetime(timestamp):
if (timestamp is None or isinstance(timestamp, datetime.datetime)):
return timestamp
else:
for fmt in DATETIME_FORMATS:
try:
return datetime.datetime.strptime(timestamp, fmt).replace(tzinfo=datetime.timezone.utc)
except ValueError:
pass
raise ValueError(f'Unrecognized datetime format: {timestamp}')<|fim▁end|> | return obj
def to_camel_case(snake_str): |
<|file_name|>disk_sync.py<|end_file_name|><|fim▁begin|>import os
import unicodedata
from tendrl.commons.event import Event
from tendrl.commons.message import ExceptionMessage
from tendrl.commons.utils import cmd_utils
from tendrl.commons.utils import etcd_utils
from tendrl.commons.utils import log_utils as logger
def sync():
try:
_keep_alive_for = int(NS.config.data.get("sync_interval", 10)) + 250
disks = get_node_disks()
disk_map = {}
for disk in disks:
# Creating dict with disk name as key and disk_id as value
# It will help populate block device disk_id attribute
_map = dict(disk_id=disks[disk]['disk_id'], ssd=False)
disk_map[disks[disk]['disk_name']] = _map
block_devices = get_node_block_devices(disk_map)
for disk in disks:
if disk_map[disks[disk]['disk_name']]:
disks[disk]['ssd'] = disk_map[disks[disk][
'disk_name']]['ssd']
if "virtio" in disks[disk]["driver"]:
# Virtual disk
NS.tendrl.objects.VirtualDisk(**disks[disk]).save(
ttl=_keep_alive_for
)
else:
# physical disk
NS.tendrl.objects.Disk(**disks[disk]).save(ttl=_keep_alive_for)
for device in block_devices['all']:
NS.tendrl.objects.BlockDevice(**device).save(ttl=_keep_alive_for)
for device_id in block_devices['used']:
etcd_utils.write(
"nodes/%s/LocalStorage/BlockDevices/used/%s" %
(NS.node_context.node_id,
device_id.replace("/", "_").replace("_", "", 1)),
device_id, ttl=_keep_alive_for<|fim▁hole|> etcd_utils.write(
"nodes/%s/LocalStorage/BlockDevices/free/%s" %
(NS.node_context.node_id,
device_id.replace("/", "_").replace("_", "", 1)),
device_id, ttl=_keep_alive_for
)
raw_reference = get_raw_reference()
etcd_utils.write(
"nodes/%s/LocalStorage/DiskRawReference" %
NS.node_context.node_id,
raw_reference,
ttl=_keep_alive_for,
)
except(Exception, KeyError) as ex:
_msg = "node_sync disks sync failed: " + ex.message
Event(
ExceptionMessage(
priority="error",
publisher=NS.publisher_id,
payload={"message": _msg,
"exception": ex}
)
)
def get_node_disks():
disks, disks_map, err = get_disk_details()
if not err:
cmd = cmd_utils.Command('hwinfo --partition')
out, err, rc = cmd.run()
if not err:
for partitions in out.split('\n\n'):
devlist = {"hardware_id": "",
"parent_hardware_id": "",
"sysfs_id": "",
"hardware_class": "",
"model": "",
"partition_name": "",
"device_files": "",
"config_status": "",
}
for partition in partitions.split('\n'):
key = partition.split(':')[0]
if key.strip() == "Unique ID":
devlist["hardware_id"] = \
partition.split(':')[1].lstrip()
if key.strip() == "Parent ID":
devlist["parent_hardware_id"] = \
partition.split(':')[1].lstrip()
if key.strip() == "SysFS ID":
devlist["sysfs_id"] = \
partition.split(':')[1].lstrip()
if key.strip() == "Hardware Class":
devlist["hardware_class"] = \
partition.split(':')[1].lstrip()
if key.strip() == "Model":
devlist["model"] = \
partition.split(':')[1].lstrip().replace('"', "")
if key.strip() == "Device File":
_name = partition.split(':')[1].lstrip()
devlist["partition_name"] = \
"".join(_name.split(" ")[0])
if key.strip() == "Device Files":
devlist["device_files"] = \
partition.split(':')[1].lstrip()
if key.strip() == "Config Status":
devlist["config_status"] = \
partition.split(':')[1].lstrip()
# checking if partition parent id is in collected
# disk_ids or not
if devlist["parent_hardware_id"] in disks_map:
part_name = devlist["partition_name"]
parent = disks_map[devlist["parent_hardware_id"]]
disks[parent]["partitions"][part_name] = devlist
return disks
def get_disk_details():
disks = {}
disks_map = {}
cmd = cmd_utils.Command('hwinfo --disk')
out, err, rc = cmd.run()
if not err:
out = unicodedata.normalize('NFKD', out).encode('utf8', 'ignore') \
if isinstance(out, unicode) \
else unicode(out, errors="ignore").encode('utf8')
for all_disks in out.split('\n\n'):
devlist = {"disk_id": "",
"hardware_id": "",
"parent_id": "",
"disk_name": "",
"sysfs_id": "",
"sysfs_busid": "",
"sysfs_device_link": "",
"hardware_class": "",
"model": "",
"vendor": "",
"device": "",
"rmversion": "",
"serial_no": "",
"driver_modules": "",
"driver": "",
"device_files": "",
"device_number": "",
"bios_id": "",
"geo_bios_edd": "",
"geo_logical": "",
"size": "",
"size_bios_edd": "",
"geo_bios_legacy": "",
"config_status": "",
"partitions": {}
}
for disk in all_disks.split('\n'):
key = disk.split(':')[0]
if key.strip() == "Unique ID":
devlist["hardware_id"] = \
disk.split(':')[1].lstrip()
elif key.strip() == "Parent ID":
devlist["parent_id"] = \
disk.split(':')[1].lstrip()
elif key.strip() == "SysFS ID":
devlist["sysfs_id"] = \
disk.split(':')[1].lstrip()
elif key.strip() == "SysFS BusID":
devlist["sysfs_busid"] = \
disk.split(':')[1].lstrip()
elif key.strip() == "SysFS Device Link":
devlist["sysfs_device_link"] = \
disk.split(':')[1].lstrip()
elif key.strip() == "Hardware Class":
devlist["hardware_class"] = \
disk.split(':')[1].lstrip()
elif key.strip() == "Model":
devlist["model"] = \
disk.split(':')[1].lstrip().replace('"', "")
elif key.strip() == "Vendor":
devlist["vendor"] = \
disk.split(':')[1].replace(" ", "").replace('"', "")
elif key.strip() == "Device":
devlist["device"] = \
disk.split(':')[1].replace(" ", "").replace('"', "")
elif key.strip() == "Revision":
devlist["rmversion"] = \
disk.split(':')[1].lstrip().replace('"', "")
elif key.strip() == "Serial ID":
devlist["serial_no"] = \
disk.split(':')[1].replace(" ", "").replace('"', "")
elif key.strip() == "Driver":
devlist["driver"] = \
disk.split(':')[1].lstrip().replace('"', "")
elif key.strip() == "Driver Modules":
devlist["driver_modules"] = \
disk.split(':')[1].lstrip().replace('"', "")
elif key.strip() == "Device File":
_name = disk.split(':')[1].lstrip()
devlist["disk_name"] = \
"".join(_name.split(" ")[0])
elif key.strip() == "Device Files":
devlist["device_files"] = \
disk.split(':')[1].lstrip()
elif key.strip() == "Device Number":
devlist["device_number"] = \
disk.split(':')[1].lstrip()
elif key.strip() == "BIOS id":
devlist["bios_id"] = \
disk.split(':')[1].lstrip()
elif key.strip() == "Geometry (Logical)":
devlist["geo_logical"] = \
disk.split(':')[1].lstrip()
elif key.strip() == "Capacity":
devlist["size"] = \
disk.split('(')[1].split()[0]
elif key.strip() == "Geometry (BIOS EDD)":
devlist["geo_bios_edd"] = \
disk.split(':')[1].lstrip()
elif key.strip() == "Size (BIOS EDD)":
devlist["size_bios_edd"] = \
disk.split(':')[1].lstrip()
elif key.strip() == "Geometry (BIOS Legacy)":
devlist["geo_bios_legacy"] = \
disk.split(':')[1].lstrip()
elif key.strip() == "Config Status":
devlist["config_status"] = \
disk.split(':')[1].lstrip()
if ("virtio" in devlist["driver"] and
"by-id/virtio" in devlist['device_files']):
# split from:
# /dev/vdc, /dev/disk/by-id/virtio-0200f64e-5892-40ee-8,
# /dev/disk/by-path/virtio-pci-0000:00:08.0
for entry in devlist['device_files'].split(','):
if "by-id/virtio" in entry:
devlist['disk_id'] = entry.split('/')[-1]
break
elif "VMware" in devlist["vendor"]:
devlist["disk_id"] = \
"{vendor}_{device}_{parent_id}_{hardware_id}".format(**devlist)
elif (devlist["vendor"] != "" and
devlist["device"] != "" and
devlist["serial_no"] != ""):
devlist["disk_id"] = (devlist["vendor"] + "_" +
devlist["device"] + "_" + devlist[
"serial_no"])
else:
devlist['disk_id'] = devlist['disk_name']
if devlist["disk_id"] in disks.keys():
# Multipath is like multiple I/O paths between
# server nodes and storage arrays into a single device
# If single device is connected with more than one path
# then hwinfo and lsblk will give same device details with
# different device names. To avoid this duplicate entry,
# If multiple devices exists with same disk_id then
# device_name which is lower in alphabetical order is stored.
# It will avoid redundacy of disks and next sync it will
# make sure same device detail is populated
if devlist["disk_name"] < disks[
devlist['disk_id']]['disk_name']:
disks[devlist["disk_id"]] = devlist
disks_map[devlist['hardware_id']] = devlist["disk_id"]
else:
disks[devlist["disk_id"]] = devlist
disks_map[devlist['hardware_id']] = devlist["disk_id"]
return disks, disks_map, err
def get_node_block_devices(disks_map):
block_devices = dict(all=list(), free=list(), used=list())
columns = 'NAME,KNAME,PKNAME,MAJ:MIN,FSTYPE,MOUNTPOINT,LABEL,' \
'UUID,RA,RO,RM,SIZE,STATE,OWNER,GROUP,MODE,ALIGNMENT,' \
'MIN-IO,OPT-IO,PHY-SEC,LOG-SEC,ROTA,SCHED,RQ-SIZE,' \
'DISC-ALN,DISC-GRAN,DISC-MAX,DISC-ZERO,TYPE'
keys = columns.split(',')
lsblk = (
"lsblk --all --bytes --noheadings --output='%s' --path --raw" %
columns)
cmd = cmd_utils.Command(lsblk)
out, err, rc = cmd.run()
if not err:
out = unicodedata.normalize('NFKD', out).encode('utf8', 'ignore') \
if isinstance(out, unicode) \
else unicode(out, errors="ignore").encode('utf8')
devlist = map(
lambda line: dict(zip(keys, line.split(' '))),
out.splitlines())
all_parents = []
parent_ids = []
multipath = {}
for dev_info in devlist:
device = dict()
device['device_name'] = dev_info['NAME']
device['device_kernel_name'] = dev_info['KNAME']
device['parent_name'] = dev_info['PKNAME']
device['major_to_minor_no'] = dev_info['MAJ:MIN']
device['fstype'] = dev_info['FSTYPE']
device['mount_point'] = dev_info['MOUNTPOINT']
device['label'] = dev_info['LABEL']
device['fsuuid'] = dev_info['UUID']
device['read_ahead'] = dev_info['RA']
if dev_info['RO'] == '0':
device['read_only'] = False
else:
device['read_only'] = True
if dev_info['RM'] == '0':
device['removable_device'] = False
else:
device['removable_device'] = True
device['size'] = dev_info['SIZE']
device['state'] = dev_info['STATE']
device['owner'] = dev_info['OWNER']
device['group'] = dev_info['GROUP']
device['mode'] = dev_info['MODE']
device['alignment'] = dev_info['ALIGNMENT']
device['min_io_size'] = dev_info['MIN-IO']
device['optimal_io_size'] = dev_info['OPT-IO']
device['phy_sector_size'] = dev_info['PHY-SEC']
device['log_sector_size'] = dev_info['LOG-SEC']
device['device_type'] = dev_info['TYPE']
device['scheduler_name'] = dev_info['SCHED']
device['req_queue_size'] = dev_info['RQ-SIZE']
device['discard_align_offset'] = dev_info['DISC-ALN']
device['discard_granularity'] = dev_info['DISC-GRAN']
device['discard_max_bytes'] = dev_info['DISC-MAX']
device['discard_zeros_data'] = dev_info['DISC-ZERO']
device['rotational'] = dev_info['ROTA']
if dev_info['TYPE'] == 'disk':
device['ssd'] = is_ssd(dev_info['ROTA'])
else:
device['ssd'] = False
if dev_info['TYPE'] == 'part':
device['used'] = True
# if partition is under multipath then parent of multipath
# is assigned
if dev_info['PKNAME'] in multipath.keys():
dev_info['PKNAME'] = multipath[dev_info['PKNAME']]
if dev_info['PKNAME'] in disks_map.keys():
device['disk_id'] = disks_map[
dev_info['PKNAME']]['disk_id']
block_devices['all'].append(device)
block_devices['used'].append(device['device_name'])
if dev_info['TYPE'] == 'disk':
if dev_info['NAME'] in disks_map.keys():
device['disk_id'] = disks_map[dev_info['NAME']]['disk_id']
disks_map[dev_info['NAME']]['ssd'] = device['ssd']
all_parents.append(device)
if dev_info['TYPE'] == 'mpath':
multipath[device['device_kernel_name']] = dev_info['PKNAME']
else:
if dev_info['PKNAME'] in multipath.keys():
dev_info['PKNAME'] = multipath[dev_info['PKNAME']]
parent_ids.append(dev_info['PKNAME'])
for parent in all_parents:
if parent['device_name'] in parent_ids:
parent['used'] = True
block_devices['used'].append(parent['device_name'])
else:
parent['used'] = False
block_devices['free'].append(parent['device_name'])
block_devices['all'].append(parent)
else:
logger.log(
"debug",
NS.publisher_id,
{"message": err}
)
return block_devices
def get_raw_reference():
base_path = '/dev/disk/'
paths = os.listdir(base_path)
raw_reference = {}
for path in paths:
raw_reference[path] = []
full_path = base_path + path
cmd = cmd_utils.Command("ls -l %s" % full_path)
out, err, rc = cmd.run()
if not err:
out = unicodedata.normalize('NFKD', out).encode('utf8', 'ignore') \
if isinstance(out, unicode) \
else unicode(out, errors="ignore").encode('utf8')
count = 0
for line in out.split('\n'):
if count == 0:
# to skip first line
count = count + 1
continue
line = line.replace(" ", " ")
raw_reference[path].append(line.split(' ', 8)[-1])
else:
logger.log(
"debug",
NS.publisher_id,
{"message": err}
)
return raw_reference
def is_ssd(rotational):
if rotational == '0':
return True
if rotational == '1':
return False
"""Rotational attribute not found for
this device which is not either SSD or HD
"""
return False<|fim▁end|> | )
for device_id in block_devices['free']: |
<|file_name|>apiUtils.js<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright (c) 2019 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
var apiUtils = (function() {
"use strict";
var oauthProvider;
var __initOauthProvider = function() {
if (!oauthProvider) {
var pathname = window.location.pathname;
var urlToMatch = ".*/oidc/endpoint/([\\s\\S]*)/usersTokenManagement";
var regExpToMatch = new RegExp(urlToMatch, "g");
var groups = regExpToMatch.exec(pathname);
oauthProvider = groups[1];
}
};
var getAccountAppPasswords = function(userID) {
__initOauthProvider();
var deferred = new $.Deferred();
$.ajax({
url: "/oidc/endpoint/" + oauthProvider + "/app-passwords",
dataType: "json",
data: {user_id: encodeURIComponent(userID)},
headers: {
"Authorization": window.globalAuthHeader, // Basic clientID:clientSecret
"access_token" : window.globalAccessToken // The OAuth access_token acquired in OIDC login,
// which identifies an authenticated user.
},
success: function(response) {
deferred.resolve(response);
},
error: function(jqXHR) {
// Ajax request failed.
console.log('Error on GET for app-passwords: ' + jqXHR.responseText);
deferred.reject(jqXHR);
}
});
return deferred;
};
var getAccountAppTokens = function(userID) {
__initOauthProvider();
var deferred = new $.Deferred();
$.ajax({
url: "/oidc/endpoint/" + oauthProvider + "/app-tokens",
dataType: "json",
data: {user_id: encodeURIComponent(userID)},
headers: {
"Authorization": window.globalAuthHeader, // Basic clientID:clientSecret
"access_token" : window.globalAccessToken // The OAuth access_token acquired in OIDC login,
// which identifies an authenticated user.
},
success: function(response) {
deferred.resolve(response);
},
error: function(jqXHR) {
// Ajax request failed.
console.log('Error on GET for app-tokens: ' + jqXHR.responseText);
deferred.reject(jqXHR);
}
});
return deferred;
};
var deleteAcctAppPasswordToken = function(authID, authType, userID) {
__initOauthProvider();
var deferred = new $.Deferred();
var authTypes = authType + 's';
$.ajax({
url: "/oidc/endpoint/" + oauthProvider + "/" + authTypes + "/" + authID + "?user_id=" + encodeURIComponent(userID),
type: "DELETE",
contentType: "application/x-www-form-urlencoded",
headers: {
"Authorization": window.globalAuthHeader, // Basic clientID:clientSecret
"access_token" : window.globalAccessToken // The OAuth access_token acquired in OIDC login,
// which identifies an authenticated user.
},
success: function(response) {
deferred.resolve(response);
},
error: function(jqXHR) {
deferred.reject(jqXHR);
}
});
return deferred;
};
var deleteAllAppPasswordsTokens = function(userID, authType) {
__initOauthProvider();
var deferred = new $.Deferred();
var authTypes = authType + 's';
$.ajax({
url: "/oidc/endpoint/" + oauthProvider + "/" + authTypes + "?user_id=" + encodeURIComponent(userID),
type: "DELETE",
accept: "application/json",
headers: {
"Authorization": window.globalAuthHeader, // Basic clientID:clientSecret
"access_token" : window.globalAccessToken // The OAuth access_token acquired in OIDC login,
// which identifies an authenticated user.
},
success: function(response) {
deferred.resolve(response);
},
error: function(jqXHR) {
deferred.reject(jqXHR);
}
});
return deferred;
};
var deleteSelectedAppPasswordsTokens = function(authID, authType, name, userID) {
__initOauthProvider();
var deferred = new $.Deferred();
var authTypes = authType + 's';
$.ajax({
url: "/oidc/endpoint/" + oauthProvider + "/" + authTypes + "/" + authID + "?user_id=" + encodeURIComponent(userID),
type: "DELETE",
contentType: "application/x-www-form-urlencoded",
headers: {
"Authorization": window.globalAuthHeader, // Basic clientID:clientSecret
"access_token" : window.globalAccessToken // The OAuth access_token acquired in OIDC login,
// which identifies an authenticated user.
},<|fim▁hole|> deferred.resolve();
},
error: function(jqXHR) {
// Record the authentication that had the error and return it for processing
var response = {status: "failure",
authType: authType,
authID: authID,
name: name
};
deferred.resolve(response);
}
});
return deferred;
};
return {
getAccountAppPasswords: getAccountAppPasswords,
getAccountAppTokens: getAccountAppTokens,
deleteAcctAppPasswordToken: deleteAcctAppPasswordToken,
deleteAllAppPasswordsTokens: deleteAllAppPasswordsTokens,
deleteSelectedAppPasswordsTokens: deleteSelectedAppPasswordsTokens
};
})();<|fim▁end|> | success: function(response) {
table.deleteTableRow(authID); |
<|file_name|>e2e.ts<|end_file_name|><|fim▁begin|><|fim▁hole|> const page = await newE2EPage({
url: '/src/components/select/test/standalone?ionic:_testing=true'
});
const compare = await page.compareScreenshot();
expect(compare).toMatchScreenshot();
});<|fim▁end|> | import { newE2EPage } from '@stencil/core/testing';
test('select: standalone', async () => { |
<|file_name|>box.js<|end_file_name|><|fim▁begin|><|fim▁hole|>var crypto = require('crypto');
var util = require('hexo-util');
var Pattern = util.Pattern;
var testUtil = require('../../util');
function shasum(content){
var hash = crypto.createHash('sha1');
hash.update(content);
return hash.digest('hex');
}
describe('Box', function(){
var Hexo = require('../../../lib/hexo');
var baseDir = pathFn.join(__dirname, 'box_tmp');
var Box = require('../../../lib/box');
function newBox(path){
var hexo = new Hexo(baseDir, {silent: true});
var base = path ? pathFn.join(baseDir, path) : baseDir;
return new Box(hexo, base);
}
before(function(){
return fs.mkdir(baseDir);
});
after(function(){
return fs.rmdir(baseDir);
});
it('constructor - add trailing "/" to the base path', function(){
var box = newBox('foo');
box.base.should.eql(pathFn.join(baseDir, 'foo') + pathFn.sep);
});
it('addProcessor() - no pattern', function(){
var box = newBox();
box.addProcessor(function(){
return 'test';
});
var p = box.processors[0];
p.pattern.match('').should.eql({});
p.process().should.eql('test');
});
it('addProcessor() - with regex', function(){
var box = newBox();
box.addProcessor(/^foo/, function(){
return 'test';
});
var p = box.processors[0];
p.pattern.match('foobar').should.be.ok;
p.pattern.should.be.an.instanceof(Pattern);
p.process().should.eql('test');
});
it('addProcessor() - with pattern', function(){
var box = newBox();
box.addProcessor(new Pattern(/^foo/), function(){
return 'test';
});
var p = box.processors[0];
p.pattern.match('foobar').should.be.ok;
p.pattern.should.be.an.instanceof(Pattern);
p.process().should.eql('test');
});
it('addProcessor() - no fn', function(){
var box = newBox();
try {
box.addProcessor('test');
} catch (err){
err.should.have.property('message', 'fn must be a function');
}
});
it('_loadFiles() - create', function(){
var box = newBox('test');
var path = pathFn.join(box.base, 'a.txt');
return fs.writeFile(path, 'a').then(function(){
return Promise.all([
box._loadFiles(),
fs.stat(path)
]);
}).spread(function(files, stats){
var cacheId = 'test/a.txt';
files.should.eql([
{path: 'a.txt', type: 'create'}
]);
box.Cache.toArray({lean: true}).should.eql([
{_id: cacheId, shasum: shasum('a'), modified: stats.mtime.getTime()}
]);
return fs.rmdir(box.base);
});
});
it('_loadFiles() - update', function(){
var box = newBox('test');
var path = pathFn.join(box.base, 'a.txt');
var cacheId = 'test/a.txt';
var Cache = box.Cache;
return Promise.all([
fs.writeFile(path, 'a'),
Cache.insert({_id: cacheId, shasum: 'a'})
]).then(function(){
return Promise.all([
box._loadFiles(),
fs.stat(path)
]);
}).spread(function(files, stats){
files.should.eql([
{path: 'a.txt', type: 'update'}
]);
Cache.toArray({lean: true}).should.eql([
{_id: cacheId, shasum: shasum('a'), modified: stats.mtime.getTime()}
]);
return fs.rmdir(box.base);
});
});
it('_loadFiles() - skip', function(){
var box = newBox('test');
var path = pathFn.join(box.base, 'a.txt');
var cacheId = 'test/a.txt';
var hash = shasum('a');
var Cache = box.Cache;
var mtime = Date.now()
return Promise.all([
fs.writeFile(path, 'a'),
Cache.insert({_id: cacheId, shasum: hash, modified: mtime})
]).then(function(){
return box._loadFiles();
}).then(function(files){
files.should.eql([
{type: 'skip', path: 'a.txt'}
]);
Cache.toArray({lean: true}).should.eql([
{_id: cacheId, shasum: hash, modified: mtime}
]);
return fs.rmdir(box.base);
});
});
it('_loadFiles() - delete', function(){
var box = newBox('test');
var cacheId = 'test/a.txt';
var Cache = box.Cache;
return Cache.insert({
_id: cacheId,
shasum: 'a'
}).then(function(){
return box._loadFiles();
}).then(function(files){
files.should.eql([
{type: 'delete', path: 'a.txt'}
]);
should.not.exist(Cache.findById(cacheId));
});
});
it('_dispatch()', function(){
var box = newBox();
var path = 'a.txt';
var data;
box.addProcessor(function(file){
box.processingFiles[path].should.be.true;
data = file;
});
return box._dispatch({
path: path,
type: 'create'
}).then(function(){
box.processingFiles[path].should.be.false;
data.source.should.eql(pathFn.join(box.base, path));
data.path.should.eql(path);
data.type.should.eql('create');
data.params.should.eql({});
});
});
it('_dispatch() - params', function(){
var box = newBox();
var data = new Array(2);
box.addProcessor(/(.*).js/, function(file){
data[0] = file;
});
box.addProcessor(function(file){
data[1] = file;
});
return box._dispatch({
path: 'server.js',
type: 'create'
}).then(function(){
data[0].params[1].should.eql('server');
data[1].params.should.eql({});
});
});
it('process()', function(){
var box = newBox('test');
var data = {};
box.addProcessor(function(file){
data[file.path] = file;
});
return Promise.all([
fs.writeFile(pathFn.join(box.base, 'a.txt'), 'a'),
fs.writeFile(pathFn.join(box.base, 'b', 'c.js'), 'c')
]).then(function(){
return box.process();
}).then(function(){
var keys = Object.keys(data);
var key, item;
for (var i = 0, len = keys.length; i < len; i++){
key = keys[i];
item = data[key];
item.path.should.eql(key);
item.source.should.eql(pathFn.join(box.base, key));
item.type.should.eql('create');
item.params.should.eql({});
}
return fs.rmdir(box.base);
});
});
it('process() - do nothing if target does not exist', function(){
var box = newBox('test');
return box.process();
});
it('watch() - create', function(callback){
var box = newBox('test');
var path = 'a.txt';
var src = pathFn.join(box.base, path);
box.watch().then(function(){
box.isWatching().should.be.true;
box.addProcessor(function(file){
file.source.should.eql(src);
file.path.should.eql(path);
file.type.should.eql('create');
file.params.should.eql({});
file.content.toString().should.eql('a');
box.unwatch();
fs.rmdir(box.base, callback);
});
fs.writeFile(src, 'a');
});
});
it('watch() - update', function(callback){
var box = newBox('test');
var path = 'a.txt';
var src = pathFn.join(box.base, path);
var cacheId = 'test/' + path;
var Cache = box.Cache;
Promise.all([
fs.writeFile(src, 'a'),
Cache.insert({_id: cacheId, shasum: 'a'})
]).then(function(){
return box.watch();
}).then(function(){
return testUtil.wait(300);
}).then(function(){
box.addProcessor(function(file){
file.source.should.eql(src);
file.path.should.eql(path);
file.type.should.eql('update');
file.params.should.eql({});
file.content.should.eql(new Buffer('ab'));
box.unwatch();
fs.rmdir(box.base, callback);
});
fs.appendFile(src, 'b');
});
});
it('watch() - delete', function(callback){
var box = newBox('test');
var path = 'a.txt';
var src = pathFn.join(box.base, path);
var cacheId = 'test/' + path;
var Cache = box.Cache;
Promise.all([
fs.writeFile(src, 'a'),
Cache.insert({_id: cacheId, shasum: 'a'})
]).then(function(){
return box.watch();
}).then(function(){
return testUtil.wait(300);
}).then(function(){
box.addProcessor(function(file){
file.source.should.eql(src);
file.path.should.eql(path);
file.type.should.eql('delete');
file.params.should.eql({});
should.not.exist(file.content);
box.unwatch();
fs.rmdir(box.base, callback);
});
fs.unlink(src);
});
});
it.skip('watch() - watcher has started', function(callback){
var box = newBox();
box.watch().then(function(){
box.watch().catch(function(err){
err.should.have.property('message', 'Watcher has already started.');
box.unwatch();
callback();
});
});
});
it('watch() - run process() before start watching', function(){
var box = newBox('test');
var data = [];
box.addProcessor(function(file){
data.push(file.path);
});
return Promise.all([
fs.writeFile(pathFn.join(box.base, 'a.txt'), 'a'),
fs.writeFile(pathFn.join(box.base, 'b', 'c.js'), 'c')
]).then(function(){
return box.watch();
}).then(function(){
data.should.have.members(['a.txt', 'b/c.js']);
box.unwatch();
return fs.rmdir(box.base);
});
});
it.skip('unwatch()', function(callback){
var box = newBox('test');
box.watch().then(function(){
var emitted = false;
box.addProcessor(function(file){
emitted = true;
});
box.unwatch();
fs.writeFile(pathFn.join(box.base, 'a.txt'), 'a').then(function(){
emitted.should.be.false;
fs.rmdir(box.base, callback);
});
});
});
it('unwatch() - watcher not started', function(){
var box = newBox();
try {
box.unwatch();
} catch (err){
err.should.have.property('message', 'Watcher hasn\'t started yet.');
}
});
it.skip('isWatching()', function(){
var box = newBox();
return box.watch().then(function(){
box.isWatching().should.be.true;
return box.unwatch();
}).then(function(){
box.isWatching().should.be.false;
});
});
it('processBefore & processAfter events');
});<|fim▁end|> | var should = require('chai').should();
var pathFn = require('path');
var fs = require('hexo-fs');
var Promise = require('bluebird'); |
<|file_name|>parse.py<|end_file_name|><|fim▁begin|>import sys, math
from test import goertzel
import wave
import pyaudio
import Queue
import numpy as np
if len(sys.argv) < 2:
print "Usage: %s <filename> " % sys.argv[0]
sys.exit(1)
filename = sys.argv[1]
w = wave.open(filename)
fs = w.getframerate()
width = w.getsampwidth()
chunkDuration = .2 #.2 second chunks
chunk = int(chunkDuration*fs)
window = np.blackman(chunk)
p = pyaudio.PyAudio()
stream = p.open(format = p.get_format_from_width(w.getsampwidth()), channels = w.getnchannels(),rate = fs, output=True)
#read .2 second chunk
data = w.readframes(chunk)
chunk_data = []
#find the frequencies of each chunk
print "Running calculations on wav file"
num = 0
while data != '':
print "Calculating Chunk " + str(num)
stream.write(data)
indata = np.array(wave.struct.unpack("%dh"%(len(data)/width),\
data))
freqs , results = goertzel(indata,fs, (1036,1058), (1567,1569), (2082,2104))
chunk_data.append((freqs,results))
data = w.readframes(chunk)
num+=.2
stream.close()
p.terminate()
#finished getting data from chunks, now to parse the data
hi = []
lo = []
mid = []
#average first second of audio to get frequency baselines
for i in range (5):
a = chunk_data[i][0]
b = chunk_data[i][1]<|fim▁hole|> hi.append(b[j])
elif a[j] < 1300:
lo.append(b[j])
else:
mid.append(b[j])
hi_average = sum(hi)/float(len(hi))
lo_average = sum(lo)/float(len(lo))
mid_average = sum(mid)/float(len(mid))
"""
Determine the frequency in each .2 second chunk that has the highest amplitude increase from its average, then determine the frequency
of that second of data by the median frequency of its 5 chunks
"""
#looks for start signal in last 3 seconds of audio
def signal_found(arr):
lst = arr[-15:]
first = 0
second = 0
third = 0
for i in range(0,5):
if lst[i]=="mid":
first += 1
for i in range(5,10):
if lst[i]=="mid":
second += 1
for i in range(10,15):
if lst[i]=="mid":
third += 1
if first >= 5 and second >= 5 and third >= 5:
return True
else:
return False
#gets freq of 1 second of audio
def get_freq(arr):
lo_count = 0
hi_count = 0
mid_count = 0
for i in arr:
if i=="lo":
lo_count+=1
if i=="hi":
hi_count+=1
if i=="mid":
mid_count+=1
if mid_count > hi_count and mid_count > lo_count:
return 2
if lo_count>hi_count:
return 0
else:
return 1
start = False
freq_list = []
offset = 0
bits = []
for i in range(5,len(chunk_data)):
a = chunk_data[i][0]
b = chunk_data[i][1]
hi_amp = []
lo_amp = []
mid_amp = []
#get averages for each freq
for j in range(len(a)):
if a[j] > 1700:
hi_amp.append(b[j])
elif a[j] < 1300:
lo_amp.append(b[j])
else:
mid_amp.append(b[j])
hi_av = sum(hi_amp)/float(len(hi_amp))
lo_av = sum(lo_amp)/float(len(lo_amp))
mid_av = sum(mid_amp)/float(len(mid_amp))
#get freq of this chunk
diff = [lo_av-lo_average,mid_av-mid_average,hi_av-hi_average]
index = diff.index(max(diff))
if(index==0):
freq_list.append("lo")
if(index==1):
freq_list.append("mid")
if(index==2):
freq_list.append("hi")
print(freq_list[len(freq_list)-1])
if len(freq_list) > 5:
if start:
if len(freq_list)%5 == offset:
bit = get_freq(freq_list[-5:])
if bit != 2:
bits.append(bit)
else:
print "Stop Signal Detected"
break
elif len(freq_list) >= 15:
if signal_found(freq_list):
print "signal found"
start = True
offset = len(freq_list)%5
print bits<|fim▁end|> | for j in range(len(a)):
if a[j] > 1700: |
<|file_name|>matrix.py<|end_file_name|><|fim▁begin|>"""
Created by Emille Ishida in May, 2015.
Class to implement calculations on data matrix.
"""
import os
import sys
import matplotlib.pylab as plt
import numpy as np
from multiprocessing import Pool
from snclass.treat_lc import LC
from snclass.util import read_user_input, read_snana_lc, translate_snid
from snclass.functions import core_cross_val, screen
##############################################
class DataMatrix(object):
"""
Data matrix class.
Methods:
- build: Build data matrix according to user input file specifications.
- reduce_dimension: Perform dimensionality reduction.
- cross_val: Perform cross-validation.
Attributes:
- user_choices: dict, user input choices
- snid: vector, list of objects identifiers
- datam: array, data matrix for training
- redshift: vector, redshift for training data
- sntype: vector, classification of training data
- low_dim_matrix: array, data matrix in KernelPC space
- transf_test: function, project argument into KernelPC space
- final: vector, optimize parameter values
"""
def __init__(self, input_file=None):
"""
Read user input file.
input: input_file -> str
name of user input file
"""
self.datam = None
self.snid = []
self.redshift = None
self.sntype = None
self.low_dim_matrix = None
self.transf_test = None
self.final = None
self.test_projection = []
if input_file is not None:
self.user_choices = read_user_input(input_file)
def check_file(self, filename, epoch=True, ref_filter=None):<|fim▁hole|> file of raw data for 1 supernova
epoch, bool - optional
If true, check if SN satisfies epoch cuts
Default is True
ref_filter, str - optional
Reference filter for peak MJD calculation
Default is None
"""
screen('Fitting ' + filename, self.user_choices)
# translate identifier
self.user_choices['path_to_lc'] = [translate_snid(filename, self.user_choices['photon_flag'][0])[0]]
# read light curve raw data
raw = read_snana_lc(self.user_choices)
# initiate light curve object
lc_obj = LC(raw, self.user_choices)
# load GP fit
lc_obj.load_fit_GP(self.user_choices['samples_dir'][0] + filename)
# normalize
lc_obj.normalize(ref_filter=ref_filter)
# shift to peak mjd
lc_obj.mjd_shift()
if epoch:
# check epoch requirements
lc_obj.check_epoch()
else:
lc_obj.epoch_cuts = True
if lc_obj.epoch_cuts:
# build data matrix lines
lc_obj.build_steps()
# store
obj_line = []
for fil in self.user_choices['filters']:
for item in lc_obj.flux_for_matrix[fil]:
obj_line.append(item)
rflag = self.user_choices['redshift_flag'][0]
redshift = raw[rflag][0]
obj_class = raw[self.user_choices['type_flag'][0]][0]
self.snid.append(raw['SNID:'][0])
return obj_line, redshift, obj_class
else:
screen('... Failed to pass epoch cuts!', self.user_choices)
screen('\n', self.user_choices)
return None
def store_training(self, file_out):
"""
Store complete training matrix.
input: file_out, str
output file name
"""
# write to file
if file_out is not None:
op1 = open(file_out, 'w')
op1.write('SNID type z LC...\n')
for i in xrange(len(self.datam)):
op1.write(str(self.snid[i]) + ' ' + str(self.sntype[i]) +
' ' + str(self.redshift[i]) + ' ')
for j in xrange(len(self.datam[i])):
op1.write(str(self.datam[i][j]) + ' ')
op1.write('\n')
op1.close()
def build(self, file_out=None, check_epoch=True, ref_filter=None):
"""
Build data matrix according to user input file specifications.
input: file_out -> str, optional
file to store data matrix (str). Default is None
check_epoch -> bool, optional
If True check if SN satisfies epoch cuts
Default is True
ref_filter -> str, optional
Reference filter for MJD calculation
Default is None
"""
# list all files in sample directory
file_list = os.listdir(self.user_choices['samples_dir'][0])
datam = []
redshift = []
sntype = []
for obj in file_list:
if 'mean' in obj:
sn_char = self.check_file(obj, epoch=check_epoch,
ref_filter=ref_filter)
if sn_char is not None:
datam.append(sn_char[0])
redshift.append(sn_char[1])
sntype.append(sn_char[2])
self.datam = np.array(datam)
self.redshift = np.array(redshift)
self.sntype = np.array(sntype)
# store results
self.store_training(file_out)
def reduce_dimension(self):
"""Perform dimensionality reduction with user defined function."""
# define dimensionality reduction function
func = self.user_choices['dim_reduction_func']
# reduce dimensionality
self.low_dim_matrix = func(self.datam, self.user_choices)
# define transformation function
self.transf_test = func(self.datam, self.user_choices, transform=True)
def cross_val(self):
"""Optimize the hyperparameters for RBF kernel and ncomp."""
# correct type parameters if necessary
types_func = self.user_choices['transform_types_func']
if types_func is not None:
self.sntype = types_func(self.sntype, self.user_choices['Ia_flag'][0])
# initialize parameters
data = self.datam
types = self.sntype
choices = self.user_choices
nparticles = self.user_choices['n_cross_val_particles']
parameters = []
for i in xrange(nparticles):
pars = {}
pars['data'] = data
pars['types'] = types
pars['user_choices'] = choices
parameters.append(pars)
if int(self.user_choices['n_proc'][0]) > 0:
cv_func = self.user_choices['cross_validation_func']
pool = Pool(processes=int(self.user_choices['n_proc'][0]))
my_pool = pool.map_async(cv_func, parameters)
try:
results = my_pool.get(0xFFFF)
except KeyboardInterrupt:
print 'Interruputed by the user!'
sys.exit()
pool.close()
pool.join()
results = np.array(results)
else:
number = self.user_choices['n_cross_val_particles']
results = np.array([core_cross_val(pars)
for pars in parameters])
flist = list(results[:,len(results[0])-1])
max_success = max(flist)
indx_max = flist.index(max_success)
self.final = {}
for i in xrange(len(self.user_choices['cross_val_par'])):
par_list = self.user_choices['cross_val_par']
self.final[par_list[i]] = results[indx_max][i]
def final_configuration(self):
"""Determine final configuraton based on cross-validation results."""
#update optimized hyper-parameters
for par in self.user_choices['cross_val_par']:
indx = self.user_choices['cross_val_par'].index(par)
self.user_choices[par] = self.final[par]
#update low dimensional matrix
self.reduce_dimension()
def plot(self, pcs, file_out, show=False, test=None):
"""
Plot 2-dimensional scatter of data matrix in kPCA space.
input: pcs, vector of int
kernel PCs to be used as horizontal and vertical axis
file_out, str
file name to store final plot
show, bool, optional
if True show plot in screen
Default is False
test, dict, optional
keywords: data, type
if not None plot the projection of 1 photometric object
Default is None
"""
#define vectors to plot
xdata = self.low_dim_matrix[:,pcs[0]]
ydata = self.low_dim_matrix[:,pcs[1]]
if '0' in self.sntype:
snIa = self.sntype == '0'
nonIa = self.sntype != '0'
else:
snIa = self.sntype == 'Ia'
snIbc = self.sntype == 'Ibc'
snII = self.sntype == 'II'
plt.figure(figsize=(10,10))
if '0' in self.sntype:
plt.scatter(xdata[nonIa], ydata[nonIa], color='purple', marker='s',
label='spec non-Ia')
plt.scatter(xdata[snIa], ydata[snIa], color='blue', marker='o',
label='spec Ia')
else:
plt.scatter(xdata[snII], ydata[snII], color='purple', marker='s',
label='spec II')
plt.scatter(xdata[snIbc], ydata[snIbc], color='green', marker='^',
s=30, label='spec Ibc')
plt.scatter(xdata[snIa], ydata[snIa], color='blue', marker='o',
label='spec Ia')
if test is not None:
if len(test.samples_for_matrix) > 0:
plt.title('prob_Ia = ' + str(round(test['prob_Ia'], 2)))
if test.raw['SIM_NON1a:'][0] == '0':
sntype = 'Ia'
else:
sntype = 'nonIa'
plt.scatter([test.test_proj[0][pcs[0]]], [test.test_proj[0][pcs[1]]],
marker='*', color='red', s=75,
label='photo ' + sntype)
plt.xlabel('kPC' + str(pcs[0] + 1), fontsize=14)
plt.ylabel('kPC' + str(pcs[1] + 1), fontsize=14)
plt.legend(fontsize=12)
if show:
plt.show()
if file_out is not None:
plt.savefig(file_out)
plt.close()
def main():
"""Print documentation."""
print __doc__
if __name__ == '__main__':
main()<|fim▁end|> | """
Construct one line of the data matrix.
input: filename, str |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! Resource data implementations.
//!
//! This module will eventually contain implementations for the record data
//! for all defined resource record types.
//!
//! The types are named identically to the [`Rtype`] variant they implement.
//! They are grouped into submodules for the RFCs they are defined in. All
//! types are also re-exported at the top level here. Ie., for the AAAA
//! record type, you can simple `use domain::rdata::Aaaa` instead of
//! `use domain::rdata::rfc3596::Aaaa` which nobody could possibly remember.
//! There are, however, some helper data types defined here and there which
//! are not re-exported to keep things somewhat tidy.
//!
//! See the [`Rtype`] enum for the complete set of record types and,
//! consequently, those types that are still missing.
//!
//! [`Rtype`]: ../iana/enum.Rtype.html
pub mod rfc1035;
pub mod rfc2782;
pub mod rfc3596;
#[macro_use] mod macros;
mod generic;
use ::bits::{CharStrBuf, DNameBuf};
// The master_types! macro (defined in self::macros) creates the
// MasterRecordData enum produced when parsing master files (aka zone files).
//
// Include all record types that can occur in master files. Place the name of
// the variant (identical to the type name) on the left side of the double
// arrow and the name of the type on the right. If the type is generic, use
// the owned version.
//
// The macro creates the re-export of the record data type.
master_types!{
rfc1035::{
A => A,
Cname => Cname<DNameBuf>,
Hinfo => Hinfo<CharStrBuf>,
Mb => Mb<DNameBuf>,
Md => Md<DNameBuf>,
Mf => Mf<DNameBuf>,
Mg => Mg<DNameBuf>,
Minfo => Minfo<DNameBuf>,
Mr => Mr<DNameBuf>,
Mx => Mx<DNameBuf>,
Ns => Ns<DNameBuf>,
Ptr => Ptr<DNameBuf>,
Soa => Soa<DNameBuf>,
Txt => Txt<Vec<u8>>,
Wks => Wks<rfc1035::WksBitmapBuf>,
}
rfc2782::{
Srv => Srv<DNameBuf>,
}
rfc3596::{
Aaaa => Aaaa,
}
}
// The pseudo_types! macro (defined in self::macros) creates the re-exports
// for all the types not part of master_types! above.
pseudo_types!{
rfc1035::{Null};
//rfc6891::{Opt};<|fim▁hole|>///
/// This helper function formats the record data at the start of `parser`
/// using the formatter `f`. It assumes that the record data is for a
/// record of record type `rtype`.
///
/// If the record type is known, the function tries to use the type’s
/// proper master data format. Otherwise the generic format is used.
pub fn fmt_rdata(rtype: ::iana::Rtype, parser: &mut ::bits::Parser,
f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
match try!(fmt_master_data(rtype, parser, f)) {
Some(res) => Ok(res),
None => {
let mut parser = parser.clone();
let len = parser.remaining();
let data = parser.parse_bytes(len).unwrap();
generic::fmt(data, f)
}
}
}
/// Parsed versions of all record data types.
///
/// This module defines or re-exports type aliases for all record data
/// types that use parsed domain names and references to bytes slices where
/// applicable. For convenience, it also includes re-exports for those types
/// that are not in fact generic.
///
/// Use the types from this module when working with wire format DNS messages.
pub mod parsed {
pub use super::rfc1035::parsed::*;
pub use super::rfc3596::Aaaa;
pub type Srv<'a> = super::rfc2782::Srv<::bits::ParsedDName<'a>>;
}
/// Owned versions of all record data types.
///
/// This module defines or re-exports type aliases for all record data
/// types using owned data only. For convenience, it also includes re-exports
/// for those types that are not generic.
///
/// Use the types from this module if you are working with master file data
/// or if you are constructing your own values.
pub mod owned {
pub use super::rfc1035::owned::*;
pub use super::rfc3596::Aaaa;
pub type Srv = super::rfc2782::Srv<::bits::DNameBuf>;
}<|fim▁end|> | }
/// Formats record data from a message parser in master file format. |
<|file_name|>test_stringify.py<|end_file_name|><|fim▁begin|>import unittest
from .context import json_stable_stringify_python as stringify
class TestStringify(unittest.TestCase):
def test_simple_object(self):
node = {'c':6, 'b': [4,5], 'a': 3, 'z': None}
actual = stringify.stringify(node)
expected = '{"a":3,"b":[4,5],"c":6,"z":null}'
self.assertEqual(actual, expected)
def test_object_with_empty_string(self):
node = {'a': 3, 'z': ''}
actual = stringify.stringify(node)
expected = '{"a":3,"z":""}'
self.assertEqual(actual, expected)
def test_nested_object(self):
node = {
'a': {
'b': {
'c': [1,2,3,None]
}
}
}
actual = stringify.stringify(node)
expected = '{"a":{"b":{"c":[1,2,3,null]}}}'
self.assertEqual(actual, expected)
def test_array_with_objects(self):
node = [{'z': 1, 'a': 2}]
actual = stringify.stringify(node)<|fim▁hole|> expected = '[{"a":2,"z":1}]'
self.assertEqual(actual, expected)
def test_nested_array_objects(self):
node = [{'z': [[{'y': 1, 'b': 2}]], 'a': 2}]
actual = stringify.stringify(node)
expected = '[{"a":2,"z":[[{"b":2,"y":1}]]}]'
self.assertEqual(actual, expected)
def test_array_with_none(self):
node = [1, None]
actual = stringify.stringify(node)
expected = '[1,null]'
self.assertEqual(actual, expected)
def test_array_with_empty_string(self):
node = [1, '']
actual = stringify.stringify(node)
expected = '[1,""]'
self.assertEqual(actual, expected)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | |
<|file_name|>RxBaseClient.s-spec.ts<|end_file_name|><|fim▁begin|>import "rxjs/add/operator/finally";
import { RxBaseClient } from "./RxBaseClient";
import proxyquire = require("proxyquire");
import { Subject } from "rxjs/Subject";
describe("RxBaseClient", () => {
let RxSocket;
let rxSocket;
let ClientConnector;
let clientConnector;
let client: RxBaseClient;
let socket;
let connectTcp;
let connectTls;
let RxBaseClientConstructor: typeof RxBaseClient;
beforeEach(() => {
rxSocket = jasmine.createSpyObj("rxSocket", ["filter", "sendMessage"]);
RxSocket = jasmine.createSpy("RxSocket").and.returnValue(rxSocket);
clientConnector = jasmine.createSpyObj("clientConnector", ["connect", "disconnect"]);
clientConnector.events$ = new Subject();
ClientConnector = jasmine.createSpy("ClientConnector").and.returnValue(clientConnector);
socket = jasmine.createSpyObj("socket", ["on"]);
connectTcp = jasmine.createSpy("connect").and.returnValue(socket);
connectTls = jasmine.createSpy("connect").and.returnValue(socket);
RxBaseClientConstructor = (proxyquire("./RxBaseClient", {
"./ClientConnector": { ClientConnector },
"./RxSocket": RxSocket,
"net": { connect: connectTcp },
"tls": { connect: connectTls },
}) as { RxBaseClient: typeof RxBaseClient }).RxBaseClient;
});
describe("new()", () => {
it("should not immediately start connecting", () => {
client = new RxBaseClientConstructor({ port: 1234 });
expect(connectTcp).not.toHaveBeenCalled();
});
});
describe("connect()", () => {<|fim▁hole|>
it("should connect", () => {
client = new RxBaseClientConstructor({ port: 1234 });
client.connect();
expect(clientConnector.connect).toHaveBeenCalled();
});
});
});<|fim▁end|> | |
<|file_name|>opp.py<|end_file_name|><|fim▁begin|># pylint: disable-msg=too-many-lines
"""OPP Hardware interface.
Contains the hardware interface and drivers for the Open Pinball Project
platform hardware, including the solenoid, input, incandescent, and neopixel
boards.
"""
import asyncio
from collections import defaultdict
from typing import Dict, List, Set, Union, Tuple, Optional # pylint: disable-msg=cyclic-import,unused-import
from mpf.core.platform_batch_light_system import PlatformBatchLightSystem
from mpf.core.utility_functions import Util
from mpf.platforms.base_serial_communicator import HEX_FORMAT
from mpf.platforms.interfaces.driver_platform_interface import PulseSettings, HoldSettings
from mpf.platforms.opp.opp_coil import OPPSolenoidCard
from mpf.platforms.opp.opp_incand import OPPIncandCard
from mpf.platforms.opp.opp_modern_lights import OPPModernLightChannel, OPPNeopixelCard, OPPModernMatrixLightsCard
from mpf.platforms.opp.opp_serial_communicator import OPPSerialCommunicator, BAD_FW_VERSION
from mpf.platforms.opp.opp_switch import OPPInputCard
from mpf.platforms.opp.opp_switch import OPPMatrixCard
from mpf.platforms.opp.opp_rs232_intf import OppRs232Intf
from mpf.core.platform import SwitchPlatform, DriverPlatform, LightsPlatform, SwitchSettings, DriverSettings, \
DriverConfig, SwitchConfig, RepulseSettings
MYPY = False
if MYPY: # pragma: no cover
from mpf.platforms.opp.opp_coil import OPPSolenoid # pylint: disable-msg=cyclic-import,unused-import
from mpf.platforms.opp.opp_incand import OPPIncand # pylint: disable-msg=cyclic-import,unused-import
from mpf.platforms.opp.opp_switch import OPPSwitch # pylint: disable-msg=cyclic-import,unused-import
# pylint: disable-msg=too-many-instance-attributes
class OppHardwarePlatform(LightsPlatform, SwitchPlatform, DriverPlatform):
"""Platform class for the OPP hardware.
Args:
----
machine: The main ``MachineController`` instance.
"""
__slots__ = ["opp_connection", "serial_connections", "opp_incands", "opp_solenoid", "sol_dict",
"opp_inputs", "inp_dict", "inp_addr_dict", "matrix_inp_addr_dict", "read_input_msg",
"neo_card_dict", "num_gen2_brd", "gen2_addr_arr", "bad_crc", "min_version", "_poll_task",
"config", "_poll_response_received", "machine_type", "opp_commands", "_incand_task", "_light_system",
"matrix_light_cards"]
def __init__(self, machine) -> None:
"""Initialise OPP platform."""
super().__init__(machine)
self.opp_connection = {} # type: Dict[str, OPPSerialCommunicator]
self.serial_connections = set() # type: Set[OPPSerialCommunicator]
self.opp_incands = dict() # type: Dict[str, OPPIncandCard]
self.opp_solenoid = [] # type: List[OPPSolenoidCard]
self.sol_dict = dict() # type: Dict[str, OPPSolenoid]
self.opp_inputs = [] # type: List[Union[OPPInputCard, OPPMatrixCard]]
self.inp_dict = dict() # type: Dict[str, OPPSwitch]
self.inp_addr_dict = dict() # type: Dict[str, OPPInputCard]
self.matrix_inp_addr_dict = dict() # type: Dict[str, OPPMatrixCard]
self.read_input_msg = {} # type: Dict[str, bytes]
self.neo_card_dict = dict() # type: Dict[str, OPPNeopixelCard]
self.matrix_light_cards = dict() # type: Dict[str, OPPModernMatrixLightsCard]
self.num_gen2_brd = 0
self.gen2_addr_arr = {} # type: Dict[str, Dict[int, Optional[int]]]
self.bad_crc = defaultdict(lambda: 0)
self.min_version = defaultdict(lambda: 0xffffffff) # type: Dict[str, int]
self._poll_task = {} # type: Dict[str, asyncio.Task]
self._incand_task = None # type: Optional[asyncio.Task]
self._light_system = None # type: Optional[PlatformBatchLightSystem]
self.features['tickless'] = True
self.config = self.machine.config_validator.validate_config("opp", self.machine.config.get('opp', {}))
self._configure_device_logging_and_debug("OPP", self.config)
self._poll_response_received = {} # type: Dict[str, asyncio.Event]
assert self.log is not None
if self.config['driverboards']:
self.machine_type = self.config['driverboards']
else:
self.machine_type = self.machine.config['hardware']['driverboards'].lower()
if self.machine_type == 'gen1':
raise AssertionError("Original OPP boards not currently supported.")
if self.machine_type == 'gen2':
self.debug_log("Configuring the OPP Gen2 boards")
else:
self.raise_config_error('Invalid driverboards type: {}'.format(self.machine_type), 15)
# Only including responses that should be received
self.opp_commands = {
ord(OppRs232Intf.INV_CMD): self.inv_resp,
ord(OppRs232Intf.EOM_CMD): self.eom_resp,
ord(OppRs232Intf.GET_GEN2_CFG): self.get_gen2_cfg_resp,
ord(OppRs232Intf.READ_GEN2_INP_CMD): self.read_gen2_inp_resp_initial,
ord(OppRs232Intf.GET_VERS_CMD): self.vers_resp,
ord(OppRs232Intf.READ_MATRIX_INP): self.read_matrix_inp_resp_initial,
}
async def initialize(self):
"""Initialise connections to OPP hardware."""
await self._connect_to_hardware()
self.opp_commands[ord(OppRs232Intf.READ_GEN2_INP_CMD)] = self.read_gen2_inp_resp
self.opp_commands[ord(OppRs232Intf.READ_MATRIX_INP)] = self.read_matrix_inp_resp
self._light_system = PlatformBatchLightSystem(self.machine.clock, self._send_multiple_light_update,
self.machine.config['mpf']['default_light_hw_update_hz'],
128)
async def _send_multiple_light_update(self, sequential_brightness_list: List[Tuple[OPPModernLightChannel,
float, int]]):
first_light, _, common_fade_ms = sequential_brightness_list[0]
number_leds = len(sequential_brightness_list)
msg = bytearray()
msg.append(int(ord(OppRs232Intf.CARD_ID_GEN2_CARD) + first_light.addr))
msg.append(OppRs232Intf.SERIAL_LED_CMD_FADE)
msg.append(int(first_light.pixel_num / 256))
msg.append(int(first_light.pixel_num % 256))
msg.append(int(number_leds / 256))
msg.append(int(number_leds % 256))
msg.append(int(common_fade_ms / 256))
msg.append(int(common_fade_ms % 256))
for _, brightness, _ in sequential_brightness_list:
msg.append(int(brightness * 255))
msg.extend(OppRs232Intf.calc_crc8_whole_msg(msg))
cmd = bytes(msg)
if self.debug:
self.debug_log("Set color on %s: %s", first_light.chain_serial, "".join(HEX_FORMAT % b for b in cmd))
self.send_to_processor(first_light.chain_serial, cmd)
async def start(self):
"""Start polling and listening for commands."""
# start polling
for chain_serial in self.read_input_msg:
self._poll_task[chain_serial] = self.machine.clock.loop.create_task(self._poll_sender(chain_serial))
self._poll_task[chain_serial].add_done_callback(Util.raise_exceptions)
# start listening for commands
for connection in self.serial_connections:
await connection.start_read_loop()
if [version for version in self.min_version.values() if version < 0x02010000]:
# if we run any CPUs with firmware prior to 2.1.0 start incands updater
self._incand_task = self.machine.clock.schedule_interval(self.update_incand,
1 / self.config['incand_update_hz'])
self._light_system.start()
def stop(self):
"""Stop hardware and close connections."""
if self._light_system:
self._light_system.stop()
for task in self._poll_task.values():
task.cancel()
self._poll_task = {}
if self._incand_task:
self._incand_task.cancel()
self._incand_task = None
for connections in self.serial_connections:
connections.stop()
self.serial_connections = []
def __repr__(self):
"""Return string representation."""
return '<Platform.OPP>'
def process_received_message(self, chain_serial, msg):
"""Send an incoming message from the OPP hardware to the proper method for servicing.
Args:
----
chain_serial: Serial of the chain which received the message.
msg: Message to parse.
"""
if len(msg) >= 1:
# Verify valid Gen2 address
if (msg[0] & 0xe0) == 0x20:
if len(msg) >= 2:
cmd = msg[1]
else:
cmd = OppRs232Intf.ILLEGAL_CMD
# Look for EOM or INV commands
elif msg[0] == ord(OppRs232Intf.INV_CMD) or msg[0] == ord(OppRs232Intf.EOM_CMD):
cmd = msg[0]
else:
cmd = OppRs232Intf.ILLEGAL_CMD
else:
# No messages received, fake an EOM
cmd = OppRs232Intf.EOM_CMD
# Can't use try since it swallows too many errors for now
if cmd in self.opp_commands:
self.opp_commands[cmd](chain_serial, msg)
else:
self.log.warning("Received unknown serial command?%s. (This is "
"very worrisome.)", "".join(HEX_FORMAT % b for b in msg))
# TODO: This means synchronization is lost. Send EOM characters
# until they come back
self.opp_connection[chain_serial].lost_synch()
@staticmethod
def _get_numbers(mask):
number = 0
ref = 1
result = []
while mask > ref:
if mask & ref:
result.append(number)
number += 1
ref = ref << 1
return result
def get_info_string(self):
"""Dump infos about boards."""
if not self.serial_connections:
return "No connection to any CPU board."
infos = "Connected CPUs:\n"
for connection in sorted(self.serial_connections, key=lambda x: x.chain_serial):
infos += " - Port: {} at {} baud. Chain Serial: {}\n".format(connection.port, connection.baud,
connection.chain_serial)
for board_id, board_firmware in self.gen2_addr_arr[connection.chain_serial].items():
if board_firmware is None:
infos += " -> Board: 0x{:02x} Firmware: broken\n".format(board_id)
else:
infos += " -> Board: 0x{:02x} Firmware: 0x{:02x}\n".format(board_id, board_firmware)
infos += "\nIncand cards:\n" if self.opp_incands else ""
card_format_string = " - Chain: {} Board: 0x{:02x} Card: {} Numbers: {}\n"
for incand in self.opp_incands.values():
infos += card_format_string.format(incand.chain_serial, incand.addr,
incand.card_num,
self._get_numbers(incand.mask))
infos += "\nInput cards:\n"
for inputs in self.opp_inputs:
infos += card_format_string.format(inputs.chain_serial, inputs.addr,
inputs.card_num,
self._get_numbers(inputs.mask))
infos += "\nSolenoid cards:\n"
for outputs in self.opp_solenoid:
infos += card_format_string.format(outputs.chain_serial, outputs.addr,
outputs.card_num,
self._get_numbers(outputs.mask))
infos += "\nLEDs:\n" if self.neo_card_dict else ""
for leds in self.neo_card_dict.values():
infos += " - Chain: {} Board: 0x{:02x} Card: {}\n".format(leds.chain_serial, leds.addr, leds.card_num)
infos += "\nMatrix lights:\n" if self.matrix_light_cards else ''
for matrix_light in self.matrix_light_cards.values():
infos += " - Chain: {} Board: 0x{:02x} Card: {} Numbers: 0 - 63\n".format(
matrix_light.chain_serial, matrix_light.addr, matrix_light.card_num)
return infos
async def _connect_to_hardware(self):
"""Connect to each port from the config.
This process will cause the OPPSerialCommunicator to figure out which chains they've connected to
and to register themselves.
"""
port_chain_serial_map = {v: k for k, v in self.config['chains'].items()}
for port in self.config['ports']:
# overwrite serial if defined for port
overwrite_chain_serial = port_chain_serial_map.get(port, None)
if overwrite_chain_serial is None and len(self.config['ports']) == 1:
overwrite_chain_serial = port
comm = OPPSerialCommunicator(platform=self, port=port, baud=self.config['baud'],
overwrite_serial=overwrite_chain_serial)
await comm.connect()
self.serial_connections.add(comm)
for chain_serial, versions in self.gen2_addr_arr.items():
for chain_id, version in versions.items():
if not version:
self.raise_config_error("Could not read version for board {}-{}.".format(chain_serial, chain_id),
16)
if self.min_version[chain_serial] != version:
self.raise_config_error("Version mismatch. Board {}-{} has version {:d}.{:d}.{:d}.{:d} which is not"
" the minimal version "
"{:d}.{:d}.{:d}.{:d}".format(chain_serial, chain_id, (version >> 24) & 0xFF,
(version >> 16) & 0xFF, (version >> 8) & 0xFF,
version & 0xFF,
(self.min_version[chain_serial] >> 24) & 0xFF,
(self.min_version[chain_serial] >> 16) & 0xFF,
(self.min_version[chain_serial] >> 8) & 0xFF,
self.min_version[chain_serial] & 0xFF), 1)
def register_processor_connection(self, serial_number, communicator):
"""Register the processors to the platform.
Args:
----
serial_number: Serial number of chain.
communicator: Instance of OPPSerialCommunicator
"""
self.opp_connection[serial_number] = communicator
def send_to_processor(self, chain_serial, msg):
"""Send message to processor with specific serial number.
Args:
----
chain_serial: Serial of the processor.
msg: Message to send.
"""
self.opp_connection[chain_serial].send(msg)
def update_incand(self):
"""Update all the incandescents connected to OPP hardware.
This is done once per game loop if changes have been made.
It is currently assumed that the UART oversampling will guarantee proper
communication with the boards. If this does not end up being the case,
this will be changed to update all the incandescents each loop.
This is used for board with firmware < 2.1.0
"""
for incand in self.opp_incands.values():
if self.min_version[incand.chain_serial] >= 0x02010000:
continue
whole_msg = bytearray()
# Check if any changes have been made
if incand.old_state is None or (incand.old_state ^ incand.new_state) != 0:
# Update card
incand.old_state = incand.new_state
msg = bytearray()
msg.append(incand.addr)
msg.extend(OppRs232Intf.INCAND_CMD)
msg.extend(OppRs232Intf.INCAND_SET_ON_OFF)
msg.append((incand.new_state >> 24) & 0xff)
msg.append((incand.new_state >> 16) & 0xff)
msg.append((incand.new_state >> 8) & 0xff)
msg.append(incand.new_state & 0xff)
msg.extend(OppRs232Intf.calc_crc8_whole_msg(msg))
whole_msg.extend(msg)
if whole_msg:
# Note: No need to send EOM at end of cmds
send_cmd = bytes(whole_msg)
if self.debug:
self.debug_log("Update incand on %s cmd:%s", incand.chain_serial,
"".join(HEX_FORMAT % b for b in send_cmd))
self.send_to_processor(incand.chain_serial, send_cmd)
@classmethod
def get_coil_config_section(cls):
"""Return coil config section."""
return "opp_coils"
async def get_hw_switch_states(self):
"""Get initial hardware switch states.
This changes switches from active low to active high
"""
hw_states = dict()
for opp_inp in self.opp_inputs:
if not opp_inp.is_matrix:
curr_bit = 1
for index in range(0, 32):
if (curr_bit & opp_inp.mask) != 0:
if (curr_bit & opp_inp.old_state) == 0:
hw_states[opp_inp.chain_serial + '-' + opp_inp.card_num + '-' + str(index)] = 1
else:
hw_states[opp_inp.chain_serial + '-' + opp_inp.card_num + '-' + str(index)] = 0
curr_bit <<= 1
else:
for index in range(0, 64):
if ((1 << index) & opp_inp.old_state) == 0:
hw_states[opp_inp.chain_serial + '-' + opp_inp.card_num + '-' + str(index + 32)] = 1
else:
hw_states[opp_inp.chain_serial + '-' + opp_inp.card_num + '-' + str(index + 32)] = 0
return hw_states
def inv_resp(self, chain_serial, msg):
"""Parse inventory response.
Args:
----
chain_serial: Serial of the chain which received the message.
msg: Message to parse.
"""
self.debug_log("Received Inventory Response: %s for %s", "".join(HEX_FORMAT % b for b in msg), chain_serial)
index = 1
self.gen2_addr_arr[chain_serial] = {}
while msg[index] != ord(OppRs232Intf.EOM_CMD):
if (msg[index] & ord(OppRs232Intf.CARD_ID_TYPE_MASK)) == ord(OppRs232Intf.CARD_ID_GEN2_CARD):
self.num_gen2_brd += 1
self.gen2_addr_arr[chain_serial][msg[index]] = None
else:
self.log.warning("Invalid inventory response %s for %s.", msg[index], chain_serial)
index += 1
self.debug_log("Found %d Gen2 OPP boards on %s.", self.num_gen2_brd, chain_serial)
# pylint: disable-msg=too-many-statements
@staticmethod
def eom_resp(chain_serial, msg):
"""Process an EOM.
Args:
----
chain_serial: Serial of the chain which received the message.
msg: Message to parse.
"""
# An EOM command can be used to resynchronize communications if message synch is lost
def _parse_gen2_board(self, chain_serial, msg, read_input_msg):
has_neo = False
has_sw_matrix = False
has_lamp_matrix = False
wing_index = 0
sol_mask = 0
inp_mask = 0
incand_mask = 0
while wing_index < OppRs232Intf.NUM_G2_WING_PER_BRD:
if msg[2 + wing_index] == ord(OppRs232Intf.WING_SOL):
sol_mask |= (0x0f << (4 * wing_index))
inp_mask |= (0x0f << (8 * wing_index))
elif msg[2 + wing_index] == ord(OppRs232Intf.WING_INP):
inp_mask |= (0xff << (8 * wing_index))
elif msg[2 + wing_index] == ord(OppRs232Intf.WING_INCAND):
incand_mask |= (0xff << (8 * wing_index))
elif msg[2 + wing_index] in (ord(OppRs232Intf.WING_SW_MATRIX_OUT),
ord(OppRs232Intf.WING_SW_MATRIX_OUT_LOW_WING)):
has_sw_matrix = True
elif msg[2 + wing_index] == ord(OppRs232Intf.WING_NEO):
has_neo = True
inp_mask |= (0xef << (8 * wing_index))
elif msg[2 + wing_index] == ord(OppRs232Intf.WING_HI_SIDE_INCAND):
incand_mask |= (0xff << (8 * wing_index))
elif msg[2 + wing_index] == ord(OppRs232Intf.WING_NEO_SOL):
inp_mask |= (0x0e << (8 * wing_index))
sol_mask |= (0x0f << (4 * wing_index))
has_neo = True
elif msg[2 + wing_index] in (ord(OppRs232Intf.WING_LAMP_MATRIX_COL_WING),
ord(OppRs232Intf.WING_LAMP_MATRIX_ROW_WING)):
has_lamp_matrix = True
wing_index += 1
if incand_mask != 0:
card = OPPIncandCard(chain_serial, msg[0], incand_mask, self.machine)
self.opp_incands["{}-{}".format(chain_serial, card.card_num)] = card
if sol_mask != 0:
self.opp_solenoid.append(
OPPSolenoidCard(chain_serial, msg[0], sol_mask, self.sol_dict, self))
if inp_mask != 0:
# Create the input object, and add to the command to read all inputs
self.opp_inputs.append(OPPInputCard(chain_serial, msg[0], inp_mask, self.inp_dict,
self.inp_addr_dict, self))
# Add command to read all inputs to read input message
inp_msg = bytearray()
inp_msg.append(msg[0])
inp_msg.extend(OppRs232Intf.READ_GEN2_INP_CMD)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.extend(OppRs232Intf.calc_crc8_whole_msg(inp_msg))
read_input_msg.extend(inp_msg)
if has_sw_matrix:
# Create the matrix object, and add to the command to read all matrix inputs
self.opp_inputs.append(OPPMatrixCard(chain_serial, msg[0], self.inp_dict,
self.matrix_inp_addr_dict, self))
# Add command to read all matrix inputs to read input message
inp_msg = bytearray()
inp_msg.append(msg[0])
inp_msg.extend(OppRs232Intf.READ_MATRIX_INP)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.extend(OppRs232Intf.calc_crc8_whole_msg(inp_msg))
read_input_msg.extend(inp_msg)
if has_neo:
card = OPPNeopixelCard(chain_serial, msg[0], self)
self.neo_card_dict[chain_serial + '-' + card.card_num] = card
if has_lamp_matrix:
card = OPPModernMatrixLightsCard(chain_serial, msg[0], self)
self.matrix_light_cards[chain_serial + '-' + card.card_num] = card
def _bad_crc(self, chain_serial, msg):
"""Show warning and increase counter."""
self.bad_crc[chain_serial] += 1
self.log.warning("Chain: %sMsg contains bad CRC: %s.", chain_serial, "".join(HEX_FORMAT % b for b in msg))
def get_gen2_cfg_resp(self, chain_serial, msg):
"""Process cfg response.
Args:
----
chain_serial: Serial of the chain which received the message.
msg: Message to parse.
"""
# Multiple get gen2 cfg responses can be received at once
self.debug_log("Received Gen2 Cfg Response:%s", "".join(HEX_FORMAT % b for b in msg))
curr_index = 0
read_input_msg = bytearray()
while True:
# check that message is long enough, must include crc8
if len(msg) < curr_index + 7:
self.log.warning("Msg is too short: %s.", "".join(HEX_FORMAT % b for b in msg))
self.opp_connection[chain_serial].lost_synch()
break
# Verify the CRC8 is correct
crc8 = OppRs232Intf.calc_crc8_part_msg(msg, curr_index, 6)
if msg[curr_index + 6] != ord(crc8):
self._bad_crc(chain_serial, msg)
break
self._parse_gen2_board(chain_serial, msg[curr_index:curr_index + 6], read_input_msg)
if (len(msg) > curr_index + 7) and (msg[curr_index + 7] == ord(OppRs232Intf.EOM_CMD)):
break
if (len(msg) > curr_index + 8) and (msg[curr_index + 8] == ord(OppRs232Intf.GET_GEN2_CFG)):
curr_index += 7
else:
self.log.warning("Malformed GET_GEN2_CFG response:%s.",
"".join(HEX_FORMAT % b for b in msg))
self.opp_connection[chain_serial].lost_synch()
break
read_input_msg.extend(OppRs232Intf.EOM_CMD)
self.read_input_msg[chain_serial] = bytes(read_input_msg)
self._poll_response_received[chain_serial] = asyncio.Event()
self._poll_response_received[chain_serial].set()
def vers_resp(self, chain_serial, msg):
"""Process version response.
Args:
----
chain_serial: Serial of the chain which received the message.
msg: Message to parse.
"""
# Multiple get version responses can be received at once
self.debug_log("Received Version Response (Chain: %s): %s", chain_serial, "".join(HEX_FORMAT % b for b in msg))
curr_index = 0
while True:
# check that message is long enough, must include crc8
if len(msg) < curr_index + 7:
self.log.warning("Msg is too short (Chain: %s): %s.", chain_serial,
"".join(HEX_FORMAT % b for b in msg))
self.opp_connection[chain_serial].lost_synch()
break
# Verify the CRC8 is correct
crc8 = OppRs232Intf.calc_crc8_part_msg(msg, curr_index, 6)
if msg[curr_index + 6] != ord(crc8):
self._bad_crc(chain_serial, msg)
break
version = (msg[curr_index + 2] << 24) | \
(msg[curr_index + 3] << 16) | \
(msg[curr_index + 4] << 8) | \
msg[curr_index + 5]
self.debug_log("Firmware version of board 0x%02x (Chain: %s): %d.%d.%d.%d", msg[curr_index], chain_serial,
msg[curr_index + 2], msg[curr_index + 3], msg[curr_index + 4], msg[curr_index + 5])
if msg[curr_index] not in self.gen2_addr_arr[chain_serial]:
self.log.warning("Got firmware response for %s but not in inventory at %s", msg[curr_index],
chain_serial)
else:
self.gen2_addr_arr[chain_serial][msg[curr_index]] = version
if version < self.min_version[chain_serial]:
self.min_version[chain_serial] = version
if version == BAD_FW_VERSION:
raise AssertionError("Original firmware sent only to Brian before adding "
"real version numbers. The firmware must be updated before "
"MPF will work.")
if (len(msg) > curr_index + 7) and (msg[curr_index + 7] == ord(OppRs232Intf.EOM_CMD)):
break
if (len(msg) > curr_index + 8) and (msg[curr_index + 8] == ord(OppRs232Intf.GET_VERS_CMD)):
curr_index += 7
else:
self.log.warning("Malformed GET_VERS_CMD response (Chain %s): %s.", chain_serial,
"".join(HEX_FORMAT % b for b in msg))
self.opp_connection[chain_serial].lost_synch()
break
def read_gen2_inp_resp_initial(self, chain_serial, msg):
"""Read initial switch states.
Args:
----
chain_serial: Serial of the chain which received the message.
msg: Message to parse.
"""
# Verify the CRC8 is correct
if len(msg) < 7:
raise AssertionError("Received too short initial input response: " + "".join(HEX_FORMAT % b for b in msg))
crc8 = OppRs232Intf.calc_crc8_part_msg(msg, 0, 6)
if msg[6] != ord(crc8):
self._bad_crc(chain_serial, msg)
else:
if chain_serial + '-' + str(msg[0]) not in self.inp_addr_dict:
self.log.warning("Got input response for invalid card at initial request: %s. Msg: %s.", msg[0],
"".join(HEX_FORMAT % b for b in msg))
return
opp_inp = self.inp_addr_dict[chain_serial + '-' + str(msg[0])]
new_state = (msg[2] << 24) | \
(msg[3] << 16) | \
(msg[4] << 8) | \
msg[5]
opp_inp.old_state = new_state
def read_gen2_inp_resp(self, chain_serial, msg):
"""Read switch changes.
Args:
----
chain_serial: Serial of the chain which received the message.
msg: Message to parse.
"""
# Single read gen2 input response. Receive function breaks them down
# Verify the CRC8 is correct
if len(msg) < 7:
self.log.warning("Msg too short: %s.", "".join(HEX_FORMAT % b for b in msg))
self.opp_connection[chain_serial].lost_synch()
return
crc8 = OppRs232Intf.calc_crc8_part_msg(msg, 0, 6)
if msg[6] != ord(crc8):
self._bad_crc(chain_serial, msg)
else:
if chain_serial + '-' + str(msg[0]) not in self.inp_addr_dict:
self.log.warning("Got input response for invalid card: %s. Msg: %s.", msg[0],
"".join(HEX_FORMAT % b for b in msg))
return
opp_inp = self.inp_addr_dict[chain_serial + '-' + str(msg[0])]
new_state = (msg[2] << 24) | \
(msg[3] << 16) | \
(msg[4] << 8) | \
msg[5]
# Update the state which holds inputs that are active
changes = opp_inp.old_state ^ new_state
if changes != 0:
curr_bit = 1
for index in range(0, 32):
if (curr_bit & changes) != 0:
if (curr_bit & new_state) == 0:
self.machine.switch_controller.process_switch_by_num(
state=1,
num=opp_inp.chain_serial + '-' + opp_inp.card_num + '-' + str(index),
platform=self)
else:
self.machine.switch_controller.process_switch_by_num(
state=0,
num=opp_inp.chain_serial + '-' + opp_inp.card_num + '-' + str(index),
platform=self)
curr_bit <<= 1
opp_inp.old_state = new_state
# we can continue to poll
self._poll_response_received[chain_serial].set()
def read_matrix_inp_resp_initial(self, chain_serial, msg):
"""Read initial matrix switch states.
Args:
----
chain_serial: Serial of the chain which received the message.
msg: Message to parse.
"""
# Verify the CRC8 is correct
if len(msg) < 11:
raise AssertionError("Received too short initial input response: " + "".join(HEX_FORMAT % b for b in msg))
crc8 = OppRs232Intf.calc_crc8_part_msg(msg, 0, 10)
if msg[10] != ord(crc8):
self._bad_crc(chain_serial, msg)
else:
if chain_serial + '-' + str(msg[0]) not in self.matrix_inp_addr_dict:
self.log.warning("Got input response for invalid matrix card at initial request: %s. Msg: %s.", msg[0],
"".join(HEX_FORMAT % b for b in msg))
return
opp_inp = self.matrix_inp_addr_dict[chain_serial + '-' + str(msg[0])]
opp_inp.old_state = ((msg[2] << 56) | (msg[3] << 48) | (msg[4] << 40) | (msg[5] << 32) |
(msg[6] << 24) | (msg[7] << 16) | (msg[8] << 8) | msg[9])
# pylint: disable-msg=too-many-nested-blocks
def read_matrix_inp_resp(self, chain_serial, msg):
"""Read matrix switch changes.
Args:
----
chain_serial: Serial of the chain which received the message.
msg: Message to parse.
"""
# Single read gen2 input response. Receive function breaks them down
# Verify the CRC8 is correct
if len(msg) < 11:
self.log.warning("Msg too short: %s.", "".join(HEX_FORMAT % b for b in msg))
self.opp_connection[chain_serial].lost_synch()
return
crc8 = OppRs232Intf.calc_crc8_part_msg(msg, 0, 10)
if msg[10] != ord(crc8):
self._bad_crc(chain_serial, msg)
else:
if chain_serial + '-' + str(msg[0]) not in self.matrix_inp_addr_dict:
self.log.warning("Got input response for invalid matrix card: %s. Msg: %s.", msg[0],
"".join(HEX_FORMAT % b for b in msg))
return
opp_inp = self.matrix_inp_addr_dict[chain_serial + '-' + str(msg[0])]
new_state = ((msg[2] << 56) | (msg[3] << 48) | (msg[4] << 40) | (msg[5] << 32) |
(msg[6] << 24) | (msg[7] << 16) | (msg[8] << 8) | msg[9])
changes = opp_inp.old_state ^ new_state
if changes != 0:
curr_bit = 1
for index in range(32, 96):
if (curr_bit & changes) != 0:
if (curr_bit & new_state) == 0:
self.machine.switch_controller.process_switch_by_num(
state=1,
num=opp_inp.chain_serial + '-' + opp_inp.card_num + '-' + str(index),
platform=self)
else:
self.machine.switch_controller.process_switch_by_num(
state=0,
num=opp_inp.chain_serial + '-' + opp_inp.card_num + '-' + str(index),
platform=self)
curr_bit <<= 1
opp_inp.old_state = new_state
# we can continue to poll
self._poll_response_received[chain_serial].set()
def _get_dict_index(self, input_str):
if not isinstance(input_str, str):
self.raise_config_error("Invalid number format for OPP. Number should be card-number or chain-card-number "
"(e.g. 0-1)", 2)
try:
chain_str, card_str, number_str = input_str.split("-")
except ValueError:
if len(self.serial_connections) > 1:
self.raise_config_error("You need to specify a chain as chain-card-number in: {}".format(input_str), 17)
else:
chain_str = list(self.serial_connections)[0].chain_serial
try:
card_str, number_str = input_str.split("-")
except ValueError:
card_str = '0'
number_str = input_str
if chain_str not in self.opp_connection:
self.raise_config_error("Chain {} does not exist. Existing chains: {}".format(
chain_str, list(self.opp_connection.keys())), 3)
return chain_str + "-" + card_str + "-" + number_str
def configure_driver(self, config: DriverConfig, number: str, platform_settings: dict):
"""Configure a driver.
Args:
----
config: Config dict.
number: Number of this driver.
platform_settings: Platform specific settings.
"""
if not self.opp_connection:
self.raise_config_error("A request was made to configure an OPP solenoid, "
"but no OPP connection is available", 4)
number = self._get_dict_index(number)
if number not in self.sol_dict:
self.raise_config_error("A request was made to configure an OPP solenoid "
"with number {} which doesn't exist".format(number), 5)
# Use new update individual solenoid command
opp_sol = self.sol_dict[number]
opp_sol.config = config
opp_sol.platform_settings = platform_settings
if self.debug:
self.debug_log("Configure driver %s", number)
default_pulse = PulseSettings(config.default_pulse_power, config.default_pulse_ms)
default_hold = HoldSettings(config.default_hold_power)
opp_sol.reconfigure_driver(default_pulse, default_hold)
# Removing the default input is not necessary since the
# CFG_SOL_USE_SWITCH is not being set
return opp_sol
def configure_switch(self, number: str, config: SwitchConfig, platform_config: dict):
"""Configure a switch.
Args:
----
number: Number of this switch.
config: Config dict.
platform_config: Platform specific settings.
"""
del platform_config
del config
# A switch is termed as an input to OPP
if not self.opp_connection:
self.raise_config_error("A request was made to configure an OPP switch, "
"but no OPP connection is available", 6)
number = self._get_dict_index(number)
if number not in self.inp_dict:
self.raise_config_error("A request was made to configure an OPP switch "
"with number {} which doesn't exist".format(number), 7)
return self.inp_dict[number]
def parse_light_number_to_channels(self, number: str, subtype: str):
"""Parse number and subtype to channel."""
if subtype in ("matrix", "incand"):
return [
{
"number": self._get_dict_index(number)
}
]
if not subtype or subtype == "led":
full_index = self._get_dict_index(number)
chain_serial, card, index = full_index.split('-')
number_format = "{}-{}-{}"
return [
{
"number": number_format.format(chain_serial, card, int(index) * 3)
},
{
"number": number_format.format(chain_serial, card, int(index) * 3 + 1)
},
{
"number": number_format.format(chain_serial, card, int(index) * 3 + 2)
},
]
self.raise_config_error("Unknown subtype {}".format(subtype), 8)
return []
def configure_light(self, number, subtype, config, platform_settings):
"""Configure a led or matrix light."""
del config
if not self.opp_connection:
self.raise_config_error("A request was made to configure an OPP light, "
"but no OPP connection is available", 9)
chain_serial, card, light_num = number.split('-')
index = chain_serial + '-' + card
if not subtype or subtype == "led":
if index not in self.neo_card_dict:
self.raise_config_error("A request was made to configure an OPP neopixel "
"with card number {} which doesn't exist".format(card), 10)
if not self.neo_card_dict[index].is_valid_light_number(light_num):
self.raise_config_error("A request was made to configure an OPP neopixel "
"with card number {} but number '{}' is "
"invalid".format(card, light_num), 22)
light = OPPModernLightChannel(chain_serial, int(card), int(light_num), self._light_system)
self._light_system.mark_dirty(light)
return light
if subtype == "matrix" and self.min_version[chain_serial] >= 0x02010000:
# modern matrix lights
if index not in self.matrix_light_cards:
self.raise_config_error("A request was made to configure an OPP matrix light "
"with card number {} which doesn't exist".format(card), 18)
if not self.matrix_light_cards[index].is_valid_light_number(light_num):
self.raise_config_error("A request was made to configure an OPP matrix light "
"with card number {} but number '{}' is "
"invalid".format(card, light_num), 19)
light = OPPModernLightChannel(chain_serial, int(card), int(light_num) + 0x2000, self._light_system)
self._light_system.mark_dirty(light)
return light
if subtype in ("incand", "matrix"):
if index not in self.opp_incands:
self.raise_config_error("A request was made to configure an OPP incand light "
"with card number {} which doesn't exist".format(card), 20)
if not self.opp_incands[index].is_valid_light_number(light_num):
self.raise_config_error("A request was made to configure an OPP incand light "
"with card number {} but number '{}' is "
"invalid".format(card, light_num), 21)
if self.min_version[chain_serial] >= 0x02010000:
light = self.opp_incands[index].configure_modern_fade_incand(light_num, self._light_system)
self._light_system.mark_dirty(light)
return light
# legacy incands with new or old subtype
return self.opp_incands[index].configure_software_fade_incand(light_num)
self.raise_config_error("Unknown subtype {}".format(subtype), 12)
return None
async def _poll_sender(self, chain_serial):
"""Poll switches."""
if len(self.read_input_msg[chain_serial]) <= 1:
# there is no point in polling without switches
return
while True:
# wait for previous poll response<|fim▁hole|> self.log.warning("Poll took more than %sms for %s", timeout * 1000, chain_serial)
else:
self._poll_response_received[chain_serial].clear()
# send poll
self.send_to_processor(chain_serial, self.read_input_msg[chain_serial])
await self.opp_connection[chain_serial].writer.drain()
# the line above saturates the link and seems to overwhelm the hardware. limit it to 100Hz
await asyncio.sleep(1 / self.config['poll_hz'])
def _verify_coil_and_switch_fit(self, switch, coil):
chain_serial, card, solenoid = coil.hw_driver.number.split('-')
sw_chain_serial, sw_card, sw_num = switch.hw_switch.number.split('-')
if self.min_version[chain_serial] >= 0x20000:
if chain_serial != sw_chain_serial or card != sw_card:
self.raise_config_error('Invalid switch being configured for driver. Driver = {} '
'Switch = {}. Driver and switch have to be on the same '
'board.'.format(coil.hw_driver.number, switch.hw_switch.number), 13)
else:
matching_sw = ((int(solenoid) & 0x0c) << 1) | (int(solenoid) & 0x03)
if chain_serial != sw_chain_serial or card != sw_card or matching_sw != int(sw_num):
self.raise_config_error('Invalid switch being configured for driver. Driver = {} '
'Switch = {}. For Firmware < 0.2.0 they have to be on the same board and '
'have the same number'.format(coil.hw_driver.number, switch.hw_switch.number),
14)
def set_pulse_on_hit_rule(self, enable_switch: SwitchSettings, coil: DriverSettings):
"""Set pulse on hit rule on driver.
Pulses a driver when a switch is hit. When the switch is released the pulse continues. Typically used for
autofire coils such as pop bumpers.
"""
self._write_hw_rule(enable_switch, coil, use_hold=False, can_cancel=False)
def set_delayed_pulse_on_hit_rule(self, enable_switch: SwitchSettings, coil: DriverSettings, delay_ms: int):
"""Set pulse on hit and release rule to driver.
When a switch is hit and a certain delay passed it pulses a driver.
When the switch is released the pulse continues.
Typically used for kickbacks.
"""
if delay_ms <= 0:
raise AssertionError("set_delayed_pulse_on_hit_rule should be used with a positive delay "
"not {}".format(delay_ms))
if delay_ms > 255:
raise AssertionError("set_delayed_pulse_on_hit_rule is limited to max 255ms "
"(was {})".format(delay_ms))
self._write_hw_rule(enable_switch, coil, use_hold=False, can_cancel=False, delay_ms=int(delay_ms))
def set_pulse_on_hit_and_release_rule(self, enable_switch: SwitchSettings, coil: DriverSettings):
"""Set pulse on hit and release rule to driver.
Pulses a driver when a switch is hit. When the switch is released the pulse is canceled. Typically used on
the main coil for dual coil flippers without eos switch.
"""
self._write_hw_rule(enable_switch, coil, use_hold=False, can_cancel=True)
def set_pulse_on_hit_and_enable_and_release_rule(self, enable_switch: SwitchSettings, coil: DriverSettings):
"""Set pulse on hit and enable and relase rule on driver.
Pulses a driver when a switch is hit. Then enables the driver (may be with pwm). When the switch is released
the pulse is canceled and the driver gets disabled. Typically used for single coil flippers.
"""
self._write_hw_rule(enable_switch, coil, use_hold=True, can_cancel=True)
def set_pulse_on_hit_and_release_and_disable_rule(self, enable_switch: SwitchSettings,
eos_switch: SwitchSettings, coil: DriverSettings,
repulse_settings: Optional[RepulseSettings]):
"""Set pulse on hit and release and disable rule on driver.
Pulses a driver when a switch is hit. Then enables the driver (may be with pwm). When the switch is released
the pulse is canceled and the driver gets disabled. When the second disable_switch is hit the pulse is canceled
and the driver gets disabled. Typically used on the main coil for dual coil flippers with eos switch.
"""
raise AssertionError("Not implemented in OPP currently")
def set_pulse_on_hit_and_enable_and_release_and_disable_rule(self, enable_switch: SwitchSettings,
eos_switch: SwitchSettings, coil: DriverSettings,
repulse_settings: Optional[RepulseSettings]):
"""Set pulse on hit and enable and release and disable rule on driver.
Pulses a driver when a switch is hit. Then enables the driver (may be with pwm). When the switch is released
the pulse is canceled and the driver becomes disabled. When the eos_switch is hit the pulse is canceled
and the driver becomes enabled (likely with PWM).
Typically used on the coil for single-wound coil flippers with eos switch.
"""
raise AssertionError("Not implemented in OPP currently")
# pylint: disable-msg=too-many-arguments
def _write_hw_rule(self, switch_obj: SwitchSettings, driver_obj: DriverSettings, use_hold, can_cancel,
delay_ms=None):
if switch_obj.invert:
raise AssertionError("Cannot handle inverted switches")
if driver_obj.hold_settings and not use_hold:
raise AssertionError("Invalid call")
self._verify_coil_and_switch_fit(switch_obj, driver_obj)
self.debug_log("Setting HW Rule. Driver: %s", driver_obj.hw_driver.number)
driver_obj.hw_driver.switches.append(switch_obj.hw_switch.number)
driver_obj.hw_driver.set_switch_rule(driver_obj.pulse_settings, driver_obj.hold_settings, driver_obj.recycle,
can_cancel, delay_ms)
_, _, switch_num = switch_obj.hw_switch.number.split("-")
switch_num = int(switch_num)
self._add_switch_coil_mapping(switch_num, driver_obj.hw_driver)
def _remove_switch_coil_mapping(self, switch_num, driver: "OPPSolenoid"):
"""Remove mapping between switch and coil."""
if self.min_version[driver.sol_card.chain_serial] < 0x20000:
return
_, _, coil_num = driver.number.split('-')
# mirror switch matrix columns to handle the fact that OPP matrix is in reverse column order
if switch_num >= 32:
switch_num = 8 * (15 - (switch_num // 8)) + switch_num % 8
msg = bytearray()
msg.append(driver.sol_card.addr)
msg.extend(OppRs232Intf.SET_SOL_INP_CMD)
msg.append(int(switch_num))
msg.append(int(coil_num) + ord(OppRs232Intf.CFG_SOL_INP_REMOVE))
msg.extend(OppRs232Intf.calc_crc8_whole_msg(msg))
msg.extend(OppRs232Intf.EOM_CMD)
final_cmd = bytes(msg)
if self.debug:
self.debug_log("Unmapping input %s and coil %s on %s", switch_num, coil_num, driver.sol_card.chain_serial)
self.send_to_processor(driver.sol_card.chain_serial, final_cmd)
def _add_switch_coil_mapping(self, switch_num, driver: "OPPSolenoid"):
"""Add mapping between switch and coil."""
if self.min_version[driver.sol_card.chain_serial] < 0x20000:
return
_, _, coil_num = driver.number.split('-')
# mirror switch matrix columns to handle the fact that OPP matrix is in reverse column order
if switch_num >= 32:
switch_num = 8 * (15 - (switch_num // 8)) + switch_num % 8
msg = bytearray()
msg.append(driver.sol_card.addr)
msg.extend(OppRs232Intf.SET_SOL_INP_CMD)
msg.append(int(switch_num))
msg.append(int(coil_num))
msg.extend(OppRs232Intf.calc_crc8_whole_msg(msg))
msg.extend(OppRs232Intf.EOM_CMD)
final_cmd = bytes(msg)
if self.debug:
self.debug_log("Mapping input %s and coil %s on %s", switch_num, coil_num, driver.sol_card.chain_serial)
self.send_to_processor(driver.sol_card.chain_serial, final_cmd)
def clear_hw_rule(self, switch: SwitchSettings, coil: DriverSettings):
"""Clear a hardware rule.
This is used if you want to remove the linkage between a switch and
some driver activity. For example, if you wanted to disable your
flippers (so that a player pushing the flipper buttons wouldn't cause
the flippers to flip), you'd call this method with your flipper button
as the *sw_num*.
"""
if switch.hw_switch.number in coil.hw_driver.switches:
if self.debug:
self.debug_log("Clearing HW Rule for switch: %s, coils: %s", switch.hw_switch.number,
coil.hw_driver.number)
coil.hw_driver.switches.remove(switch.hw_switch.number)
_, _, switch_num = switch.hw_switch.number.split("-")
switch_num = int(switch_num)
self._remove_switch_coil_mapping(switch_num, coil.hw_driver)
# disable rule if there are no more switches
# Technically not necessary unless the solenoid parameters are
# changing. MPF may not know when initial kick and hold values
# are changed, so this might need to be called each time.
if not coil.hw_driver.switches:
coil.hw_driver.remove_switch_rule()<|fim▁end|> | timeout = 1 / self.config['poll_hz'] * 25
try:
await asyncio.wait_for(self._poll_response_received[chain_serial].wait(), timeout)
except asyncio.TimeoutError: |
<|file_name|>switched-expectations.rs<|end_file_name|><|fim▁begin|>fn main() {
let var = 10i32;
let ref string: String = var; //~ ERROR mismatched types [E0308]<|fim▁hole|>}<|fim▁end|> | |
<|file_name|>number.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Servo Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Immutable numbers.
#[allow(non_uppercase_statics)];
use base::{CFAllocatorRef, CFRelease, CFTypeID, TCFType, kCFAllocatorDefault};
use std::cast;
use std::libc::c_void;
pub type CFNumberType = u32;
// members of enum CFNumberType
// static kCFNumberSInt8Type: CFNumberType = 1;
// static kCFNumberSInt16Type: CFNumberType = 2;
// static kCFNumberSInt32Type: CFNumberType = 3;
static kCFNumberSInt64Type: CFNumberType = 4;
// static kCFNumberFloat32Type: CFNumberType = 5;
static kCFNumberFloat64Type: CFNumberType = 6;
// static kCFNumberCharType: CFNumberType = 7;
// static kCFNumberShortType: CFNumberType = 8;
// static kCFNumberIntType: CFNumberType = 9;
// static kCFNumberLongType: CFNumberType = 10;
// static kCFNumberLongLongType: CFNumberType = 11;
// static kCFNumberFloatType: CFNumberType = 12;
// static kCFNumberDoubleType: CFNumberType = 13;
// static kCFNumberCFIndexType: CFNumberType = 14;
// static kCFNumberNSIntegerType: CFNumberType = 15;
// static kCFNumberCGFloatType: CFNumberType = 16;
// static kCFNumberMaxType: CFNumberType = 16;
struct __CFNumber;
pub type CFNumberRef = *__CFNumber;
/// An immutable numeric value.
///
/// FIXME(pcwalton): Should be a newtype struct, but that fails due to a Rust compiler bug.
pub struct CFNumber {
priv obj: CFNumberRef,
}
impl Drop for CFNumber {
fn drop(&mut self) {
unsafe {
CFRelease(self.as_CFTypeRef())
}
}
}
impl TCFType<CFNumberRef> for CFNumber {
fn as_concrete_TypeRef(&self) -> CFNumberRef {
self.obj
}
unsafe fn wrap_under_create_rule(obj: CFNumberRef) -> CFNumber {
CFNumber {
obj: obj,
}<|fim▁hole|> fn type_id(_: Option<CFNumber>) -> CFTypeID {
unsafe {
CFNumberGetTypeID()
}
}
}
// TODO(pcwalton): Floating point.
impl ToPrimitive for CFNumber {
#[inline]
fn to_i64(&self) -> Option<i64> {
unsafe {
let mut value: i64 = 0;
let ok = CFNumberGetValue(self.obj, kCFNumberSInt64Type, cast::transmute(&mut value));
assert!(ok);
Some(value)
}
}
#[inline]
fn to_u64(&self) -> Option<u64> {
// CFNumber does not support unsigned 64-bit values.
None
}
#[inline]
fn to_f64(&self) -> Option<f64> {
unsafe {
let mut value: f64 = 0.0;
let ok = CFNumberGetValue(self.obj, kCFNumberFloat64Type, cast::transmute(&mut value));
assert!(ok);
Some(value)
}
}
}
// TODO(pcwalton): Floating point.
impl FromPrimitive for CFNumber {
#[inline]
fn from_i64(value: i64) -> Option<CFNumber> {
unsafe {
let number_ref = CFNumberCreate(kCFAllocatorDefault,
kCFNumberSInt64Type,
cast::transmute(&value));
Some(TCFType::wrap_under_create_rule(number_ref))
}
}
#[inline]
fn from_u64(_: u64) -> Option<CFNumber> {
// CFNumber does not support unsigned 64-bit values.
None
}
#[inline]
fn from_f64(value: f64) -> Option<CFNumber> {
unsafe {
let number_ref = CFNumberCreate(kCFAllocatorDefault,
kCFNumberFloat64Type,
cast::transmute(&value));
Some(TCFType::wrap_under_create_rule(number_ref))
}
}
}
/// A convenience function to create CFNumbers.
pub fn number(value: i64) -> CFNumber {
FromPrimitive::from_i64(value).unwrap()
}
#[link(name = "CoreFoundation", kind = "framework")]
extern {
/*
* CFNumber.h
*/
fn CFNumberCreate(allocator: CFAllocatorRef, theType: CFNumberType, valuePtr: *c_void)
-> CFNumberRef;
//fn CFNumberGetByteSize
fn CFNumberGetValue(number: CFNumberRef, theType: CFNumberType, valuePtr: *mut c_void) -> bool;
//fn CFNumberCompare
fn CFNumberGetTypeID() -> CFTypeID;
}<|fim▁end|> | }
#[inline] |
<|file_name|>mymovies.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import sys
from PyQt4.QtCore import *
from PyQt4.QtGui import *
import moviedata
class MainWindow(QMainWindow):
def __init__(self, parent=None):
super(MainWindow, self).__init__(parent)
self.movies = moviedata.MovieContainer()
self.table = QTableWidget()
self.setCentralWidget(self.table)
def updateTable(self, current=None):<|fim▁hole|> self.table.setHorizontalHeaderLabels(['Title',
'Year', 'Mins', 'Acquired', 'Notes'])
self.table.setAlternatingRowColors(True)
self.table.setEditTriggers(QTableWidget.NoEditTriggers)
self.table.setSelectionBehavior(QTableWidget.SelectRows)
self.table.setSelectionMode(QTableWidget.SingleSelection)
selected = None<|fim▁end|> | self.table.clear()
self.table.setRowCount(len(self.movies))
self.table.setColumnCount(5) |
<|file_name|>logout.js<|end_file_name|><|fim▁begin|>module.exports = function (app) {
app.get('/', function (req, res) {
if (req.logout) {
req.logout();
}
res.redirect('/');<|fim▁hole|><|fim▁end|> | });
}; |
<|file_name|>ipo.py<|end_file_name|><|fim▁begin|>from heapq import *
from typing import List
class Solution:
def findMaximizedCapital(self, k: int, wealth: int, profits: List[int], capitals: List[int]) -> int:
minCapitalHeap, maxProfitHeap = [], []
for i in range(len(capitals)):
heappush(minCapitalHeap, (capitals[i], profits[i]))
for _ in range(k):
# find the projects which require less capital than available wealth
while minCapitalHeap and minCapitalHeap[0][0] <= wealth:
_, profit = heappop(minCapitalHeap)
heappush(maxProfitHeap, -profit)
if not maxProfitHeap:
break
wealth += -heappop(maxProfitHeap)
return wealth
k=0
W=0<|fim▁hole|>Profits=[1,2,3,5]
Capital=[0,1,2,3]
ob = Solution()
print(ob.findMaximizedCapital(k, W, Profits, Capital))<|fim▁end|> | |
<|file_name|>download_test.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
"""Unit tests for the image downloader."""
import unittest
import download
__author__ = "Nick Pascucci ([email protected])"
class DownloadTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_img_matcher(self):
html = """<html>
<body>
<b>Hi there!</b>
<img src="abcd-(myfile)[1].jpg"><|fim▁hole|></html>
"""
paths = download.get_image_paths(html)
assert paths == ["abcd-(myfile)[1].jpg"]
def test_img_matcher_http(self):
html = """<html>
<body>
<b>Hi there!</b>
<img src="http://www.def.com/abcd-(myfile)[1].jpg">
</body>
</html>
"""
paths = download.get_image_paths(html)
assert paths == ["http://www.def.com/abcd-(myfile)[1].jpg"]
def test_extension_matcher(self):
filename = "abcdef.jpg"
assert download.match_extension(filename)
filename = "abcdef.txt"
assert not download.match_extension(filename)
def test_sitename_matcher(self):
site = "http://www.xkcd.com/208/"
sitename = download.sitename(site)
assert "http://www.xkcd.com" == sitename
if __name__ == "__main__":
unittest.main()<|fim▁end|> | </body> |
<|file_name|>attach.js<|end_file_name|><|fim▁begin|>import {ATTACHMENTS} from "../constants";
export default (...args) => {
// Use one or the other
const attachments = args.length ? args : ATTACHMENTS;
return {
props: {
attach: {
type: String,
validator: value => value === "" || attachments.includes(value)
}
},
computed: {
getAttach() {
if(typeof this.attach === "string") {
return this.attach ? `${this.attach} attached` : "attached";
}
}
}
};<|fim▁hole|><|fim▁end|> | }; |
<|file_name|>macro-backtrace-nested.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// In expression position, but not statement position, when we expand a macro,
// we replace the span of the expanded expression with that of the call site.
macro_rules! nested_expr {
() => (fake)
}
macro_rules! call_nested_expr {
() => (nested_expr!())
}
macro_rules! call_nested_expr_sum { //~ NOTE in expansion of
() => { 1 + nested_expr!(); } //~ ERROR unresolved name
}<|fim▁hole|>}<|fim▁end|> |
fn main() {
1 + call_nested_expr!(); //~ ERROR unresolved name
call_nested_expr_sum!(); //~ NOTE expansion site |
<|file_name|>basic_stats.py<|end_file_name|><|fim▁begin|># This script calculates how many error reports are in each subdirectory
# and how many error reports are in total.
# Edit in_dir and out_file parameters as you need.
import os
in_dir = "D:/Projects/CrashRpt/valid_reports"
out_file = "stats.txt"
f = open(out_file, "w")
def get_txt_file_count(dirname):
count = 0
for root, dirs, files in os.walk(dirname, True):
for file in files:
if file[-4:] != ".txt":
continue;
count += 1
break;
return count
multimap = dict()
for root, dirs, files in os.walk(in_dir):
for dir in dirs:
dir_name = os.path.join(root, dir)
report_count_in_dir = get_txt_file_count(dir_name)
if report_count_in_dir in multimap.keys():
multimap[report_count_in_dir].append(dir)
else:
multimap[report_count_in_dir] = [dir]
ordered_list = list(multimap.keys())
ordered_list.sort()
ordered_list.reverse()
total_count = 0
total_groups = 0
for count in ordered_list:
total_groups += len(multimap[count]);
total_count += count * len(multimap[count])
f.write("Total %d reports (100%%) in %d directories\n"%(total_count, total_groups))
n = 1
for key in ordered_list:
for dir in multimap[key]:
percent = key/total_count*100
f.write("%d. %d reports (%0.1f%%) in '%s'\n"%(n, key, percent, dir))
n = n+1<|fim▁hole|>f.close()<|fim▁end|> | |
<|file_name|>test_notification.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Integration tests for notification command."""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import re
import time
import uuid
import boto
import gslib.tests.testcase as testcase
from gslib.tests.util import ObjectToURI as suri
from gslib.tests.util import unittest
from gslib.utils.retry_util import Retry
def _LoadNotificationUrl():
return boto.config.get_value('GSUtil', 'test_notification_url')
NOTIFICATION_URL = _LoadNotificationUrl()
class TestNotification(testcase.GsUtilIntegrationTestCase):
"""Integration tests for notification command."""
@unittest.skipUnless(NOTIFICATION_URL,
'Test requires notification URL configuration.')
def test_watch_bucket(self):
"""Tests creating a notification channel on a bucket."""
bucket_uri = self.CreateBucket()
self.RunGsUtil(
['notification', 'watchbucket', NOTIFICATION_URL,
suri(bucket_uri)])
identifier = str(uuid.uuid4())
token = str(uuid.uuid4())
stderr = self.RunGsUtil([
'notification', 'watchbucket', '-i', identifier, '-t', token,
NOTIFICATION_URL,
suri(bucket_uri)
],
return_stderr=True)
self.assertIn('token: %s' % token, stderr)
self.assertIn('identifier: %s' % identifier, stderr)
@unittest.skipUnless(NOTIFICATION_URL,
'Test requires notification URL configuration.')
def test_stop_channel(self):
"""Tests stopping a notification channel on a bucket."""
bucket_uri = self.CreateBucket()
stderr = self.RunGsUtil(
['notification', 'watchbucket', NOTIFICATION_URL,
suri(bucket_uri)],
return_stderr=True)
channel_id = re.findall(r'channel identifier: (?P<id>.*)', stderr)<|fim▁hole|> resource_id = re.findall(r'resource identifier: (?P<id>.*)', stderr)
self.assertEqual(len(resource_id), 1)
channel_id = channel_id[0]
resource_id = resource_id[0]
self.RunGsUtil(['notification', 'stopchannel', channel_id, resource_id])
@unittest.skipUnless(NOTIFICATION_URL,
'Test requires notification URL configuration.')
def test_list_one_channel(self):
"""Tests listing notification channel on a bucket."""
# TODO(b/132277269): Re-enable these once the service-side bug is fixed.
return unittest.skip('Functionality has been disabled due to b/132277269')
bucket_uri = self.CreateBucket()
# Set up an OCN (object change notification) on the newly created bucket.
self.RunGsUtil(
['notification', 'watchbucket', NOTIFICATION_URL,
suri(bucket_uri)],
return_stderr=False)
# The OCN listing in the service is eventually consistent. In initial
# tests, it almost never was ready immediately after calling WatchBucket
# above, so we A) sleep for a few seconds before the first OCN listing
# attempt, and B) wrap the OCN listing attempt in retry logic in case
# it raises a BucketNotFoundException (note that RunGsUtil will raise this
# as an AssertionError due to the exit status not being 0).
@Retry(AssertionError, tries=3, timeout_secs=5)
def _ListObjectChangeNotifications():
stderr = self.RunGsUtil(['notification', 'list', '-o',
suri(bucket_uri)],
return_stderr=True)
return stderr
time.sleep(5)
stderr = _ListObjectChangeNotifications()
channel_id = re.findall(r'Channel identifier: (?P<id>.*)', stderr)
self.assertEqual(len(channel_id), 1)
resource_id = re.findall(r'Resource identifier: (?P<id>.*)', stderr)
self.assertEqual(len(resource_id), 1)
push_url = re.findall(r'Application URL: (?P<id>.*)', stderr)
self.assertEqual(len(push_url), 1)
subscriber_email = re.findall(r'Created by: (?P<id>.*)', stderr)
self.assertEqual(len(subscriber_email), 1)
creation_time = re.findall(r'Creation time: (?P<id>.*)', stderr)
self.assertEqual(len(creation_time), 1)
def test_invalid_subcommand(self):
stderr = self.RunGsUtil(['notification', 'foo', 'bar', 'baz'],
return_stderr=True,
expected_status=1)
self.assertIn('Invalid subcommand', stderr)<|fim▁end|> | self.assertEqual(len(channel_id), 1) |
<|file_name|>atomic_usize.rs<|end_file_name|><|fim▁begin|>use std::cell::UnsafeCell;
use std::fmt;
use std::ops;
/// `AtomicUsize` providing an additional `load_unsync` function.
pub(crate) struct AtomicUsize {
inner: UnsafeCell<std::sync::atomic::AtomicUsize>,
}
unsafe impl Send for AtomicUsize {}
unsafe impl Sync for AtomicUsize {}
impl AtomicUsize {
pub(crate) const fn new(val: usize) -> AtomicUsize {
let inner = UnsafeCell::new(std::sync::atomic::AtomicUsize::new(val));
AtomicUsize { inner }
}
/// Performs an unsynchronized load.
///<|fim▁hole|> /// All mutations must have happened before the unsynchronized load.
/// Additionally, there must be no concurrent mutations.
pub(crate) unsafe fn unsync_load(&self) -> usize {
*(*self.inner.get()).get_mut()
}
pub(crate) fn with_mut<R>(&mut self, f: impl FnOnce(&mut usize) -> R) -> R {
// safety: we have mutable access
f(unsafe { (*self.inner.get()).get_mut() })
}
}
impl ops::Deref for AtomicUsize {
type Target = std::sync::atomic::AtomicUsize;
fn deref(&self) -> &Self::Target {
// safety: it is always safe to access `&self` fns on the inner value as
// we never perform unsafe mutations.
unsafe { &*self.inner.get() }
}
}
impl ops::DerefMut for AtomicUsize {
fn deref_mut(&mut self) -> &mut Self::Target {
// safety: we hold `&mut self`
unsafe { &mut *self.inner.get() }
}
}
impl fmt::Debug for AtomicUsize {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
(**self).fmt(fmt)
}
}<|fim▁end|> | /// # Safety
/// |
<|file_name|>test.js<|end_file_name|><|fim▁begin|>const path = require('path');
const { expect } = require('chai');
const delay = require('../../../../lib/utils/delay');
describe('Compiler service', () => {
it('Should execute a basic test', async () => {
await runTests('testcafe-fixtures/basic-test.js', 'Basic test');
});
it('Should handle an error', async () => {
try {<|fim▁hole|> catch (err) {
expect(err[0].startsWith([
`The specified selector does not match any element in the DOM tree. ` +
` > | Selector('#not-exists') ` +
` [[user-agent]] ` +
` 1 |fixture \`Compiler service\`;` +
` 2 |` +
` 3 |test(\`Throw an error\`, async t => {` +
` > 4 | await t.click('#not-exists');` +
` 5 |});` +
` 6 | at <anonymous> (${path.join(__dirname, 'testcafe-fixtures/error-test.js')}:4:13)`
])).to.be.true;
}
});
it('Should allow using ClientFunction in assertions', async () => {
await runTests('testcafe-fixtures/client-function-in-assertions.js', 'ClientFunction in assertions');
});
it('Should execute Selectors in sync mode', async () => {
await runTests('testcafe-fixtures/synchronous-selectors.js');
});
it('debug', async () => {
let resolver = null;
const result = new Promise(resolve => {
resolver = resolve;
});
runTests('testcafe-fixtures/debug.js')
.then(() => resolver());
setTimeout(async () => {
const client = global.testCafe.runner.compilerService.cdp;
await client.Debugger.resume();
await delay(1000);
await client.Debugger.resume();
}, 10000);
return result;
});
});<|fim▁end|> | await runTests('testcafe-fixtures/error-test.js', 'Throw an error', { shouldFail: true });
} |
<|file_name|>0068_auto_20160413_0650.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('backend', '0067_auto_20160411_1224'),
]
operations = [
migrations.CreateModel(
name='Brick',
fields=[
('crafteditem_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='backend.CraftedItem')),
],
bases=('backend.crafteditem',),
),
migrations.AddField(
model_name='crafteditem',
name='energy',<|fim▁hole|><|fim▁end|> | field=models.IntegerField(default=0),
),
] |
<|file_name|>CargaFacturaOracle.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from BaseOracle import *
from Factura import *
from ConfigDB import *
class CargaFacturaOracle(object):
def __init__(self):
pass
def carga(self, factura):
cfgOra = ConfigDB("oracle")
cfgOra.getConfig()<|fim▁hole|> oracle = BaseOracle(cfgOra.maquina, cfgOra.usuario, cfgOra.clave, cfgOra.servicio)
oracle.conectar()
oracle.ejecutar("delete ELE_DOCUMENTOS where CLAVE_ACCESO = '" + factura.claveAcceso + "'")
oracle.ejecutar("INSERT INTO ELE_DOCUMENTOS VALUES ('"
+ factura.claveAcceso + "','" + factura.documento + "','" + factura.razonSocial + "','"
+ factura.nombreComercial + "','" + factura.direccion + "','" + factura.establecimiento
+ "','"
+ factura.puntoEmision + "','" + factura.secuencial + "',TO_DATE('" + factura.fechaEmision
+ "', 'dd/mm/yyyy'),'" + factura.autorizacion + "','" + factura.tipo + "')")
i = 1
for det in factura.detalle:
oracle.ejecutar("INSERT INTO ELE_FACTURA_DETALLES"
+ "(CLAVE_ACCESO_ELE_DOCUMENTOS,NUMFILA,CODIGO_PRINCIPAL,DESCRIPCION,CANTIDAD,"
+ "PRECIO_UNITARIO,DESCUENTO,PRECIO_TOTAL_SIN_IMPUESTO)"
+ "VALUES ('" + factura.claveAcceso + "'," + str(i) + ",'" + det.codigoPrincipal + "','"
+ det.descripcion + "'," + str(det.cantidad) + "," + str(det.precioUnitario) + ","
+ str(det.descuento) + ","
+ str(det.total) + ")")
j = 1
for imp in det.impuesto:
oracle.ejecutar("INSERT INTO ELE_FACTURA_IMPUESTOS(CLAVE_ACCESO_ELE_DOCUMENTOS,"
+ "NUM_FILA_ELE_FACTURA_DETALLES,NUM_FILA,CODIGO,CODIGO_PORCENTAJE,TARIFA,"
+ "BASE_IMPONIBLE,VALOR) VALUES ('" + factura.claveAcceso + "'," + str(i) + ","
+ str(j) + ",'" + imp.codigo + "','" + imp.codigoPorcentaje + "',"
+ imp.tarifa + "," + imp.baseImponible + "," + imp.valor + ")")
j = j + 1
i = i + 1
oracle.desconectar()<|fim▁end|> | #cfgOra.imprimir()
|
<|file_name|>126_sdp_with_port_0_and_no_rtpmap_for_dynamic_pt.py<|end_file_name|><|fim▁begin|># $Id: 126_sdp_with_port_0_and_no_rtpmap_for_dynamic_pt.py 369517 2012-07-01 17:28:57Z file $
import inc_sip as sip
import inc_sdp as sdp
sdp = \
"""
v=0
o=- 0 0 IN IP4 127.0.0.1
s=-
c=IN IP4 127.0.0.1
t=0 0
m=video 0 RTP/AVP 100
m=audio 5000 RTP/AVP 0
"""
pjsua_args = "--null-audio --auto-answer 200"
extra_headers = ""
include = ["Content-Type: application/sdp", # response must include SDP
"m=video 0 RTP/AVP[\\s\\S]+m=audio [1-9]+[0-9]* RTP/AVP"
]
exclude = []
<|fim▁hole|> resp_inc=include, resp_exc=exclude)<|fim▁end|> | sendto_cfg = sip.SendtoCfg("SDP media with port 0 and no rtpmap for dynamic PT", pjsua_args, sdp, 200,
extra_headers=extra_headers, |
<|file_name|>LiveStreamEvent.java<|end_file_name|><|fim▁begin|>package com.google.api.ads.dfp.jaxws.v201408;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
/**
*
* A {@code LiveStreamEvent} encapsulates all the information necessary
* to enable DAI (Dynamic Ad Insertion) into a live video stream.<|fim▁hole|> *
*
* <p>Java class for LiveStreamEvent complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="LiveStreamEvent">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="id" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="name" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="description" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="status" type="{https://www.google.com/apis/ads/publisher/v201408}LiveStreamEventStatus" minOccurs="0"/>
* <element name="creationDateTime" type="{https://www.google.com/apis/ads/publisher/v201408}DateTime" minOccurs="0"/>
* <element name="lastModifiedDateTime" type="{https://www.google.com/apis/ads/publisher/v201408}DateTime" minOccurs="0"/>
* <element name="startDateTime" type="{https://www.google.com/apis/ads/publisher/v201408}DateTime" minOccurs="0"/>
* <element name="endDateTime" type="{https://www.google.com/apis/ads/publisher/v201408}DateTime" minOccurs="0"/>
* <element name="totalEstimatedConcurrentUsers" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="contentUrls" type="{http://www.w3.org/2001/XMLSchema}string" maxOccurs="unbounded" minOccurs="0"/>
* <element name="adTags" type="{http://www.w3.org/2001/XMLSchema}string" maxOccurs="unbounded" minOccurs="0"/>
* <element name="liveStreamEventCode" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "LiveStreamEvent", propOrder = {
"id",
"name",
"description",
"status",
"creationDateTime",
"lastModifiedDateTime",
"startDateTime",
"endDateTime",
"totalEstimatedConcurrentUsers",
"contentUrls",
"adTags",
"liveStreamEventCode"
})
public class LiveStreamEvent {
protected Long id;
protected String name;
protected String description;
@XmlSchemaType(name = "string")
protected LiveStreamEventStatus status;
protected DateTime creationDateTime;
protected DateTime lastModifiedDateTime;
protected DateTime startDateTime;
protected DateTime endDateTime;
protected Long totalEstimatedConcurrentUsers;
protected List<String> contentUrls;
protected List<String> adTags;
protected String liveStreamEventCode;
/**
* Gets the value of the id property.
*
* @return
* possible object is
* {@link Long }
*
*/
public Long getId() {
return id;
}
/**
* Sets the value of the id property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
public void setId(Long value) {
this.id = value;
}
/**
* Gets the value of the name property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getName() {
return name;
}
/**
* Sets the value of the name property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setName(String value) {
this.name = value;
}
/**
* Gets the value of the description property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDescription() {
return description;
}
/**
* Sets the value of the description property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDescription(String value) {
this.description = value;
}
/**
* Gets the value of the status property.
*
* @return
* possible object is
* {@link LiveStreamEventStatus }
*
*/
public LiveStreamEventStatus getStatus() {
return status;
}
/**
* Sets the value of the status property.
*
* @param value
* allowed object is
* {@link LiveStreamEventStatus }
*
*/
public void setStatus(LiveStreamEventStatus value) {
this.status = value;
}
/**
* Gets the value of the creationDateTime property.
*
* @return
* possible object is
* {@link DateTime }
*
*/
public DateTime getCreationDateTime() {
return creationDateTime;
}
/**
* Sets the value of the creationDateTime property.
*
* @param value
* allowed object is
* {@link DateTime }
*
*/
public void setCreationDateTime(DateTime value) {
this.creationDateTime = value;
}
/**
* Gets the value of the lastModifiedDateTime property.
*
* @return
* possible object is
* {@link DateTime }
*
*/
public DateTime getLastModifiedDateTime() {
return lastModifiedDateTime;
}
/**
* Sets the value of the lastModifiedDateTime property.
*
* @param value
* allowed object is
* {@link DateTime }
*
*/
public void setLastModifiedDateTime(DateTime value) {
this.lastModifiedDateTime = value;
}
/**
* Gets the value of the startDateTime property.
*
* @return
* possible object is
* {@link DateTime }
*
*/
public DateTime getStartDateTime() {
return startDateTime;
}
/**
* Sets the value of the startDateTime property.
*
* @param value
* allowed object is
* {@link DateTime }
*
*/
public void setStartDateTime(DateTime value) {
this.startDateTime = value;
}
/**
* Gets the value of the endDateTime property.
*
* @return
* possible object is
* {@link DateTime }
*
*/
public DateTime getEndDateTime() {
return endDateTime;
}
/**
* Sets the value of the endDateTime property.
*
* @param value
* allowed object is
* {@link DateTime }
*
*/
public void setEndDateTime(DateTime value) {
this.endDateTime = value;
}
/**
* Gets the value of the totalEstimatedConcurrentUsers property.
*
* @return
* possible object is
* {@link Long }
*
*/
public Long getTotalEstimatedConcurrentUsers() {
return totalEstimatedConcurrentUsers;
}
/**
* Sets the value of the totalEstimatedConcurrentUsers property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
public void setTotalEstimatedConcurrentUsers(Long value) {
this.totalEstimatedConcurrentUsers = value;
}
/**
* Gets the value of the contentUrls property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the contentUrls property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getContentUrls().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getContentUrls() {
if (contentUrls == null) {
contentUrls = new ArrayList<String>();
}
return this.contentUrls;
}
/**
* Gets the value of the adTags property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the adTags property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getAdTags().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getAdTags() {
if (adTags == null) {
adTags = new ArrayList<String>();
}
return this.adTags;
}
/**
* Gets the value of the liveStreamEventCode property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getLiveStreamEventCode() {
return liveStreamEventCode;
}
/**
* Sets the value of the liveStreamEventCode property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setLiveStreamEventCode(String value) {
this.liveStreamEventCode = value;
}
}<|fim▁end|> | *
* <p>This includes information such as the start and expected end time of
* the event, the URL of the actual content for DFP to pull and insert ads into,
* as well as the metadata necessary to generate ad requests during the event. |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from collections import OrderedDict
import logging
import json
import re
import itertools
import sublime
import sublime_plugin
from ..lib import inhibit_word_completions
from .commandinfo import (
get_command_name,
get_builtin_command_meta_data,
get_builtin_commands,
iter_python_command_classes,
get_args_from_command_name
)
__all__ = (
"SublimeTextCommandCompletionPythonListener",
"SublimeTextCommandArgsCompletionListener",
"SublimeTextCommandArgsCompletionPythonListener",
"SublimeTextCommandCompletionListener",
)
KIND_APPLICATION = (sublime.KIND_ID_FUNCTION, "A", "Application Command")
KIND_WINDOW = (sublime.KIND_ID_FUNCTION, "W", "Window Command")
KIND_TEXT = (sublime.KIND_ID_FUNCTION, "T", "Text Command")
KIND_MAP = {
'application': KIND_APPLICATION,
'window': KIND_WINDOW,
'text': KIND_TEXT,
}
KIND_COMMAND = (sublime.KIND_ID_FUNCTION, "C", "Command") # fallback
KIND_SNIPPET = sublime.KIND_SNIPPET
logger = logging.getLogger(__name__)
def _escape_in_snippet(v):
return v.replace("}", "\\}").replace("$", "\\$")
def is_plugin(view):
"""Use some heuristics to determine whether a Python view shows a plugin.
Or the console input widget, should it be using the Python syntax.
"""
return (view.find("import sublime", 0, sublime.LITERAL) is not None<|fim▁hole|>
def create_args_snippet_from_command_args(command_args, quote_char='"', for_json=True):
"""Create an argument snippet to insert from the arguments to run a command.
Parameters:
command_args (dict)
The arguments with their default value.
quote_char (str)
Which char should be used for string quoting.
for_json (bool)
Whether it should be done for a json or a python file.
Returns (str)
The formatted entry to insert into the sublime text package
file.
"""
counter = itertools.count(1)
def make_snippet_item(k, v):
if v is not None:
if isinstance(v, str):
v = '{q}${{{i}:{v}}}{q}'.format(i=next(counter),
v=_escape_in_snippet(v),
q=quote_char)
else:
if for_json:
dumps = json.dumps(v)
else: # python
dumps = repr(v)
v = '${{{i}:{v}}}'.format(i=next(counter), v=_escape_in_snippet(dumps))
else:
v = '${i}'.format(i=next(counter))
return '{q}{k}{q}: {v}'.format(k=k, v=v, q=quote_char)
keys = iter(command_args)
if not isinstance(command_args, OrderedDict):
keys = sorted(keys)
snippet_items = (make_snippet_item(k, command_args[k]) for k in keys)
if for_json:
args_content = ",\n\t".join(snippet_items)
args_snippet = '"args": {{\n\t{0}\n}},$0'.format(args_content)
else:
args_content = ", ".join(snippet_items)
args_snippet = '{{{0}}}'.format(args_content)
return args_snippet
def _builtin_completions(names):
_, data = get_builtin_command_meta_data()
for name in names:
yield sublime.CompletionItem(
trigger=name,
annotation="built-in",
completion=name,
kind=KIND_MAP.get(data[name].get("command_type"), KIND_COMMAND),
details=data[name].get('doc_string') or "",
# TODO link to show full description
)
def _plugin_completions(cmd_classes):
for cmd_class in cmd_classes:
name = get_command_name(cmd_class)
module = cmd_class.__module__
package_name = module.split(".")[0]
if issubclass(cmd_class, sublime_plugin.TextCommand):
kind = KIND_TEXT
elif issubclass(cmd_class, sublime_plugin.WindowCommand):
kind = KIND_WINDOW
elif issubclass(cmd_class, sublime_plugin.ApplicationCommand):
kind = KIND_APPLICATION
else:
kind = KIND_COMMAND
yield sublime.CompletionItem(
trigger=name,
annotation=package_name,
completion=name,
kind=kind,
details=(cmd_class.__doc__ or "").strip(),
# TODO link to show full description
)
def _create_completions(command_type=""):
completions = []
completions.extend(_builtin_completions(get_builtin_commands(command_type)))
completions.extend(_plugin_completions(iter_python_command_classes(command_type)))
logger.debug("Collected %d command completions", len(completions))
return completions
class SublimeTextCommandCompletionListener(sublime_plugin.EventListener):
@inhibit_word_completions
def on_query_completions(self, view, prefix, locations):
keymap_scope = "source.json.sublime meta.command-name"
loc = locations[0]
if not view.score_selector(loc, keymap_scope):
return
return _create_completions()
class SublimeTextCommandCompletionPythonListener(sublime_plugin.EventListener):
_RE_LINE_BEFORE = re.compile(
r"(?P<callervar>\w+)\s*\.\s*run_command\s*\("
r"\s*['\"]\w*$",
re.MULTILINE
)
@inhibit_word_completions
def on_query_completions(self, view, prefix, locations):
loc = locations[0]
python_arg_scope = ("source.python meta.function-call.arguments.python string.quoted")
if not view.score_selector(loc, python_arg_scope) or not is_plugin(view):
return None
before_region = sublime.Region(view.line(loc).a, loc)
lines = view.line(sublime.Region(view.line(locations[0]).a - 1, loc))
before_region = sublime.Region(lines.a, loc)
before = view.substr(before_region)
m = self._RE_LINE_BEFORE.search(before)
if not m:
return None
# get the command type
caller_var = m.group('callervar')
logger.debug("caller_var: %s", caller_var)
if "view" in caller_var or caller_var == "v":
command_type = 'text'
elif caller_var == "sublime":
command_type = 'app'
else:
# window.run_command allows all command types
command_type = ''
return _create_completions(command_type)
class SublimeTextCommandArgsCompletionListener(sublime_plugin.EventListener):
_default_args = [("args\targuments", '"args": {\n\t"$1": "$2"$0\n},')]
_st_insert_arg_scope = (
"("
" ("
+ ", ".join("source.json.sublime.{}".format(suffix)
for suffix in ("commands", "keymap", "macro", "menu", "mousemap"))
+ ")"
" & "
" meta.sequence meta.mapping"
" - meta.sequence meta.mapping meta.mapping"
")"
"- string "
"- comment "
"- ("
" meta.value.json "
" | meta.mapping.json meta.mapping.json "
" | meta.sequence.json meta.sequence.json "
" - meta.menu.collection.sublime-menu"
")"
)
_RE_COMMAND_SEARCH = re.compile(r'\"command\"\s*\:\s*\"(\w+)\"')
def on_query_completions(self, view, prefix, locations):
if not view.score_selector(locations[0], self._st_insert_arg_scope):
return
# extract the line and the line above to search for the command
lines_reg = view.line(sublime.Region(view.line(locations[0]).a - 1, locations[0]))
lines = view.substr(lines_reg)
results = self._RE_COMMAND_SEARCH.findall(lines)
if not results:
return self._default_args
command_name = results[-1]
logger.debug("building args completions for command %r", command_name)
command_args = get_args_from_command_name(command_name)
if not command_args:
return self._default_args
completion = create_args_snippet_from_command_args(command_args, for_json=True)
return [sublime.CompletionItem(
trigger="args",
annotation="auto-detected",
completion=completion,
completion_format=sublime.COMPLETION_FORMAT_SNIPPET,
kind=KIND_SNIPPET,
)]
class SublimeTextCommandArgsCompletionPythonListener(sublime_plugin.EventListener):
_default_args_dict = {
c: sublime.CompletionItem(
trigger="args",
completion="{{{q}$1{q}: $0}}".format(q=c),
completion_format=sublime.COMPLETION_FORMAT_SNIPPET,
kind=KIND_SNIPPET,
)
for c in "'\""
}
_RE_LINE_BEFORE = re.compile(
r"\w+\s*\.\s*run_command\s*\("
r"\s*(['\"])(\w+)\1,\s*\w*$"
)
def on_query_completions(self, view, prefix, locations):
loc = locations[0]
python_arg_scope = "source.python meta.function-call.arguments.python,"
if not view.score_selector(loc, python_arg_scope) or not is_plugin(view):
return
before_region = sublime.Region(view.line(loc).a, loc)
before = view.substr(before_region)
m = self._RE_LINE_BEFORE.search(before)
if not m:
return
quote_char, command_name = m.groups()
logger.debug("building args completions for command %r", command_name)
command_args = get_args_from_command_name(command_name)
if command_args is None:
return self._default_args_dict[quote_char]
completion = create_args_snippet_from_command_args(command_args, quote_char,
for_json=False)
return [sublime.CompletionItem(
trigger="args",
annotation="auto-detected",
completion=completion,
completion_format=sublime.COMPLETION_FORMAT_SNIPPET,
kind=KIND_SNIPPET,
)]<|fim▁end|> | or sublime.packages_path() in (view.file_name() or "")
or view.settings().get('is_widget'))
|
<|file_name|>celery.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from __future__ import absolute_import
import os
from celery import Celery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'sugarcub.settings')
from django.conf import settings # noqa
app = Celery('sugarcub')
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))<|fim▁end|> | |
<|file_name|>MonitoringCatalog.py<|end_file_name|><|fim▁begin|>""" Interacts with sqlite3 db
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
import sqlite3
import os
import hashlib
import random
import time
import DIRAC
from DIRAC import gLogger, S_OK, S_ERROR<|fim▁hole|>class MonitoringCatalog(object):
"""
This class is used to perform all kinds queries to the sqlite3 database.
"""
def __init__(self, dataPath):
"""
Initialize monitoring catalog
"""
self.dbConn = False
self.dataPath = dataPath
self.log = gLogger.getSubLogger("ActivityCatalog")
self.createSchema()
def __connect(self):
"""
Connects to database
"""
if not self.dbConn:
dbPath = "%s/monitoring.db" % self.dataPath
self.dbConn = sqlite3.connect(dbPath, timeout=20, isolation_level=None)
# These two settings dramatically increase the performance
# at the cost of a small corruption risk in case of OS crash
# It is acceptable though, given the nature of the data
# details here https://www.sqlite.org/pragma.html
c = self.dbConn.cursor()
c.execute("PRAGMA synchronous = OFF")
c.execute("PRAGMA journal_mode = TRUNCATE")
def __dbExecute(self, query, values=False):
"""
Executes a sql statement.
:type query: string
:param query: The query to be executed.
:type values: bool
:param values: To execute query with values or not.
:return: the cursor.
"""
cursor = self.dbConn.cursor() # pylint: disable=no-member
self.log.debug("Executing %s" % query)
executed = False
retry = 0
while not executed and retry < 10:
retry += 1
try:
if values:
cursor.execute(query, values)
else:
cursor.execute(query)
executed = True
except Exception as e:
self.log.exception("Exception executing statement", "query: %s, values: %s" % (query, values))
time.sleep(random.random())
if not executed:
self.log.error("Could not execute query, big mess ahead", "query: %s, values: %s" % (query, values))
return cursor
def __createTables(self):
"""
Creates tables if not already created
"""
self.log.info("Creating tables in db")
try:
filePath = "%s/monitoringSchema.sql" % os.path.dirname(__file__)
fd = open(filePath)
buff = fd.read()
fd.close()
except IOError as e:
DIRAC.abort(1, "Can't read monitoring schema", filePath)
while buff.find(";") > -1:
limit = buff.find(";") + 1
sqlQuery = buff[:limit].replace("\n", "")
buff = buff[limit:]
try:
self.__dbExecute(sqlQuery)
except Exception as e:
DIRAC.abort(1, "Can't create tables", str(e))
def createSchema(self):
"""
Creates all the sql schema if it does not exist
"""
self.__connect()
try:
sqlQuery = "SELECT name FROM sqlite_master WHERE type='table';"
c = self.__dbExecute(sqlQuery)
tablesList = c.fetchall()
if len(tablesList) < 2:
self.__createTables()
except Exception as e:
self.log.fatal("Failed to startup db engine", str(e))
return False
return True
def __delete(self, table, dataDict):
"""
Executes an sql delete.
:type table: string
:param table: name of the table.
:type dataDict: dictionary
:param dataDict: the data dictionary.
"""
query = "DELETE FROM %s" % table
valuesList = []
keysList = []
for key in dataDict:
if isinstance(dataDict[key], list):
orList = []
for keyValue in dataDict[key]:
valuesList.append(keyValue)
orList.append("%s = ?" % key)
keysList.append("( %s )" % " OR ".join(orList))
else:
valuesList.append(dataDict[key])
keysList.append("%s = ?" % key)
if keysList:
query += " WHERE %s" % (" AND ".join(keysList))
self.__dbExecute("%s;" % query, values=valuesList)
def __select(self, fields, table, dataDict, extraCond="", queryEnd=""):
"""
Executes a sql select.
:type fields: string
:param fields: The fields required in a string.
:type table: string
:param table: name of the table.
:type dataDict: dictionary
:param dataDict: the data dictionary.
:return: a list of values.
"""
valuesList = []
keysList = []
for key in dataDict:
if isinstance(dataDict[key], list):
orList = []
for keyValue in dataDict[key]:
valuesList.append(keyValue)
orList.append("%s = ?" % key)
keysList.append("( %s )" % " OR ".join(orList))
else:
valuesList.append(dataDict[key])
keysList.append("%s = ?" % key)
if isinstance(fields, six.string_types):
fields = [fields]
if len(keysList) > 0:
whereCond = "WHERE %s" % (" AND ".join(keysList))
else:
whereCond = ""
if extraCond:
if whereCond:
whereCond += " AND %s" % extraCond
else:
whereCond = "WHERE %s" % extraCond
query = "SELECT %s FROM %s %s %s;" % (",".join(fields), table, whereCond, queryEnd)
c = self.__dbExecute(query, values=valuesList)
return c.fetchall()
def __insert(self, table, specialDict, dataDict):
"""
Executes an sql insert.
:type table: string
:param table: name of the table.
:type specialDict: dictionary
:param specialDict: the special dictionary.
:type dataDict: dictionary
:param dataDict: the data dictionary.
:return: the number of rows inserted.
"""
valuesList = []
valuePoitersList = []
namesList = []
for key in specialDict:
namesList.append(key)
valuePoitersList.append(specialDict[key])
for key in dataDict:
namesList.append(key)
valuePoitersList.append("?")
valuesList.append(dataDict[key])
query = "INSERT INTO %s (%s) VALUES (%s);" % (table, ", ".join(namesList), ",".join(valuePoitersList))
c = self.__dbExecute(query, values=valuesList)
return c.rowcount
def __update(self, newValues, table, dataDict, extraCond=""):
"""
Executes a sql update.
:type table: string
:param table: name of the table.
:type newValues: dictionary
:param newValues: a dictionary with new values.
:type dataDict: dictionary
:param dataDict: the data dictionary.
:return: the number of rows updated.
"""
valuesList = []
keysList = []
updateFields = []
for key in newValues:
updateFields.append("%s = ?" % key)
valuesList.append(newValues[key])
for key in dataDict:
if isinstance(dataDict[key], list):
orList = []
for keyValue in dataDict[key]:
valuesList.append(keyValue)
orList.append("%s = ?" % key)
keysList.append("( %s )" % " OR ".join(orList))
else:
valuesList.append(dataDict[key])
keysList.append("%s = ?" % key)
if len(keysList) > 0:
whereCond = "WHERE %s" % (" AND ".join(keysList))
else:
whereCond = ""
if extraCond:
if whereCond:
whereCond += " AND %s" % extraCond
else:
whereCond = "WHERE %s" % extraCond
query = "UPDATE %s SET %s %s;" % (table, ",".join(updateFields), whereCond)
c = self.__dbExecute(query, values=valuesList)
return c.rowcount
def registerSource(self, sourceDict):
"""
Registers an activity source.
:type sourceDict: dictionary
:param sourceDict: the source dictionary.
:return: a list of values.
"""
retList = self.__select("id", "sources", sourceDict)
if len(retList) > 0:
return retList[0][0]
else:
self.log.info("Registering source", str(sourceDict))
if self.__insert("sources", {"id": "NULL"}, sourceDict) == 0:
return -1
return self.__select("id", "sources", sourceDict)[0][0]
def registerActivity(self, sourceId, acName, acDict):
"""
Register an activity.
:type sourceId: string
:param sourceId: The source id.
:type acName: string
:param acName: name of the activity.
:type acDict: dictionary
:param acDict: The activity dictionary containing information about 'category', 'description', 'bucketLength',
'type', 'unit'.
:return: a list of values.
"""
m = hashlib.md5()
acDict["name"] = acName
acDict["sourceId"] = sourceId
m.update(str(acDict).encode())
retList = self.__select("filename", "activities", acDict)
if len(retList) > 0:
return retList[0][0]
else:
acDict["lastUpdate"] = int(Time.toEpoch() - 86000)
filePath = m.hexdigest()
filePath = "%s/%s.rrd" % (filePath[:2], filePath)
self.log.info("Registering activity", str(acDict))
# This is basically called by the ServiceInterface inside registerActivities method and then all the activity
# information is stored in the sqlite3 db using the __insert method.
if (
self.__insert(
"activities",
{
"id": "NULL",
"filename": "'%s'" % filePath,
},
acDict,
)
== 0
):
return -1
return self.__select("filename", "activities", acDict)[0][0]
def getFilename(self, sourceId, acName):
"""
Gets rrd filename for an activity.
:type sourceId: string
:param sourceId: The source id.
:type acName: string
:param acName: name of the activity.
:return: The filename in a string.
"""
queryDict = {"sourceId": sourceId, "name": acName}
retList = self.__select("filename", "activities", queryDict)
if len(retList) == 0:
return ""
else:
return retList[0][0]
def findActivity(self, sourceId, acName):
"""
Finds activity.
:type sourceId: string
:param sourceId: The source id.
:type acName: string
:param acName: name of the activity.
:return: A list containing all the activity information.
"""
queryDict = {"sourceId": sourceId, "name": acName}
retList = self.__select(
"id, name, category, unit, type, description, filename, bucketLength, lastUpdate", "activities", queryDict
)
if len(retList) == 0:
return False
else:
return retList[0]
def activitiesQuery(self, selDict, sortList, start, limit):
"""
Gets all the sources and activities details in a joined format.
:type selDict: dictionary
:param selDict: The fields inside the select query.
:type sortList: list
:param sortList: A list in sorted order of the data.
:type start: int
:param start: The point or tuple from where to start.
:type limit: int
:param limit: The number of tuples to select from the starting point.
:return: S_OK with a tuple of the result list and fields list.
"""
fields = [
"sources.id",
"sources.site",
"sources.componentType",
"sources.componentLocation",
"sources.componentName",
"activities.id",
"activities.name",
"activities.category",
"activities.unit",
"activities.type",
"activities.description",
"activities.bucketLength",
"activities.filename",
"activities.lastUpdate",
]
extraSQL = ""
if sortList:
for sorting in sortList:
if sorting[0] not in fields:
return S_ERROR("Sorting field %s is invalid" % sorting[0])
extraSQL = "ORDER BY %s" % ",".join(["%s %s" % sorting for sorting in sortList])
if limit:
if start:
extraSQL += " LIMIT %s OFFSET %s" % (limit, start)
else:
extraSQL += " LIMIT %s" % limit
# This method basically takes in some condition and then based on those performs SQL Join on the
# sources and activities table of the sqlite3 db and returns the corresponding result.
retList = self.__select(
", ".join(fields), "sources, activities", selDict, "sources.id = activities.sourceId", extraSQL
)
return S_OK((retList, fields))
def setLastUpdate(self, sourceId, acName, lastUpdateTime):
"""
Updates the lastUpdate timestamp for a particular activity using the source id.
:type sourceId: string
:param sourceId: The source id.
:type acName: string
:param acName: name of the activity.
:type lastUpdateTime: string
:param lastUpdateTime: The last update time in the proper format.
:return: the number of rows updated.
"""
queryDict = {"sourceId": sourceId, "name": acName}
return self.__update({"lastUpdate": lastUpdateTime}, "activities", queryDict)
def getLastUpdate(self, sourceId, acName):
"""
Gets the lastUpdate timestamp for a particular activity using the source id.
:type sourceId: string
:param sourceId: The source id.
:type acName: string
:param acName: name of the activity.
:return: The last update time in string.
"""
queryDict = {"sourceId": sourceId, "name": acName}
retList = self.__update("lastUpdate", "activities", queryDict)
if len(retList) == 0:
return False
else:
return retList[0]
def queryField(self, field, definedFields):
"""
Query the values of a field given a set of defined ones.
:type field: string
:param field: The field required in a string.
:type field: list
:param definedFields: A set of defined fields.
:return: A list of values.
"""
retList = self.__select(field, "sources, activities", definedFields, "sources.id = activities.sourceId")
return retList
def getMatchingActivities(self, condDict):
"""
Gets all activities matching the defined conditions.
:type condDict: dictionary.
:param condDict: A dictionary containing the conditions.
:return: a list of matching activities.
"""
retList = self.queryField(Activity.dbFields, condDict)
acList = []
for acData in retList:
acList.append(Activity(acData))
return acList
def registerView(self, viewName, viewData, varFields):
"""
Registers a new view.
:type viewName: string
:param viewName: Name of the view.
:type viewDescription: dictionary
:param viewDescription: A dictionary containing the view description.
:type varFields: list
:param varFields: A list of variable fields.
:return: S_OK / S_ERROR with the corresponding error message.
"""
retList = self.__select("id", "views", {"name": viewName})
if len(retList) > 0:
return S_ERROR("Name for view name already exists")
retList = self.__select("name", "views", {"definition": viewData})
if len(retList) > 0:
return S_ERROR("View specification already defined with name '%s'" % retList[0][0])
self.__insert(
"views", {"id": "NULL"}, {"name": viewName, "definition": viewData, "variableFields": ", ".join(varFields)}
)
return S_OK()
def getViews(self, onlyStatic):
"""
Gets views.
:type onlyStatic: bool
:param onlyStatic: Whether the views required are static or not.
:return: A list of values.
"""
queryCond = {}
if onlyStatic:
queryCond["variableFields"] = ""
return self.__select("id, name, variableFields", "views", queryCond)
def getViewById(self, viewId):
"""
Gets a view for a given id.
:type viewId: string
:param viewId: The view id.
:return: A list of values.
"""
if isinstance(viewId, six.string_types):
return self.__select("definition, variableFields", "views", {"name": viewId})
else:
return self.__select("definition, variableFields", "views", {"id": viewId})
def deleteView(self, viewId):
"""
Deletes a view for a given id.
:type viewId: string
:param viewId: The view id.
"""
self.__delete("views", {"id": viewId})
def getSources(self, dbCond, fields=[]):
"""
Gets souces for a given db condition.
:type dbCond: dictionary
:param dbCond: The required database conditions.
:type fields: list
:param fields: A list of required fields.
:return: The list of results after the query is performed.
"""
if not fields:
fields = "id, site, componentType, componentLocation, componentName"
else:
fields = ", ".join(fields)
return self.__select(fields, "sources", dbCond)
def getActivities(self, dbCond):
"""
Gets activities given a db condition.
:type dbCond: dictionary
:param dbCond: The required database conditions.
:return: a list of activities.
"""
return self.__select("id, name, category, unit, type, description, bucketLength", "activities", dbCond)
def deleteActivity(self, sourceId, activityId):
"""
Deletes an activity.
:type sourceId: string
:param sourceId: The source id.
:type activityId: string
:param activityId: The activity id.
:return: S_OK with rrd filename / S_ERROR with a message.
"""
acCond = {"sourceId": sourceId, "id": activityId}
acList = self.__select("filename", "activities", acCond)
if len(acList) == 0:
return S_ERROR("Activity does not exist")
rrdFile = acList[0][0]
self.__delete("activities", acCond)
acList = self.__select("id", "activities", {"sourceId": sourceId})
if len(acList) == 0:
self.__delete("sources", {"id": sourceId})
return S_OK(rrdFile)<|fim▁end|> | from DIRAC.FrameworkSystem.private.monitoring.Activity import Activity
from DIRAC.Core.Utilities import Time
|
<|file_name|>cleanup-shortcircuit.rs<|end_file_name|><|fim▁begin|>// run-pass
// Test that cleanups for the RHS of shortcircuiting operators work.
// pretty-expanded FIXME #23616
#![allow(deref_nullptr)]<|fim▁hole|>pub fn main() {
let args: Vec<String> = env::args().collect();
// Here, the rvalue `"signal".to_string()` requires cleanup. Older versions
// of the code had a problem that the cleanup scope for this
// expression was the end of the `if`, and as the `"signal".to_string()`
// expression was never evaluated, we wound up trying to clean
// uninitialized memory.
if args.len() >= 2 && args[1] == "signal" {
// Raise a segfault.
unsafe { *std::ptr::null_mut::<isize>() = 0; }
}
}<|fim▁end|> |
use std::env;
|
<|file_name|>lumina-fm_fi.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.0" language="fi_FI">
<context>
<name>MainUI</name>
<message>
<location filename="../MainUI.ui" line="14"/>
<source>Insight</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="51"/>
<source>Open Multimedia Player</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="54"/>
<location filename="../MainUI.ui" line="67"/>
<location filename="../MainUI.ui" line="80"/>
<location filename="../MainUI.ui" line="136"/>
<location filename="../MainUI.ui" line="143"/>
<location filename="../MainUI.ui" line="215"/>
<location filename="../MainUI.ui" line="225"/>
<location filename="../MainUI.ui" line="271"/>
<location filename="../MainUI.ui" line="281"/>
<location filename="../MainUI.ui" line="309"/>
<location filename="../MainUI.ui" line="337"/>
<source>...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="64"/>
<source>View Slideshow</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="77"/>
<source>Restore File(s)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="106"/>
<source>Playlist</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="116"/>
<location filename="../MainUI.ui" line="268"/>
<source>Go to Next</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="127"/>
<source>TextLabel</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="212"/>
<source>Go to Beginning</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="222"/>
<source>Go to Previous</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="278"/>
<source>Go to End</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="356"/>
<source>Name</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="361"/>
<source>Date Modified</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="366"/>
<source>Size</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="371"/>
<source>Owner</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="381"/>
<source>Restore entire directory</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="384"/>
<source>Restore All</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="394"/>
<source>Restore Selected Item</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="397"/>
<source>Restore Selection</source><|fim▁hole|> </message>
<message>
<location filename="../MainUI.ui" line="420"/>
<source>Overwrite Existing Files</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="443"/>
<source>File</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="451"/>
<source>Edit</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="457"/>
<source>View</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="465"/>
<source>Bookmarks</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="476"/>
<source>toolBar</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="497"/>
<source>New &Tab</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="502"/>
<source>Close Tab</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="507"/>
<source>E&xit</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="512"/>
<source>&Preferences</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="523"/>
<source>Shortcuts</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="534"/>
<source>Music Player</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="545"/>
<source>Image Viewer</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="550"/>
<source>UpDir</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="553"/>
<source>Go up one directory</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="558"/>
<source>Home</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="561"/>
<source>Go to your home directory</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="572"/>
<source>View Hidden Files</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="577"/>
<source>Back</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="580"/>
<source>Back to directory</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="585"/>
<source>Refresh</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="588"/>
<source>Refresh Directory</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="591"/>
<source>F5</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="596"/>
<source>Bookmark</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="599"/>
<source>Bookmark this directory</source>
<translation type="unfinished"></translation>
</message>
</context>
</TS><|fim▁end|> | <translation type="unfinished"></translation> |
<|file_name|>drop.go<|end_file_name|><|fim▁begin|>// Copyright 2015 The Cockroach Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
// implied. See the License for the specific language governing
// permissions and limitations under the License.
//
// Author: Peter Mattis ([email protected])
// This code was derived from https://github.com/youtube/vitess.
//
// Copyright 2012, Google Inc. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file
package parser
<|fim▁hole|>// DropBehavior represents options for dropping schema elements.
type DropBehavior int
// DropBehavior values.
const (
DropDefault DropBehavior = iota
DropRestrict
DropCascade
)
var dropBehaviorName = [...]string{
DropDefault: "",
DropRestrict: "RESTRICT",
DropCascade: "CASCADE",
}
func (d DropBehavior) String() string {
return dropBehaviorName[d]
}
// DropDatabase represents a DROP DATABASE statement.
type DropDatabase struct {
Name Name
IfExists bool
}
// Format implements the NodeFormatter interface.
func (node *DropDatabase) Format(buf *bytes.Buffer, f FmtFlags) {
buf.WriteString("DROP DATABASE ")
if node.IfExists {
buf.WriteString("IF EXISTS ")
}
FormatNode(buf, f, node.Name)
}
// DropIndex represents a DROP INDEX statement.
type DropIndex struct {
IndexList TableNameWithIndexList
IfExists bool
DropBehavior DropBehavior
}
// Format implements the NodeFormatter interface.
func (node *DropIndex) Format(buf *bytes.Buffer, f FmtFlags) {
buf.WriteString("DROP INDEX ")
if node.IfExists {
buf.WriteString("IF EXISTS ")
}
FormatNode(buf, f, node.IndexList)
if node.DropBehavior != DropDefault {
buf.WriteByte(' ')
buf.WriteString(node.DropBehavior.String())
}
}
// DropTable represents a DROP TABLE statement.
type DropTable struct {
Names QualifiedNames
IfExists bool
DropBehavior DropBehavior
}
// Format implements the NodeFormatter interface.
func (node *DropTable) Format(buf *bytes.Buffer, f FmtFlags) {
buf.WriteString("DROP TABLE ")
if node.IfExists {
buf.WriteString("IF EXISTS ")
}
FormatNode(buf, f, node.Names)
if node.DropBehavior != DropDefault {
buf.WriteByte(' ')
buf.WriteString(node.DropBehavior.String())
}
}<|fim▁end|> | import "bytes"
|
<|file_name|>getJavaScriptCompletions18.ts<|end_file_name|><|fim▁begin|>/// <reference path="fourslash.ts" />
// @allowNonTsExtensions: true
// @Filename: file.js
//// /**
//// * @param {number} a
//// * @param {string} b
//// */
//// exports.foo = function(a, b) {
//// a/*a*/;
//// b/*b*/
//// };
goTo.marker('a');
edit.insert('.');
verify.completionListContains('toFixed', undefined, undefined, 'method');
goTo.marker('b');
<|fim▁hole|>edit.insert('.');
verify.completionListContains('substr', undefined, undefined, 'method');<|fim▁end|> | |
<|file_name|>ApplicationTest.java<|end_file_name|><|fim▁begin|>package be.ipl.mobile.projet.historypub;
import android.app.Application;
import android.test.ApplicationTestCase;
<|fim▁hole|>/**
* <a href="http://d.android.com/tools/testing/testing_android.html">Testing Fundamentals</a>
*/
public class ApplicationTest extends ApplicationTestCase<Application> {
public ApplicationTest() {
super(Application.class);
}
}<|fim▁end|> | |
<|file_name|>01-count-pe1.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from bluesky.callbacks import LiveTable
RE(count([pe1]), LiveTable([pe1]))<|fim▁end|> | from bluesky.plans import count |
<|file_name|>Layer.ts<|end_file_name|><|fim▁begin|>namespace Rocket.Cmd {
import display = Rocket.Display;
export class Layer implements Jhtml.CompHandler {
private _zones: Array<Zone> = new Array<Zone>();
private onNewZoneCallbacks: Array<ZoneCallback>;
private onNewHistoryEntryCallbacks: Array<HistoryCallback>;
private callbackRegistery: Rocket.Util.CallbackRegistry<LayerCallback> = new Rocket.Util.CallbackRegistry<LayerCallback>();
private _visible: boolean = true;
constructor(private jqLayer: JQuery<Element>, private _level: number, private _container: Container,
private _monitor: Jhtml.Monitor) {
this.onNewZoneCallbacks = new Array<ZoneCallback>();
this.onNewHistoryEntryCallbacks = new Array<HistoryCallback>();
var zoneJq = jqLayer.children(".rocket-zone:first");
if (zoneJq.length > 0) {
let url = Jhtml.Url.create(window.location.href);
var zone = new Zone(zoneJq, url, this);
let page = this.monitor.history.currentPage;
page.promise = this.createPromise(zone);
zone.page = page;
this.addZone(zone);
}
this.monitor.history.onChanged(() => this.historyChanged() );
this.monitor.registerCompHandler("rocket-page", this);
this.historyChanged();
}
get jQuery(): JQuery<Element> {
return this.jqLayer;
}
get monitor(): Jhtml.Monitor {
return this._monitor;
}
containsUrl(url: Jhtml.Url): boolean {
for (var i in this._zones) {
if (this._zones[i].containsUrl(url)) return true;
}
return false;
}
public getZoneByUrl(urlExpr: string|Jhtml.Url): Zone {
let url = Jhtml.Url.create(urlExpr);
for (let i in this._zones) {
if (this._zones[i].containsUrl(url)) {
return this._zones[i];
}
}
return null;
}
private historyChanged() {
let currentEntry: Jhtml.History.Entry = this.monitor.history.currentEntry;
if (!currentEntry) return;
let page = currentEntry.page;
let zone: Zone = this.getZoneByUrl(page.url);
if (!zone) {
zone = this.createZone(page.url)
zone.clear(true);
}
if (!zone.page) {
zone.page = page;
}
this.switchToZone(zone);
}
public createZone(urlExpr: string|Jhtml.Url): Zone {
let url = Jhtml.Url.create(urlExpr);
if (this.containsUrl(url)) {
throw new Error("Page with url already available: " + url);
}
var jqZone = $("<div />");
this.jqLayer.append(jqZone);
var zone = new Zone(jqZone, url, this);
this.addZone(zone);
return zone;
}
get currentZone(): Zone {
if (this.empty || !this._monitor.history.currentEntry) {
return null;
}
var url = this._monitor.history.currentPage.url;
for (var i in this._zones) {
if (this._zones[i].containsUrl(url)) {
return this._zones[i];
}
}
return null;
}
get container(): Container {
return this._container;
}
get visible(): boolean {
return this._visible;
}
private trigger(eventType: Layer.EventType) {
var layer = this;
this.callbackRegistery.filter(eventType.toString())
.forEach(function (callback: LayerCallback) {
callback(layer);
});
}
on(eventType: Layer.EventType, callback: LayerCallback) {
this.callbackRegistery.register(eventType.toString(), callback);
}
off(eventType: Layer.EventType, callback: LayerCallback) {
this.callbackRegistery.unregister(eventType.toString(), callback);
}
show() {
this._visible = true;
this.jqLayer.show();
this.trigger(Layer.EventType.SHOWED);
}
hide() {
this._visible = false;
this.jqLayer.hide();
this.trigger(Layer.EventType.HIDDEN);
}
get level(): number {
return this._level;
}
get empty(): boolean {
return this._zones.length == 0;
}
get zones(): Array<Zone> {
return this._zones.slice();
}
private addZone(zone: Zone) {
this._zones.push(zone);
var that = this;
zone.on(Zone.EventType.CLOSE, function (zone: Zone) {
for (var i in that._zones) {
if (that._zones[i] !== zone) continue;
that._zones.splice(parseInt(i), 1);
break;
}
});
for (var i in this.onNewZoneCallbacks) {
this.onNewZoneCallbacks[i](zone);
}
}
private scrollPos: number = 0;
set active(active: boolean) {
if (active == this.active) return;
if (this.monitor) {
this.monitor.active = active;
}
if (active) {
this.jqLayer.addClass("rocket-active");
$(window).scrollTop(this.scrollPos);
return;
}
this.scrollPos = $(window).scrollTop();
this.jqLayer.removeClass("rocket-active");
}
get active() {
return this.jqLayer.hasClass("rocket-active");
}
public onNewZone(onNewPageCallback: ZoneCallback) {
this.onNewZoneCallbacks.push(onNewPageCallback);
}
public clear() {
for (var i in this._zones) {
this._zones[i].close();
}
}
public close() {
this.trigger(Layer.EventType.CLOSE);
let zone = null;
while (zone = this._zones.pop()) {
zone.close();
}
this.jqLayer.remove();
}
private switchToZone(zone: Zone) {
for (var i in this._zones) {
if (this._zones[i] === zone) {
zone.show();
} else {
this._zones[i].hide();
}
}
}
attachComp(comp: Jhtml.Comp): boolean {
if (comp.isAttached) return true;
// if (!comp.model.response) {
// throw new Error("model response undefined");
// }
let url = this.monitor.history.currentPage.url;
let zone: Zone = this.getZoneByUrl(url);
if (!zone) {
throw new Error("Zone for url " + url + " does not extist");
}
zone.applyComp(comp);
return true;
}
detachComp(comp: Jhtml.Comp): boolean {
return true;
}
pushHistoryEntry(urlExpr: Jhtml.Url|string) {
let url: Jhtml.Url = Jhtml.Url.create(urlExpr);
let history = this.monitor.history;
let page = history.getPageByUrl(url);
if (page) {
history.push(page);
return;
}
let zone: Zone = this.getZoneByUrl(url);
if (zone) {
page = new Jhtml.Page(url, this.createPromise(zone));
history.push(page);
return;
}
history.push(new Jhtml.Page(url, null));
}
private createPromise(zone: Zone): Promise<Jhtml.Directive> {
return new Promise((resolve: any) => {
resolve({
getAdditionalData(): any {
return null;
},
exec() {
zone.layer.switchToZone(zone);
}
});
});
}
// public currentHistoryIndex(): number {
// return this._currentHistoryIndex;
// }
// public pushHistoryEntry(urlExpr: string|Url) {
// var url: Url = Url.create(urlExpr);
// var context: Page = this.getPageByUrl(url);
// if (context === null) {
// throw new Error("Not context with this url found: " + url);
// }
//
// this._currentHistoryIndex = this.historyUrls.length;
// this.historyUrls.push(url);
// context.activeUrl = url;
//
// for (var i in this.onNewHistoryEntryCallbacks) {
// this.onNewHistoryEntryCallbacks[i](this._currentHistoryIndex, url, context);
// }
//
// this.switchToPage(context);
// }
// get currentHistoryEntryUrl(): Url {
// return this.historyUrls[this._currentHistoryIndex];
// }
//
// public go(historyIndex: number, urlExpr: string|Url) {
// var url = Url.create(urlExpr);
//
// if (this.historyUrls.length < (historyIndex + 1)) {
// throw new Error("Invalid history index: " + historyIndex);
// }
//
// if (this.historyUrls[historyIndex].equals(url)) {
// throw new Error("Url missmatch for history index " + historyIndex + ". Url: " + url + " History url: "
// + this.historyUrls[historyIndex]);
// }
//
// this._currentHistoryIndex = historyIndex;
// var context = this.getPageByUrl(this.historyUrls[historyIndex]);
// if (context === null) return false;
//
// this.switchToPage(context);
// return true;
// }
//
<|fim▁hole|>// }
//
//
//
//
// public onNewHistoryEntry(onNewHistoryEntryCallback: HistoryCallback) {
// this.onNewHistoryEntryCallbacks.push(onNewHistoryEntryCallback);
// }
public static create(jqLayer: JQuery<Element>, _level: number, _container: Container, history: Jhtml.History) {
if (Layer.test(jqLayer)) {
throw new Error("Layer already bound to this element.");
}
jqLayer.addClass("rocket-layer");
jqLayer.data("rocketLayer", this);
}
private static test(jqLayer: JQuery<Element>): Layer {
var layer = jqLayer.data("rocketLayer");
if (layer instanceof Layer) {
return layer;
}
return null;
}
public static of(jqElem: JQuery<Element>): Layer {
if (!jqElem.hasClass(".rocket-layer")) {
jqElem = jqElem.closest(".rocket-layer");
}
var layer = Layer.test(jqElem);
if (layer === undefined) {
return null;
}
return layer;
}
}
interface HistoryCallback {
(index: number, url: Jhtml.Url, context: Zone): any
}
export interface LayerCallback {
(layer: Layer): any
}
export namespace Layer {
export enum EventType {
SHOWED /*= "show"*/,
HIDDEN /*= "hide"*/,
CLOSE /*= "close"*/
}
}
}<|fim▁end|> | // public getHistoryUrlByIndex(historyIndex: number): Url {
// if (this.historyUrls.length <= historyIndex) return null;
//
// return this.historyUrls[historyIndex];
|
<|file_name|>fooPost.js<|end_file_name|><|fim▁begin|>'use strict'<|fim▁hole|> res.send(200);
next();
};<|fim▁end|> |
module.exports = function fooPost(req, res, next) {
res.header('name', 'foo');
res.header('method', 'post'); |
<|file_name|>HTTP.test.js<|end_file_name|><|fim▁begin|>/**
* HTTP.test
*/
"use strict";
/* Node modules */
/* Third-party modules */
var steeplejack = require("steeplejack");
/* Files */
describe("HTTPError test", function () {
var HTTPError;
beforeEach(function () {
injector(function (_HTTPError_) {
HTTPError = _HTTPError_;
});
});
describe("Instantation tests", function () {
it("should extend the steeplejack Fatal exception", function () {
var obj = new HTTPError("text");
expect(obj).to.be.instanceof(HTTPError)
.to.be.instanceof(steeplejack.Exceptions.Fatal);
expect(obj.type).to.be.equal("HTTPError");
expect(obj.message).to.be.equal("text");
expect(obj.httpCode).to.be.equal(500);
expect(obj.getHttpCode()).to.be.equal(500);
});
it("should set the HTTP code in the first input", function () {
var obj = new HTTPError(401);
expect(obj.httpCode).to.be.equal(401);<|fim▁hole|> });
});<|fim▁end|> | expect(obj.getHttpCode()).to.be.equal(401);
});
|
<|file_name|>data.py<|end_file_name|><|fim▁begin|># coding=UTF-8
'''
Created on 24.09.2017
@author: sysoev
'''
from google.appengine.ext import db
from google.appengine.api import users
import datetime
import time
import logging
from myusers import MyUser
def force_unicode(string):
if type(string) == unicode:
return string
return string.decode('utf-8')
class Project(db.Model):
name = db.StringProperty(multiline=False)
def getProjectsList(user):
return None
<|fim▁hole|> p.name = name
p.put()
def addProject(name):
p = Project()
p.name = name
p.put()
return p.key()
class UserProject(db.Model):
user_key = db.ReferenceProperty(MyUser)
project_key = db.ReferenceProperty(Project)
number = 0
def addUserProject(user_name, project_key_str):
user_query = MyUser.all()
user = user_query.filter('username = ', user_name).get()
if user is None:
return None
true_project_key = Project.get(project_key_str).key()
if check_user_have_project(user, true_project_key):
return False
up = UserProject()
up.user_key = user.key()
up.project_key = true_project_key
up.put()
return True
def check_user_have_project(user, true_project_key):
user_project_keys = [user_proj.project_key.key() for user_proj in
UserProject.all().filter('user_key = ', user.key()).fetch(None)]
return true_project_key in user_project_keys
def deleteUserProject(user_key, project_key):
query = UserProject.all()
query.filter('user_key = ', MyUser.get(user_key)).filter('project_key = ', Project.get(project_key))
user_project = query.get()
if user_project is None:
return None
# project.key().delete()
db.delete(user_project.key())
return True
def getUserProjects(user):
if user is None:
return []
query = UserProject.all().filter('user_key = ', user.key())
return [user_project.project_key for user_project in query]
# return [Project.get(user_project.project_key) for user_project in query]
class Request(db.Model):
number = int
name = db.StringProperty()
description = db.StringProperty(multiline=True)
state = int
perfomer = db.ReferenceProperty() #???
def addRequests(project_key, name, description):
print("log")
req = Request(parent=project_key)
req.name = name
req.description = description
req.perfomer = ""
req.state = 1
req.number = Request(ancestor = project_key).all().length + 1
req.put()
Project.set(project_key).number += 1
return True
def getRequests(project_key):
if project_key is None:
return []
query = Request(ancestor = project_key).all()
return query<|fim▁end|> | def updateProject(key, name):
p = Project.get(key)
if not p:
return |
<|file_name|>line.rs<|end_file_name|><|fim▁begin|>//
// imag - the personal information management suite for the commandline
// Copyright (C) 2015, 2016 Matthias Beyer <[email protected]> and contributors
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; version
// 2.1 of the License.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
//
use std::io::stdout;
use std::io::Write;
use lister::Lister;
use error::Result;
use error::ResultExt;
use libimagstore::store::FileLockEntry;
pub struct LineLister<'a> {
unknown_output: &'a str,
}
impl<'a> LineLister<'a> {
pub fn new(unknown_output: &'a str) -> LineLister<'a> {
LineLister {
unknown_output: unknown_output,
}
}
}
impl<'a> Lister for LineLister<'a> {
fn list<'b, I: Iterator<Item = FileLockEntry<'b>>>(&self, entries: I) -> Result<()> {
use error::ListErrorKind as LEK;
for entry in entries {<|fim▁hole|> Ok(())
}
}<|fim▁end|> | let s = entry.get_location().to_str().unwrap_or(String::from(self.unknown_output));
write!(stdout(), "{:?}\n", s).chain_err(|| LEK::FormatError)?
}
|
<|file_name|>typedef.py<|end_file_name|><|fim▁begin|># Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from .code_generator_info import CodeGeneratorInfo
from .composition_parts import WithCodeGeneratorInfo
from .composition_parts import WithComponent
from .composition_parts import WithDebugInfo
from .composition_parts import WithIdentifier
from .ir_map import IRMap
from .make_copy import make_copy
class Typedef(WithIdentifier, WithCodeGeneratorInfo, WithComponent,
WithDebugInfo):
"""https://webidl.spec.whatwg.org/#idl-typedefs"""
class IR(IRMap.IR, WithCodeGeneratorInfo, WithComponent, WithDebugInfo):
def __init__(self,
identifier,
idl_type,
code_generator_info=None,
component=None,
debug_info=None):
IRMap.IR.__init__(
self, identifier=identifier, kind=IRMap.IR.Kind.TYPEDEF)
WithCodeGeneratorInfo.__init__(self, code_generator_info)
WithComponent.__init__(self, component)
WithDebugInfo.__init__(self, debug_info)
self.idl_type = idl_type
def __init__(self, ir):
assert isinstance(ir, Typedef.IR)
ir = make_copy(ir)
WithIdentifier.__init__(self, ir)
WithCodeGeneratorInfo.__init__(self, ir, readonly=True)
WithComponent.__init__(self, ir, readonly=True)<|fim▁hole|>
@property
def idl_type(self):
"""Returns the typedef'ed type."""
return self._idl_type<|fim▁end|> | WithDebugInfo.__init__(self, ir)
self._idl_type = ir.idl_type |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import url
from audiotracks import feeds
from audiotracks import views
urlpatterns = [
url(r"^$", views.index, name="audiotracks"),
url(r"^(?P<page_number>\d+)/?$", views.index, name="audiotracks"),
url(r"^track/(?P<track_slug>.*)$", views.track_detail,
name="track_detail"),
url(r"^upload", views.upload_track, name="upload_track"),
url(r"^edit/(?P<track_id>.+)", views.edit_track, name="edit_track"),
url(r"^confirm_delete/(?P<track_id>\d+)$",
views.confirm_delete_track, name="confirm_delete_track"),
url(r"^delete$", views.delete_track, name="delete_track"),
url(r"^tracks$", views.user_index, name="user_index"),
url(r"^tracks/(?P<page_number>\d)/?$", views.user_index,
name="user_index"),
url(r"^feed/?$", feeds.choose_feed, name="tracks_feed"),
url(r"^player.js$", views.player_script, name="player_script"),<|fim▁hole|><|fim▁end|> | url(r"^m3u/?$", views.m3u, name="m3u"),
] |
<|file_name|>recipe-523034.py<|end_file_name|><|fim▁begin|>try:
from collections import defaultdict
except:
class defaultdict(dict):
def __init__(self, default_factory=None, *a, **kw):
if (default_factory is not None and
not hasattr(default_factory, '__call__')):
raise TypeError('first argument must be callable')
dict.__init__(self, *a, **kw)
self.default_factory = default_factory
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
return self.__missing__(key)
def __missing__(self, key):
if self.default_factory is None:
raise KeyError(key)
self[key] = value = self.default_factory()
return value
def __reduce__(self):
if self.default_factory is None:
args = tuple()
else:
args = self.default_factory,
return type(self), args, None, None, self.items()
def copy(self):
return self.__copy__()
def __copy__(self):
return type(self)(self.default_factory, self)
def __deepcopy__(self, memo):<|fim▁hole|> copy.deepcopy(self.items()))
def __repr__(self):
return 'defaultdict(%s, %s)' % (self.default_factory,
dict.__repr__(self))<|fim▁end|> | import copy
return type(self)(self.default_factory, |
<|file_name|>win_iocp_socket_recvfrom_op.hpp<|end_file_name|><|fim▁begin|>//
// detail/win_iocp_socket_recvfrom_op.hpp
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
//
// Copyright (c) 2003-2012 Christopher M. Kohlhoff (chris at kohlhoff dot com)
//
// Distributed under the Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at http://www.lslboost.org/LICENSE_1_0.txt)
//
#ifndef BOOST_ASIO_DETAIL_WIN_IOCP_SOCKET_RECVFROM_OP_HPP
#define BOOST_ASIO_DETAIL_WIN_IOCP_SOCKET_RECVFROM_OP_HPP
#if defined(_MSC_VER) && (_MSC_VER >= 1200)<|fim▁hole|>#include <lslboost/asio/detail/config.hpp>
#if defined(BOOST_ASIO_HAS_IOCP)
#include <lslboost/utility/addressof.hpp>
#include <lslboost/asio/detail/bind_handler.hpp>
#include <lslboost/asio/detail/buffer_sequence_adapter.hpp>
#include <lslboost/asio/detail/fenced_block.hpp>
#include <lslboost/asio/detail/handler_alloc_helpers.hpp>
#include <lslboost/asio/detail/handler_invoke_helpers.hpp>
#include <lslboost/asio/detail/operation.hpp>
#include <lslboost/asio/detail/socket_ops.hpp>
#include <lslboost/asio/error.hpp>
#include <lslboost/asio/detail/push_options.hpp>
namespace lslboost {
namespace asio {
namespace detail {
template <typename MutableBufferSequence, typename Endpoint, typename Handler>
class win_iocp_socket_recvfrom_op : public operation
{
public:
BOOST_ASIO_DEFINE_HANDLER_PTR(win_iocp_socket_recvfrom_op);
win_iocp_socket_recvfrom_op(Endpoint& endpoint,
socket_ops::weak_cancel_token_type cancel_token,
const MutableBufferSequence& buffers, Handler& handler)
: operation(&win_iocp_socket_recvfrom_op::do_complete),
endpoint_(endpoint),
endpoint_size_(static_cast<int>(endpoint.capacity())),
cancel_token_(cancel_token),
buffers_(buffers),
handler_(BOOST_ASIO_MOVE_CAST(Handler)(handler))
{
}
int& endpoint_size()
{
return endpoint_size_;
}
static void do_complete(io_service_impl* owner, operation* base,
const lslboost::system::error_code& result_ec,
std::size_t bytes_transferred)
{
lslboost::system::error_code ec(result_ec);
// Take ownership of the operation object.
win_iocp_socket_recvfrom_op* o(
static_cast<win_iocp_socket_recvfrom_op*>(base));
ptr p = { lslboost::addressof(o->handler_), o, o };
BOOST_ASIO_HANDLER_COMPLETION((o));
#if defined(BOOST_ASIO_ENABLE_BUFFER_DEBUGGING)
// Check whether buffers are still valid.
if (owner)
{
buffer_sequence_adapter<lslboost::asio::mutable_buffer,
MutableBufferSequence>::validate(o->buffers_);
}
#endif // defined(BOOST_ASIO_ENABLE_BUFFER_DEBUGGING)
socket_ops::complete_iocp_recvfrom(o->cancel_token_, ec);
// Record the size of the endpoint returned by the operation.
o->endpoint_.resize(o->endpoint_size_);
// Make a copy of the handler so that the memory can be deallocated before
// the upcall is made. Even if we're not about to make an upcall, a
// sub-object of the handler may be the true owner of the memory associated
// with the handler. Consequently, a local copy of the handler is required
// to ensure that any owning sub-object remains valid until after we have
// deallocated the memory here.
detail::binder2<Handler, lslboost::system::error_code, std::size_t>
handler(o->handler_, ec, bytes_transferred);
p.h = lslboost::addressof(handler.handler_);
p.reset();
// Make the upcall if required.
if (owner)
{
fenced_block b(fenced_block::half);
BOOST_ASIO_HANDLER_INVOCATION_BEGIN((handler.arg1_, handler.arg2_));
lslboost_asio_handler_invoke_helpers::invoke(handler, handler.handler_);
BOOST_ASIO_HANDLER_INVOCATION_END;
}
}
private:
Endpoint& endpoint_;
int endpoint_size_;
socket_ops::weak_cancel_token_type cancel_token_;
MutableBufferSequence buffers_;
Handler handler_;
};
} // namespace detail
} // namespace asio
} // namespace lslboost
#include <lslboost/asio/detail/pop_options.hpp>
#endif // defined(BOOST_ASIO_HAS_IOCP)
#endif // BOOST_ASIO_DETAIL_WIN_IOCP_SOCKET_RECVFROM_OP_HPP<|fim▁end|> | # pragma once
#endif // defined(_MSC_VER) && (_MSC_VER >= 1200)
|
<|file_name|>fast_sin_cos_2π_benchmark.cpp<|end_file_name|><|fim▁begin|>
// .\Release\x64\benchmarks.exe --benchmark_repetitions=10 --benchmark_min_time=2 --benchmark_filter=FastSinCos // NOLINT(whitespace/line_length)
#include "numerics/fast_sin_cos_2π.hpp"
#include <pmmintrin.h>
#include <random>
#include <vector>
#include "benchmark/benchmark.h"
#include "quantities/numbers.hpp"
namespace principia {
namespace numerics {
namespace {
// Returns a value which has a data dependency on both cos_2πx and sin_2πx.
// The result is (cos_2πx bitand cos_mask) bitxor sin_2πx.
// The latency is between 1 and 2 cycles: at worst this is an and and an xor, at
// best the xor can only be computed given both trigonometric lines.
double MixTrigonometricLines(double cos_2πx,
double sin_2πx,
__m128d const cos_mask) {
__m128d const cos_bits = _mm_and_pd(_mm_set_sd(cos_2πx), cos_mask);
__m128d const sin_all_bits = _mm_set_sd(sin_2πx);
__m128d const mixed_bits = _mm_xor_pd(cos_bits, sin_all_bits);
return _mm_cvtsd_f64(mixed_bits);
}
static const __m128d mantissa_bits =
_mm_castsi128_pd(_mm_cvtsi64_si128(0x000F'FFFF'FFFF'FFFF));
// When iterated, the quadrant of the result is unbiased.
double ThoroughlyMixTrigonometricLines(double cos_2πx, double sin_2πx) {
return MixTrigonometricLines(cos_2πx, sin_2πx, mantissa_bits);
}
static const __m128d mantissa_bits_and_5_bits_of_exponent =
_mm_castsi128_pd(_mm_cvtsi64_si128(0x01FF'FFFF'FFFF'FFFF));
// Same as above, but when iterated, the result is quickly confined to [0, 1/8].
double PoorlyMixTrigonometricLines(double cos_2πx, double sin_2πx) {
return MixTrigonometricLines(
cos_2πx, sin_2πx, mantissa_bits_and_5_bits_of_exponent);
}
} // namespace
void BM_FastSinCos2πPoorlyPredictedLatency(benchmark::State& state) {
for (auto _ : state) {
double sin = π;
double cos = 0.0;
for (int i = 0; i < 1e3; ++i) {
FastSinCos2π(ThoroughlyMixTrigonometricLines(cos, sin), sin, cos);
}
}
}
void BM_FastSinCos2πWellPredictedLatency(benchmark::State& state) {
for (auto _ : state) {
double sin = π;
double cos = 0.0;
for (int i = 0; i < 1e3; ++i) {
FastSinCos2π(PoorlyMixTrigonometricLines(cos, sin), sin, cos);
}
}
}
void BM_FastSinCos2πThroughput(benchmark::State& state) {
std::mt19937_64 random(42);
std::uniform_real_distribution<> distribution(-1.0, 1.0);
std::vector<double> input;
for (int i = 0; i < 1e3; ++i) {
input.push_back(distribution(random));
}<|fim▁hole|> double cos;
for (double const x : input) {
FastSinCos2π(x, sin, cos);
}
benchmark::DoNotOptimize(sin);
benchmark::DoNotOptimize(cos);
}
}
BENCHMARK(BM_FastSinCos2πPoorlyPredictedLatency)->Unit(benchmark::kMicrosecond);
BENCHMARK(BM_FastSinCos2πWellPredictedLatency)->Unit(benchmark::kMicrosecond);
BENCHMARK(BM_FastSinCos2πThroughput)->Unit(benchmark::kMicrosecond);
} // namespace numerics
} // namespace principia<|fim▁end|> |
for (auto _ : state) {
double sin; |
<|file_name|>userdata_test.go<|end_file_name|><|fim▁begin|>// Copyright 2015 Canonical Ltd.
// Copyright 2015 Cloudbase Solutions SRL
// Licensed under the AGPLv3, see LICENCE file for details.
package openstack_test
import (
jc "github.com/juju/testing/checkers"
"github.com/juju/utils"
"github.com/juju/utils/os"
gc "gopkg.in/check.v1"
"github.com/juju/juju/cloudconfig/cloudinit/cloudinittest"
"github.com/juju/juju/cloudconfig/providerinit/renderers"
"github.com/juju/juju/provider/openstack"
"github.com/juju/juju/testing"
)
type UserdataSuite struct {
testing.BaseSuite
}
var _ = gc.Suite(&UserdataSuite{})
func (s *UserdataSuite) TestOpenstackUnix(c *gc.C) {
renderer := openstack.OpenstackRenderer{}
cloudcfg := &cloudinittest.CloudConfig{YAML: []byte("yaml")}
result, err := renderer.Render(cloudcfg, os.Ubuntu)
c.Assert(err, jc.ErrorIsNil)
c.Assert(result, jc.DeepEquals, utils.Gzip(cloudcfg.YAML))
result, err = renderer.Render(cloudcfg, os.CentOS)
c.Assert(err, jc.ErrorIsNil)
c.Assert(result, jc.DeepEquals, utils.Gzip(cloudcfg.YAML))
}
func (s *UserdataSuite) TestOpenstackWindows(c *gc.C) {
renderer := openstack.OpenstackRenderer{}
cloudcfg := &cloudinittest.CloudConfig{YAML: []byte("yaml")}
result, err := renderer.Render(cloudcfg, os.Windows)
c.Assert(err, jc.ErrorIsNil)
c.Assert(result, jc.DeepEquals, renderers.WinEmbedInScript(cloudcfg.YAML))<|fim▁hole|> cloudcfg := &cloudinittest.CloudConfig{}
result, err := renderer.Render(cloudcfg, os.GenericLinux)
c.Assert(result, gc.IsNil)
c.Assert(err, gc.ErrorMatches, "Cannot encode userdata for OS: GenericLinux")
}<|fim▁end|> | }
func (s *UserdataSuite) TestOpenstackUnknownOS(c *gc.C) {
renderer := openstack.OpenstackRenderer{} |
<|file_name|>mobile_device_constant_service.pb.go<|end_file_name|><|fim▁begin|>// Code generated by protoc-gen-go. DO NOT EDIT.
// source: google/ads/googleads/v2/services/mobile_device_constant_service.proto
package services
import (
context "context"
fmt "fmt"
math "math"
proto "github.com/golang/protobuf/proto"
resources "google.golang.org/genproto/googleapis/ads/googleads/v2/resources"
_ "google.golang.org/genproto/googleapis/api/annotations"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
// Request message for [MobileDeviceConstantService.GetMobileDeviceConstant][google.ads.googleads.v2.services.MobileDeviceConstantService.GetMobileDeviceConstant].
type GetMobileDeviceConstantRequest struct {
// Resource name of the mobile device to fetch.
ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *GetMobileDeviceConstantRequest) Reset() { *m = GetMobileDeviceConstantRequest{} }
func (m *GetMobileDeviceConstantRequest) String() string { return proto.CompactTextString(m) }
func (*GetMobileDeviceConstantRequest) ProtoMessage() {}
func (*GetMobileDeviceConstantRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_5b671ef47eb7057e, []int{0}
}
func (m *GetMobileDeviceConstantRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_GetMobileDeviceConstantRequest.Unmarshal(m, b)
}
func (m *GetMobileDeviceConstantRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_GetMobileDeviceConstantRequest.Marshal(b, m, deterministic)
}
func (m *GetMobileDeviceConstantRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_GetMobileDeviceConstantRequest.Merge(m, src)
}
func (m *GetMobileDeviceConstantRequest) XXX_Size() int {
return xxx_messageInfo_GetMobileDeviceConstantRequest.Size(m)
}
func (m *GetMobileDeviceConstantRequest) XXX_DiscardUnknown() {
xxx_messageInfo_GetMobileDeviceConstantRequest.DiscardUnknown(m)
}
var xxx_messageInfo_GetMobileDeviceConstantRequest proto.InternalMessageInfo
func (m *GetMobileDeviceConstantRequest) GetResourceName() string {
if m != nil {
return m.ResourceName
}
return ""
}
func init() {
proto.RegisterType((*GetMobileDeviceConstantRequest)(nil), "google.ads.googleads.v2.services.GetMobileDeviceConstantRequest")
}
func init() {
proto.RegisterFile("google/ads/googleads/v2/services/mobile_device_constant_service.proto", fileDescriptor_5b671ef47eb7057e)
}
var fileDescriptor_5b671ef47eb7057e = []byte{
// 383 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x52, 0xcb, 0x4a, 0xeb, 0x40,
0x18, 0x26, 0x39, 0x70, 0xe0, 0x84, 0xe3, 0x26, 0x9b, 0x96, 0x54, 0x24, 0xd4, 0x2e, 0x44, 0x71,
0x06, 0xd2, 0x85, 0x30, 0xa2, 0x98, 0x6a, 0xa9, 0x1b, 0xa5, 0x54, 0xe8, 0x42, 0x02, 0x61, 0x9a,
0x0c, 0x21, 0x90, 0xcc, 0xd4, 0xfc, 0x69, 0x37, 0xe2, 0xc6, 0x85, 0x3e, 0x80, 0x6f, 0xe0, 0xd2,
0x37, 0xb1, 0x5b, 0x5f, 0xc1, 0x95, 0x4f, 0x21, 0xb9, 0x4c, 0xaa, 0xd0, 0xb4, 0xbb, 0x2f, 0xf3,
0x7f, 0x97, 0xff, 0x12, 0xad, 0x1f, 0x08, 0x11, 0x44, 0x0c, 0x53, 0x1f, 0x70, 0x01, 0x33, 0x34,
0xb7, 0x30, 0xb0, 0x64, 0x1e, 0x7a, 0x0c, 0x70, 0x2c, 0x26, 0x61, 0xc4, 0x5c, 0x9f, 0x65, 0x9f,
0xae, 0x27, 0x38, 0xa4, 0x94, 0xa7, 0x6e, 0x59, 0x47, 0xd3, 0x44, 0xa4, 0x42, 0x37, 0x0b, 0x2d,
0xa2, 0x3e, 0xa0, 0xca, 0x06, 0xcd, 0x2d, 0x24, 0x6d, 0x8c, 0xd3, 0xba, 0xa0, 0x84, 0x81, 0x98,
0x25, 0xf5, 0x49, 0x45, 0x82, 0xb1, 0x2d, 0xf5, 0xd3, 0x10, 0x53, 0xce, 0x45, 0x4a, 0xd3, 0x50,
0x70, 0x28, 0xab, 0x8d, 0x1f, 0x55, 0x2f, 0x0a, 0x99, 0x94, 0xb5, 0xfb, 0xda, 0xce, 0x80, 0xa5,
0x57, 0xb9, 0xf3, 0x45, 0x6e, 0x7c, 0x5e, 0xfa, 0x8e, 0xd8, 0xdd, 0x8c, 0x41, 0xaa, 0xef, 0x6a,
0x5b, 0xb2, 0x05, 0x97, 0xd3, 0x98, 0x35, 0x15, 0x53, 0xd9, 0xfb, 0x37, 0xfa, 0x2f, 0x1f, 0xaf,
0x69, 0xcc, 0xac, 0x27, 0x55, 0x6b, 0xad, 0x32, 0xb9, 0x29, 0xc6, 0xd3, 0xdf, 0x15, 0xad, 0x51,
0x93, 0xa3, 0x9f, 0xa1, 0x4d, 0xcb, 0x41, 0xeb, 0x5b, 0x34, 0x8e, 0x6a, 0x1d, 0xaa, 0xe5, 0xa1,
0x55, 0xfa, 0x76, 0xf7, 0xf1, 0xe3, 0xf3, 0x45, 0x3d, 0xd4, 0x0f, 0xb2, 0x45, 0xdf, 0xff, 0x1a,
0xf3, 0x24, 0x5e, 0x21, 0x00, 0xbc, 0xff, 0x60, 0xb4, 0x16, 0x76, 0x73, 0x19, 0x52, 0xa2, 0x69,
0x08, 0xc8, 0x13, 0x71, 0xef, 0x59, 0xd5, 0x3a, 0x9e, 0x88, 0x37, 0x8e, 0xd4, 0x33, 0xd7, 0xac,
0x6b, 0x98, 0x9d, 0x66, 0xa8, 0xdc, 0x5e, 0x96, 0x2e, 0x81, 0x88, 0x28, 0x0f, 0x90, 0x48, 0x02,
0x1c, 0x30, 0x9e, 0x1f, 0x0e, 0x2f, 0x73, 0xeb, 0xff, 0xcd, 0x63, 0x09, 0x5e, 0xd5, 0x3f, 0x03,
0xdb, 0x7e, 0x53, 0xcd, 0x41, 0x61, 0x68, 0xfb, 0x80, 0x0a, 0x98, 0xa1, 0xb1, 0x85, 0xca, 0x60,
0x58, 0x48, 0x8a, 0x63, 0xfb, 0xe0, 0x54, 0x14, 0x67, 0x6c, 0x39, 0x92, 0xf2, 0xa5, 0x76, 0x8a,
0x77, 0x42, 0x6c, 0x1f, 0x08, 0xa9, 0x48, 0x84, 0x8c, 0x2d, 0x42, 0x24, 0x6d, 0xf2, 0x37, 0xef,
0xb3, 0xfb, 0x1d, 0x00, 0x00, 0xff, 0xff, 0xb2, 0x2e, 0x00, 0xeb, 0x42, 0x03, 0x00, 0x00,
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConn
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion4
// MobileDeviceConstantServiceClient is the client API for MobileDeviceConstantService service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type MobileDeviceConstantServiceClient interface {
// Returns the requested mobile device constant in full detail.
GetMobileDeviceConstant(ctx context.Context, in *GetMobileDeviceConstantRequest, opts ...grpc.CallOption) (*resources.MobileDeviceConstant, error)
}
type mobileDeviceConstantServiceClient struct {
cc *grpc.ClientConn
}
func NewMobileDeviceConstantServiceClient(cc *grpc.ClientConn) MobileDeviceConstantServiceClient {
return &mobileDeviceConstantServiceClient{cc}
}
func (c *mobileDeviceConstantServiceClient) GetMobileDeviceConstant(ctx context.Context, in *GetMobileDeviceConstantRequest, opts ...grpc.CallOption) (*resources.MobileDeviceConstant, error) {
out := new(resources.MobileDeviceConstant)
err := c.cc.Invoke(ctx, "/google.ads.googleads.v2.services.MobileDeviceConstantService/GetMobileDeviceConstant", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// MobileDeviceConstantServiceServer is the server API for MobileDeviceConstantService service.
type MobileDeviceConstantServiceServer interface {
// Returns the requested mobile device constant in full detail.
GetMobileDeviceConstant(context.Context, *GetMobileDeviceConstantRequest) (*resources.MobileDeviceConstant, error)
}
// UnimplementedMobileDeviceConstantServiceServer can be embedded to have forward compatible implementations.
type UnimplementedMobileDeviceConstantServiceServer struct {<|fim▁hole|>}
func RegisterMobileDeviceConstantServiceServer(s *grpc.Server, srv MobileDeviceConstantServiceServer) {
s.RegisterService(&_MobileDeviceConstantService_serviceDesc, srv)
}
func _MobileDeviceConstantService_GetMobileDeviceConstant_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GetMobileDeviceConstantRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(MobileDeviceConstantServiceServer).GetMobileDeviceConstant(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/google.ads.googleads.v2.services.MobileDeviceConstantService/GetMobileDeviceConstant",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(MobileDeviceConstantServiceServer).GetMobileDeviceConstant(ctx, req.(*GetMobileDeviceConstantRequest))
}
return interceptor(ctx, in, info, handler)
}
var _MobileDeviceConstantService_serviceDesc = grpc.ServiceDesc{
ServiceName: "google.ads.googleads.v2.services.MobileDeviceConstantService",
HandlerType: (*MobileDeviceConstantServiceServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "GetMobileDeviceConstant",
Handler: _MobileDeviceConstantService_GetMobileDeviceConstant_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "google/ads/googleads/v2/services/mobile_device_constant_service.proto",
}<|fim▁end|> | }
func (*UnimplementedMobileDeviceConstantServiceServer) GetMobileDeviceConstant(ctx context.Context, req *GetMobileDeviceConstantRequest) (*resources.MobileDeviceConstant, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetMobileDeviceConstant not implemented") |
<|file_name|>test_event.py<|end_file_name|><|fim▁begin|>from vadvisor.store.event import InMemoryStore
import pytest
from freezegun import freeze_time
from datetime import datetime, timedelta
@pytest.fixture
@freeze_time("2012-01-14 03:00:00")
def expired_store():
store = InMemoryStore(60)
# Insert old data
store.put('old')
store.put('old')
store.put('old')
return store
@pytest.fixture
@freeze_time("2012-01-14 03:01:30")
def new_store(expired_store):
# Insert newer data
expired_store.put('new')
expired_store.put('new')
expired_store.put('new')
return expired_store
@pytest.fixture
@freeze_time("2012-01-14 03:01:50")
def newest_store(new_store):
# Insert newer data
new_store.put('newest')<|fim▁hole|>
def test_empty_store():
store = InMemoryStore()
assert store.get() == []
@freeze_time("2012-01-14 03:02:00")
def test_expire_on_get(expired_store):
expired_store.get()
assert expired_store.get() == []
@freeze_time("2012-01-14 03:02:00")
def test_get_all_new(new_store):
assert new_store.get() == ['new', 'new', 'new']
@freeze_time("2012-01-14 03:02:00")
def test_get_two_new(new_store):
assert new_store.get(elements=2) == ['new', 'new']
@freeze_time("2012-01-14 03:02:00")
def test_get_not_older_than(newest_store):
events = newest_store.get(
elements=2,
start_time=datetime.utcnow() - timedelta(seconds=20)
)
assert events == ['newest', 'newest']
@freeze_time("2012-01-14 03:02:00")
def test_get_not_newer_than(newest_store):
events = newest_store.get(
elements=2,
stop_time=datetime.utcnow() - timedelta(seconds=20)
)
assert events == ['new', 'new']<|fim▁end|> | new_store.put('newest')
new_store.put('newest')
return new_store
|
<|file_name|>app.py<|end_file_name|><|fim▁begin|>import gamerocket
from flask import Flask, request, render_template
app = Flask(__name__)
gamerocket.Configuration.configure(gamerocket.Environment.Development,
apiKey = "your_apiKey",
secretKey = "your_secretKey")
<|fim▁hole|>def form():
return render_template("form.html")
@app.route("/create_player", methods=["POST"])
def create_player():
result = gamerocket.Player.create({
"name":request.form["name"],
"locale":request.form["locale"]
})
if result.is_success:
return "<h1>Success! Player ID: " + result.player.id + "</h1>"
else:
return "<h1>Error " + result.error + ": " + result.error_description + "</h1>"
if __name__ == '__main__':
app.run(debug=True)<|fim▁end|> |
@app.route("/") |
<|file_name|>actuators_simple.py<|end_file_name|><|fim▁begin|>def print_from_queue(q):
"""
prints values read from queue q to
standard out.
"""
while True:
v = q.get()
if v is None:
# exit loop
return
else:
print (str(v))
class queue_to_file(object):
"""
self.actuate(a) puts values from a queue q
into the file called self.filename
"""
def __init__(self, filename, timeout=0):
self.filename = filename
self.timeout = timeout<|fim▁hole|> with open(self.filename, 'w') as the_file:
while True:
try:
v = q.get(timeout=self.timeout)
except:
# No more input for this actuator
return
if v is None:
# exit loop
return
else:
the_file.write(str(v) + '\n')<|fim▁end|> |
def actuate(self, q): |
<|file_name|>serializers.py<|end_file_name|><|fim▁begin|>from rest_framework import serializers
from csinterop.models import SharingProposal, Folder, User
class SharingProposalSerializer(serializers.ModelSerializer):
share_id = serializers.RelatedField(source='key')
permission = serializers.CharField(source='get_permission', read_only=True)
folder_name = serializers.RelatedField(source='folder.name')
owner_name = serializers.RelatedField(source='owner.name')
owner_email = serializers.RelatedField(source='owner.email')
protocol_version = serializers.CharField(required=False)
def restore_object(self, attrs, instance=None):
"""
Given a dictionary of deserialized field values, either update
an existing model instance, or create a new model instance.
"""
if instance is not None:
return instance
proposal = SharingProposal(**attrs)<|fim▁hole|> proposal.key = self.context['request'].DATA['share_id']
owner = User()
owner.name = self.context['request'].DATA['owner_name']
owner.email = self.context['request'].DATA['owner_email']
proposal.owner = owner
folder = Folder()
folder.name = self.context['request'].DATA['folder_name']
proposal.folder = folder
write_access = True if self.context['request'].DATA['permission'].lower() is 'read-write' else False
proposal.write_access = write_access
proposal.status = 'PENDING'
return proposal
class Meta:
model = SharingProposal
fields = (
'share_id', 'recipient', 'resource_url', 'owner_name', 'owner_email', 'folder_name', 'permission',
'callback', 'protocol_version',
'status', 'created_at')<|fim▁end|> | |
<|file_name|>log.hpp<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2004, 2005, 2006, 2007, 2008, 2009 Mark Aylett <[email protected]>
This file is part of Aug written by Mark Aylett.
Aug is released under the GPL with the additional exemption that compiling,
linking, and/or using OpenSSL is allowed.
Aug is free software; you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software
Foundation; either version 2 of the License, or (at your option) any later
version.
Aug is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details.
You should have received a copy of the GNU General Public License along with
this program; if not, write to the Free Software Foundation, Inc., 51
Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#ifndef AUGUTILPP_LOG_HPP
#define AUGUTILPP_LOG_HPP
#include "augutilpp/config.hpp"
#include "augctxpp/exception.hpp"
#include "augutil/log.h"
#include "augctx/defs.h" // AUG_MAXLINE
#include <string>
namespace aug {
inline void
vformatlog(char* buf, size_t& n, clockref clock, unsigned level,
const char* format, va_list args)
{
verify(aug_vformatlog(buf, &n, clock.get(), level, format, args));
}
inline void<|fim▁hole|> va_start(args, format);
aug_result result(aug_vformatlog(buf, &n, clock.get(), level, format,
args));
va_end(args);
verify(result);
}
inline std::string
vformatlog(clockref clock, unsigned level, const char* format,
va_list args)
{
char buf[AUG_MAXLINE];
size_t n(sizeof(buf));
vformatlog(buf, n, clock, level, format, args);
return std::string(buf, n);
}
inline std::string
formatlog(clockref clock, unsigned level, const char* format, ...)
{
char buf[AUG_MAXLINE];
size_t n(sizeof(buf));
va_list args;
va_start(args, format);
aug_result result(aug_vformatlog(buf, &n, clock.get(), level, format,
args));
va_end(args);
verify(result);
return std::string(buf, n);
}
inline logptr
createdaemonlog(mpoolref mpool, clockref clock)
{
return object_attach<aug_log>
(aug_createdaemonlog(mpool.get(), clock.get()));
}
inline void
setdaemonlog(ctxref ctx)
{
verify(aug_setdaemonlog(ctx.get()));
}
}
#endif // AUGUTILPP_LOG_HPP<|fim▁end|> | formatlog(char* buf, size_t& n, clockref clock, unsigned level,
const char* format, ...)
{
va_list args; |
<|file_name|>software_engineering_sqlite3.py<|end_file_name|><|fim▁begin|>from collections import namedtuple
import sqlite3
# make a basic Link class
Link = namedtuple('Link', ['id', 'submitter_id', 'submitted_time', 'votes',
'title', 'url'])
# list of Links to work with
links = [
Link(0, 60398, 1334014208.0, 109,
"C overtakes Java as the No. 1 programming language in the TIOBE index.",
"http://pixelstech.net/article/index.php?id=1333969280"),
Link(1, 60254, 1333962645.0, 891,
"This explains why technical books are all ridiculously thick and overpriced",
"http://prog21.dadgum.com/65.html"),
Link(23, 62945, 1333894106.0, 351,
"Learn Haskell Fast and Hard",
"http://yannesposito.com/Scratch/en/blog/Haskell-the-Hard-Way/"),<|fim▁hole|> "TIL about the Lisp Curse",
"http://www.winestockwebdesign.com/Essays/Lisp_Curse.html"),
Link(4, 59008, 1334016506.0, 19,
"The Downfall of Imperative Programming. Functional Programming and the Multicore Revolution",
"http://fpcomplete.com/the-downfall-of-imperative-programming/"),
Link(5, 8712, 1333993676.0, 26,
"Open Source - Twitter Stock Market Game - ",
"http://www.twitstreet.com/"),
Link(6, 48626, 1333975127.0, 63,
"First look: Qt 5 makes JavaScript a first-class citizen for app development",
"http://arstechnica.com/business/news/2012/04/an-in-depth-look-at-qt-5-making-javascript-a-first-class-citizen-for-native-cross-platform-developme.ars"),
Link(7, 30172, 1334017294.0, 5,
"Benchmark of Dictionary Structures", "http://lh3lh3.users.sourceforge.net/udb.shtml"),
Link(8, 678, 1334014446.0, 7,
"If It's Not on Prod, It Doesn't Count: The Value of Frequent Releases",
"http://bits.shutterstock.com/?p=165"),
Link(9, 29168, 1334006443.0, 18,
"Language proposal: dave",
"http://davelang.github.com/"),
Link(17, 48626, 1334020271.0, 1,
"LispNYC and EmacsNYC meetup Tuesday Night: Large Scale Development with Elisp ",
"http://www.meetup.com/LispNYC/events/47373722/"),
Link(101, 62443, 1334018620.0, 4,
"research!rsc: Zip Files All The Way Down",
"http://research.swtch.com/zip"),
Link(12, 10262, 1334018169.0, 5,
"The Tyranny of the Diff",
"http://michaelfeathers.typepad.com/michael_feathers_blog/2012/04/the-tyranny-of-the-diff.html"),
Link(13, 20831, 1333996529.0, 14,
"Understanding NIO.2 File Channels in Java 7",
"http://java.dzone.com/articles/understanding-nio2-file"),
Link(15, 62443, 1333900877.0, 1244,
"Why vector icons don't work",
"http://www.pushing-pixels.org/2011/11/04/about-those-vector-icons.html"),
Link(14, 30650, 1334013659.0, 3,
"Python - Getting Data Into Graphite - Code Examples",
"http://coreygoldberg.blogspot.com/2012/04/python-getting-data-into-graphite-code.html"),
Link(16, 15330, 1333985877.0, 9,
"Mozilla: The Web as the Platform and The Kilimanjaro Event",
"https://groups.google.com/forum/?fromgroups#!topic/mozilla.dev.planning/Y9v46wFeejA"),
Link(18, 62443, 1333939389.0, 104,
"github is making me feel stupid(er)",
"http://www.serpentine.com/blog/2012/04/08/github-is-making-me-feel-stupider/"),
Link(19, 6937, 1333949857.0, 39,
"BitC Retrospective: The Issues with Type Classes",
"http://www.bitc-lang.org/pipermail/bitc-dev/2012-April/003315.html"),
Link(20, 51067, 1333974585.0, 14,
"Object Oriented C: Class-like Structures",
"http://cecilsunkure.blogspot.com/2012/04/object-oriented-c-class-like-structures.html"),
Link(10, 23944, 1333943632.0, 188,
"The LOVE game framework version 0.8.0 has been released - with GLSL shader support!",
"https://love2d.org/forums/viewtopic.php?f=3&t=8750"),
Link(22, 39191, 1334005674.0, 11,
"An open letter to language designers: Please kill your sacred cows. (megarant)",
"http://joshondesign.com/2012/03/09/open-letter-language-designers"),
Link(21, 3777, 1333996565.0, 2,
"Developers guide to Garage48 hackatron",
"http://martingryner.com/developers-guide-to-garage48-hackatron/"),
Link(24, 48626, 1333934004.0, 17,
"An R programmer looks at Julia",
"http://www.r-bloggers.com/an-r-programmer-looks-at-julia/")]
# links is a list of Link objects. Links have a handful of properties. For
# example, a Link's number of votes can be accessed by link.votes if "link" is a
# Link.
# make and populate a table
db = sqlite3.connect(':memory:')
db.execute('create table links ' +
'(id integer, submitter_id integer, submitted_time integer, ' +
'votes integer, title text, url text)')
for l in links:
db.execute('insert into links values (?, ?, ?, ?, ?, ?)', l)
# db is an in-memory sqlite database that can respond to sql queries using the
# execute() function.
#
# For example. If you run
#
# c = db.execute("select * from links")
#
# c will be a "cursor" to the results of that query. You can use the fetchmany()
# function on the cursor to convert that cursor into a list of results. These
# results won't be Links; they'll be tuples, but they can be passed turned into
# a Link.
#
# For example, to print all the votes for all of the links, do this:
#
# c = db.execute("select * from links")
# for link_tuple in c:
# link = Link(*link_tuple)
# print link.votes
#
# QUIZ - make the function query() return the number of votes the link with ID = 2 has
def query():
c = db.execute("select * from links where id==2")
link = Link(*c.fetchone())
return link.votes
def query2():
c = db.execute("select * from links where submitter_id = 62443 and votes > 1000")
link = Link(*c.fetchone())
print link.id
for link_tuple in c:
link = Link(*link_tuple)
print link.id
query2()<|fim▁end|> | Link(2, 6084, 1333996166.0, 81,
"Announcing Yesod 1.0- a robust, developer friendly, high performance web framework for Haskell",
"http://www.yesodweb.com/blog/2012/04/announcing-yesod-1-0"),
Link(3, 30305, 1333968061.0, 270, |
<|file_name|>SbPimplPtr.hpp<|end_file_name|><|fim▁begin|>#ifndef COIN_SBPIMPLPTR_HPP
#define COIN_SBPIMPLPTR_HPP
/**************************************************************************\
*
* This file is part of the Coin 3D visualization library.
* Copyright (C) 1998-2008 by Kongsberg SIM. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* ("GPL") version 2 as published by the Free Software Foundation.
* See the file LICENSE.GPL at the root directory of this source
* distribution for additional information about the GNU GPL.
*
* For using Coin with software that can not be combined with the GNU
* GPL, and for taking advantage of the additional benefits of our
* support services, please contact Kongsberg SIM about acquiring
* a Coin Professional Edition License.
*
* See http://www.coin3d.org/ for more information.
*
* Kongsberg SIM, Postboks 1283, Pirsenteret, 7462 Trondheim, NORWAY.
* http://www.sim.no/ [email protected] [email protected]
*
\**************************************************************************/<|fim▁hole|>#error do not include Inventor/tools/SbPimplPtr.hpp directly, use Inventor/tools/SbPimplPtr.h
#endif // !COIN_SBPIMPLPTR_H
/* ********************************************************************** */
template <typename T>
SbPimplPtr<T>::SbPimplPtr(void)
: ptr(NULL)
{
this->set(this->getNew());
}
template <typename T>
SbPimplPtr<T>::SbPimplPtr(T * initial)
{
this->ptr = initial;
}
template <typename T>
SbPimplPtr<T>::SbPimplPtr(const SbPimplPtr<T> & copy)
{
*this = copy;
}
template <typename T>
SbPimplPtr<T>::~SbPimplPtr(void)
{
this->set(NULL);
}
template <typename T>
void
SbPimplPtr<T>::set(T * value)
{
if (this->ptr) {
delete this->ptr;
}
this->ptr = value;
}
template <typename T>
T &
SbPimplPtr<T>::get(void) const
{
return *(this->ptr);
}
template <typename T>
T *
SbPimplPtr<T>::getNew(void) const
{
return new T;
}
template <typename T>
SbPimplPtr<T> &
SbPimplPtr<T>::operator = (const SbPimplPtr<T> & copy)
{
this->get() = copy.get();
return *this;
}
template <typename T>
SbBool
SbPimplPtr<T>::operator == (const SbPimplPtr<T> & rhs) const
{
return this->get() == rhs.get();
}
template <typename T>
SbBool
SbPimplPtr<T>::operator != (const SbPimplPtr<T> & rhs) const
{
return !(*this == rhs);
}
template <typename T>
const T *
SbPimplPtr<T>::operator -> (void) const
{
return &(this->get());
}
template <typename T>
T *
SbPimplPtr<T>::operator -> (void)
{
return &(this->get());
}
/* ********************************************************************** */
#endif // !COIN_SBPIMPLPTR_HPP<|fim▁end|> |
#ifndef COIN_SBPIMPLPTR_H |
<|file_name|>pull-to-refresh.js<|end_file_name|><|fim▁begin|>/*======================================================
************ Pull To Refresh ************
======================================================*/
app.initPullToRefresh = function (pageContainer) {
var eventsTarget = $(pageContainer);
if (!eventsTarget.hasClass('pull-to-refresh-content')) {
eventsTarget = eventsTarget.find('.pull-to-refresh-content');
}
if (!eventsTarget || eventsTarget.length === 0) return;
var touchId, isTouched, isMoved, touchesStart = {}, isScrolling, touchesDiff, touchStartTime, container, refresh = false, useTranslate = false, startTranslate = 0, translate, scrollTop, wasScrolled, layer, triggerDistance, dynamicTriggerDistance, pullStarted;
var page = eventsTarget.hasClass('page') ? eventsTarget : eventsTarget.parents('.page');
var hasNavbar = false;
if (page.find('.navbar').length > 0 || page.parents('.navbar-fixed, .navbar-through').length > 0 || page.hasClass('navbar-fixed') || page.hasClass('navbar-through')) hasNavbar = true;
if (page.hasClass('no-navbar')) hasNavbar = false;
if (!hasNavbar) eventsTarget.addClass('pull-to-refresh-no-navbar');
container = eventsTarget;
// Define trigger distance
if (container.attr('data-ptr-distance')) {
dynamicTriggerDistance = true;
}
else {
triggerDistance = 44;
}
function handleTouchStart(e) {
if (isTouched) {
if (app.device.os === 'android') {
if ('targetTouches' in e && e.targetTouches.length > 1) return;
}
else return;
}
/*jshint validthis:true */
container = $(this);
if (container.hasClass('refreshing')) {
return;
}
isMoved = false;
pullStarted = false;
isTouched = true;
isScrolling = undefined;
wasScrolled = undefined;
if (e.type === 'touchstart') touchId = e.targetTouches[0].identifier;
touchesStart.x = e.type === 'touchstart' ? e.targetTouches[0].pageX : e.pageX;
touchesStart.y = e.type === 'touchstart' ? e.targetTouches[0].pageY : e.pageY;
touchStartTime = (new Date()).getTime();
}
function handleTouchMove(e) {
if (!isTouched) return;
var pageX, pageY, touch;
if (e.type === 'touchmove') {
if (touchId && e.touches) {
for (var i = 0; i < e.touches.length; i++) {
if (e.touches[i].identifier === touchId) {
touch = e.touches[i];
}
}
}
if (!touch) touch = e.targetTouches[0];
pageX = touch.pageX;
pageY = touch.pageY;
}
else {
pageX = e.pageX;
pageY = e.pageY;
}
if (!pageX || !pageY) return;
if (typeof isScrolling === 'undefined') {
isScrolling = !!(isScrolling || Math.abs(pageY - touchesStart.y) > Math.abs(pageX - touchesStart.x));
}
if (!isScrolling) {
isTouched = false;
return;
}
scrollTop = container[0].scrollTop;
if (typeof wasScrolled === 'undefined' && scrollTop !== 0) wasScrolled = true;
if (!isMoved) {
/*jshint validthis:true */
container.removeClass('transitioning');
if (scrollTop > container[0].offsetHeight) {
isTouched = false;
return;
}
if (dynamicTriggerDistance) {
triggerDistance = container.attr('data-ptr-distance');
if (triggerDistance.indexOf('%') >= 0) triggerDistance = container[0].offsetHeight * parseInt(triggerDistance, 10) / 100;
}
startTranslate = container.hasClass('refreshing') ? triggerDistance : 0;
if (container[0].scrollHeight === container[0].offsetHeight || app.device.os !== 'ios') {
useTranslate = true;
}
else {
useTranslate = false;
}
}
isMoved = true;
touchesDiff = pageY - touchesStart.y;
if (touchesDiff > 0 && scrollTop <= 0 || scrollTop < 0) {
// iOS 8 fix
if (app.device.os === 'ios' && parseInt(app.device.osVersion.split('.')[0], 10) > 7 && scrollTop === 0 && !wasScrolled) useTranslate = true;
if (useTranslate) {
e.preventDefault();
translate = (Math.pow(touchesDiff, 0.85) + startTranslate);
container.transform('translate3d(0,' + translate + 'px,0)');
}
if ((useTranslate && Math.pow(touchesDiff, 0.85) > triggerDistance) || (!useTranslate && touchesDiff >= triggerDistance * 2)) {
refresh = true;
container.addClass('pull-up').removeClass('pull-down');
}
else {
refresh = false;
container.removeClass('pull-up').addClass('pull-down');
}
if (!pullStarted) {
container.trigger('pullstart');
pullStarted = true;
}
container.trigger('pullmove', {
event: e,
scrollTop: scrollTop,
translate: translate,
touchesDiff: touchesDiff
});
}
else {
pullStarted = false;
container.removeClass('pull-up pull-down');
refresh = false;
return;
}
}
function handleTouchEnd(e) {<|fim▁hole|> if (e.changedTouches[0].identifier !== touchId) return;
}
if (!isTouched || !isMoved) {
isTouched = false;
isMoved = false;
return;
}
if (translate) {
container.addClass('transitioning');
translate = 0;
}
container.transform('');
if (refresh) {
container.addClass('refreshing');
container.trigger('refresh', {
done: function () {
app.pullToRefreshDone(container);
}
});
}
else {
container.removeClass('pull-down');
}
isTouched = false;
isMoved = false;
if (pullStarted) container.trigger('pullend');
}
// Attach Events
var passiveListener = app.touchEvents.start === 'touchstart' && app.support.passiveListener ? {passive: true, capture: false} : false;
eventsTarget.on(app.touchEvents.start, handleTouchStart, passiveListener);
eventsTarget.on(app.touchEvents.move, handleTouchMove);
eventsTarget.on(app.touchEvents.end, handleTouchEnd, passiveListener);
// Detach Events on page remove
if (page.length === 0) return;
function destroyPullToRefresh() {
eventsTarget.off(app.touchEvents.start, handleTouchStart);
eventsTarget.off(app.touchEvents.move, handleTouchMove);
eventsTarget.off(app.touchEvents.end, handleTouchEnd);
}
eventsTarget[0].f7DestroyPullToRefresh = destroyPullToRefresh;
function detachEvents() {
destroyPullToRefresh();
page.off('pageBeforeRemove', detachEvents);
}
page.on('pageBeforeRemove', detachEvents);
};
app.pullToRefreshDone = function (container) {
container = $(container);
if (container.length === 0) container = $('.pull-to-refresh-content.refreshing');
container.removeClass('refreshing').addClass('transitioning');
container.transitionEnd(function () {
container.removeClass('transitioning pull-up pull-down');
container.trigger('refreshdone');
});
};
app.pullToRefreshTrigger = function (container) {
container = $(container);
if (container.length === 0) container = $('.pull-to-refresh-content');
if (container.hasClass('refreshing')) return;
container.addClass('transitioning refreshing');
container.trigger('refresh', {
done: function () {
app.pullToRefreshDone(container);
}
});
};
app.destroyPullToRefresh = function (pageContainer) {
pageContainer = $(pageContainer);
var pullToRefreshContent = pageContainer.hasClass('pull-to-refresh-content') ? pageContainer : pageContainer.find('.pull-to-refresh-content');
if (pullToRefreshContent.length === 0) return;
if (pullToRefreshContent[0].f7DestroyPullToRefresh) pullToRefreshContent[0].f7DestroyPullToRefresh();
};<|fim▁end|> | if (e.type === 'touchend' && e.changedTouches && e.changedTouches.length > 0 && touchId) { |
<|file_name|>EggPlayer.java<|end_file_name|><|fim▁begin|>package com.github.niwaniwa.we.core.player;
import com.github.niwaniwa.we.core.api.callback.Callback;
import com.github.niwaniwa.we.core.twitter.TwitterManager;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.Location;
import org.bukkit.entity.Entity;
import org.bukkit.entity.Player;
import org.bukkit.inventory.Inventory;
import org.bukkit.permissions.Permission;
import twitter4j.Status;
import twitter4j.StatusUpdate;
import java.lang.reflect.InvocationTargetException;
import java.net.InetSocketAddress;
import java.util.Arrays;
import java.util.List;
import java.util.UUID;
public abstract class EggPlayer implements Twitter, WhitePlayer {
private UUID uuid;
private Player player;
public EggPlayer(Player player) {
this.uuid = player.getUniqueId();
this.player = player;
}
public EggPlayer(UUID uuid){
this.uuid = uuid;
this.player = getPlayer();
}
@Override
public void remove() {
player.remove();
}
@Override
public String getName() {
return player.getName();
}
@Override
public String getFullName() {
return getPrefix() + getName();
}
@Override
public Player getPlayer() {
return Bukkit.getPlayer(uuid);
}
public void update(){
this.player = getPlayer();
}
@Override
public UUID getUniqueId() {
return player.getUniqueId();
}
@Override
public boolean isOp() {
return player.isOp();
}<|fim▁hole|>
@Override
public boolean isOnline() {
return player.isOnline();
}
@Override
public void sendMessage(String message) {
this.sendMessage(message, true);
}
@Override
public void sendMessage(String[] message) {
Arrays.asList(message).forEach(msg -> sendMessage(msg, true));
}
@Override
public void sendMessage(String message, boolean replaceColorCode) {
player.sendMessage(ChatColor.translateAlternateColorCodes('&', message));
}
@Override
public TwitterManager getTwitterManager() {
throw new UnsupportedOperationException(getClass().getSimpleName());
}
@Override
public boolean hasPermission(String permission) {
return player.hasPermission(permission);
}
@Override
public boolean hasPermission(Permission permission) {
return player.hasPermission(permission);
}
@Override
public InetSocketAddress getAddress() {
return player.getAddress();
}
@Override
public Object getHandle() {
Object object = null;
try {
object = player.getClass().getMethod("getHandle").invoke(player);
} catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException e) {
e.printStackTrace();
}
return object;
}
@Override
public Location getLocation() {
return player.getLocation();
}
@Override
public Inventory getInventory() {
return player.getInventory();
}
@Override
public void teleport(Location loc) {
player.teleport(loc);
}
@Override
public void teleport(Entity entity) {
player.teleport(entity);
}
@Override
public void updateStatus(StatusUpdate update) {
throw new UnsupportedOperationException(getClass().getSimpleName());
}
@Override
public void updateStatus(StatusUpdate update, Callback callback) {
throw new UnsupportedOperationException(getClass().getSimpleName());
}
@Override
public void updateStatus(String tweet) {
throw new UnsupportedOperationException(getClass().getSimpleName());
}
@Override
public List<Status> getTimeLine() {
throw new UnsupportedOperationException(getClass().getSimpleName());
}
}<|fim▁end|> | |
<|file_name|>alertview.py<|end_file_name|><|fim▁begin|>import kivy
kivy.require('1.9.1')
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.uix.gridlayout import GridLayout
from kivy.metrics import dp
from kivy.app import Builder
from kivy.properties import StringProperty, ObjectProperty
from kivy.clock import Clock
from kivy.metrics import sp
from kivy.metrics import dp
from iconbutton import IconButton
__all__ = ('alertPopup, confirmPopup, okPopup, editor_popup')
Builder.load_string('''
<ConfirmPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
<OkPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
<EditorPopup>:
id: editor_popup
cols:1
BoxLayout:
id: content
GridLayout:
id: buttons
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
''')
def alertPopup(title, msg):
popup = Popup(title = title,
content=Label(text = msg),
size_hint=(None, None), size=(dp(600), dp(200)))
popup.open()
def confirmPopup(title, msg, answerCallback):
content = ConfirmPopup(text=msg)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class ConfirmPopup(GridLayout):
text = StringProperty()<|fim▁hole|> self.register_event_type('on_answer')
super(ConfirmPopup,self).__init__(**kwargs)
def on_answer(self, *args):
pass
def editor_popup(title, content, answerCallback):
content = EditorPopup(content=content)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(0.7, 0.8),
auto_dismiss= False,
title_size=sp(18))
popup.open()
return popup
class EditorPopup(GridLayout):
content = ObjectProperty(None)
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(EditorPopup,self).__init__(**kwargs)
def on_content(self, instance, value):
Clock.schedule_once(lambda dt: self.ids.content.add_widget(value))
def on_answer(self, *args):
pass
def okPopup(title, msg, answerCallback):
content = OkPopup(text=msg)
content.bind(on_ok=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class OkPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_ok')
super(OkPopup,self).__init__(**kwargs)
def on_ok(self, *args):
pass<|fim▁end|> |
def __init__(self,**kwargs): |
<|file_name|>kindck-owned-trait-contains-1.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(unknown_features)]
#![feature(box_syntax)]
trait repeat<A> { fn get(&self) -> A; }
impl<A:Clone + 'static> repeat<A> for Box<A> {
fn get(&self) -> A {
(**self).clone()
}
}
fn repeater<A:Clone + 'static>(v: Box<A>) -> Box<repeat<A>+'static> {
box v as Box<repeat<A>+'static> // No
}
<|fim▁hole|> let y = repeater(box x);
assert_eq!(x, y.get());
}<|fim▁end|> | pub fn main() {
let x = 3; |
<|file_name|>exon_utils_tests.py<|end_file_name|><|fim▁begin|>"""Testing for overlap intervals
"""
import unittest
from genda.transcripts.exon_utils import calcOverlap, collideIntervals, \
collapseIntervals
class TestOverlapFunctions(unittest.TestCase):
def setUp(self):
# Simple Overlap
self.simple = [(1,10), (6,15)]
# One interval enclosed in another
self.enclosed = [(100,200), (110,150)]
# Partial overlap
self.partial = [(150,300), (160,300), (170,330)]
# No overlap
self.no = [(150,300), (10,30)]
# Equal
self.equal = [(1,15), (1,5)]
#Complex interval list
self.full = [(7,20), (1,5), (8,11), (18,50), (100,150)]
def test_bpOverlap(self):
# Make sure overlaps are calculated correctly
self.assertEqual(calcOverlap(self.simple), 4)
self.assertEqual(calcOverlap(self.enclosed), 40)
self.assertEqual(calcOverlap(self.partial),400)
def test_collideIntervals(self):
self.assertEqual(collideIntervals(self.simple[0], self.simple[1]),
[(1,15)])
self.assertEqual(collideIntervals(self.enclosed[0], self.enclosed[1]),
[(100,200)])
self.assertEqual(collideIntervals(self.no[0], self.no[1]),self.no)
def test_collapseIntervals(self):
self.assertEqual(collapseIntervals(self.simple), [(1,15)])
print(self.partial)
self.assertEqual(collapseIntervals(self.partial), [(150,330)])
print(self.full)
self.assertEqual(collapseIntervals(self.full), [(1,5),(7,50),(100,150)])
def test_unique_bp(self):
self.assertEqual(sum(map(lambda x \<|fim▁hole|>
if __name__ == '__main__':
unittest.main()<|fim▁end|> | :x[1]-x[0],collapseIntervals(self.partial))) -
calcOverlap(self.partial),330-150) |
<|file_name|>test_httplib.py<|end_file_name|><|fim▁begin|>import errno
from http import client
import io
import os
import array
import socket
import unittest
TestCase = unittest.TestCase
from test import support
here = os.path.dirname(__file__)
# Self-signed cert file for 'localhost'
CERT_localhost = os.path.join(here, 'keycert.pem')
# Self-signed cert file for 'fakehostname'
CERT_fakehostname = os.path.join(here, 'keycert2.pem')
# Root cert file (CA) for svn.python.org's cert
CACERT_svn_python_org = os.path.join(here, 'https_svn_python_org_root.pem')
HOST = support.HOST
class FakeSocket:
def __init__(self, text, fileclass=io.BytesIO):
if isinstance(text, str):
text = text.encode("ascii")
self.text = text
self.fileclass = fileclass
self.data = b''
def sendall(self, data):
self.data += data
def makefile(self, mode, bufsize=None):
if mode != 'r' and mode != 'rb':
raise client.UnimplementedFileMode()
return self.fileclass(self.text)
class EPipeSocket(FakeSocket):
def __init__(self, text, pipe_trigger):
# When sendall() is called with pipe_trigger, raise EPIPE.
FakeSocket.__init__(self, text)
self.pipe_trigger = pipe_trigger
def sendall(self, data):
if self.pipe_trigger in data:
raise socket.error(errno.EPIPE, "gotcha")
self.data += data
def close(self):
pass
class NoEOFStringIO(io.BytesIO):
"""Like StringIO, but raises AssertionError on EOF.
This is used below to test that http.client doesn't try to read
more from the underlying file than it should.
"""
def read(self, n=-1):
data = io.BytesIO.read(self, n)
if data == b'':
raise AssertionError('caller tried to read past EOF')
return data
def readline(self, length=None):
data = io.BytesIO.readline(self, length)
if data == b'':
raise AssertionError('caller tried to read past EOF')
return data
class HeaderTests(TestCase):
def test_auto_headers(self):
# Some headers are added automatically, but should not be added by
# .request() if they are explicitly set.
class HeaderCountingBuffer(list):
def __init__(self):
self.count = {}
def append(self, item):
kv = item.split(b':')
if len(kv) > 1:
# item is a 'Key: Value' header string
lcKey = kv[0].decode('ascii').lower()
self.count.setdefault(lcKey, 0)
self.count[lcKey] += 1
list.append(self, item)
for explicit_header in True, False:
for header in 'Content-length', 'Host', 'Accept-encoding':
conn = client.HTTPConnection('example.com')
conn.sock = FakeSocket('blahblahblah')
conn._buffer = HeaderCountingBuffer()
body = 'spamspamspam'
headers = {}
if explicit_header:
headers[header] = str(len(body))
conn.request('POST', '/', body, headers)
self.assertEqual(conn._buffer.count[header.lower()], 1)
def test_putheader(self):
conn = client.HTTPConnection('example.com')
conn.sock = FakeSocket(None)
conn.putrequest('GET','/')
conn.putheader('Content-length', 42)
self.assertTrue(b'Content-length: 42' in conn._buffer)
def test_ipv6host_header(self):
# Default host header on IPv6 transaction should wrapped by [] if
# its actual IPv6 address
expected = b'GET /foo HTTP/1.1\r\nHost: [2001::]:81\r\n' \
b'Accept-Encoding: identity\r\n\r\n'
conn = client.HTTPConnection('[2001::]:81')
sock = FakeSocket('')
conn.sock = sock
conn.request('GET', '/foo')
self.assertTrue(sock.data.startswith(expected))
expected = b'GET /foo HTTP/1.1\r\nHost: [2001:102A::]\r\n' \
b'Accept-Encoding: identity\r\n\r\n'
conn = client.HTTPConnection('[2001:102A::]')
sock = FakeSocket('')
conn.sock = sock
conn.request('GET', '/foo')
self.assertTrue(sock.data.startswith(expected))
class BasicTest(TestCase):
def test_status_lines(self):
# Test HTTP status lines
body = "HTTP/1.1 200 Ok\r\n\r\nText"
sock = FakeSocket(body)
resp = client.HTTPResponse(sock)
resp.begin()
self.assertEqual(resp.read(), b"Text")
self.assertTrue(resp.isclosed())
body = "HTTP/1.1 400.100 Not Ok\r\n\r\nText"
sock = FakeSocket(body)
resp = client.HTTPResponse(sock)
self.assertRaises(client.BadStatusLine, resp.begin)
def test_bad_status_repr(self):
exc = client.BadStatusLine('')
self.assertEqual(repr(exc), '''BadStatusLine("\'\'",)''')
def test_partial_reads(self):
# if we have a lenght, the system knows when to close itself
# same behaviour than when we read the whole thing with read()
body = "HTTP/1.1 200 Ok\r\nContent-Length: 4\r\n\r\nText"
sock = FakeSocket(body)
resp = client.HTTPResponse(sock)
resp.begin()
self.assertEqual(resp.read(2), b'Te')
self.assertFalse(resp.isclosed())
self.assertEqual(resp.read(2), b'xt')
self.assertTrue(resp.isclosed())
def test_host_port(self):
# Check invalid host_port
for hp in ("www.python.org:abc", "www.python.org:"):
self.assertRaises(client.InvalidURL, client.HTTPConnection, hp)
for hp, h, p in (("[fe80::207:e9ff:fe9b]:8000",
"fe80::207:e9ff:fe9b", 8000),
("www.python.org:80", "www.python.org", 80),<|fim▁hole|> self.assertEqual(h, c.host)
self.assertEqual(p, c.port)
def test_response_headers(self):
# test response with multiple message headers with the same field name.
text = ('HTTP/1.1 200 OK\r\n'
'Set-Cookie: Customer="WILE_E_COYOTE"; '
'Version="1"; Path="/acme"\r\n'
'Set-Cookie: Part_Number="Rocket_Launcher_0001"; Version="1";'
' Path="/acme"\r\n'
'\r\n'
'No body\r\n')
hdr = ('Customer="WILE_E_COYOTE"; Version="1"; Path="/acme"'
', '
'Part_Number="Rocket_Launcher_0001"; Version="1"; Path="/acme"')
s = FakeSocket(text)
r = client.HTTPResponse(s)
r.begin()
cookies = r.getheader("Set-Cookie")
self.assertEqual(cookies, hdr)
def test_read_head(self):
# Test that the library doesn't attempt to read any data
# from a HEAD request. (Tickles SF bug #622042.)
sock = FakeSocket(
'HTTP/1.1 200 OK\r\n'
'Content-Length: 14432\r\n'
'\r\n',
NoEOFStringIO)
resp = client.HTTPResponse(sock, method="HEAD")
resp.begin()
if resp.read():
self.fail("Did not expect response from HEAD request")
def test_send_file(self):
expected = (b'GET /foo HTTP/1.1\r\nHost: example.com\r\n'
b'Accept-Encoding: identity\r\nContent-Length:')
with open(__file__, 'rb') as body:
conn = client.HTTPConnection('example.com')
sock = FakeSocket(body)
conn.sock = sock
conn.request('GET', '/foo', body)
self.assertTrue(sock.data.startswith(expected), '%r != %r' %
(sock.data[:len(expected)], expected))
def test_send(self):
expected = b'this is a test this is only a test'
conn = client.HTTPConnection('example.com')
sock = FakeSocket(None)
conn.sock = sock
conn.send(expected)
self.assertEqual(expected, sock.data)
sock.data = b''
conn.send(array.array('b', expected))
self.assertEqual(expected, sock.data)
sock.data = b''
conn.send(io.BytesIO(expected))
self.assertEqual(expected, sock.data)
def test_send_iter(self):
expected = b'GET /foo HTTP/1.1\r\nHost: example.com\r\n' \
b'Accept-Encoding: identity\r\nContent-Length: 11\r\n' \
b'\r\nonetwothree'
def body():
yield b"one"
yield b"two"
yield b"three"
conn = client.HTTPConnection('example.com')
sock = FakeSocket("")
conn.sock = sock
conn.request('GET', '/foo', body(), {'Content-Length': '11'})
self.assertEqual(sock.data, expected)
def test_send_type_error(self):
# See: Issue #12676
conn = client.HTTPConnection('example.com')
conn.sock = FakeSocket('')
with self.assertRaises(TypeError):
conn.request('POST', 'test', conn)
def test_chunked(self):
chunked_start = (
'HTTP/1.1 200 OK\r\n'
'Transfer-Encoding: chunked\r\n\r\n'
'a\r\n'
'hello worl\r\n'
'1\r\n'
'd\r\n'
)
sock = FakeSocket(chunked_start + '0\r\n')
resp = client.HTTPResponse(sock, method="GET")
resp.begin()
self.assertEqual(resp.read(), b'hello world')
resp.close()
for x in ('', 'foo\r\n'):
sock = FakeSocket(chunked_start + x)
resp = client.HTTPResponse(sock, method="GET")
resp.begin()
try:
resp.read()
except client.IncompleteRead as i:
self.assertEqual(i.partial, b'hello world')
self.assertEqual(repr(i),'IncompleteRead(11 bytes read)')
self.assertEqual(str(i),'IncompleteRead(11 bytes read)')
else:
self.fail('IncompleteRead expected')
finally:
resp.close()
def test_chunked_head(self):
chunked_start = (
'HTTP/1.1 200 OK\r\n'
'Transfer-Encoding: chunked\r\n\r\n'
'a\r\n'
'hello world\r\n'
'1\r\n'
'd\r\n'
)
sock = FakeSocket(chunked_start + '0\r\n')
resp = client.HTTPResponse(sock, method="HEAD")
resp.begin()
self.assertEqual(resp.read(), b'')
self.assertEqual(resp.status, 200)
self.assertEqual(resp.reason, 'OK')
self.assertTrue(resp.isclosed())
def test_negative_content_length(self):
sock = FakeSocket(
'HTTP/1.1 200 OK\r\nContent-Length: -1\r\n\r\nHello\r\n')
resp = client.HTTPResponse(sock, method="GET")
resp.begin()
self.assertEqual(resp.read(), b'Hello\r\n')
resp.close()
def test_incomplete_read(self):
sock = FakeSocket('HTTP/1.1 200 OK\r\nContent-Length: 10\r\n\r\nHello\r\n')
resp = client.HTTPResponse(sock, method="GET")
resp.begin()
try:
resp.read()
except client.IncompleteRead as i:
self.assertEqual(i.partial, b'Hello\r\n')
self.assertEqual(repr(i),
"IncompleteRead(7 bytes read, 3 more expected)")
self.assertEqual(str(i),
"IncompleteRead(7 bytes read, 3 more expected)")
else:
self.fail('IncompleteRead expected')
finally:
resp.close()
def test_epipe(self):
sock = EPipeSocket(
"HTTP/1.0 401 Authorization Required\r\n"
"Content-type: text/html\r\n"
"WWW-Authenticate: Basic realm=\"example\"\r\n",
b"Content-Length")
conn = client.HTTPConnection("example.com")
conn.sock = sock
self.assertRaises(socket.error,
lambda: conn.request("PUT", "/url", "body"))
resp = conn.getresponse()
self.assertEqual(401, resp.status)
self.assertEqual("Basic realm=\"example\"",
resp.getheader("www-authenticate"))
# Test lines overflowing the max line size (_MAXLINE in http.client)
def test_overflowing_status_line(self):
body = "HTTP/1.1 200 Ok" + "k" * 65536 + "\r\n"
resp = client.HTTPResponse(FakeSocket(body))
self.assertRaises((client.LineTooLong, client.BadStatusLine), resp.begin)
def test_overflowing_header_line(self):
body = (
'HTTP/1.1 200 OK\r\n'
'X-Foo: bar' + 'r' * 65536 + '\r\n\r\n'
)
resp = client.HTTPResponse(FakeSocket(body))
self.assertRaises(client.LineTooLong, resp.begin)
def test_overflowing_chunked_line(self):
body = (
'HTTP/1.1 200 OK\r\n'
'Transfer-Encoding: chunked\r\n\r\n'
+ '0' * 65536 + 'a\r\n'
'hello world\r\n'
'0\r\n'
)
resp = client.HTTPResponse(FakeSocket(body))
resp.begin()
self.assertRaises(client.LineTooLong, resp.read)
class OfflineTest(TestCase):
def test_responses(self):
self.assertEqual(client.responses[client.NOT_FOUND], "Not Found")
class SourceAddressTest(TestCase):
def setUp(self):
self.serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.port = support.bind_port(self.serv)
self.source_port = support.find_unused_port()
self.serv.listen(5)
self.conn = None
def tearDown(self):
if self.conn:
self.conn.close()
self.conn = None
self.serv.close()
self.serv = None
def testHTTPConnectionSourceAddress(self):
self.conn = client.HTTPConnection(HOST, self.port,
source_address=('', self.source_port))
self.conn.connect()
self.assertEqual(self.conn.sock.getsockname()[1], self.source_port)
@unittest.skipIf(not hasattr(client, 'HTTPSConnection'),
'http.client.HTTPSConnection not defined')
def testHTTPSConnectionSourceAddress(self):
self.conn = client.HTTPSConnection(HOST, self.port,
source_address=('', self.source_port))
# We don't test anything here other the constructor not barfing as
# this code doesn't deal with setting up an active running SSL server
# for an ssl_wrapped connect() to actually return from.
class TimeoutTest(TestCase):
PORT = None
def setUp(self):
self.serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
TimeoutTest.PORT = support.bind_port(self.serv)
self.serv.listen(5)
def tearDown(self):
self.serv.close()
self.serv = None
def testTimeoutAttribute(self):
# This will prove that the timeout gets through HTTPConnection
# and into the socket.
# default -- use global socket timeout
self.assertTrue(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(30)
try:
httpConn = client.HTTPConnection(HOST, TimeoutTest.PORT)
httpConn.connect()
finally:
socket.setdefaulttimeout(None)
self.assertEqual(httpConn.sock.gettimeout(), 30)
httpConn.close()
# no timeout -- do not use global socket default
self.assertTrue(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(30)
try:
httpConn = client.HTTPConnection(HOST, TimeoutTest.PORT,
timeout=None)
httpConn.connect()
finally:
socket.setdefaulttimeout(None)
self.assertEqual(httpConn.sock.gettimeout(), None)
httpConn.close()
# a value
httpConn = client.HTTPConnection(HOST, TimeoutTest.PORT, timeout=30)
httpConn.connect()
self.assertEqual(httpConn.sock.gettimeout(), 30)
httpConn.close()
class HTTPSTest(TestCase):
def setUp(self):
if not hasattr(client, 'HTTPSConnection'):
self.skipTest('ssl support required')
def make_server(self, certfile):
from test.ssl_servers import make_https_server
return make_https_server(self, certfile)
def test_attributes(self):
# simple test to check it's storing the timeout
h = client.HTTPSConnection(HOST, TimeoutTest.PORT, timeout=30)
self.assertEqual(h.timeout, 30)
def _check_svn_python_org(self, resp):
# Just a simple check that everything went fine
server_string = resp.getheader('server')
self.assertIn('Apache', server_string)
def test_networked(self):
# Default settings: no cert verification is done
support.requires('network')
with support.transient_internet('svn.python.org'):
h = client.HTTPSConnection('svn.python.org', 443)
h.request('GET', '/')
resp = h.getresponse()
self._check_svn_python_org(resp)
def test_networked_good_cert(self):
# We feed a CA cert that validates the server's cert
import ssl
support.requires('network')
with support.transient_internet('svn.python.org'):
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(CACERT_svn_python_org)
h = client.HTTPSConnection('svn.python.org', 443, context=context)
h.request('GET', '/')
resp = h.getresponse()
self._check_svn_python_org(resp)
def test_networked_bad_cert(self):
# We feed a "CA" cert that is unrelated to the server's cert
import ssl
support.requires('network')
with support.transient_internet('svn.python.org'):
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(CERT_localhost)
h = client.HTTPSConnection('svn.python.org', 443, context=context)
with self.assertRaises(ssl.SSLError):
h.request('GET', '/')
def test_local_good_hostname(self):
# The (valid) cert validates the HTTP hostname
import ssl
from test.ssl_servers import make_https_server
server = make_https_server(self, CERT_localhost)
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(CERT_localhost)
h = client.HTTPSConnection('localhost', server.port, context=context)
h.request('GET', '/nonexistent')
resp = h.getresponse()
self.assertEqual(resp.status, 404)
def test_local_bad_hostname(self):
# The (valid) cert doesn't validate the HTTP hostname
import ssl
from test.ssl_servers import make_https_server
server = make_https_server(self, CERT_fakehostname)
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(CERT_fakehostname)
h = client.HTTPSConnection('localhost', server.port, context=context)
with self.assertRaises(ssl.CertificateError):
h.request('GET', '/')
# Same with explicit check_hostname=True
h = client.HTTPSConnection('localhost', server.port, context=context,
check_hostname=True)
with self.assertRaises(ssl.CertificateError):
h.request('GET', '/')
# With check_hostname=False, the mismatching is ignored
h = client.HTTPSConnection('localhost', server.port, context=context,
check_hostname=False)
h.request('GET', '/nonexistent')
resp = h.getresponse()
self.assertEqual(resp.status, 404)
class RequestBodyTest(TestCase):
"""Test cases where a request includes a message body."""
def setUp(self):
self.conn = client.HTTPConnection('example.com')
self.conn.sock = self.sock = FakeSocket("")
self.conn.sock = self.sock
def get_headers_and_fp(self):
f = io.BytesIO(self.sock.data)
f.readline() # read the request line
message = client.parse_headers(f)
return message, f
def test_manual_content_length(self):
# Set an incorrect content-length so that we can verify that
# it will not be over-ridden by the library.
self.conn.request("PUT", "/url", "body",
{"Content-Length": "42"})
message, f = self.get_headers_and_fp()
self.assertEqual("42", message.get("content-length"))
self.assertEqual(4, len(f.read()))
def test_ascii_body(self):
self.conn.request("PUT", "/url", "body")
message, f = self.get_headers_and_fp()
self.assertEqual("text/plain", message.get_content_type())
self.assertIsNone(message.get_charset())
self.assertEqual("4", message.get("content-length"))
self.assertEqual(b'body', f.read())
def test_latin1_body(self):
self.conn.request("PUT", "/url", "body\xc1")
message, f = self.get_headers_and_fp()
self.assertEqual("text/plain", message.get_content_type())
self.assertIsNone(message.get_charset())
self.assertEqual("5", message.get("content-length"))
self.assertEqual(b'body\xc1', f.read())
def test_bytes_body(self):
self.conn.request("PUT", "/url", b"body\xc1")
message, f = self.get_headers_and_fp()
self.assertEqual("text/plain", message.get_content_type())
self.assertIsNone(message.get_charset())
self.assertEqual("5", message.get("content-length"))
self.assertEqual(b'body\xc1', f.read())
def test_file_body(self):
with open(support.TESTFN, "w") as f:
f.write("body")
with open(support.TESTFN) as f:
self.conn.request("PUT", "/url", f)
message, f = self.get_headers_and_fp()
self.assertEqual("text/plain", message.get_content_type())
self.assertIsNone(message.get_charset())
self.assertEqual("4", message.get("content-length"))
self.assertEqual(b'body', f.read())
def test_binary_file_body(self):
with open(support.TESTFN, "wb") as f:
f.write(b"body\xc1")
with open(support.TESTFN, "rb") as f:
self.conn.request("PUT", "/url", f)
message, f = self.get_headers_and_fp()
self.assertEqual("text/plain", message.get_content_type())
self.assertIsNone(message.get_charset())
self.assertEqual("5", message.get("content-length"))
self.assertEqual(b'body\xc1', f.read())
class HTTPResponseTest(TestCase):
def setUp(self):
body = "HTTP/1.1 200 Ok\r\nMy-Header: first-value\r\nMy-Header: \
second-value\r\n\r\nText"
sock = FakeSocket(body)
self.resp = client.HTTPResponse(sock)
self.resp.begin()
def test_getting_header(self):
header = self.resp.getheader('My-Header')
self.assertEqual(header, 'first-value, second-value')
header = self.resp.getheader('My-Header', 'some default')
self.assertEqual(header, 'first-value, second-value')
def test_getting_nonexistent_header_with_string_default(self):
header = self.resp.getheader('No-Such-Header', 'default-value')
self.assertEqual(header, 'default-value')
def test_getting_nonexistent_header_with_iterable_default(self):
header = self.resp.getheader('No-Such-Header', ['default', 'values'])
self.assertEqual(header, 'default, values')
header = self.resp.getheader('No-Such-Header', ('default', 'values'))
self.assertEqual(header, 'default, values')
def test_getting_nonexistent_header_without_default(self):
header = self.resp.getheader('No-Such-Header')
self.assertEqual(header, None)
def test_getting_header_defaultint(self):
header = self.resp.getheader('No-Such-Header',default=42)
self.assertEqual(header, 42)
def test_main(verbose=None):
support.run_unittest(HeaderTests, OfflineTest, BasicTest, TimeoutTest,
HTTPSTest, RequestBodyTest, SourceAddressTest,
HTTPResponseTest)
if __name__ == '__main__':
test_main()<|fim▁end|> | ("www.python.org", "www.python.org", 80),
("[fe80::207:e9ff:fe9b]", "fe80::207:e9ff:fe9b", 80)):
c = client.HTTPConnection(hp) |
<|file_name|>filesmonster.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# ------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para filesmonster
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
# ------------------------------------------------------------
import re
from core import logger
from core import scrapertools
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
logger.info("( page_url='%s')")
video_urls = []
itemlist = []
data1 = ''
data2 = ''
url = ''
alerta = '[filesmonster premium]'
enlace = "no"
post2 = "username=" + user + "&password=" + password
login_url = "http://filesmonster.com/api/public/login"
data1 = scrapertools.cache_page(login_url, post=post2)
partes1 = data1.split('"')
estado = partes1[3]
if estado != 'success': alerta = "[error de filesmonster premium]: " + estado
id = page_url
id = id.replace("http://filesmonster.com/download.php", "")
post = id.replace("?", "")
url = 'http://filesmonster.com/api/public/premiumDownload'
data2 = scrapertools.cache_page(url, post=post)
partes = data2.split('"')
url = partes[7]
filename = scrapertools.get_filename_from_url(url)[-4:]
alerta = filename + " " + alerta
if "http" not in url: alerta = "[error de filesmonster premium]: " + url
video_urls.append([alerta, url])
return video_urls
# Encuentra vÃÂdeos del servidor en el texto pasado
def find_videos(data):
encontrados = set()
devuelve = []
# http://uploaz.com/file/
patronvideos = '"filesmonster.com/download(.*?)"'
logger.info("#" + patronvideos + "#")
matches = re.compile(patronvideos, re.DOTALL).findall(data)
for match in matches:
titulo = "[filesmonster]"
url = "http://filesmonster.com/download" + match
if url not in encontrados:<|fim▁hole|> devuelve.append([titulo, url, 'filemonster'])
encontrados.add(url)
else:
logger.info(" url duplicada=" + url)
return devuelve<|fim▁end|> | logger.info(" url=" + url) |
<|file_name|>color-contrast.js<|end_file_name|><|fim▁begin|>/**
* @license Copyright 2017 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
'use strict';
/**
* @fileoverview Ensures the contrast between foreground and background colors meets<|fim▁hole|> */
const AxeAudit = require('./axe-audit');
class ColorContrast extends AxeAudit {
/**
* @return {LH.Audit.Meta}
*/
static get meta() {
return {
id: 'color-contrast',
title: 'Background and foreground colors have a sufficient contrast ratio',
failureTitle: 'Background and foreground colors do not have a ' +
'sufficient contrast ratio.',
description: 'Low-contrast text is difficult or impossible for many users to read. ' +
'[Learn more](https://dequeuniversity.com/rules/axe/2.2/color-contrast?application=lighthouse).',
requiredArtifacts: ['Accessibility'],
};
}
}
module.exports = ColorContrast;<|fim▁end|> | * WCAG 2 AA contrast ratio thresholds.
* See base class in axe-audit.js for audit() implementation. |
<|file_name|>S11.13.2_A4.1_T2.6.js<|end_file_name|><|fim▁begin|>// Copyright 2009 the Sputnik authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/**
* The production x *= y is the same as the production x = x * y
*
* @path ch11/11.13/11.13.2/S11.13.2_A4.1_T2.6.js
* @description Type(x) is different from Type(y) and both types vary between primitive String (primitive or object) and Undefined
*/
//CHECK#1
x = "1";
x *= undefined;
if (isNaN(x) !== true) {
$ERROR('#1: x = "1"; x *= undefined; x === Not-a-Number. Actual: ' + (x));
}
//CHECK#2
x = undefined;
x *= "1";<|fim▁hole|>//CHECK#3
x = new String("1");
x *= undefined;
if (isNaN(x) !== true) {
$ERROR('#3: x = new String("1"); x *= undefined; x === Not-a-Number. Actual: ' + (x));
}
//CHECK#4
x = undefined;
x *= new String("1");
if (isNaN(x) !== true) {
$ERROR('#4: x = undefined; x *= new String("1"); x === Not-a-Number. Actual: ' + (x));
}<|fim▁end|> | if (isNaN(x) !== true) {
$ERROR('#2: x = undefined; x *= "1"; x === Not-a-Number. Actual: ' + (x));
}
|
<|file_name|>plot_pattern_diagram_markers.py<|end_file_name|><|fim▁begin|>import matplotlib.pyplot as plt
import matplotlib.colors as clr
import matplotlib
import warnings
from skill_metrics import add_legend
def plot_pattern_diagram_markers(X,Y,option):
'''
Plots color markers on a pattern diagram.
Plots color markers on a target diagram according their (X,Y)
locations. The symbols and colors are chosen automatically with a
limit of 70 symbol & color combinations.
The color bar is titled using the content of option['titleColorBar']
(if non-empty string).
INPUTS:
x : x-coordinates of markers
y : y-coordinates of markers
z : z-coordinates of markers (used for color shading)
option : dictionary containing option values. (Refer to
GET_TARGET_DIAGRAM_OPTIONS function for more information.)
option['axismax'] : maximum for the X & Y values. Used to limit
maximum distance from origin to display markers
option['markerlabel'] : labels for markers
OUTPUTS:
None
Created on Nov 30, 2016
Revised on Jan 6, 2019
Author: Peter A. Rochford
Symplectic, LLC
www.thesymplectic.com
[email protected]
'''
# Set face color transparency
alpha = option['alpha']
# Set font and marker size
fontSize = matplotlib.rcParams.get('font.size') - 2
markerSize = option['markersize']
if option['markerlegend'] == 'on':
# Check that marker labels have been provided
if option['markerlabel'] == '':
raise ValueError('No marker labels provided.')
# Plot markers of different color and shapes with labels
# displayed in a legend
# Define markers
kind = ['+','o','x','s','d','^','v','p','h','*']<|fim▁hole|>
if len(X) <= len(kind):
# Define markers with specified color
marker = []
markercolor = []
for color in colorm:
for symbol in kind:
marker.append(symbol + option['markercolor'])
rgba = clr.to_rgb(option['markercolor']) + (alpha,)
markercolor.append(rgba)
else:
# Define markers and colors using predefined list
marker = []
markercolor = [] #Bug Fix: missing array initialization
for color in colorm:
for symbol in kind:
marker.append(symbol + color)
rgba = clr.to_rgb(color) + (alpha,)
markercolor.append(rgba)
# Plot markers at data points
limit = option['axismax']
hp = ()
markerlabel = []
for i, xval in enumerate(X):
if abs(X[i]) <= limit and abs(Y[i]) <= limit:
h = plt.plot(X[i],Y[i],marker[i], markersize = markerSize,
markerfacecolor = markercolor[i],
markeredgecolor = marker[i][1],
markeredgewidth = 2)
hp += tuple(h)
markerlabel.append(option['markerlabel'][i])
# Add legend
if len(markerlabel) == 0:
warnings.warn('No markers within axis limit ranges.')
else:
add_legend(markerlabel, option, rgba, markerSize, fontSize, hp)
else:
# Plot markers as dots of a single color with accompanying labels
# and no legend
# Plot markers at data points
limit = option['axismax']
rgba = clr.to_rgb(option['markercolor']) + (alpha,)
for i,xval in enumerate(X):
if abs(X[i]) <= limit and abs(Y[i]) <= limit:
# Plot marker
marker = option['markersymbol']
plt.plot(X[i],Y[i],marker, markersize = markerSize,
markerfacecolor = rgba,
markeredgecolor = option['markercolor'])
# Check if marker labels provided
if type(option['markerlabel']) is list:
# Label marker
xtextpos = X[i]
ytextpos = Y[i]
plt.text(xtextpos,ytextpos,option['markerlabel'][i],
color = option['markerlabelcolor'],
verticalalignment = 'bottom',
horizontalalignment = 'right',
fontsize = fontSize)
# Add legend if labels provided as dictionary
markerlabel = option['markerlabel']
if type(markerlabel) is dict:
add_legend(markerlabel, option, rgba, markerSize, fontSize)
def _disp(text):
print(text)<|fim▁end|> | colorm = ['b','r','g','c','m','y','k']
if len(X) > 70:
_disp('You must introduce new markers to plot more than 70 cases.')
_disp('The ''marker'' character array need to be extended inside the code.') |
<|file_name|>DeserializeReshape.cpp<|end_file_name|><|fim▁begin|>//
// Copyright © 2017 Arm Ltd. All rights reserved.
// SPDX-License-Identifier: MIT
//
#include "ParserFlatbuffersSerializeFixture.hpp"
#include <armnnDeserializer/IDeserializer.hpp>
#include <string>
TEST_SUITE("Deserializer_Reshape")
{
struct ReshapeFixture : public ParserFlatbuffersSerializeFixture
{
explicit ReshapeFixture(const std::string &inputShape,
const std::string &targetShape,
const std::string &outputShape,
const std::string &dataType)
{
m_JsonString = R"(
{
inputIds: [0],
outputIds: [2],
layers: [
{
layer_type: "InputLayer",<|fim▁hole|> index: 0,
layerName: "InputLayer",
layerType: "Input",
inputSlots: [{
index: 0,
connection: {sourceLayerIndex:0, outputSlotIndex:0 },
}],
outputSlots: [ {
index: 0,
tensorInfo: {
dimensions: )" + inputShape + R"(,
dataType: )" + dataType + R"(
}}]
}
}}},
{
layer_type: "ReshapeLayer",
layer: {
base: {
index: 1,
layerName: "ReshapeLayer",
layerType: "Reshape",
inputSlots: [{
index: 0,
connection: {sourceLayerIndex:0, outputSlotIndex:0 },
}],
outputSlots: [ {
index: 0,
tensorInfo: {
dimensions: )" + inputShape + R"(,
dataType: )" + dataType + R"(
}}]},
descriptor: {
targetShape: )" + targetShape + R"(,
}
}},
{
layer_type: "OutputLayer",
layer: {
base:{
layerBindingId: 2,
base: {
index: 2,
layerName: "OutputLayer",
layerType: "Output",
inputSlots: [{
index: 0,
connection: {sourceLayerIndex:0, outputSlotIndex:0 },
}],
outputSlots: [ {
index: 0,
tensorInfo: {
dimensions: )" + outputShape + R"(,
dataType: )" + dataType + R"(
},
}],
}}},
}]
}
)";
SetupSingleInputSingleOutput("InputLayer", "OutputLayer");
}
};
struct SimpleReshapeFixture : ReshapeFixture
{
SimpleReshapeFixture() : ReshapeFixture("[ 1, 9 ]", "[ 3, 3 ]", "[ 3, 3 ]",
"QuantisedAsymm8") {}
};
struct SimpleReshapeFixture2 : ReshapeFixture
{
SimpleReshapeFixture2() : ReshapeFixture("[ 2, 2, 1, 1 ]",
"[ 2, 2, 1, 1 ]",
"[ 2, 2, 1, 1 ]",
"Float32") {}
};
TEST_CASE_FIXTURE(SimpleReshapeFixture, "ReshapeQuantisedAsymm8")
{
RunTest<2, armnn::DataType::QAsymmU8>(0,
{ 1, 2, 3, 4, 5, 6, 7, 8, 9 },
{ 1, 2, 3, 4, 5, 6, 7, 8, 9 });
}
TEST_CASE_FIXTURE(SimpleReshapeFixture2, "ReshapeFloat32")
{
RunTest<4, armnn::DataType::Float32>(0,
{ 111, 85, 226, 3 },
{ 111, 85, 226, 3 });
}
}<|fim▁end|> | layer: {
base: {
layerBindingId: 0,
base: { |
<|file_name|>test_feature.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import TestCase
<|fim▁hole|>from polycommon.options.option import NAMESPACE_DB_OPTION_MARKER, OptionStores
class DummyFeature(Feature):
pass
class TestFeature(TestCase):
def test_feature_default_store(self):
assert DummyFeature.store == OptionStores(settings.STORE_OPTION)
def test_feature_marker(self):
assert DummyFeature.get_marker() == NAMESPACE_DB_OPTION_MARKER
def test_parse_key_wtong_namespace(self):
DummyFeature.key = "FOO"
with self.assertRaises(OptionException):
DummyFeature.parse_key()
DummyFeature.key = "FOO:BAR"
with self.assertRaises(OptionException):
DummyFeature.parse_key()
def test_parse_key_without_namespace(self):
DummyFeature.key = "FEATURES:FOO"
assert DummyFeature.parse_key() == (None, "FOO")
def test_parse_key_with_namespace(self):
DummyFeature.key = "FEATURES:FOO:BAR"
assert DummyFeature.parse_key() == ("FOO", "BAR")<|fim▁end|> | from django.conf import settings
from polycommon.options.exceptions import OptionException
from polycommon.options.feature import Feature |
<|file_name|>request.java<|end_file_name|><|fim▁begin|>package org.ocbc.utils;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.HashMap;
public class request {
public static JSONObject get(String url, HashMap<String, String> headers) {
BufferedReader in = null;
StringBuffer response = null;
try {
URL obj = new URL(url);
HttpURLConnection con = (HttpURLConnection) obj.openConnection();
con.setRequestMethod("GET");
for (String key : headers.keySet()) {
con.setRequestProperty(key, headers.get(key));
}<|fim▁hole|>
String inputLine;
while ((inputLine = in.readLine()) != null) {
response.append(inputLine);
}
in.close();
} catch(Exception exception) {
System.out.println(exception);
return null;
}
return new JSONObject(response.toString());
}
}<|fim▁end|> |
in = new BufferedReader(new InputStreamReader(con.getInputStream()));
response = new StringBuffer(); |
<|file_name|>mouse.cpp<|end_file_name|><|fim▁begin|>#include "mouse.h"
//////////////////////////////////////////////////////////////////////////
// RAY //
//////////////////////////////////////////////////////////////////////////
RAY::RAY()
{
org = dir = D3DXVECTOR3(0.0f, 0.0f, 0.0f);
}
RAY::RAY(D3DXVECTOR3 o, D3DXVECTOR3 d)
{
org = o;
dir = d;
}
float RAY::Intersect(MESHINSTANCE iMesh)
{
if(iMesh.m_pMesh == NULL)return -1.0f;
return Intersect(iMesh.m_pMesh->m_pMesh);
}
float RAY::Intersect(BBOX bBox)
{
if(D3DXBoxBoundProbe(&bBox.min, &bBox.max, &org, &dir))
return D3DXVec3Length(&(((bBox.min + bBox.max) / 2.0f) - org));
else return -1.0f;
}
float RAY::Intersect(BSPHERE bSphere)
{
if(D3DXSphereBoundProbe(&bSphere.center, bSphere.radius, &org, &dir))
return D3DXVec3Length(&(bSphere.center - org));
else return -1.0f;
}
float RAY::Intersect(ID3DXMesh* mesh)
{
if(mesh == NULL)return -1.0f;
// Collect only the closest intersection
BOOL hit;
DWORD dwFace;
float hitU, hitV, dist;
D3DXIntersect(mesh, &org, &dir, &hit, &dwFace, &hitU, &hitV, &dist, NULL, NULL);
if(hit) return dist;
else return -1.0f;
}
//////////////////////////////////////////////////////////////////////////
// MOUSE //
//////////////////////////////////////////////////////////////////////////
MOUSE::MOUSE()
{
m_type = 0;
m_pMouseTexture = NULL;
m_pMouseDevice = NULL;
m_speed = 1.5f;
}
MOUSE::~MOUSE()
{
if(m_pMouseDevice != NULL)
m_pMouseDevice->Release();
if(m_pMouseTexture != NULL)
m_pMouseTexture->Release();
}
void MOUSE::InitMouse(IDirect3DDevice9* Dev, HWND wnd)
{
try
{
m_pDevice = Dev;
//Load texture and init sprite
D3DXCreateTextureFromFile(m_pDevice, "cursor/cursor.dds", &m_pMouseTexture);
D3DXCreateSprite(m_pDevice, &m_pSprite);
//Get directinput object
LPDIRECTINPUT8 directInput = NULL;
DirectInput8Create(GetModuleHandle(NULL), // Retrieves the application handle
0x0800, // v.8.0
IID_IDirectInput8, // Interface ID
(VOID**)&directInput, // Our DirectInput Object
NULL);
//Acquire Default System mouse
directInput->CreateDevice(GUID_SysMouse, &m_pMouseDevice, NULL);
m_pMouseDevice->SetDataFormat(&c_dfDIMouse);
m_pMouseDevice->SetCooperativeLevel(wnd, DISCL_EXCLUSIVE | DISCL_FOREGROUND);
m_pMouseDevice->Acquire();
//Get m_viewport size and init mouse location
D3DVIEWPORT9 v;
m_pDevice->GetViewport(&v);
m_viewport.left = v.X;
m_viewport.top = v.Y;
m_viewport.right = v.X + v.Width;
m_viewport.bottom = v.Y + v.Height;
x = v.X + v.Width / 2;
y = v.Y + v.Height / 2;
//Release Direct Input Object
directInput->Release();
//Create Mouse Sphere
D3DXCreateSphere(m_pDevice, 0.2f, 5, 5, &m_pSphereMesh, NULL);
//Create Ball Material
m_ballMtrl.Diffuse = D3DXCOLOR(1.0f, 1.0f, 0.0f, 1.0f);
m_ballMtrl.Specular = m_ballMtrl.Ambient = m_ballMtrl.Emissive = D3DXCOLOR(0.0f, 0.0f, 0.0f, 1.0f);
}
catch(...)
{
debug.Print("Error in MOUSE::InitMouse()");
}
}
bool MOUSE::ClickLeft()
{
return m_mouseState.rgbButtons[0] != 0;
}
bool MOUSE::ClickRight()
{
return m_mouseState.rgbButtons[1] != 0;
}
bool MOUSE::WheelUp()
{
return m_mouseState.lZ > 0.0f;
}
bool MOUSE::WheelDown()
{
return m_mouseState.lZ < 0.0f;
}
bool MOUSE::Over(RECT dest)
{
if(x < dest.left || x > dest.right)return false;
if(y < dest.top || y > dest.bottom)return false;
return true;
}
bool MOUSE::PressInRect(RECT dest)
{
return ClickLeft() && Over(dest);
}
void MOUSE::Update(TERRAIN &terrain)
{
//Retrieve mouse state
ZeroMemory(&m_mouseState, sizeof(DIMOUSESTATE));
m_pMouseDevice->GetDeviceState(sizeof(DIMOUSESTATE), &m_mouseState);
<|fim▁hole|> x += (int)(m_mouseState.lX * m_speed);
y += (int)(m_mouseState.lY * m_speed);
//Keep mouse pointer within window
if(x < m_viewport.left) x = m_viewport.left;
if(y < m_viewport.top) y = m_viewport.top;
if(x > m_viewport.right) x = m_viewport.right;
if(y > m_viewport.bottom) y = m_viewport.bottom;
CalculateMappos(terrain);
}
void MOUSE::Paint()
{
//Draw ball
D3DXMATRIX world;
D3DXMatrixTranslation(&world, m_ballPos.x, m_ballPos.y, m_ballPos.z);
m_pDevice->SetTransform(D3DTS_WORLD, &world);
m_pDevice->SetMaterial(&m_ballMtrl);
m_pDevice->SetTexture(0, NULL);
m_pSphereMesh->DrawSubset(0);
if(m_pMouseTexture != NULL)
{
RECT src[] = {{0,0,20,20}, {0,20,20,40}, {20,20,40,40}, {0,40,20,60}, {20,40,40,60}};
m_pSprite->Begin(D3DXSPRITE_ALPHABLEND);
m_pSprite->Draw(m_pMouseTexture, &src[m_type], NULL, &D3DXVECTOR3((float)x, (float)y, 0.0f), 0xffffffff);
m_pSprite->End();
}
}
RAY MOUSE::GetRay()
{
try
{
D3DXMATRIX projectionMatrix, viewMatrix, worldViewInverse, worldMatrix;
m_pDevice->GetTransform(D3DTS_PROJECTION, &projectionMatrix);
m_pDevice->GetTransform(D3DTS_VIEW, &viewMatrix);
m_pDevice->GetTransform(D3DTS_WORLD, &worldMatrix);
float width = (float)(m_viewport.right - m_viewport.left);
float height = (float)(m_viewport.bottom - m_viewport.top);
float angle_x = (((2.0f * x) / width) - 1.0f) / projectionMatrix(0,0);
float angle_y = (((-2.0f * y) / height) + 1.0f) / projectionMatrix(1,1);
RAY ray;
ray.org = D3DXVECTOR3(0.0f, 0.0f, 0.0f);
ray.dir = D3DXVECTOR3(angle_x, angle_y, 1.0f);
D3DXMATRIX m = worldMatrix * viewMatrix;
D3DXMatrixInverse(&worldViewInverse, 0, &m);
D3DXVec3TransformCoord(&ray.org, &ray.org, &worldViewInverse);
D3DXVec3TransformNormal(&ray.dir, &ray.dir, &worldViewInverse);
D3DXVec3Normalize(&ray.dir, &ray.dir);
return ray;
}
catch(...)
{
debug.Print("Error in MOUSE::GetRay()");
}
return RAY();
}
void MOUSE::CalculateMappos(TERRAIN &terrain)
{
//Get Mouse Ray
D3DXMATRIX world;
D3DXMatrixIdentity(&world);
m_pDevice->SetTransform(D3DTS_WORLD, &world);
RAY mRay = GetRay();
float minDistance = 10000.0f;
for(int i=0;i<(int)terrain.m_patches.size();i++)
{
if(mRay.Intersect(terrain.m_patches[i]->m_BBox) > 0.0f)
{
// Collect only the closest intersection
BOOL hit;
DWORD dwFace;
float hitU, hitV, dist;
D3DXIntersect(terrain.m_patches[i]->m_pMesh, &mRay.org, &mRay.dir, &hit, &dwFace, &hitU, &hitV, &dist, NULL, NULL);
if(hit && dist < minDistance)
{
minDistance = dist;
int tiles = dwFace / 2; //Two faces to each map tile
int tilesPerRow = terrain.m_patches[i]->m_mapRect.right - terrain.m_patches[i]->m_mapRect.left;
int y = tiles / tilesPerRow, x = tiles - y * tilesPerRow;
if(dwFace % 2 == 0) //Hit upper left face
{
if(hitU > 0.5f)x++;
else if(hitV > 0.5f)y++;
}
else //Hit lower right face
{
if(hitU + hitV < 0.5f)y++;
else if(hitU > 0.5f)x++;
else {x++;y++;}
}
//Set mouse map position
m_mappos.Set(terrain.m_patches[i]->m_mapRect.left + x, terrain.m_patches[i]->m_mapRect.top + y);
m_ballPos = terrain.GetWorldPos(m_mappos);
m_uv = D3DXVECTOR2(hitU, hitV);
}
}
}
}<|fim▁end|> | //Update pointer |
<|file_name|>poke-catalog.component.ts<|end_file_name|><|fim▁begin|>import { OnInit, Component, Input, EventEmitter } from "@angular/core";
import { Pokemon } from "./pokemon.model";
import { ExternalImageURLPipe } from "./../pipes/external-image-url.pipe";
import { PokeCatalogService } from "./poke-catalog.service";
<|fim▁hole|>@Component({
selector: "poke-catalog",
pipes: [ExternalImageURLPipe],
providers: [PokeCatalogService],
templateUrl: "app/poke-catalog/poke-catalog.component.html",
styleUrls: ["app/poke-catalog/poke-catalog.component.css"]
})
export class PokeCatalogComponent implements OnInit {
originalPokemonList: Array<Pokemon> = [];
pokemonList: Array<Pokemon> = [];
searchValue: string = "";
@Input() set searchValueInput(value: string) {
this.searchValue = value;
this.filterPokemon();
};
constructor(private _pokeCatalogService: PokeCatalogService) {
console.log(">> PokeCatalogComponent()");
}
filterPokemon() {
this.pokemonList = this.originalPokemonList.filter((pokemon, index)=> {
// If the value to search is a substring of the pokemon name; then the pokemonList will have the current pokemon
return pokemon.name.includes(this.searchValue);
});
}
public ngOnInit() {
this._pokeCatalogService.getPokemonList().then((pokemonList: Pokemon[]) => {
this.pokemonList = pokemonList;
this.originalPokemonList = pokemonList;
});
}
}<|fim▁end|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.