prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>history.go<|end_file_name|><|fim▁begin|>// mauIRC - The original mauIRC web frontend
// Copyright (C) 2016 Tulir Asokan
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
<|fim▁hole|>
// Package ui contains UI-related functions
package ui
import (
"encoding/json"
"fmt"
"github.com/gopherjs/gopherjs/js"
"github.com/gopherjs/jquery"
"maunium.net/go/mauirc-common/messages"
"maunium.net/go/mauirc/data"
"maunium.net/go/mauirc/templates"
"maunium.net/go/mauirc/util/console"
)
// GetHistory gets n messages of the history of channel @ network
func GetHistory(network, channel string, n int) {
data.MustGetChannel(network, channel).FetchingHistory = true
GetChannel(network, channel).SetHtml("<div class='loader-center-wrapper'><div class='loader'/></div>")
jquery.Ajax(map[string]interface{}{
"type": "GET",
"url": fmt.Sprintf("/history/%s/%s/?n=%d", network, js.Global.Call("encodeURIComponent", channel).String(), n),
jquery.SUCCESS: func(rawData string) {
GetChannel(network, channel).Empty()
var histData = make([]messages.Message, 0)
json.Unmarshal([]byte(rawData), &histData)
for i := len(histData) - 1; i >= 0; i-- {
Receive(histData[i], false)
}
chanData := data.MustGetChannel(network, channel)
Loop:
for {
select {
case obj := <-chanData.MessageCache:
Receive(obj, true)
default:
break Loop
}
}
chanData.FetchingHistory = false
chanData.HistoryFetched = true
ScrollDown()
},
jquery.ERROR: func(info map[string]interface{}, textStatus, errorThrown string) {
console.Error("Failed to fetch history: HTTP", info["status"])
console.Error(info)
data.MustGetChannel(network, channel).FetchingHistory = false
if len(GetActiveNetwork()) == 0 || len(GetActiveChannel()) == 0 {
return
}
templates.AppendObj("error", GetActiveChannelObj(), fmt.Sprintln("Failed to fetch history:", textStatus, errorThrown))
ScrollDown()
},
})
}<|fim▁end|> | // You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>. |
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>"""
WSGI config for api project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
<|fim▁hole|>
from django.core.wsgi import get_wsgi_application
# This allows easy placement of apps within the interior
# api directory.
app_path = os.path.dirname(os.path.abspath(__file__)).replace('/config', '')
sys.path.append(os.path.join(app_path, 'api'))
if os.environ.get('DJANGO_SETTINGS_MODULE') == 'config.settings.production':
from raven.contrib.django.raven_compat.middleware.wsgi import Sentry
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.production"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
application = get_wsgi_application()
if os.environ.get('DJANGO_SETTINGS_MODULE') == 'config.settings.production':
application = Sentry(application)
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)<|fim▁end|> | """
import os
import sys |
<|file_name|>control_slider.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '/home/yeison/Documentos/python/developing/pinguino/pinguino-ide/qtgui/gide/bloques/widgets/control_slider.ui'
#
# Created: Wed Mar 4 01:39:58 2015
# by: pyside-uic 0.2.15 running on PySide 1.2.2
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_Frame(object):
def setupUi(self, Frame):
Frame.setObjectName("Frame")
Frame.resize(237, 36)
Frame.setWindowTitle("")
self.gridLayout = QtGui.QGridLayout(Frame)
self.gridLayout.setSpacing(0)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setObjectName("gridLayout")
self.lineEdit_2 = QtGui.QLineEdit(Frame)
self.lineEdit_2.setMaximumSize(QtCore.QSize(46, 16777215))
font = QtGui.QFont()
font.setFamily("Ubuntu Mono")
font.setPointSize(15)
font.setWeight(75)
font.setBold(True)
self.lineEdit_2.setFont(font)
self.lineEdit_2.setStyleSheet("color: rgb(255, 255, 255);\n"
"background-color: rgba(255, 255, 255, 0);")
self.lineEdit_2.setText("0000")
self.lineEdit_2.setFrame(False)
self.lineEdit_2.setReadOnly(True)
self.lineEdit_2.setObjectName("lineEdit_2")
self.gridLayout.addWidget(self.lineEdit_2, 0, 1, 1, 1)
self.horizontalSlider = QtGui.QSlider(Frame)
self.horizontalSlider.setCursor(QtCore.Qt.PointingHandCursor)
self.horizontalSlider.setFocusPolicy(QtCore.Qt.NoFocus)
self.horizontalSlider.setMaximum(1023)
self.horizontalSlider.setOrientation(QtCore.Qt.Horizontal)
self.horizontalSlider.setInvertedAppearance(False)<|fim▁hole|> self.gridLayout.addWidget(self.horizontalSlider, 0, 2, 1, 1)
self.retranslateUi(Frame)
QtCore.QMetaObject.connectSlotsByName(Frame)
def retranslateUi(self, Frame):
pass<|fim▁end|> | self.horizontalSlider.setTickPosition(QtGui.QSlider.NoTicks)
self.horizontalSlider.setTickInterval(128)
self.horizontalSlider.setObjectName("horizontalSlider") |
<|file_name|>test_mechanism_fslsdn.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Freescale, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from oslo.config import cfg
from neutron.extensions import portbindings
from neutron.plugins.ml2.drivers.freescale import mechanism_fslsdn
from neutron.tests import base
from neutron.tests.unit import test_db_plugin
"""Unit testing for Freescale SDN mechanism driver."""
class TestFslSdnMechDriverV2(test_db_plugin.NeutronDbPluginV2TestCase):
"""Testing mechanism driver with ML2 plugin."""
def setUp(self):
cfg.CONF.set_override('mechanism_drivers', ['fslsdn'], 'ml2')
def mocked_fslsdn_init(self):
# Mock CRD client, since it requires CRD service running.
self._crdclient = mock.Mock()
with mock.patch.object(mechanism_fslsdn.FslsdnMechanismDriver,
'initialize', new=mocked_fslsdn_init):
super(TestFslSdnMechDriverV2, self).setUp()
class TestFslSdnMechDriverNetworksV2(test_db_plugin.TestNetworksV2,
TestFslSdnMechDriverV2):
pass
class TestFslSdnMechDriverPortsV2(test_db_plugin.TestPortsV2,
TestFslSdnMechDriverV2):
VIF_TYPE = portbindings.VIF_TYPE_OVS
CAP_PORT_FILTER = True
class TestFslSdnMechDriverSubnetsV2(test_db_plugin.TestSubnetsV2,
TestFslSdnMechDriverV2):
pass
class TestFslSdnMechanismDriver(base.BaseTestCase):
"""Testing FSL SDN Mechanism driver."""
def setUp(self):
super(TestFslSdnMechanismDriver, self).setUp()
cfg.CONF.set_override('mechanism_drivers', ['fslsdn'], 'ml2')
self.driver = mechanism_fslsdn.FslsdnMechanismDriver()
self.driver.initialize()
self.client = self.driver._crdclient = mock.Mock()
def test_create_update_delete_network_postcommit(self):
"""Testing create/update/delete network postcommit operations."""
tenant_id = 'test'
network_id = '123'
segmentation_id = 456
expected_seg = [{'segmentation_id': segmentation_id}]
expected_crd_network = {'network':
{'network_id': network_id,
'tenant_id': tenant_id,
'name': 'FakeNetwork',
'status': 'ACTIVE',
'admin_state_up': True,
'segments': expected_seg}}
network_context = self._get_network_context(tenant_id, network_id,
segmentation_id)
network = network_context.current
segments = network_context.network_segments
net_id = network['id']
req = self.driver._prepare_crd_network(network, segments)
# test crd network dict
self.assertEqual(expected_crd_network, req)
# test create_network.
self.driver.create_network_postcommit(network_context)
self.client.create_network.assert_called_once_with(body=req)
# test update_network.
self.driver.update_network_postcommit(network_context)
self.client.update_network.assert_called_once_with(net_id, body=req)
# test delete_network.
self.driver.delete_network_postcommit(network_context)
self.client.delete_network.assert_called_once_with(net_id)
def test_create_update_delete_subnet_postcommit(self):
"""Testing create/update/delete subnet postcommit operations."""
tenant_id = 'test'<|fim▁hole|> subnet_id = '122'
cidr = '192.0.0.0/8'
gateway_ip = '192.0.0.1'
expected_crd_subnet = {'subnet':
{'subnet_id': subnet_id, 'tenant_id': tenant_id,
'name': 'FakeSubnet', 'network_id': network_id,
'ip_version': 4, 'cidr': cidr,
'gateway_ip': gateway_ip,
'dns_nameservers': '',
'allocation_pools': '',
'host_routes': ''}}
subnet_context = self._get_subnet_context(tenant_id, network_id,
subnet_id, cidr, gateway_ip)
subnet = subnet_context.current
subnet_id = subnet['id']
req = self.driver._prepare_crd_subnet(subnet)
# test crd subnet dict
self.assertEqual(expected_crd_subnet, req)
# test create_subnet.
self.driver.create_subnet_postcommit(subnet_context)
self.client.create_subnet.assert_called_once_with(body=req)
# test update_subnet.
self.driver.update_subnet_postcommit(subnet_context)
self.client.update_subnet.assert_called_once_with(subnet_id, body=req)
# test delete_subnet.
self.driver.delete_subnet_postcommit(subnet_context)
self.client.delete_subnet.assert_called_once_with(subnet_id)
def test_create_delete_port_postcommit(self):
"""Testing create/delete port postcommit operations."""
tenant_id = 'test'
network_id = '123'
port_id = '453'
expected_crd_port = {'port':
{'port_id': port_id, 'tenant_id': tenant_id,
'name': 'FakePort', 'network_id': network_id,
'subnet_id': '', 'mac_address': 'aabb',
'device_id': '1234', 'ip_address': '',
'admin_state_up': True, 'status': 'ACTIVE',
'device_owner': 'compute',
'security_groups': ''}}
# Test with empty fixed IP
port_context = self._get_port_context(tenant_id, network_id, port_id)
port = port_context.current
req = self.driver._prepare_crd_port(port)
# Test crd port dict
self.assertEqual(expected_crd_port, req)
# test create_port.
self.driver.create_port_postcommit(port_context)
self.client.create_port.assert_called_once_with(body=req)
# Test delete_port
self.driver.delete_port_postcommit(port_context)
self.client.delete_port.assert_called_once_with(port['id'])
def test_prepare_port_with_single_fixed_ip(self):
"""Test _prepare_crd_port with single fixed_ip."""
tenant_id = 'test'
network_id = '123'
port_id = '453'
fips = [{"subnet_id": "sub-1", "ip_address": "10.0.0.1"}]
expected_crd_port = {'port':
{'port_id': port_id, 'tenant_id': tenant_id,
'name': 'FakePort', 'network_id': network_id,
'subnet_id': '', 'mac_address': 'aabb',
'device_id': '1234', 'ip_address': '',
'admin_state_up': True, 'status': 'ACTIVE',
'device_owner': 'compute',
'security_groups': ''}}
port_context = self._get_port_context(tenant_id, network_id, port_id,
fips)
port = port_context.current
req = self.driver._prepare_crd_port(port)
expected_crd_port['port']['subnet_id'] = 'sub-1'
expected_crd_port['port']['ip_address'] = '10.0.0.1'
self.assertEqual(expected_crd_port, req)
def test_prepare_port_with_multiple_fixed_ips(self):
"""Test _prepare_crd_port with multiple fixed_ips."""
tenant_id = 'test'
network_id = '123'
port_id = '453'
multiple_fips = [{"subnet_id": "sub-1", "ip_address": "10.0.0.1"},
{"subnet_id": "sub-1", "ip_address": "10.0.0.4"}]
expected_crd_port = {'port':
{'port_id': port_id, 'tenant_id': tenant_id,
'name': 'FakePort', 'network_id': network_id,
'subnet_id': '', 'mac_address': 'aabb',
'device_id': '1234', 'ip_address': '',
'admin_state_up': True, 'status': 'ACTIVE',
'device_owner': 'compute',
'security_groups': ''}}
port_context = self._get_port_context(tenant_id, network_id, port_id,
multiple_fips)
port = port_context.current
req = self.driver._prepare_crd_port(port)
expected_crd_port['port']['subnet_id'] = 'sub-1'
expected_crd_port['port']['ip_address'] = '10.0.0.1'
self.assertEqual(expected_crd_port, req)
def _get_subnet_context(self, tenant_id, net_id, subnet_id, cidr,
gateway_ip):
# sample data for testing purpose only.
subnet = {'tenant_id': tenant_id,
'network_id': net_id,
'id': subnet_id,
'cidr': cidr,
'name': 'FakeSubnet',
'ip_version': 4,
'gateway_ip': gateway_ip,
}
return FakeContext(subnet)
def _get_port_context(self, tenant_id, net_id, port_id,
fixed_ips=[]):
# sample data for testing purpose only
port = {'device_id': '1234',
'name': 'FakePort',
'mac_address': 'aabb',
'device_owner': 'compute',
'tenant_id': tenant_id,
'id': port_id,
'fixed_ips': fixed_ips,
'admin_state_up': True,
'status': 'ACTIVE',
'network_id': net_id}
return FakeContext(port)
def _get_network_context(self, tenant_id, net_id, seg_id):
# sample data for testing purpose only.
network = {'id': net_id,
'tenant_id': tenant_id,
'admin_state_up': True,
'status': 'ACTIVE',
'name': 'FakeNetwork', }
segments = [{'segmentation_id': seg_id}]
return FakeNetworkContext(network, segments)
class FakeNetworkContext(object):
"""To generate network context for testing purposes only."""
def __init__(self, network, segments):
self._network = network
self._segments = segments
@property
def current(self):
return self._network
@property
def network_segments(self):
return self._segments
class FakeContext(object):
"""To generate context for testing purposes only."""
def __init__(self, record):
self._record = record
@property
def current(self):
return self._record<|fim▁end|> | network_id = '123' |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![allow(improper_ctypes)]
#![allow(missing_copy_implementations)]
#![allow(non_upper_case_globals)]
//! OpenCL bindings for Rust.
extern crate libc;
#[macro_use]
extern crate log;
#[link(name = "OpenCL", kind = "framework")]
#[cfg(target_os = "macos")]
extern { }
#[link(name = "OpenCL")]
#[cfg(target_os = "linux")]
extern { }
#[link(name = "OpenCL")]
#[cfg(target_os = "windows")]
extern { }<|fim▁hole|>/// Low-level OpenCL bindings. These should primarily be used by the
/// higher level features in this library.
pub mod cl;
/// OpenCL extensions
pub mod ext;
pub mod error;
pub mod hl;
pub mod util;
pub mod mem;
pub mod array;<|fim▁end|> | |
<|file_name|>decompression.rs<|end_file_name|><|fim▁begin|>/* Copyright (C) 2021 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
use crate::core::STREAM_TOCLIENT;
use brotli;
use flate2::read::GzDecoder;
use std;
use std::io;
use std::io::{Cursor, Read, Write};
pub const HTTP2_DECOMPRESSION_CHUNK_SIZE: usize = 0x1000; // 4096
#[repr(u8)]
#[derive(Copy, Clone, PartialOrd, PartialEq, Debug)]
pub enum HTTP2ContentEncoding {
HTTP2ContentEncodingUnknown = 0,
HTTP2ContentEncodingGzip = 1,
HTTP2ContentEncodingBr = 2,
HTTP2ContentEncodingUnrecognized = 3,
}
//a cursor turning EOF into blocking errors
pub struct HTTP2cursor {
pub cursor: Cursor<Vec<u8>>,
}
impl HTTP2cursor {
pub fn new() -> HTTP2cursor {
HTTP2cursor {
cursor: Cursor::new(Vec::new()),
}
}
pub fn set_position(&mut self, pos: u64) {
return self.cursor.set_position(pos);
}
}
// we need to implement this as flate2 and brotli crates
// will read from this object
impl Read for HTTP2cursor {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
//use the cursor, except it turns eof into blocking error
let r = self.cursor.read(buf);
match r {
Err(ref err) => {
if err.kind() == io::ErrorKind::UnexpectedEof {
return Err(io::ErrorKind::WouldBlock.into());
}
}
Ok(0) => {
//regular EOF turned into blocking error
return Err(io::ErrorKind::WouldBlock.into());
}
Ok(_n) => {}
}
return r;
}
}
pub enum HTTP2Decompresser {
UNASSIGNED,
GZIP(GzDecoder<HTTP2cursor>),
BROTLI(brotli::Decompressor<HTTP2cursor>),
}
struct HTTP2DecoderHalf {
encoding: HTTP2ContentEncoding,
decoder: HTTP2Decompresser,
}
pub trait GetMutCursor {
fn get_mut(&mut self) -> &mut HTTP2cursor;
}
impl GetMutCursor for GzDecoder<HTTP2cursor> {
fn get_mut(&mut self) -> &mut HTTP2cursor {
return self.get_mut();
}
}
impl GetMutCursor for brotli::Decompressor<HTTP2cursor> {
fn get_mut(&mut self) -> &mut HTTP2cursor {
return self.get_mut();
}
}
fn http2_decompress<'a>(
decoder: &mut (impl Read + GetMutCursor), input: &'a [u8], output: &'a mut Vec<u8>,
) -> io::Result<&'a [u8]> {
match decoder.get_mut().cursor.write_all(input) {
Ok(()) => {}
Err(e) => {
return Err(e);
}
}
let mut offset = 0;
decoder.get_mut().set_position(0);
output.resize(HTTP2_DECOMPRESSION_CHUNK_SIZE, 0);
loop {
match decoder.read(&mut output[offset..]) {
Ok(0) => {
break;
}
Ok(n) => {
offset += n;
if offset == output.len() {
output.resize(output.len() + HTTP2_DECOMPRESSION_CHUNK_SIZE, 0);
}
}
Err(e) => {
if e.kind() == io::ErrorKind::WouldBlock {
break;
}
return Err(e);
}
}
}
//brotli does not consume all input if it reaches some end
<|fim▁hole|>impl HTTP2DecoderHalf {
pub fn new() -> HTTP2DecoderHalf {
HTTP2DecoderHalf {
encoding: HTTP2ContentEncoding::HTTP2ContentEncodingUnknown,
decoder: HTTP2Decompresser::UNASSIGNED,
}
}
pub fn http2_encoding_fromvec(&mut self, input: &Vec<u8>) {
//use first encoding...
if self.encoding == HTTP2ContentEncoding::HTTP2ContentEncodingUnknown {
if *input == "gzip".as_bytes().to_vec() {
self.encoding = HTTP2ContentEncoding::HTTP2ContentEncodingGzip;
self.decoder = HTTP2Decompresser::GZIP(GzDecoder::new(HTTP2cursor::new()));
} else if *input == "br".as_bytes().to_vec() {
self.encoding = HTTP2ContentEncoding::HTTP2ContentEncodingBr;
self.decoder = HTTP2Decompresser::BROTLI(brotli::Decompressor::new(
HTTP2cursor::new(),
HTTP2_DECOMPRESSION_CHUNK_SIZE,
));
} else {
self.encoding = HTTP2ContentEncoding::HTTP2ContentEncodingUnrecognized;
}
}
}
pub fn decompress<'a>(
&'a mut self, input: &'a [u8], output: &'a mut Vec<u8>,
) -> io::Result<&'a [u8]> {
match self.decoder {
HTTP2Decompresser::GZIP(ref mut gzip_decoder) => {
let r = http2_decompress(gzip_decoder, input, output);
match r {
Err(_) => {
self.decoder = HTTP2Decompresser::UNASSIGNED;
}
_ => {}
}
return r;
}
HTTP2Decompresser::BROTLI(ref mut br_decoder) => {
let r = http2_decompress(br_decoder, input, output);
match r {
Err(_) => {
self.decoder = HTTP2Decompresser::UNASSIGNED;
}
_ => {}
}
return r;
}
_ => {}
}
return Ok(input);
}
}
pub struct HTTP2Decoder {
decoder_tc: HTTP2DecoderHalf,
decoder_ts: HTTP2DecoderHalf,
}
impl HTTP2Decoder {
pub fn new() -> HTTP2Decoder {
HTTP2Decoder {
decoder_tc: HTTP2DecoderHalf::new(),
decoder_ts: HTTP2DecoderHalf::new(),
}
}
pub fn http2_encoding_fromvec(&mut self, input: &Vec<u8>, dir: u8) {
if dir == STREAM_TOCLIENT {
self.decoder_tc.http2_encoding_fromvec(input);
} else {
self.decoder_ts.http2_encoding_fromvec(input);
}
}
pub fn decompress<'a>(
&'a mut self, input: &'a [u8], output: &'a mut Vec<u8>, dir: u8,
) -> io::Result<&'a [u8]> {
if dir == STREAM_TOCLIENT {
return self.decoder_tc.decompress(input, output);
} else {
return self.decoder_ts.decompress(input, output);
}
}
}<|fim▁end|> | decoder.get_mut().set_position(0);
return Ok(&output[..offset]);
}
|
<|file_name|>clothes_detection_dummy_node.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
import rospy
import roslib
roslib.load_manifest('clothing_type_classification')
import actionlib
import clothing_type_classification.msg
import std_msgs
from sensor_msgs.msg import Image
from clothing_type_classification.msg import ClothesArray, Clothes
# Specified target Centroid Points and Area of ClothesObject Here [x,y,z,area]
result_clothes = [[0.5, 0.0, 0.7, 50]]
class ClothesDetectionDummy(object):
def __init__(self, name):
self._action_name = name
self._as = actionlib.SimpleActionServer(self._action_name,
clothing_type_classification.msg.FindClothesAction,
self.execute_cb, False)
self._feedback = clothing_type_classification.msg.FindClothesFeedback()
self._result = clothing_type_classification.msg.FindClothesResult()
self._as.start()
print "Current Clothes: "
index = 0
for i in result_clothes:
print "clothes[" + str(index) + "] = " + str(i)
index += 1
print "-------------Complete Initialization------------------"
<|fim▁hole|> global result_clothes
rospy.loginfo("-------Start Execution-----")
ca = ClothesArray()
ca.header.frame_id = "base_link"
ca.header.stamp = rospy.Time.now()
for i in result_clothes:
ca.array.append(self.create_clothes(i))
print "array => " + str(ca)
self._result.result = ca
print "result : " + str(type(self._result.result))
print str(self._result.result)
#self._result.result = ClothesArray()
self._as.set_succeeded(self._result)
def create_clothes(self, centroid_and_area):
tmp = Clothes()
tmp.type = "Unknown"
tmp.image = Image()
tmp.centroid.x = centroid_and_area[0]
tmp.centroid.y = centroid_and_area[1]
tmp.centroid.z = centroid_and_area[2]
tmp.area = centroid_and_area[3]
return tmp
if __name__ == '__main__':
rospy.init_node('clothes_detection_node')
ClothesDetectionDummy(rospy.get_name())
rospy.spin()<|fim▁end|> | def execute_cb(self, goal): |
<|file_name|>recentdocsmenu.cpp<|end_file_name|><|fim▁begin|>/*****************************************************************
Copyright (c) 1996-2000 the kicker authors. See file AUTHORS.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
******************************************************************/
#include <qdragobject.h>
#include <qstring.h>
#include <qstringlist.h>
#include <kglobal.h>
#include <kiconloader.h>
#include <kmimetype.h>
#include <klocale.h>
#include <kdesktopfile.h>
#include <kglobalsettings.h>
#include <kapplication.h>
#include <kurldrag.h>
#include <krecentdocument.h>
#include "recentdocsmenu.h"
K_EXPORT_KICKER_MENUEXT(recentdocs, RecentDocsMenu)
RecentDocsMenu::RecentDocsMenu(QWidget *parent, const char *name,
const QStringList &/*args*/)
: KPanelMenu(KRecentDocument::recentDocumentDirectory(), parent, name)
{
}
RecentDocsMenu::~RecentDocsMenu()
{
}
void RecentDocsMenu::initialize() {
if (initialized()) clear();
insertItem(SmallIconSet("history_clear"), i18n("Clear History"), this, SLOT(slotClearHistory()));
insertSeparator();
_fileList = KRecentDocument::recentDocuments();
if (_fileList.isEmpty()) {
insertItem(i18n("No Entries"), 0);
setItemEnabled(0, false);
return;
}
int id = 0;
char alreadyPresentInMenu;
QStringList previousEntries;
for (QStringList::ConstIterator it = _fileList.begin(); it != _fileList.end(); ++it) {
KDesktopFile f(*it, true /* read only */);
// Make sure this entry is not already present in the menu
alreadyPresentInMenu = 0;
for ( QStringList::Iterator previt = previousEntries.begin(); previt != previousEntries.end(); ++previt ) {
if (QString::localeAwareCompare(*previt, f.readName().replace('&', QString::fromAscii("&&") )) == 0) {
alreadyPresentInMenu = 1;
}
}
if (alreadyPresentInMenu == 0) {
// Add item to menu
insertItem(SmallIconSet(f.readIcon()), f.readName().replace('&', QString::fromAscii("&&") ), id++);
// Append to duplicate checking list
previousEntries.append(f.readName().replace('&', QString::fromAscii("&&") ));
}
}
setInitialized(true);
}
void RecentDocsMenu::slotClearHistory() {
KRecentDocument::clear();
reinitialize();
}
void RecentDocsMenu::slotExec(int id) {
if (id >= 0) {
kapp->propagateSessionManager();
KURL u;
u.setPath(_fileList[id]);
KDEDesktopMimeType::run(u, true);
}
}
void RecentDocsMenu::mousePressEvent(QMouseEvent* e) {
_mouseDown = e->pos();
QPopupMenu::mousePressEvent(e);
}
void RecentDocsMenu::mouseMoveEvent(QMouseEvent* e) {
KPanelMenu::mouseMoveEvent(e);
if (!(e->state() & LeftButton))
return;
if (!rect().contains(_mouseDown))
return;
int dragLength = (e->pos() - _mouseDown).manhattanLength();
if (dragLength <= KGlobalSettings::dndEventDelay())
return; // ignore it
int id = idAt(_mouseDown);
// Don't drag 'manual' items.
if (id < 0)
return;
KDesktopFile f(_fileList[id], true /* read only */);
<|fim▁hole|> return;
KURL::List lst;
lst.append(url);
KURLDrag* d = new KURLDrag(lst, this);
d->setPixmap(SmallIcon(f.readIcon()));
d->dragCopy();
close();
}
void RecentDocsMenu::slotAboutToShow()
{
reinitialize();
KPanelMenu::slotAboutToShow();
}
#include "recentdocsmenu.moc"<|fim▁end|> | KURL url ( f.readURL() );
if (url.isEmpty()) // What are we to do ? |
<|file_name|>context.spec.ts<|end_file_name|><|fim▁begin|>import { Context } from '../../../src/context/Context';
describe('Context', () => {
describe('Response', () => {
it('should return the response object', () => {
const res: any = 1;
const context = new Context(undefined, res, undefined, undefined);
expect(context.Response).toBe(1);
});
});
describe('Request', () => {
it('should return the request object', () => {
const req: any = 1;
const context = new Context(req, undefined, undefined, undefined);
expect(context.Request).toBe(1);
});
});
describe('Services', () => {
it('should return the repositories object', () => {
const services: any = 1;
const context = new Context(undefined, undefined, undefined, services);
expect(context.Services).toBe(1);
});
});
describe('DataLoaders', () => {
it('should return the dataLoaders object', () => {
const dataLoaders: any = 1;
const context = new Context(undefined, undefined, dataLoaders, undefined);
expect(context.DataLoaders).toBe(1);
});
});
describe('hasUserRoles', () => {
it('should return the dataLoaders object', () => {
const req: any = {
acceptsLanguages: () => 'de'
};
const context = new Context(req, undefined, undefined, undefined);
expect(context.getLanguage()).toBe('de');
});
});
describe('hasUserRoles', () => {<|fim▁hole|> const context = new Context(undefined, undefined, undefined, undefined);
expect(context.hasUserRoles([])).toBeTruthy();
});
});
});<|fim▁end|> | it('should return the request object', () => { |
<|file_name|>0003_auto_20150816_2148.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django_extensions.db.fields
class Migration(migrations.Migration):
dependencies = [
('talks', '0002_auto_20150808_2108'),
]
operations = [
migrations.AlterField(
model_name='speaker',
name='slug',
field=django_extensions.db.fields.AutoSlugField(verbose_name='Slug', blank=True, populate_from='get_name', editable=False),<|fim▁hole|> name='speaker',
field=models.ForeignKey(null=True, blank=True, related_name='talks', to='talks.Speaker'),
),
]<|fim▁end|> | ),
migrations.AlterField(
model_name='talk', |
<|file_name|>queue-cave-reinstall.ts<|end_file_name|><|fim▁begin|>import { actions } from "common/actions";
import { messages } from "common/butlerd";
import { DownloadReason } from "common/butlerd/messages";
import { Watcher } from "common/util/watcher";
import { mcall } from "main/butlerd/mcall";
export default function (watcher: Watcher) {
watcher.on(actions.queueCaveReinstall, async (store, action) => {
const { caveId } = action.payload;
await mcall(messages.InstallQueue, {
caveId,<|fim▁hole|> store.dispatch(actions.downloadQueued({}));
});
}<|fim▁end|> | reason: DownloadReason.Reinstall,
queueDownload: true,
fastQueue: true,
}); |
<|file_name|>viz.go<|end_file_name|><|fim▁begin|>package graph<|fim▁hole|> "github.com/docker/docker/engine"
"github.com/docker/docker/image"
)
func (s *TagStore) CmdViz(job *engine.Job) engine.Status {
images, _ := s.graph.Map()
if images == nil {
return engine.StatusOK
}
job.Stdout.Write([]byte("digraph docker {\n"))
var (
parentImage *image.Image
err error
)
for _, image := range images {
parentImage, err = image.GetParent()
if err != nil {
return job.Errorf("Error while getting parent image: %v", err)
}
if parentImage != nil {
job.Stdout.Write([]byte(" \"" + parentImage.ID + "\" -> \"" + image.ID + "\"\n"))
} else {
job.Stdout.Write([]byte(" base -> \"" + image.ID + "\" [style=invis]\n"))
}
}
for id, repos := range s.GetRepoRefs() {
job.Stdout.Write([]byte(" \"" + id + "\" [label=\"" + id + "\\n" + strings.Join(repos, "\\n") + "\",shape=box,fillcolor=\"paleturquoise\",style=\"filled,rounded\"];\n"))
}
job.Stdout.Write([]byte(" base [style=invisible]\n}\n"))
return engine.StatusOK
}<|fim▁end|> |
import (
"strings"
|
<|file_name|>cs_matrix.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>use na::{Matrix4x5, Matrix5x4, CsMatrix};
#[test]
fn cs_transpose() {
let m = Matrix4x5::new(
4.0, 1.0, 4.0, 0.0, 9.0,
5.0, 6.0, 0.0, 8.0, 10.0,
9.0, 10.0, 11.0, 12.0, 0.0,
0.0, 0.0, 1.0, 0.0, 10.0
);
let cs: CsMatrix<_, _, _> = m.into();
assert!(cs.is_sorted());
let cs_transposed = cs.transpose();
assert!(cs_transposed.is_sorted());
let cs_transposed_mat: Matrix5x4<_> = cs_transposed.into();
assert_eq!(cs_transposed_mat, m.transpose())
}<|fim▁end|> | #![cfg_attr(rustfmt, rustfmt_skip)]
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>fullname = _('Cosigner Pool')
description = ' '.join([
_("This plugin facilitates the use of multi-signatures wallets."),
_("It sends and receives partially signed transactions from/to your cosigner wallet."),
_("Transactions are encrypted and stored on a remote server.")
])
#requires_wallet_type = ['2of2', '2of3']
available_for = ['qt', 'vtc']<|fim▁end|> | from electrum_vtc.i18n import _ |
<|file_name|>ipblock.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import sys
def ip2str(ip):
l = [
(ip >> (3*8)) & 0xFF,
(ip >> (2*8)) & 0xFF,
(ip >> (1*8)) & 0xFF,
(ip >> (0*8)) & 0xFF,
]
return '.'.join([str(i) for i in l])
def str2ip(line):
a, b, c, d = [int(s) for s in line.split('.')]
ip = 0
ip += (a << (3*8))
ip += (b << (2*8))
ip += (c << (1*8))
ip += (d << (0*8))
return ip
blockip = str2ip(sys.stdin.readline())
hostmask = 1<|fim▁hole|> ip = str2ip(line.strip())
except:
print 'Ignored line:', line,
continue
while (blockip & (~hostmask)) != (ip & (~hostmask)):
hostmask = (hostmask << 1) | 1
bitcount += 1
print ip2str(blockip & (~hostmask)) + '/' + str(bitcount), 'hostmask =', ip2str(hostmask)
print 'wrong way around'<|fim▁end|> | bitcount = 1
for line in sys.stdin.readlines():
try: |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-<|fim▁hole|>
from setuptools import setup
setup(name='scalegrease',
version='1',
url='https://github.com/spotify/scalegrease',
description='A tool chain for executing batch processing jobs',
packages=['scalegrease'],
data_files=[('/etc', ['conf/scalegrease.json'])],
scripts=[
'bin/greaserun',
'bin/greaseworker'
]
)<|fim▁end|> | |
<|file_name|>org_writer.go<|end_file_name|><|fim▁begin|>package org
import (
"fmt"
"strings"
"unicode"
"unicode/utf8"
)
// OrgWriter export an org document into pretty printed org document.
type OrgWriter struct {
ExtendingWriter Writer
TagsColumn int
strings.Builder
indent string
}
var emphasisOrgBorders = map[string][]string{
"_": []string{"_", "_"},
"*": []string{"*", "*"},
"/": []string{"/", "/"},
"+": []string{"+", "+"},
"~": []string{"~", "~"},
"=": []string{"=", "="},
"_{}": []string{"_{", "}"},
"^{}": []string{"^{", "}"},
}
func NewOrgWriter() *OrgWriter {
return &OrgWriter{
TagsColumn: 77,
}
}
func (w *OrgWriter) WriterWithExtensions() Writer {
if w.ExtendingWriter != nil {
return w.ExtendingWriter
}
return w
}
func (w *OrgWriter) Before(d *Document) {}
func (w *OrgWriter) After(d *Document) {}
func (w *OrgWriter) WriteNodesAsString(nodes ...Node) string {
builder := w.Builder
w.Builder = strings.Builder{}
WriteNodes(w, nodes...)
out := w.String()
w.Builder = builder
return out
}
func (w *OrgWriter) WriteHeadline(h Headline) {
start := w.Len()
w.WriteString(strings.Repeat("*", h.Lvl))
if h.Status != "" {
w.WriteString(" " + h.Status)
}
if h.Priority != "" {
w.WriteString(" [#" + h.Priority + "]")
}
w.WriteString(" ")
WriteNodes(w, h.Title...)
if len(h.Tags) != 0 {
tString := ":" + strings.Join(h.Tags, ":") + ":"
if n := w.TagsColumn - len(tString) - (w.Len() - start); n > 0 {
w.WriteString(strings.Repeat(" ", n) + tString)
} else {
w.WriteString(" " + tString)
}
}
w.WriteString("\n")
if len(h.Children) != 0 {
w.WriteString(w.indent)
}
if h.Properties != nil {
WriteNodes(w, *h.Properties)
}
WriteNodes(w, h.Children...)
}
func (w *OrgWriter) WriteBlock(b Block) {
w.WriteString(w.indent + "#+BEGIN_" + b.Name)
if len(b.Parameters) != 0 {
w.WriteString(" " + strings.Join(b.Parameters, " "))
}
w.WriteString("\n")
if isRawTextBlock(b.Name) {
w.WriteString(w.indent)
}
WriteNodes(w, b.Children...)
if !isRawTextBlock(b.Name) {
w.WriteString(w.indent)
}
w.WriteString("#+END_" + b.Name + "\n")
}
func (w *OrgWriter) WriteDrawer(d Drawer) {
w.WriteString(w.indent + ":" + d.Name + ":\n")
WriteNodes(w, d.Children...)
w.WriteString(w.indent + ":END:\n")
}
func (w *OrgWriter) WritePropertyDrawer(d PropertyDrawer) {
w.WriteString(":PROPERTIES:\n")
for _, kvPair := range d.Properties {
k, v := kvPair[0], kvPair[1]
if v != "" {
v = " " + v
}
w.WriteString(fmt.Sprintf(":%s:%s\n", k, v))
}
w.WriteString(":END:\n")
}
func (w *OrgWriter) WriteFootnoteDefinition(f FootnoteDefinition) {
w.WriteString(fmt.Sprintf("[fn:%s]", f.Name))<|fim▁hole|> content := w.WriteNodesAsString(f.Children...)
if content != "" && !unicode.IsSpace(rune(content[0])) {
w.WriteString(" ")
}
w.WriteString(content)
}
func (w *OrgWriter) WriteParagraph(p Paragraph) {
content := w.WriteNodesAsString(p.Children...)
if len(content) > 0 && content[0] != '\n' {
w.WriteString(w.indent)
}
w.WriteString(content + "\n")
}
func (w *OrgWriter) WriteExample(e Example) {
for _, n := range e.Children {
w.WriteString(w.indent + ":")
if content := w.WriteNodesAsString(n); content != "" {
w.WriteString(" " + content)
}
w.WriteString("\n")
}
}
func (w *OrgWriter) WriteKeyword(k Keyword) {
w.WriteString(w.indent + "#+" + k.Key + ":")
if k.Value != "" {
w.WriteString(" " + k.Value)
}
w.WriteString("\n")
}
func (w *OrgWriter) WriteInclude(i Include) {
w.WriteKeyword(i.Keyword)
}
func (w *OrgWriter) WriteNodeWithMeta(n NodeWithMeta) {
for _, ns := range n.Meta.Caption {
w.WriteString("#+CAPTION: ")
WriteNodes(w, ns...)
w.WriteString("\n")
}
for _, attributes := range n.Meta.HTMLAttributes {
w.WriteString("#+ATTR_HTML: ")
w.WriteString(strings.Join(attributes, " ") + "\n")
}
WriteNodes(w, n.Node)
}
func (w *OrgWriter) WriteNodeWithName(n NodeWithName) {
w.WriteString(fmt.Sprintf("#+NAME: %s\n", n.Name))
WriteNodes(w, n.Node)
}
func (w *OrgWriter) WriteComment(c Comment) {
w.WriteString(w.indent + "#" + c.Content + "\n")
}
func (w *OrgWriter) WriteList(l List) { WriteNodes(w, l.Items...) }
func (w *OrgWriter) WriteListItem(li ListItem) {
originalBuilder, originalIndent := w.Builder, w.indent
w.Builder, w.indent = strings.Builder{}, w.indent+strings.Repeat(" ", len(li.Bullet)+1)
WriteNodes(w, li.Children...)
content := strings.TrimPrefix(w.String(), w.indent)
w.Builder, w.indent = originalBuilder, originalIndent
w.WriteString(w.indent + li.Bullet)
if li.Status != "" {
w.WriteString(fmt.Sprintf(" [%s]", li.Status))
}
if len(content) > 0 && content[0] == '\n' {
w.WriteString(content)
} else {
w.WriteString(" " + content)
}
}
func (w *OrgWriter) WriteDescriptiveListItem(di DescriptiveListItem) {
w.WriteString(w.indent + di.Bullet)
if di.Status != "" {
w.WriteString(fmt.Sprintf(" [%s]", di.Status))
}
indent := w.indent + strings.Repeat(" ", len(di.Bullet)+1)
if len(di.Term) != 0 {
term := w.WriteNodesAsString(di.Term...)
w.WriteString(" " + term + " ::")
indent = indent + strings.Repeat(" ", len(term)+4)
}
originalBuilder, originalIndent := w.Builder, w.indent
w.Builder, w.indent = strings.Builder{}, indent
WriteNodes(w, di.Details...)
details := strings.TrimPrefix(w.String(), w.indent)
w.Builder, w.indent = originalBuilder, originalIndent
if len(details) > 0 && details[0] == '\n' {
w.WriteString(details)
} else {
w.WriteString(" " + details)
}
}
func (w *OrgWriter) WriteTable(t Table) {
for _, row := range t.Rows {
w.WriteString(w.indent)
if len(row.Columns) == 0 {
w.WriteString(`|`)
for i := 0; i < len(t.ColumnInfos); i++ {
w.WriteString(strings.Repeat("-", t.ColumnInfos[i].Len+2))
if i < len(t.ColumnInfos)-1 {
w.WriteString("+")
}
}
w.WriteString(`|`)
} else {
w.WriteString(`|`)
for _, column := range row.Columns {
w.WriteString(` `)
content := w.WriteNodesAsString(column.Children...)
if content == "" {
content = " "
}
n := column.Len - utf8.RuneCountInString(content)
if n < 0 {
n = 0
}
if column.Align == "center" {
if n%2 != 0 {
w.WriteString(" ")
}
w.WriteString(strings.Repeat(" ", n/2) + content + strings.Repeat(" ", n/2))
} else if column.Align == "right" {
w.WriteString(strings.Repeat(" ", n) + content)
} else {
w.WriteString(content + strings.Repeat(" ", n))
}
w.WriteString(` |`)
}
}
w.WriteString("\n")
}
}
func (w *OrgWriter) WriteHorizontalRule(hr HorizontalRule) {
w.WriteString(w.indent + "-----\n")
}
func (w *OrgWriter) WriteText(t Text) { w.WriteString(t.Content) }
func (w *OrgWriter) WriteEmphasis(e Emphasis) {
borders, ok := emphasisOrgBorders[e.Kind]
if !ok {
panic(fmt.Sprintf("bad emphasis %#v", e))
}
w.WriteString(borders[0])
WriteNodes(w, e.Content...)
w.WriteString(borders[1])
}
func (w *OrgWriter) WriteLatexFragment(l LatexFragment) {
w.WriteString(l.OpeningPair)
WriteNodes(w, l.Content...)
w.WriteString(l.ClosingPair)
}
func (w *OrgWriter) WriteStatisticToken(s StatisticToken) {
w.WriteString(fmt.Sprintf("[%s]", s.Content))
}
func (w *OrgWriter) WriteLineBreak(l LineBreak) {
w.WriteString(strings.Repeat("\n"+w.indent, l.Count))
}
func (w *OrgWriter) WriteExplicitLineBreak(l ExplicitLineBreak) {
w.WriteString(`\\` + "\n" + w.indent)
}
func (w *OrgWriter) WriteTimestamp(t Timestamp) {
w.WriteString("<")
if t.IsDate {
w.WriteString(t.Time.Format(datestampFormat))
} else {
w.WriteString(t.Time.Format(timestampFormat))
}
if t.Interval != "" {
w.WriteString(" " + t.Interval)
}
w.WriteString(">")
}
func (w *OrgWriter) WriteFootnoteLink(l FootnoteLink) {
w.WriteString("[fn:" + l.Name)
if l.Definition != nil {
w.WriteString(":")
WriteNodes(w, l.Definition.Children[0].(Paragraph).Children...)
}
w.WriteString("]")
}
func (w *OrgWriter) WriteRegularLink(l RegularLink) {
if l.AutoLink {
w.WriteString(l.URL)
} else if l.Description == nil {
w.WriteString(fmt.Sprintf("[[%s]]", l.URL))
} else {
w.WriteString(fmt.Sprintf("[[%s][%s]]", l.URL, w.WriteNodesAsString(l.Description...)))
}
}<|fim▁end|> | |
<|file_name|>html.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![allow(unrooted_must_root)]
use dom::bindings::codegen::Bindings::HTMLTemplateElementBinding::HTMLTemplateElementMethods;
use dom::bindings::inheritance::{Castable, CharacterDataTypeId, NodeTypeId};
use dom::bindings::js::{JS, Root};
use dom::bindings::trace::JSTraceable;
use dom::characterdata::CharacterData;
use dom::document::Document;
use dom::documenttype::DocumentType;
use dom::element::Element;
use dom::htmlscriptelement::HTMLScriptElement;
use dom::htmltemplateelement::HTMLTemplateElement;
use dom::node::Node;
use dom::processinginstruction::ProcessingInstruction;
use dom::servoparser::Sink;
use html5ever::QualName;
use html5ever::buffer_queue::BufferQueue;
use html5ever::serialize::{AttrRef, Serialize, Serializer};
use html5ever::serialize::TraversalScope;
use html5ever::serialize::TraversalScope::{ChildrenOnly, IncludeNode};
use html5ever::tokenizer::{Tokenizer as HtmlTokenizer, TokenizerOpts, TokenizerResult};
use html5ever::tree_builder::{Tracer as HtmlTracer, TreeBuilder, TreeBuilderOpts};
use js::jsapi::JSTracer;
use servo_url::ServoUrl;
use std::io;
#[derive(HeapSizeOf, JSTraceable)]
#[must_root]
pub struct Tokenizer {
#[ignore_heap_size_of = "Defined in html5ever"]
inner: HtmlTokenizer<TreeBuilder<JS<Node>, Sink>>,
}
impl Tokenizer {
pub fn new(
document: &Document,
url: ServoUrl,
fragment_context: Option<super::FragmentContext>)
-> Self {
let sink = Sink {
base_url: url,
document: JS::from_ref(document),
current_line: 1,
script: Default::default(),
};
let options = TreeBuilderOpts {
ignore_missing_rules: true,
.. Default::default()
};
let inner = if let Some(fc) = fragment_context {
let tb = TreeBuilder::new_for_fragment(
sink,
JS::from_ref(fc.context_elem),
fc.form_elem.map(|n| JS::from_ref(n)),
options);
let tok_options = TokenizerOpts {
initial_state: Some(tb.tokenizer_state_for_context_elem()),
.. Default::default()
};
HtmlTokenizer::new(tb, tok_options)
} else {
HtmlTokenizer::new(TreeBuilder::new(sink, options), Default::default())
};
Tokenizer {
inner: inner,
}
}
pub fn feed(&mut self, input: &mut BufferQueue) -> Result<(), Root<HTMLScriptElement>> {
match self.inner.feed(input) {
TokenizerResult::Done => Ok(()),
TokenizerResult::Script(script) => Err(Root::from_ref(script.downcast().unwrap())),
}
}
pub fn end(&mut self) {
self.inner.end();
}
pub fn url(&self) -> &ServoUrl {
&self.inner.sink.sink.base_url
}
pub fn set_plaintext_state(&mut self) {
self.inner.set_plaintext_state();
}
}
#[allow(unsafe_code)]
unsafe impl JSTraceable for HtmlTokenizer<TreeBuilder<JS<Node>, Sink>> {
unsafe fn trace(&self, trc: *mut JSTracer) {
struct Tracer(*mut JSTracer);
let tracer = Tracer(trc);
impl HtmlTracer for Tracer {
type Handle = JS<Node>;
#[allow(unrooted_must_root)]
fn trace_handle(&self, node: &JS<Node>) {
unsafe { node.trace(self.0); }
}<|fim▁hole|> tree_builder.sink.trace(trc);
}
}
impl<'a> Serialize for &'a Node {
fn serialize<S: Serializer>(&self, serializer: &mut S,
traversal_scope: TraversalScope) -> io::Result<()> {
let node = *self;
match (traversal_scope, node.type_id()) {
(_, NodeTypeId::Element(..)) => {
let elem = node.downcast::<Element>().unwrap();
let name = QualName::new(None, elem.namespace().clone(),
elem.local_name().clone());
if traversal_scope == IncludeNode {
let attrs = elem.attrs().iter().map(|attr| {
let qname = QualName::new(None, attr.namespace().clone(),
attr.local_name().clone());
let value = attr.value().clone();
(qname, value)
}).collect::<Vec<_>>();
let attr_refs = attrs.iter().map(|&(ref qname, ref value)| {
let ar: AttrRef = (&qname, &**value);
ar
});
serializer.start_elem(name.clone(), attr_refs)?;
}
let children = if let Some(tpl) = node.downcast::<HTMLTemplateElement>() {
// https://github.com/w3c/DOM-Parsing/issues/1
tpl.Content().upcast::<Node>().children()
} else {
node.children()
};
for handle in children {
(&*handle).serialize(serializer, IncludeNode)?;
}
if traversal_scope == IncludeNode {
serializer.end_elem(name.clone())?;
}
Ok(())
},
(ChildrenOnly, NodeTypeId::Document(_)) => {
for handle in node.children() {
(&*handle).serialize(serializer, IncludeNode)?;
}
Ok(())
},
(ChildrenOnly, _) => Ok(()),
(IncludeNode, NodeTypeId::DocumentType) => {
let doctype = node.downcast::<DocumentType>().unwrap();
serializer.write_doctype(&doctype.name())
},
(IncludeNode, NodeTypeId::CharacterData(CharacterDataTypeId::Text)) => {
let cdata = node.downcast::<CharacterData>().unwrap();
serializer.write_text(&cdata.data())
},
(IncludeNode, NodeTypeId::CharacterData(CharacterDataTypeId::Comment)) => {
let cdata = node.downcast::<CharacterData>().unwrap();
serializer.write_comment(&cdata.data())
},
(IncludeNode, NodeTypeId::CharacterData(CharacterDataTypeId::ProcessingInstruction)) => {
let pi = node.downcast::<ProcessingInstruction>().unwrap();
let data = pi.upcast::<CharacterData>().data();
serializer.write_processing_instruction(&pi.target(), &data)
},
(IncludeNode, NodeTypeId::DocumentFragment) => Ok(()),
(IncludeNode, NodeTypeId::Document(_)) => panic!("Can't serialize Document node itself"),
}
}
}<|fim▁end|> | }
let tree_builder = &self.sink;
tree_builder.trace_handles(&tracer); |
<|file_name|>dstr-async-gen-meth-dflt-ary-init-iter-close.js<|end_file_name|><|fim▁begin|>// This file was procedurally generated from the following sources:
// - src/dstr-binding/ary-init-iter-close.case
// - src/dstr-binding/default/cls-decl-async-gen-meth-dflt.template
/*---<|fim▁hole|>features: [Symbol.iterator, async-iteration]
flags: [generated, async]
info: |
ClassDeclaration : class BindingIdentifier ClassTail
1. Let className be StringValue of BindingIdentifier.
2. Let value be the result of ClassDefinitionEvaluation of ClassTail with
argument className.
[...]
14.5.14 Runtime Semantics: ClassDefinitionEvaluation
21. For each ClassElement m in order from methods
a. If IsStatic of m is false, then
i. Let status be the result of performing
PropertyDefinitionEvaluation for m with arguments proto and
false.
[...]
Runtime Semantics: PropertyDefinitionEvaluation
AsyncGeneratorMethod :
async [no LineTerminator here] * PropertyName ( UniqueFormalParameters )
{ AsyncGeneratorBody }
1. Let propKey be the result of evaluating PropertyName.
2. ReturnIfAbrupt(propKey).
3. If the function code for this AsyncGeneratorMethod is strict mode code, let strict be true.
Otherwise let strict be false.
4. Let scope be the running execution context's LexicalEnvironment.
5. Let closure be ! AsyncGeneratorFunctionCreate(Method, UniqueFormalParameters,
AsyncGeneratorBody, scope, strict).
[...]
13.3.3.5 Runtime Semantics: BindingInitialization
BindingPattern : ArrayBindingPattern
[...]
4. If iteratorRecord.[[done]] is false, return ? IteratorClose(iterator,
result).
[...]
---*/
var doneCallCount = 0;
var iter = {};
iter[Symbol.iterator] = function() {
return {
next: function() {
return { value: null, done: false };
},
return: function() {
doneCallCount += 1;
return {};
}
};
};
var callCount = 0;
class C {
async *method([x] = iter) {
assert.sameValue(doneCallCount, 1);
callCount = callCount + 1;
}
};
new C().method().next().then(() => {
assert.sameValue(callCount, 1, 'invoked exactly once');
}).then($DONE, $DONE);<|fim▁end|> | description: Iterator is closed when not exhausted by pattern evaluation (class expression async generator method (default parameters))
esid: sec-class-definitions-runtime-semantics-evaluation |
<|file_name|>repo_branch.go<|end_file_name|><|fim▁begin|>// Copyright 2016 The Gogs Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package gitea
import (
"fmt"
"code.gitea.io/gitea/modules/structs"
)
// Branch is equal to structs.Branch
type Branch = structs.Branch
// ListRepoBranches list all the branches of one repository
func (c *Client) ListRepoBranches(user, repo string) ([]*Branch, error) {<|fim▁hole|>// GetRepoBranch get one branch's information of one repository
func (c *Client) GetRepoBranch(user, repo, branch string) (*Branch, error) {
b := new(Branch)
return b, c.getParsedResponse("GET", fmt.Sprintf("/repos/%s/%s/branches/%s", user, repo, branch), nil, nil, &b)
}<|fim▁end|> | branches := make([]*Branch, 0, 10)
return branches, c.getParsedResponse("GET", fmt.Sprintf("/repos/%s/%s/branches", user, repo), nil, nil, &branches)
}
|
<|file_name|>router.js<|end_file_name|><|fim▁begin|>import Ember from 'ember';
import config from './config/environment';
import googlePageview from './mixins/google-pageview';
const Router = Ember.Router.extend(googlePageview, {
location: config.locationType
});
Router.map(function() {<|fim▁hole|> this.route('loading');
});
this.route('stops', function(){
this.route('by-frequency');
this.route('walkshed');
this.route('transitshed');
this.route('stop', { path: "/:stop-id" });
});
this.route('operators', function(){
this.route('service-areas');
this.route('operator', { path: "/:operator-id" });
});
this.route('route-stop-patterns', function(){
});
this.route('error', { path: "*path" });
this.route('isochrones');
this.route('map-matching');
});
export default Router;<|fim▁end|> | this.route('routes', function(){
this.route('route', { path: "/:route-id" }); |
<|file_name|>Program.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (C) 2015-2021 Martin Glueck All rights reserved
# Neugasse 2, A--2244 Spannberg, Austria. [email protected]
# #*** <License> ************************************************************#
# This module is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of<|fim▁hole|>#
# You should have received a copy of the GNU General Public License
# along with this module. If not, see <http://www.gnu.org/licenses/>.
# #*** </License> ***********************************************************#
#
#++
# Name
# STG.Program
#
# Purpose
# A application program used by a device
#
#--
from Once_Property import Once_Property
from STG._Object_ import _STG_Object_
from STG._Program_Object_ import _Program_Object_
from STG.Parameter import Parameter, Parameter_Ref, Parameter_Type, Absolute_Segment
from STG.Language import Language
import os
from collections import defaultdict
class Static (_STG_Object_) :
"""Find a static reference"""
def __init__ (self, xml) :
super ().__init__ ()
self.xml = xml
self.memories = dict ()
# end def __init__
def find (self, id, tag, cls = None, * args, ** kw) :
result = super ().get \
(self.xml, "//E:%s[@Id='%s']" % (tag, id))
if cls :
result = cls (xml = result, * args, ** kw)
return result
# end def find
def get (self, id, tag, cls = None, * args, ** kw) :
if id not in cls.Table :
cls.Table [id] = self.find (id, tag, cls, * args, ** kw)
return cls.Table [id]
# end def get
def Parameter_Ref (self, id, parent, program) :
return self.get \
( id, "ParameterRef", Parameter_Ref
, static = self, parent = parent, program = program
)
# end def Parameter_Ref
def Parameter (self, id) :
return self.get \
(id, "Parameter", Parameter, static = self)
# end def Parameter_Ref
def Parameter_Type (self, id) :
return self.get \
(id, "ParameterType", Parameter_Type, static = self)
# end def Parameter_Type
def Memory (self, id) :
result = self.get \
(id, "AbsoluteSegment", Absolute_Segment, static = self)
self.memories [id] = result
return result
# end def Code_Segment
# end class Static
class Program (_Program_Object_) :
"""An application program used by an EIB device"""
def __init__ (self, xml) :
super ().__init__ ()
self.xml = xml
self.id = xml.get ("Id")
self.mask = xml.get ("MaskVersion")
self.raw_name = xml.get ("Name")
self.manu_id = int (self.id[2:6], 16)
self.app_number = int (xml.get ("ApplicationNumber"))
self.app_version = int (xml.get ("ApplicationVersion"))
prop_load = self.xpath (xml, "//E:LdCtrlCompareProp[@PropId=78]")
if prop_load :
idata = prop_load [0].get ("InlineData")
data = []
for i in range (len (idata) // 2) :
data.append ("0x" + idata [i*2:i*2+2])
data = ", ".join (data)
else :
data = "-"
self.load_compare = data
self.parameter_refs = dict ()
self.com_object_refs = dict ()
static = Static (self.get (xml, "E:Static"))
for abse in self.xpath (xml, "//E:AbsoluteSegment") :
static.Memory (abse.get ("Id"))
self._visit_element (self, self.get (xml, "E:Dynamic"), static)
self._setup_tables (static)
self._finalize ()
# end def __init__
def _finalize (self) :
self.memory_segments = \
[ m for m in sorted ( Absolute_Segment.Table.values ()
, key = lambda m : m.address
)
]
ram_section = \
[ m for m in self.memory_segments
if (m.size > 1) and m.data is None
]
if ram_section :
self.com_ram_memory = ram_section [0]
self.com_objects_by_number = defaultdict (list)
for cor in self.com_object_refs.values () :
self.com_objects_by_number [cor.number].append (cor)
# end def _finalize
def as_html (self, template = "parameter_overview-grid.jnj") :
from jinja2 import Environment, FileSystemLoader
path = os.path.dirname (__file__)
env = Environment \
(loader = FileSystemLoader (os.path.join (path, "jinja")))
template = env.get_template (template)
return template.render (dict (device = self))
# end def as_html
def eeprom_as_html (self, reference_address = 0) :
p_refs = sorted \
( ( pr for pr in self.parameter_refs.values ()
if pr.parameter.address
)
, key = lambda pr : (pr.parameter.address, pr.parameter.mask)
)
from jinja2 import Environment, FileSystemLoader
path = os.path.dirname (__file__)
env = Environment \
(loader = FileSystemLoader (os.path.join (path, "jinja")))
template = env.get_template ("eeprom_layout.jnj")
return template.render \
( dict ( p_refs = p_refs
, program = self
, ref_addr = reference_address
)
)
# end def eeprom_as_html
@Once_Property
def name (self) :
return Language.Translation (self.id, "Name", self.raw_name)
# end def name
def _setup_tables (self, static) :
adr_tab = self.get (self.xml, "//E:AddressTable")
aso_tab = self.get (self.xml, "//E:AssociationTable")
com_tab = self.get (self.xml, "//E:ComObjectTable")
self.address_table = \
( int (adr_tab.get ("Offset"))
, int (adr_tab.get ("MaxEntries"))
, static.Memory (adr_tab.get ("CodeSegment"))
)
self.assoc_table = \
( int (aso_tab.get ("Offset"))
, int (aso_tab.get ("MaxEntries"))
, static.Memory (aso_tab.get ("CodeSegment"))
)
self.com_table = \
( int (aso_tab.get ("Offset"))
, static.Memory (com_tab.get ("CodeSegment"))
)
# end def _setup_tables
### pickle interfaces
Value_Attributes = ("id", "mask", "app_number", "app_version", "load_compare")
@property
def pickle_cargo (self) :
result = super ().pickle_cargo
for attr in "address_table", "assoc_table", "com_table" :
value = getattr (self, attr)
value = value [:-1] + (value [-1].id, )
result [attr] = value
return result
# end def pickle_cargo
@classmethod
def From_Pickle (cls, dump) :
for attr in "address_table", "assoc_table", "com_table" :
value = dump [attr]
value = value [:-1] + (Absolute_Segment.Table [value [-1]], )
dump [attr] = value
result = super (Program, cls).From_Pickle (None, dump)
result._finalize ()
return result
# end def From_Pickle
# end class Program
if __name__ == "__main__" :
from STG._Object_ import _STG_Object_
from STG.Language import Language
from STG.Datapoint import Datapoint
import sys
if len (sys.argv) > 2 :
master = Datapoint.Parse (sys.argv [2])
Datapoint.From_Master (master)
root = _STG_Object_.Parse (sys.argv [1])
Language.add (root)
Language.set ("de-DE")
if 1 :
prg = Program (Program.get (root, "//E:ApplicationProgram"))
if len (sys.argv) > 3 :
file = open (sys.argv [3], "w", encoding = "utf-8")
else :
file = sys.stdout
file.write (prg.as_html ())
if len (sys.argv) > 3 :
file.close ()
print (prg.name)
### __END__ STG.Program<|fim▁end|> | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details. |
<|file_name|>test_smt.py<|end_file_name|><|fim▁begin|>import numpy as np
import amnet
import z3
from numpy.linalg import norm
import sys
import unittest
import itertools
VISUALIZE = True # output graphviz drawings
if VISUALIZE:
import amnet.vis
class TestSmt(unittest.TestCase):
@classmethod
def setUpClass(cls):
print 'Setting up test floats.'
cls.floatvals = np.concatenate(
(np.linspace(-5., 5., 11), np.linspace(-5., 5., 10)),
axis=0
)
cls.floatvals2 = np.concatenate(
(np.linspace(-5., 5., 3), np.linspace(-.5, .5, 2)),
axis=0
)
cls.floatvals3 = np.linspace(-5., 5., 3)
cls.FPTOL = 1e-8
# set up global z3 parameters
# parameters from https://stackoverflow.com/a/12516269
#z3.set_param('auto_config', False)
#z3.set_param('smt.case_split', 5)
#z3.set_param('smt.relevancy', 2)
def validate_outputs(self, phi, onvals, true_f=None, verbose=False):
# encode phi using default context and solver
enc = amnet.smt.SmtEncoder(phi=phi, solver=None)
# tap the input and output vars
invar = enc.var_of_input()
outvar = enc.var_of(phi)
# check dimensions
self.assertEqual(phi.indim, len(invar))
self.assertEqual(phi.outdim, len(outvar))
# go through inputs
for val in onvals:
# get a new value
fpval = np.array(val)
self.assertEqual(len(fpval), phi.indim)
# evaluate using the Amn tree
fpeval = phi.eval(fpval)
self.assertEqual(len(fpeval), phi.outdim)
if verbose:
print 'inp:', fpval
print 'fpeval: ', fpeval
# compare to true floating point function, if it's provided
if true_f is not None:
true_eval = true_f(fpval)
if verbose: print 'true_eval: ', true_eval
self.assertAlmostEqual(norm(true_eval - fpeval), 0)
# set the z3 input
enc.solver.push()
for i in range(len(invar)):
enc.solver.add(invar[i] == fpval[i])
# run z3 to check for satisfiability
result = enc.solver.check()
#if verbose: print enc.solver
self.assertTrue(result == z3.sat)
# extract the output
model = enc.solver.model()
smteval = np.zeros(len(outvar))
for i in range(len(outvar)):
smteval[i] = amnet.util.mfp(model, outvar[i])
# check that the outputs match
if verbose: print 'smteval: ', smteval
self.assertAlmostEqual(norm(smteval - fpeval), 0)
enc.solver.pop()
def donot_test_SmtEncoder_mu_big(self):
xyz = amnet.Variable(3, name='xyz')
x = amnet.atoms.select(xyz, 0)
y = amnet.atoms.select(xyz, 1)
z = amnet.atoms.select(xyz, 2)
w = amnet.Mu(x, y, z)
def true_mu(fpin):
x, y, z = fpin
return x if z <= 0 else y
self.validate_outputs(
phi=w,
onvals=itertools.product(self.floatvals, repeat=w.indim),
true_f=true_mu
)
def test_SmtEncoder_mu_small(self):
xyz = amnet.Variable(3, name='xyz')
x = amnet.atoms.select(xyz, 0)
y = amnet.atoms.select(xyz, 1)
z = amnet.atoms.select(xyz, 2)
w = amnet.Mu(x, y, z)
def true_mu(fpin):
x, y, z = fpin
return x if z <= 0 else y
self.validate_outputs(
phi=w,
onvals=itertools.product(self.floatvals2, repeat=w.indim),
true_f=true_mu
)
if VISUALIZE: amnet.vis.quick_vis(phi=w, title='mu')
def test_SmtEncoder_max_all_2(self):
xy = amnet.Variable(2, name='xy')
phi_max2 = amnet.atoms.max_all(xy)
self.assertEqual(phi_max2.indim, 2)
def true_max2(fpin):
x, y = fpin
return max(x, y)
self.validate_outputs(
phi=phi_max2,
onvals=itertools.product(self.floatvals, repeat=phi_max2.indim),
true_f=true_max2
)
def test_SmtEncoder_min_all_2(self):
xy = amnet.Variable(2, name='xy')
phi_min2 = amnet.atoms.min_all(xy)
self.assertEqual(phi_min2.indim, 2)
def true_min2(fpin):
x, y = fpin
return min(x, y)
self.validate_outputs(
phi=phi_min2,
onvals=itertools.product(self.floatvals, repeat=phi_min2.indim),
true_f=true_min2
)
def test_SmtEncoder_max_all_3_small(self):
xyz = amnet.Variable(3, name='xy')
phi_max3 = amnet.atoms.max_all(xyz)
self.assertEqual(phi_max3.indim, 3)
def true_max3(fpin):
x, y, z = fpin
return max(x, y, z)
self.validate_outputs(
phi=phi_max3,
onvals=itertools.product(self.floatvals2, repeat=phi_max3.indim),
true_f=true_max3
)
def test_SmtEncoder_min_all_3_small(self):
xyz = amnet.Variable(3, name='xy')
phi_min3 = amnet.atoms.min_all(xyz)
self.assertEqual(phi_min3.indim, 3)
def true_min3(fpin):
x, y, z = fpin
return min(x, y, z)
self.validate_outputs(
phi=phi_min3,
onvals=itertools.product(self.floatvals2, repeat=phi_min3.indim),
true_f=true_min3
)
def test_SmtEncoder_add_all(self):
xyz = amnet.Variable(3, name='xyz')
phi_add = amnet.atoms.add_all(xyz)
self.assertEqual(phi_add.outdim, 1)
self.assertEqual(phi_add.indim, 3)
def true_add(fpin):
return sum(fpin)
self.validate_outputs(
phi=phi_add,
onvals=itertools.product(self.floatvals2, repeat=phi_add.indim),
true_f=true_add
)
def test_SmtEncoder_add_list(self):
xyz = amnet.Variable(2+2+2, name='xyz')
x = amnet.Linear(np.eye(2, 6, 0), xyz)
y = amnet.Linear(np.eye(2, 6, 2), xyz)
z = amnet.Linear(np.eye(2, 6, 4), xyz)
phi_add_list = amnet.atoms.add_list([x, y, z])
self.assertEqual(x.outdim, 2)
self.assertEqual(y.outdim, 2)
self.assertEqual(z.outdim, 2)
self.assertEqual(phi_add_list.outdim, 2)
self.assertEqual(phi_add_list.indim, 6)
def true_add(fpin):
x, y, z = fpin[0:2], fpin[2:4], fpin[4:6]
return x + y + z
self.validate_outputs(
phi=phi_add_list,
onvals=itertools.product(self.floatvals3, repeat=phi_add_list.indim),
true_f=true_add
)
def test_SmtEncoder_triplexer(self):
np.random.seed(1)
TOTAL_RUNS=5
#print ""
for iter in range(TOTAL_RUNS):
#print "Testing random triplexer [%d/%d]..." % (iter+1, TOTAL_RUNS),
# create a random triplexer
x = amnet.Variable(1, name='x')
a = 3 * (2 * np.random.rand(4) - 1)
b = 3 * (2 * np.random.rand(4) - 1)
c = 3 * (2 * np.random.rand(4) - 1)
d = 3 * (2 * np.random.rand(4) - 1)
e = 3 * (2 * np.random.rand(4) - 1)
f = 3 * (2 * np.random.rand(4) - 1)
phi_tri = amnet.atoms.triplexer(x, a, b, c, d, e, f)
def true_tri(fpin):
return amnet.atoms.fp_triplexer(fpin, a, b, c, d, e, f)
xvals = 50 * (2 * np.random.rand(100) - 1)
onvals = itertools.product(xvals, repeat=1)
self.validate_outputs(
phi=phi_tri,
onvals=onvals,
true_f=true_tri
)
#print "done!"
def test_SmtEncoder_max_aff(self):
np.random.seed(1)
m = 10
n = 4
A = np.random.randint(-5, 6, m*n).reshape((m, n))
b = np.random.randint(-5, 6, m).reshape((m,))
b[np.random.randint(0, n)] = 0 # make sure there is a Linear term
x = amnet.Variable(n, name='x')
y = amnet.atoms.max_aff(A, x, b)
self.assertEqual(y.indim, n)
self.assertEqual(y.outdim, 1)
def true_max_aff(fpin):
vals = np.dot(A, fpin) + b
assert len(vals) == m
return np.max(vals)
self.validate_outputs(
phi=y,
onvals=itertools.product(self.floatvals3, repeat=y.indim),
true_f=true_max_aff
)
# visualize max_aff
if VISUALIZE: amnet.vis.quick_vis(y, title='max_aff')
def test_SmtEncoder_min_aff(self):
np.random.seed(1)
m = 10
n = 4
A = np.random.randint(-5, 6, m*n).reshape((m, n))
b = np.random.randint(-5, 6, m).reshape((m,))
b[np.random.randint(0, n)] = 0 # make sure there is a Linear term
x = amnet.Variable(n, name='x')
y = amnet.atoms.min_aff(A, x, b)
self.assertEqual(y.indim, n)
self.assertEqual(y.outdim, 1)
def true_min_aff(fpin):
vals = np.dot(A, fpin) + b
assert len(vals) == m
return np.min(vals)
self.validate_outputs(<|fim▁hole|> )
# visualize min_aff
if VISUALIZE: amnet.vis.quick_vis(y, title='min_aff')
def test_SmtEncoder_dag(self):
xyz = amnet.Variable(3, name='xyz')
x = amnet.atoms.select(xyz, 0)
yz = amnet.Linear(
np.array([[0, 1, 0], [0, 0, 1]]),
xyz
)
maxyz = amnet.atoms.max_all(yz)
twoxp1 = amnet.Affine(
np.array([[2]]),
x,
np.array([1])
)
twox = amnet.atoms.add2(x, x)
threex = amnet.atoms.add2(x, twox)
fivexp1 = amnet.atoms.add2(twoxp1, threex)
phi = amnet.atoms.add2(fivexp1, maxyz)
def true_dag(fpin):
x, y, z = fpin
return 5*x + 1 + max(y, z)
self.validate_outputs(
phi=phi,
onvals=itertools.product(self.floatvals2, repeat=3),
true_f=true_dag
)
# visualize dag
if VISUALIZE: amnet.vis.quick_vis(phi, title='dag')
def test_SmtEncoder_relu_1(self):
x = amnet.Variable(1, name='x')
y = amnet.atoms.relu(x)
def true_relu(fpin):
return max(fpin[0], 0)
self.validate_outputs(
phi=y,
onvals=itertools.product(self.floatvals, repeat=y.indim),
true_f=true_relu
)
def test_SmtEncoder_relu_2(self):
x = amnet.Variable(3, name='x')
y = amnet.atoms.relu(x)
def true_relu(fpin):
return np.maximum(fpin, 0)
self.validate_outputs(
phi=y,
onvals=itertools.product(self.floatvals2, repeat=y.indim),
true_f=true_relu
)
# visualize relu
if VISUALIZE: amnet.vis.quick_vis(y, title='relu_2')
def test_SmtEncoder_relu_old(self):
x = amnet.Variable(3, name='x')
y = amnet.atoms.relu_old(x)
def true_relu(fpin):
return np.maximum(fpin, 0)
self.validate_outputs(
phi=y,
onvals=itertools.product(self.floatvals2, repeat=y.indim),
true_f=true_relu
)
# visualize relu_old
if VISUALIZE: amnet.vis.quick_vis(y, title='relu_old')
def test_SmtEncoder_gates(self):
xy_z1z2 = amnet.Variable(2+2+1+1, name='xyz1z2')
x = amnet.Linear(
np.eye(2, 6, 0),
xy_z1z2
)
y = amnet.Linear(
np.eye(2, 6, 2),
xy_z1z2
)
z1 = amnet.atoms.select(xy_z1z2, 4)
z2 = amnet.atoms.select(xy_z1z2, 5)
phi_and = amnet.atoms.gate_and(x, y, z1, z2)
phi_or = amnet.atoms.gate_or(x, y, z1, z2)
phi_xor = amnet.atoms.gate_xor(x, y, z1, z2)
phi_not = amnet.atoms.gate_not(x, y, z1)
# check dimensions
self.assertEqual(xy_z1z2.outdim, 6)
self.assertEqual(x.outdim, 2)
self.assertEqual(y.outdim, 2)
self.assertEqual(z1.outdim, 1)
self.assertEqual(z2.outdim, 1)
self.assertEqual(phi_and.outdim, 2)
self.assertEqual(phi_or.outdim, 2)
self.assertEqual(phi_xor.outdim, 2)
self.assertEqual(phi_not.outdim, 2)
# true gate functions
def true_and(fpin):
return fpin[0:2] if (fpin[4] <= 0 and fpin[5] <= 0) else fpin[2:4]
def true_or(fpin):
return fpin[0:2] if (fpin[4] <= 0 or fpin[5] <= 0) else fpin[2:4]
def true_xor(fpin):
return fpin[0:2] if ((fpin[4] <= 0) != (fpin[5] <= 0)) else fpin[2:4]
def true_not(fpin): # ignores last input
return fpin[2:4] if (fpin[4] <= 0) else fpin[0:2]
# evaluate
vals = np.array([1, -2, -3, 4])
sels = itertools.product([-1, 0, 1], repeat=2)
onvals = [np.concatenate((vals, sel), axis=0) for sel in sels]
self.validate_outputs(phi=phi_and, onvals=onvals, true_f=true_and)
self.validate_outputs(phi=phi_or, onvals=onvals, true_f=true_or)
self.validate_outputs(phi=phi_xor, onvals=onvals, true_f=true_xor)
self.validate_outputs(phi=phi_not, onvals=onvals, true_f=true_not)
def test_SmtEncoder_cmp(self):
xyz = amnet.Variable(2+2+1, name='xyz')
x = amnet.Linear(
np.eye(2, 5, 0),
xyz
)
y = amnet.Linear(
np.eye(2, 5, 2),
xyz
)
z = amnet.atoms.select(xyz, 4)
phi_eq = amnet.atoms.cmp_eq(x, y, z)
phi_neq = amnet.atoms.cmp_neq(x, y, z)
phi_ge = amnet.atoms.cmp_ge(x, y, z)
phi_gt = amnet.atoms.cmp_gt(x, y, z)
phi_le = amnet.atoms.cmp_le(x, y, z)
phi_lt = amnet.atoms.cmp_lt(x, y, z)
# check dimensions
self.assertEqual(xyz.outdim, 5)
self.assertEqual(x.outdim, 2)
self.assertEqual(y.outdim, 2)
self.assertEqual(z.outdim, 1)
self.assertEqual(phi_eq.outdim, 2)
self.assertEqual(phi_neq.outdim, 2)
self.assertEqual(phi_ge.outdim, 2)
self.assertEqual(phi_gt.outdim, 2)
self.assertEqual(phi_le.outdim, 2)
self.assertEqual(phi_lt.outdim, 2)
# true cmp functions
def true_eq(fpin):
x, y, z = fpin[0:2], fpin[2:4], fpin[4]
return x if z == 0 else y
def true_neq(fpin):
x, y, z = fpin[0:2], fpin[2:4], fpin[4]
return x if z != 0 else y
def true_ge(fpin):
x, y, z = fpin[0:2], fpin[2:4], fpin[4]
return x if z >= 0 else y
def true_gt(fpin):
x, y, z = fpin[0:2], fpin[2:4], fpin[4]
return x if z > 0 else y
def true_le(fpin):
x, y, z = fpin[0:2], fpin[2:4], fpin[4]
return x if z <= 0 else y
def true_lt(fpin):
x, y, z = fpin[0:2], fpin[2:4], fpin[4]
return x if z < 0 else y
# evaluate
vals = np.array([1, -2, -3, 4])
sels = itertools.product([-1.1, -0.5, 0, 0.0, 0.01, 1, 12.0], repeat=1)
onvals = [np.concatenate((vals, sel), axis=0) for sel in sels]
self.validate_outputs(phi=phi_eq, onvals=onvals, true_f=true_eq)
self.validate_outputs(phi=phi_neq, onvals=onvals, true_f=true_neq)
self.validate_outputs(phi=phi_ge, onvals=onvals, true_f=true_ge)
self.validate_outputs(phi=phi_gt, onvals=onvals, true_f=true_gt)
self.validate_outputs(phi=phi_le, onvals=onvals, true_f=true_le)
self.validate_outputs(phi=phi_lt, onvals=onvals, true_f=true_lt)
def test_SmtEncoder_identity(self):
x = amnet.Variable(2, name='x')
w = np.array([[1, 2], [3, 4]])
b = np.array([-1, -1])
y = amnet.Affine(w, x, b)
z = amnet.atoms.identity(y)
self.assertEqual(y.outdim, 2)
self.assertEqual(z.outdim, 2)
self.assertEqual(z.indim, 2)
def true_z(fpin):
return np.dot(w, fpin) + b
self.validate_outputs(
phi=z,
onvals=itertools.product(self.floatvals, repeat=z.indim),
true_f=true_z
)
def test_SmtEncoder_absval1(self):
x = amnet.Variable(1, name='x')
y = amnet.atoms.absval(x)
self.assertEqual(y.outdim, 1)
self.assertEqual(y.indim, 1)
def true_absval(fpin):
return abs(fpin)
self.validate_outputs(
phi=y,
onvals=itertools.product(self.floatvals, repeat=y.indim),
true_f = true_absval
)
# visualize absval1
if VISUALIZE: amnet.vis.quick_vis(y, title='absval1')
def test_SmtEncoder_absval3(self):
x = amnet.Variable(3, name='x')
y = amnet.atoms.absval(x)
self.assertEqual(y.outdim, 3)
self.assertEqual(y.indim, 3)
def true_absval(fpin):
x1, x2, x3 = fpin
return np.array([abs(x1), abs(x2), abs(x3)])
self.validate_outputs(
phi=y,
onvals=itertools.product(self.floatvals2, repeat=y.indim),
true_f=true_absval
)
# visualize absval3
if VISUALIZE: amnet.vis.quick_vis(y, title='absval3')
def test_SmtEncoder_sat1(self):
x = amnet.Variable(1, name='x')
y1 = amnet.atoms.sat(x)
y2 = amnet.atoms.sat(x, lo=-3, hi=3)
y3 = amnet.atoms.sat(x, lo=-2, hi=1.5)
self.assertEqual(y1.outdim, 1)
self.assertEqual(y1.indim, 1)
self.assertEqual(y2.outdim, 1)
self.assertEqual(y2.indim, 1)
self.assertEqual(y3.outdim, 1)
self.assertEqual(y3.indim, 1)
# manual tests
self.assertAlmostEqual(norm(y1.eval(np.array([-2])) - np.array([-1])), 0)
self.assertAlmostEqual(norm(y1.eval(np.array([-0.5])) - np.array([-0.5])), 0)
self.assertAlmostEqual(norm(y1.eval(np.array([0])) - np.array([0.0])), 0)
self.assertAlmostEqual(norm(y1.eval(np.array([0.6])) - np.array([0.6])), 0)
self.assertAlmostEqual(norm(y1.eval(np.array([1.6])) - np.array([1.0])), 0)
# automatic tests
def true_sat1(fpval, lo, hi):
x = fpval
if lo <= x <= hi:
return x
elif x < lo:
return lo
else:
return hi
self.validate_outputs(
phi=y1,
onvals=itertools.product(self.floatvals, repeat=y1.indim),
true_f=lambda z: true_sat1(z, -1, 1)
)
self.validate_outputs(
phi=y2,
onvals=itertools.product(self.floatvals, repeat=y2.indim),
true_f=lambda z: true_sat1(z, -3, 3)
)
self.validate_outputs(
phi=y3,
onvals=itertools.product(self.floatvals, repeat=y3.indim),
true_f=lambda z: true_sat1(z, -2, 1.5)
)
# visualize sat1
if VISUALIZE: amnet.vis.quick_vis(y1, title='sat1')
def test_SmtEncoder_sat3(self):
x = amnet.Variable(3, name='x')
y1 = amnet.atoms.sat(x)
y2 = amnet.atoms.sat(x, lo=-3, hi=3)
y3 = amnet.atoms.sat(x, lo=-2, hi=1.5)
self.assertEqual(y1.outdim, 3)
self.assertEqual(y1.indim, 3)
self.assertEqual(y2.outdim, 3)
self.assertEqual(y2.indim, 3)
self.assertEqual(y3.outdim, 3)
self.assertEqual(y3.indim, 3)
# manual tests
self.assertAlmostEqual(norm(y1.eval(np.array([-2, 1.6, 0.5])) - np.array([-1, 1, 0.5])), 0)
self.assertAlmostEqual(norm(y2.eval(np.array([-2, 1.6, 0.5])) - np.array([-2, 1.6, 0.5])), 0)
self.assertAlmostEqual(norm(y3.eval(np.array([-2, 1.6, 0.5])) - np.array([-2, 1.5, 0.5])), 0)
# visualize sat3
if VISUALIZE: amnet.vis.quick_vis(y1, title='sat3')
# automatic tests
def true_sat3(fpin, lo, hi):
return np.clip(fpin, lo, hi)
self.validate_outputs(
phi=y1,
onvals=itertools.product(self.floatvals2, repeat=y1.indim),
true_f=lambda z: true_sat3(z, -1, 1)
)
self.validate_outputs(
phi=y2,
onvals=itertools.product(self.floatvals2, repeat=y2.indim),
true_f=lambda z: true_sat3(z, -3, 3)
)
self.validate_outputs(
phi=y3,
onvals=itertools.product(self.floatvals2, repeat=y3.indim),
true_f=lambda z: true_sat3(z, -2, 1.5)
)
def test_SmtEncoder_dz1(self):
x = amnet.Variable(1, name='x')
y1 = amnet.atoms.dz(x)
y2 = amnet.atoms.dz(x, lo=-3, hi=3)
y3 = amnet.atoms.dz(x, lo=-2, hi=1.5)
self.assertEqual(y1.outdim, 1)
self.assertEqual(y1.indim, 1)
self.assertEqual(y2.outdim, 1)
self.assertEqual(y2.indim, 1)
self.assertEqual(y3.outdim, 1)
self.assertEqual(y3.indim, 1)
# manual tests
self.assertAlmostEqual(norm(y1.eval(np.array([-2])) - np.array([-1])), 0)
self.assertAlmostEqual(norm(y1.eval(np.array([-0.5])) - np.array([0])), 0)
self.assertAlmostEqual(norm(y1.eval(np.array([0])) - np.array([0])), 0)
self.assertAlmostEqual(norm(y1.eval(np.array([0.6])) - np.array([0])), 0)
self.assertAlmostEqual(norm(y1.eval(np.array([1.6])) - np.array([0.6])), 0)
# automatic tests
def true_dz1(fpval, lo, hi):
x = fpval
if lo <= x <= hi:
return 0
elif x < lo:
return x-lo
else:
return x-hi
self.validate_outputs(
phi=y1,
onvals=itertools.product(self.floatvals, repeat=y1.indim),
true_f=lambda z: true_dz1(z, -1, 1)
)
self.validate_outputs(
phi=y2,
onvals=itertools.product(self.floatvals, repeat=y2.indim),
true_f=lambda z: true_dz1(z, -3, 3)
)
self.validate_outputs(
phi=y3,
onvals=itertools.product(self.floatvals, repeat=y3.indim),
true_f=lambda z: true_dz1(z, -2, 1.5)
)
# visualize dz1
if VISUALIZE: amnet.vis.quick_vis(y1, title='dz1')
def test_SmtEncoder_dz3(self):
x = amnet.Variable(3, name='x')
y1 = amnet.atoms.dz(x)
y2 = amnet.atoms.dz(x, lo=-3, hi=3)
y3 = amnet.atoms.dz(x, lo=-2, hi=1.5)
self.assertEqual(y1.outdim, 3)
self.assertEqual(y1.indim, 3)
self.assertEqual(y2.outdim, 3)
self.assertEqual(y2.indim, 3)
self.assertEqual(y3.outdim, 3)
self.assertEqual(y3.indim, 3)
# manual tests
self.assertAlmostEqual(norm(y1.eval(np.array([-2, 1.6, 0.5])) - np.array([-1, 0.6, 0])), 0)
self.assertAlmostEqual(norm(y2.eval(np.array([-2, 1.6, 0.5])) - np.array([0, 0, 0])), 0)
self.assertAlmostEqual(norm(y3.eval(np.array([-2, 1.6, 0.5])) - np.array([0, 0.1, 0])), 0)
# visualize dz3
if VISUALIZE: amnet.vis.quick_vis(y1, title='dz3')
# automatic tests
def true_dz3(fpin, lo, hi):
retv = np.array(fpin)
retv[(retv >= lo) & (retv <= hi)] = 0
retv[retv > hi] -= hi
retv[retv < lo] -= lo
return retv
self.validate_outputs(
phi=y1,
onvals=itertools.product(self.floatvals2, repeat=y1.indim),
true_f=lambda z: true_dz3(z, -1, 1)
)
self.validate_outputs(
phi=y2,
onvals=itertools.product(self.floatvals2, repeat=y2.indim),
true_f=lambda z: true_dz3(z, -3, 3)
)
self.validate_outputs(
phi=y3,
onvals=itertools.product(self.floatvals2, repeat=y3.indim),
true_f=lambda z: true_dz3(z, -2, 1.5)
)
def test_SmtEncoder_norminf1(self):
x = amnet.Variable(1, name='x')
y = amnet.atoms.norminf(x)
self.assertEqual(y.indim, 1)
self.assertEqual(y.outdim, 1)
# visualize norminf1
if VISUALIZE: amnet.vis.quick_vis(y, title='norminf1')
# automatic tests
def true_norminf(fpin):
self.assertEqual(len(fpin), 1)
return norm(fpin, ord=np.inf)
self.validate_outputs(
phi=y,
onvals=itertools.product(self.floatvals, repeat=y.indim),
true_f=true_norminf
)
def test_SmtEncoder_norminf3(self):
x = amnet.Variable(3, name='x')
y = amnet.atoms.norminf(x)
self.assertEqual(y.indim, 3)
self.assertEqual(y.outdim, 1)
# visualize norminf3
if VISUALIZE: amnet.vis.quick_vis(y, title='norminf3')
# automatic tests
def true_norminf(fpin):
self.assertEqual(len(fpin), 3)
return norm(fpin, ord=np.inf)
self.validate_outputs(
phi=y,
onvals=itertools.product(self.floatvals2, repeat=y.indim),
true_f=true_norminf
)
def test_SmtEncoder_norm11(self):
x = amnet.Variable(1, name='x')
y = amnet.atoms.norm1(x)
self.assertEqual(y.indim, 1)
self.assertEqual(y.outdim, 1)
# visualize norm11
if VISUALIZE: amnet.vis.quick_vis(y, title='norm11')
# automatic tests
def true_norm1(fpin):
self.assertEqual(len(fpin), 1)
return norm(fpin, ord=1)
self.validate_outputs(
phi=y,
onvals=itertools.product(self.floatvals, repeat=y.indim),
true_f=true_norm1
)
def test_SmtEncoder_norm13(self):
x = amnet.Variable(3, name='x')
y = amnet.atoms.norm1(x)
self.assertEqual(y.indim, 3)
self.assertEqual(y.outdim, 1)
# visualize norm13
if VISUALIZE: amnet.vis.quick_vis(y, title='norm13')
# automatic tests
def true_norm1(fpin):
self.assertEqual(len(fpin), 3)
return norm(fpin, ord=1)
self.validate_outputs(
phi=y,
onvals=itertools.product(self.floatvals2, repeat=y.indim),
true_f=true_norm1
)
def test_SmtEncoder_phase_vgc(self):
alpha1 = 1.5
alpha2 = -0.7
x = amnet.Variable(2, name='x')
e = amnet.atoms.select(x, 0)
edot = amnet.atoms.select(x, 1)
phi_vgc1 = amnet.atoms.phase_vgc(e, edot, alpha=alpha1)
phi_vgc2 = amnet.atoms.phase_vgc(e, edot, alpha=alpha2)
self.assertEqual(phi_vgc1.indim, 2)
self.assertEqual(phi_vgc1.outdim, 1)
self.assertEqual(phi_vgc2.indim, 2)
self.assertEqual(phi_vgc2.outdim, 1)
# visualize vgc
if VISUALIZE:
ctx = amnet.smt.NamingContext(phi_vgc1)
ctx.rename(e, 'e')
ctx.rename(edot, 'edot')
ctx.rename(phi_vgc1, 'phi_vgc1')
amnet.vis.quick_vis(phi_vgc1, title='phase_vgc', ctx=ctx)
# manual tests
self.assertAlmostEqual(norm(phi_vgc1.eval(np.array([1.1, 1.2])) - np.array([alpha1 * 1.1])), 0)
self.assertAlmostEqual(norm(phi_vgc1.eval(np.array([1.1, -1.2])) - np.array([0])), 0)
self.assertAlmostEqual(norm(phi_vgc1.eval(np.array([-1.1, -1.2])) - np.array([alpha1 * (-1.1)])), 0)
self.assertAlmostEqual(norm(phi_vgc1.eval(np.array([-1.1, 1.2])) - np.array([0])), 0)
self.assertAlmostEqual(norm(phi_vgc1.eval(np.array([1.1, 0])) - np.array([0])), 0)
self.assertAlmostEqual(norm(phi_vgc1.eval(np.array([0, 1.2])) - np.array([0])), 0)
self.assertAlmostEqual(norm(phi_vgc1.eval(np.array([-1.1, 0])) - np.array([0])), 0)
self.assertAlmostEqual(norm(phi_vgc1.eval(np.array([0, -1.2])) - np.array([0])), 0)
self.assertAlmostEqual(norm(phi_vgc1.eval(np.array([0, 0])) - np.array([0])), 0)
# automatic tests
def true_phase_vgc(fpin, alpha):
x1, x2 = fpin
return alpha*x1 if x1*x2 > 0 else 0
self.validate_outputs(
phi=phi_vgc1,
onvals=itertools.product(self.floatvals2, repeat=phi_vgc1.indim),
true_f=lambda xi: true_phase_vgc(xi, alpha=alpha1)
)
self.validate_outputs(
phi=phi_vgc2,
onvals=itertools.product(self.floatvals2, repeat=phi_vgc2.indim),
true_f=lambda xi: true_phase_vgc(xi, alpha=alpha2)
)
def test_NamingContext_multiple_contexts_for(self):
x = amnet.Variable(2, name='x')
y = amnet.Variable(3, name='y')
phi_x = amnet.atoms.max_all(x)
phi_y = amnet.atoms.max_all(y)
# multiple context names
ctx_list = amnet.smt.NamingContext.multiple_contexts_for([phi_x, phi_y])
self.assertEqual(len(ctx_list), 2)
# make sure all names are unique
names = []
for ctx in ctx_list:
names.extend(ctx.symbols.keys())
self.assertEqual(len(names), len(set(names)))
if VISUALIZE:
amnet.vis.quick_vis(phi_x, title='multiple_contexts_phi_x', ctx=ctx_list[0])
amnet.vis.quick_vis(phi_y, title='multiple_contexts_phi_y', ctx=ctx_list[1])
def test_SmtEncoder_multiple_encode(self):
x = amnet.Variable(2, name='x')
y = amnet.Variable(3, name='y')
z = amnet.Variable(2, name='z')
phi_x = amnet.atoms.max_all(x)
phi_y = amnet.atoms.max_all(y)
phi_z = amnet.atoms.max_all(z)
# encode the AMNs
enc_x, enc_y, enc_z = amnet.smt.SmtEncoder.multiple_encode(phi_x, phi_y, phi_z)
solver = enc_x.solver
if VISUALIZE:
amnet.vis.quick_vis(phi_x, title='multiple_encode_phi_x', ctx=enc_x.ctx)
amnet.vis.quick_vis(phi_y, title='multiple_encode_phi_y', ctx=enc_y.ctx)
amnet.vis.quick_vis(phi_z, title='multiple_encode_phi_z', ctx=enc_z.ctx)
# make sure solver object is the same
self.assertTrue(enc_x.solver is solver)
self.assertTrue(enc_y.solver is solver)
self.assertTrue(enc_z.solver is solver)
# link the outputs of x and y to the inputs of z
phi_x_out = enc_x.var_of(phi_x)
phi_y_out = enc_y.var_of(phi_y)
z_in = enc_z.var_of_input()
self.assertEqual(len(phi_x_out), 1)
self.assertEqual(len(phi_y_out), 1)
self.assertEqual(len(z_in), 2)
# solver.add(z_in[0] == phi_x_out[0])
# solver.add(z_in[1] == phi_y_out[0])
amnet.util.eqv_z3(solver, z_in, [phi_x_out[0], phi_y_out[0]])
#print "Linked solver:", solver
# input variables to the linked network
x_in = enc_x.var_of_input()
y_in = enc_y.var_of_input()
phi_z_out = enc_z.var_of(phi_z)
self.assertEqual(len(x_in), 2)
self.assertEqual(len(y_in), 3)
self.assertEqual(len(phi_z_out), 1)
# do some test cases
def do_testcase(xf, yf, fpeval):
solver.push()
#print "Pre-input solver:", solver
amnet.util.eqv_z3(solver, x_in, xf)
amnet.util.eqv_z3(solver, y_in, yf)
#print "Post-input solver:", solver
# check for sat
result = solver.check()
self.assertTrue(result == z3.sat)
self.assertFalse(result == z3.unsat)
# extract the output
model = solver.model()
smteval = amnet.util.mfpv(model, phi_z_out)
#print smteval
# check that the outputs match
self.assertAlmostEqual(norm(smteval - fpeval), 0)
solver.pop()
do_testcase(
xf=np.array([1, 0]),
yf=np.array([-1, -4, 0]),
fpeval=np.array([1])
)
do_testcase(
xf=np.array([1, 4.1]),
yf=np.array([-1, 4.1, 0]),
fpeval=np.array([4.1])
)
do_testcase(
xf = np.array([-1, 0]),
yf = np.array([3, -4, 5]),
fpeval = np.array([5])
)
do_testcase(
xf=np.array([-1, 0]),
yf=np.array([3, 20, 5]),
fpeval=np.array([20])
)
do_testcase(
xf=np.array([-1, -17.1]),
yf=np.array([0, -4, -5]),
fpeval=np.array([0])
)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestSmt)
result = unittest.TextTestRunner(verbosity=2).run(suite)
sys.exit(not result.wasSuccessful())<|fim▁end|> | phi=y,
onvals=itertools.product(self.floatvals3, repeat=y.indim),
true_f=true_min_aff |
<|file_name|>redis_hash_layer.py<|end_file_name|><|fim▁begin|>import redis
from hashlib import sha1
class RedisHashLayer(object):
"""
A more memory-efficient way to store many small values in redis using hashes.
See http://antirez.com/post/redis-weekly-update-7.html
Note: add these config value to redis:
hash-max-zipmap-entries 512
hash-max-zipmap-value 512
"""
def __init__(self,connection,name):
self.connection = connection
self.name = name
def _get_hashname(self,key):
field = sha1(str(key)).hexdigest()
hashkey = "%s:%s" % (self.name, field[:4])
return (hashkey,field)
def __contains__(self,key):
hashkey,field = self._get_hashname(key)
res = self.connection.hget(hashkey,field)
if res:
return True
return False
def add(self,key):
hashkey,field = self._get_hashname(key)
self.connection.hset(hashkey,field,field)
return
def delete(self,key):
hashkey,field = self._get_hashname(key)
self.connection.hset(hashkey,field,field)
self.connection.hdel(hashkey,field)
return
def clear(self):<|fim▁hole|> pipeline.delete(k)
pipeline.execute()
return<|fim▁end|> | pipeline = self.connection.pipeline()
keys = self.connection.keys(self.name+"*")
for k in keys: |
<|file_name|>test_decorators.py<|end_file_name|><|fim▁begin|>import numpy as np
from numpy.testing import *
from numpy.testing.noseclasses import KnownFailureTest
import nose
def test_slow():
@dec.slow
def slow_func(x,y,z):
pass
assert_(slow_func.slow)
def test_setastest():
@dec.setastest()
def f_default(a):
pass
@dec.setastest(True)
def f_istest(a):
pass
@dec.setastest(False)
def f_isnottest(a):
pass
assert_(f_default.__test__)
assert_(f_istest.__test__)
assert_(not f_isnottest.__test__)
class DidntSkipException(Exception):
pass
def test_skip_functions_hardcoded():
@dec.skipif(True)
def f1(x):
raise DidntSkipException
try:
f1('a')
except DidntSkipException:
raise Exception('Failed to skip')
except nose.SkipTest:
pass
@dec.skipif(False)
def f2(x):
raise DidntSkipException
try:
f2('a')
except DidntSkipException:
pass
except nose.SkipTest:
raise Exception('Skipped when not expected to')
def test_skip_functions_callable():
def skip_tester():
return skip_flag == 'skip me!'
@dec.skipif(skip_tester)
def f1(x):
raise DidntSkipException
try:
skip_flag = 'skip me!'
f1('a')
except DidntSkipException:
raise Exception('Failed to skip')
except nose.SkipTest:
pass
@dec.skipif(skip_tester)
def f2(x):
raise DidntSkipException
try:
skip_flag = 'five is right out!'
f2('a')
except DidntSkipException:
pass
except nose.SkipTest:
raise Exception('Skipped when not expected to')
def test_skip_generators_hardcoded():
@dec.knownfailureif(True, "This test is known to fail")
def g1(x):
for i in xrange(x):
yield i
try:<|fim▁hole|> pass
else:
raise Exception('Failed to mark as known failure')
@dec.knownfailureif(False, "This test is NOT known to fail")
def g2(x):
for i in xrange(x):
yield i
raise DidntSkipException('FAIL')
try:
for j in g2(10):
pass
except KnownFailureTest:
raise Exception('Marked incorretly as known failure')
except DidntSkipException:
pass
def test_skip_generators_callable():
def skip_tester():
return skip_flag == 'skip me!'
@dec.knownfailureif(skip_tester, "This test is known to fail")
def g1(x):
for i in xrange(x):
yield i
try:
skip_flag = 'skip me!'
for j in g1(10):
pass
except KnownFailureTest:
pass
else:
raise Exception('Failed to mark as known failure')
@dec.knownfailureif(skip_tester, "This test is NOT known to fail")
def g2(x):
for i in xrange(x):
yield i
raise DidntSkipException('FAIL')
try:
skip_flag = 'do not skip'
for j in g2(10):
pass
except KnownFailureTest:
raise Exception('Marked incorretly as known failure')
except DidntSkipException:
pass
def test_deprecated():
@dec.deprecated(True)
def non_deprecated_func():
pass
@dec.deprecated()
def deprecated_func():
import warnings
warnings.warn("TEST: deprecated func", DeprecationWarning)
@dec.deprecated()
def deprecated_func2():
import warnings
warnings.warn("AHHHH")
raise ValueError
@dec.deprecated()
def deprecated_func3():
import warnings
warnings.warn("AHHHH")
# marked as deprecated, but does not raise DeprecationWarning
assert_raises(AssertionError, non_deprecated_func)
# should be silent
deprecated_func()
# fails if deprecated decorator just disables test. See #1453.
assert_raises(ValueError, deprecated_func2)
# first warnings is not a DeprecationWarning
assert_raises(AssertionError, deprecated_func3)
if __name__ == '__main__':
run_module_suite()<|fim▁end|> | for j in g1(10):
pass
except KnownFailureTest: |
<|file_name|>GridNearestNeighbor.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
***************************************************************************
GridNearestNeighbor.py
---------------------
Date : October 2013
Copyright : (C) 2013 by Alexander Bruy
Email : alexander dot bruy at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Alexander Bruy'
__date__ = 'October 2013'
__copyright__ = '(C) 2013, Alexander Bruy'
import os
from qgis.PyQt.QtGui import QIcon
from qgis.core import (QgsRasterFileWriter,
QgsProcessing,
QgsProcessingParameterDefinition,
QgsProcessingParameterFeatureSource,
QgsProcessingParameterEnum,
QgsProcessingParameterField,
QgsProcessingParameterNumber,
QgsProcessingParameterString,
QgsProcessingParameterRasterDestination)
from processing.algs.gdal.GdalAlgorithm import GdalAlgorithm
from processing.algs.gdal.GdalUtils import GdalUtils
pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0]
class GridNearestNeighbor(GdalAlgorithm):
INPUT = 'INPUT'
Z_FIELD = 'Z_FIELD'
RADIUS_1 = 'RADIUS_1'
RADIUS_2 = 'RADIUS_2'
ANGLE = 'ANGLE'
NODATA = 'NODATA'
OPTIONS = 'OPTIONS'
EXTRA = 'EXTRA'
DATA_TYPE = 'DATA_TYPE'
OUTPUT = 'OUTPUT'
TYPES = ['Byte', 'Int16', 'UInt16', 'UInt32', 'Int32', 'Float32', 'Float64', 'CInt16', 'CInt32', 'CFloat32', 'CFloat64']
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
self.addParameter(QgsProcessingParameterFeatureSource(self.INPUT,
self.tr('Point layer'),
[QgsProcessing.TypeVectorPoint]))
z_field_param = QgsProcessingParameterField(self.Z_FIELD,
self.tr('Z value from field'),
None,
self.INPUT,<|fim▁hole|> optional=True)
z_field_param.setFlags(z_field_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(z_field_param)
self.addParameter(QgsProcessingParameterNumber(self.RADIUS_1,
self.tr('The first radius of search ellipse'),
type=QgsProcessingParameterNumber.Double,
minValue=0.0,
defaultValue=0.0))
self.addParameter(QgsProcessingParameterNumber(self.RADIUS_2,
self.tr('The second radius of search ellipse'),
type=QgsProcessingParameterNumber.Double,
minValue=0.0,
defaultValue=0.0))
self.addParameter(QgsProcessingParameterNumber(self.ANGLE,
self.tr('Angle of search ellipse rotation in degrees (counter clockwise)'),
type=QgsProcessingParameterNumber.Double,
minValue=0.0,
maxValue=360.0,
defaultValue=0.0))
self.addParameter(QgsProcessingParameterNumber(self.NODATA,
self.tr('NODATA marker to fill empty points'),
type=QgsProcessingParameterNumber.Double,
defaultValue=0.0))
options_param = QgsProcessingParameterString(self.OPTIONS,
self.tr('Additional creation options'),
defaultValue='',
optional=True)
options_param.setFlags(options_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
options_param.setMetadata({
'widget_wrapper': {
'class': 'processing.algs.gdal.ui.RasterOptionsWidget.RasterOptionsWidgetWrapper'}})
self.addParameter(options_param)
extra_param = QgsProcessingParameterString(self.EXTRA,
self.tr('Additional command-line parameters'),
defaultValue=None,
optional=True)
extra_param.setFlags(extra_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(extra_param)
dataType_param = QgsProcessingParameterEnum(self.DATA_TYPE,
self.tr('Output data type'),
self.TYPES,
allowMultiple=False,
defaultValue=5)
dataType_param.setFlags(dataType_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(dataType_param)
self.addParameter(QgsProcessingParameterRasterDestination(self.OUTPUT,
self.tr('Interpolated (Nearest neighbor)')))
def name(self):
return 'gridnearestneighbor'
def displayName(self):
return self.tr('Grid (Nearest neighbor)')
def group(self):
return self.tr('Raster analysis')
def groupId(self):
return 'rasteranalysis'
def icon(self):
return QIcon(os.path.join(pluginPath, 'images', 'gdaltools', 'grid.png'))
def commandName(self):
return 'gdal_grid'
def getConsoleCommands(self, parameters, context, feedback, executing=True):
ogrLayer, layerName = self.getOgrCompatibleSource(self.INPUT, parameters, context, feedback, executing)
arguments = ['-l']
arguments.append(layerName)
fieldName = self.parameterAsString(parameters, self.Z_FIELD, context)
if fieldName:
arguments.append('-zfield')
arguments.append(fieldName)
params = 'nearest'
params += ':radius1={}'.format(self.parameterAsDouble(parameters, self.RADIUS_1, context))
params += ':radius2={}'.format(self.parameterAsDouble(parameters, self.RADIUS_2, context))
params += ':angle={}'.format(self.parameterAsDouble(parameters, self.ANGLE, context))
params += ':nodata={}'.format(self.parameterAsDouble(parameters, self.NODATA, context))
arguments.append('-a')
arguments.append(params)
arguments.append('-ot')
arguments.append(self.TYPES[self.parameterAsEnum(parameters, self.DATA_TYPE, context)])
out = self.parameterAsOutputLayer(parameters, self.OUTPUT, context)
self.setOutputValue(self.OUTPUT, out)
arguments.append('-of')
arguments.append(QgsRasterFileWriter.driverForExtension(os.path.splitext(out)[1]))
options = self.parameterAsString(parameters, self.OPTIONS, context)
if options:
arguments.extend(GdalUtils.parseCreationOptions(options))
if self.EXTRA in parameters and parameters[self.EXTRA] not in (None, ''):
extra = self.parameterAsString(parameters, self.EXTRA, context)
arguments.append(extra)
arguments.append(ogrLayer)
arguments.append(out)
return [self.commandName(), GdalUtils.escapeAndJoin(arguments)]<|fim▁end|> | QgsProcessingParameterField.Numeric, |
<|file_name|>cmd_request.py<|end_file_name|><|fim▁begin|>import click
from json import dumps, load
import os
import sys
from tabulate import tabulate
@click.group('request')
@click.pass_context
def cli(ctx):
pass
@cli.command(name='unpause', help='Unpause a request')
@click.argument('request-id')
@click.pass_context
def request_unpause(ctx, request_id):
res = ctx.obj['client'].unpause_request(request_id)
if 'error' in res:
click.echo('error during unpause request {0}: {1}'.format(request_id, res['error']))
else:
click.echo('unpaused request {0}'.format(request_id))
@cli.command(name='run', help='Run a on-demand request now')
@click.argument('request-id')
@click.pass_context
def request_run(ctx, request_id):
res = ctx.obj['client'].run_request(request_id)
if 'error' in res:
click.echo('error during running request {0}: {1}'.format(request_id, res['error']))
else:
click.echo('running request {0}'.format(request_id))
@cli.command(name='pause', help='Pause a request')
@click.option('--kill-tasks', '-k', is_flag=True, default=False, help='Kill tasks when paused')
@click.argument('request-id')
@click.pass_context
def request_pause(ctx, request_id, kill_tasks):
res = ctx.obj['client'].pause_request(request_id, kill_tasks)
if 'error' in res:
click.echo('error during pause request {0}: {1}'.format(request_id, res['error']))
else:
click.echo('paused request {0} with killTasks={1}'.format(request_id, kill_tasks))
@cli.command(name='scale', help='Scale a request up/down')
@click.argument('request-id')
@click.argument('instances', '-i', type=click.INT)
@click.pass_context
def request_scale(ctx, request_id, instances):
res = ctx.obj['client'].scale_request(request_id, instances)
if 'error' in res:
click.echo('error during set instances for request {0}: {1}'.format(request_id, res['error']))
else:
click.echo('setting instances to {0} for request {1}'.format(instances, request_id))
@cli.command(name='bounce', help='Restart a request tasks')
@click.argument('request-id')
@click.pass_context
def request_bounce(ctx, request_id):
res = ctx.obj['client'].bounce_request(request_id)
if 'error' in res:
click.echo('error during set instances for request {0}: {1}'.format(request_id, res['error']))
else:
click.echo('bounced request {0}'.format(request_id))
@cli.command(name='get', help='Get the state of a request')
@click.argument('request-id')
@click.option('--json', '-j', is_flag=True, help='Enable json output')
@click.pass_context
def request_get(ctx, request_id, json):
if request_id:
res = ctx.obj['client'].get_request(request_id)
if json:
click.echo(dumps(res, indent=2))
else:
output_request(res)
@cli.command(name='delete', help='Remove a request')
@click.argument('request-id')
@click.pass_context
def request_delete(ctx, request_id):
res = ctx.obj['client'].delete_request(request_id)
if 'error' in res:
click.echo('error during delete request {0}: {1}'.format(request_id, res['error']))
else:
click.echo('deleted request {0}'.format(request_id))
@cli.command(name='list', help='Get a list of requests')
@click.option('--type', '-t', default='all', type=click.Choice(['pending', 'cleanup', 'paused', 'finished', 'cooldown', 'active', 'all']), help='Request type to get')
@click.option('--json', '-j', is_flag=True, help='Enable json output')
@click.pass_context
def request_list(ctx, type, json):
res = ctx.obj['client'].get_requests(type)
if json:
click.echo(dumps(res, indent=2))
else:
output_requests(res)
@cli.command(name='sync', help='Sync one or more requests/deploys')
@click.option('--file', '-f', type=click.File('r'), help='JSON request/deploy file to sync')
@click.option('--dir', '-d', type=click.Path(), help='Directory of JSON request/deploy files to sync')
@click.pass_context
def request_sync(ctx, file, dir):
had_error = False
client = ctx.obj['client']
if file:
file_request = None
try:
file_request = load(file)
except ValueError as e:
click.echo('json parse error: {0} in {1}'.format(e, file.name))
had_error = True
if file_request:
sync_request(client, file_request)
elif dir:
for filename in os.listdir(dir):
if filename.endswith('json'):
with open(os.path.join(dir, filename)) as file:
file_request = None
try:
file_request = load(file)
except ValueError as e:
click.echo('json parse error: {0} in {1}'.format(e, filename))
had_error = True
if file_request:
sync_request(client, file_request)
else:
click.echo('Either --file or --dir is required')
if had_error:
sys.exit(1)
def sync_request(client, request):
requested_instances = request['request'].get('instances', 1)
if requested_instances == 0:
singularity_request = client.pause_request(request['request']['id'], kill_tasks=True)
if 'error' in singularity_request:
click.echo('error during sync request: {0}'.format(singularity_request['error']))
else:
click.echo('syncronized request {0}'.format(request['request']['id']))
else:
isPaused = False
singularity_request = client.get_request(request['request']['id'])
if 'error' in singularity_request and singularity_request['status_code'] == 404:
pass # request didn't exist before
else:
if singularity_request and singularity_request['state'] == 'PAUSED' and requested_instances > 0:
isPaused = True
if not request.get('deploy', {}).get('pauseBeforeDeploy', False):
client.unpause_request(request['request']['id'])
isPaused = False
singularity_request = client.upsert_request(request['request'])
if 'error' in singularity_request:
click.echo('error during sync request: {0}'.format(singularity_request['error']))
else:
click.echo('syncronized request {0}'.format(request['request']['id']))
if 'deploy' in request:
file_deploy_id = request['deploy'].get('id', None)
# always set deploy.requestId to request.id from json file
request['deploy']['requestId'] = request['request']['id']
if 'activeDeploy' in singularity_request:
singularity_deploy_id = singularity_request['activeDeploy'].get('id', None)
if file_deploy_id != singularity_deploy_id:
sync_deploy(client, request['deploy'], isPaused)
else:
sync_deploy(client, request['deploy'], isPaused)
def sync_deploy(client, deploy, isPaused):
unpauseOnSuccessfulDeploy = False
if deploy.get('pauseBeforeDeploy', False):
del deploy['pauseBeforeDeploy']
unpauseOnSuccessfulDeploy = True
if not isPaused:
pause_deploy_and_wait(client, deploy)
res = client.create_deploy(deploy, unpauseOnSuccessfulDeploy)
if 'error' in res:
click.echo('error during sync deploy: {0}'.format(res['error']))
else:
click.echo('syncronized deploy {0} for request {1}'.format(deploy['id'], deploy['requestId']))
return res
def pause_deploy_and_wait(client, deploy):
singularity_request = client.pause_request(deploy['requestId'], kill_tasks=True)
if 'error' in singularity_request:
click.echo('error during pause request: {0}'.format(singularity_request['error']))
else:
click.echo('pausing request {0} before deploy'.format(deploy['requestId']))
if singularity_request:
active_deploy_id = singularity_request.get('activeDeploy', {}).get('id', None)
if active_deploy_id:
no_tasks = False
while not no_tasks:
tasks = client.get_active_deploy_tasks(deploy['requestId'], active_deploy_id)
if len(tasks) == 0:
no_tasks = True
click.echo('killed all tasks for request {0}'.format(deploy['requestId']))
@cli.command(name='clean', help='Remove requests not in the specified directory')
@click.argument('dir', type=click.Path())
@click.pass_context
def request_clean(ctx, dir):
client = ctx.obj['client']
requests = client.get_requests('all')
for request in requests:
request_id = request['request']['id']
if not os.path.isfile(os.path.join(dir, '{0}.json'.format(request_id))):
res = client.delete_request(request_id)
if 'error' in res:
click.echo('error during delete request {0}: {1}'.format(request_id, res['error']))
else:
click.echo('deleted request {0}'.format(request_id))
def output_request(request):
request_output = [
['Id', request.get('request', {}).get('id', 'unknown')],
['State', request.get('state')],
['Type', request.get('request', {}).get('requestType', 'unknown')],
['Instances', request.get('request', {}).get('instances', 1)],
['Rack Sensitive', request.get('request', {}).get('rackSensitive', 'unknown')],
['Load Balanced', request.get('request', {}).get('loadBalanced', 'unknown')],
['Owners', request.get('request', {}).get('owners', 'unknown')],
['Deploy Id', request.get('activeDeploy', {}).get('id', 'unknown')],
]
click.echo(tabulate(request_output))
def output_requests(requests):
requests_output = [['Id', 'State', 'Type', 'Instances', 'Deploy Id']]
for request in requests:
requests_output.append([
request.get('request', {}).get('id', 'unknown'),<|fim▁hole|> request.get('state', 'unknown'),
request.get('request', {}).get('requestType', 'unknown'),
request.get('request', {}).get('instances', 1),
request.get('requestDeployState', {}).get('activeDeploy', {}).get('deployId', 'none')
])
click.echo(tabulate(requests_output, headers="firstrow"))<|fim▁end|> | |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup<|fim▁hole|>setup(
name='MKS',
version='0.1.0',
description="A unit system based on meter, kilo, and second",
author='Roderic Day',
author_email='[email protected]',
url='www.permanentsignal.com',
license='MIT',
)<|fim▁end|> | |
<|file_name|>SettingsExtensionDocTest.java<|end_file_name|><|fim▁begin|>/**
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.extension;
//#imports
import akka.actor.Extension;
import akka.actor.AbstractExtensionId;
import akka.actor.ExtensionIdProvider;
import akka.actor.ActorSystem;
import akka.actor.ExtendedActorSystem;
import scala.concurrent.duration.Duration;
import com.typesafe.config.Config;
import java.util.concurrent.TimeUnit;
//#imports
import akka.actor.UntypedActor;
import org.junit.Test;
public class SettingsExtensionDocTest {
static
//#extension
public class SettingsImpl implements Extension {
public final String DB_URI;
public final Duration CIRCUIT_BREAKER_TIMEOUT;
public SettingsImpl(Config config) {
DB_URI = config.getString("myapp.db.uri");
CIRCUIT_BREAKER_TIMEOUT =
Duration.create(config.getDuration("myapp.circuit-breaker.timeout",
TimeUnit.MILLISECONDS), TimeUnit.MILLISECONDS);
}
}
//#extension
static
//#extensionid
public class Settings extends AbstractExtensionId<SettingsImpl>
implements ExtensionIdProvider {
public final static Settings SettingsProvider = new Settings();
<|fim▁hole|> private Settings() {}
public Settings lookup() {
return Settings.SettingsProvider;
}
public SettingsImpl createExtension(ExtendedActorSystem system) {
return new SettingsImpl(system.settings().config());
}
}
//#extensionid
static
//#extension-usage-actor
public class MyActor extends UntypedActor {
// typically you would use static import of the Settings.SettingsProvider field
final SettingsImpl settings =
Settings.SettingsProvider.get(getContext().system());
Connection connection =
connect(settings.DB_URI, settings.CIRCUIT_BREAKER_TIMEOUT);
//#extension-usage-actor
public Connection connect(String dbUri, Duration circuitBreakerTimeout) {
return new Connection();
}
public void onReceive(Object msg) {
}
//#extension-usage-actor
}
//#extension-usage-actor
public static class Connection {
}
@Test
public void demonstrateHowToCreateAndUseAnAkkaExtensionInJava() {
final ActorSystem system = null;
try {
//#extension-usage
// typically you would use static import of the Settings.SettingsProvider field
String dbUri = Settings.SettingsProvider.get(system).DB_URI;
//#extension-usage
} catch (Exception e) {
//do nothing
}
}
}<|fim▁end|> | |
<|file_name|>__test.py<|end_file_name|><|fim▁begin|>import unittest
import doctest
import spiralx.fileproc
# -----------------------------------------------------------------------------
def load_tests(loader, tests, ignore):
fileprocTestSuite = unittest.TestSuite()
fileprocTestSuite.addTests(doctest.DocTestSuite(spiralx.fileproc))
return fileprocTestSuite
<|fim▁hole|><|fim▁end|> | #suite = get_tests()
#suite.run() |
<|file_name|>SpyUtils.js<|end_file_name|><|fim▁begin|>import assert from './assert'
import { isFunction } from './TestUtils'
function noop() {}
let spies = []
export function createSpy(fn, restore=noop) {
if (fn == null)
fn = noop
assert(
isFunction(fn),
'createSpy needs a function'
)
<|fim▁hole|>
const spy = function () {
spy.calls.push({
context: this,
arguments: Array.prototype.slice.call(arguments, 0)
})
if (targetFn)
return targetFn.apply(this, arguments)
if (thrownValue)
throw thrownValue
return returnValue
}
spy.calls = []
spy.andCall = function (fn) {
targetFn = fn
return spy
}
spy.andCallThrough = function () {
return spy.andCall(fn)
}
spy.andThrow = function (object) {
thrownValue = object
return spy
}
spy.andReturn = function (value) {
returnValue = value
return spy
}
spy.getLastCall = function () {
return spy.calls[spy.calls.length - 1]
}
spy.reset = function () {
spy.calls = []
}
spy.restore = spy.destroy = restore
spy.__isSpy = true
spies.push(spy)
return spy
}
export function spyOn(object, methodName) {
const original = object[methodName]
if (!isSpy(original)) {
assert(
isFunction(original),
'Cannot spyOn the %s property; it is not a function',
methodName
)
object[methodName] = createSpy(original, function () {
object[methodName] = original
})
}
return object[methodName]
}
export function isSpy(object) {
return object && object.__isSpy === true
}
export function restoreSpies() {
for (let i = spies.length - 1; i >= 0; i--)
spies[i].restore()
spies = []
}<|fim▁end|> | let targetFn, thrownValue, returnValue |
<|file_name|>bitcoin_af_ZA.ts<|end_file_name|><|fim▁begin|><TS language="af_ZA" version="2.1">
<context>
<name>AddressBookPage</name>
<message>
<source>Create a new address</source>
<translation>Skep 'n nuwe adres</translation>
</message>
<message>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Maak 'n kopie van die huidige adres na die stelsel klipbord</translation>
</message>
<message>
<source>&Delete</source>
<translation>&Verwyder</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<source>Passphrase Dialog</source>
<translation>Wagfrase Dialoog</translation>
</message>
<message>
<source>Enter passphrase</source>
<translation>Tik wagfrase in</translation>
</message>
<message>
<source>New passphrase</source>
<translation>Nuwe wagfrase</translation>
</message>
<message>
<source>Repeat new passphrase</source>
<translation>Herhaal nuwe wagfrase</translation>
</message>
</context>
<context>
<name>BanTableModel</name>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<source>Synchronizing with network...</source>
<translation>Sinchroniseer met die netwerk ...</translation>
</message>
<message>
<source>&Overview</source>
<translation>&Oorsig</translation>
</message>
<message>
<source>Show general overview of wallet</source>
<translation>Wys algemene oorsig van die beursie</translation>
</message>
<message>
<source>&Transactions</source>
<translation>&Transaksies</translation>
</message>
<message>
<source>Browse transaction history</source>
<translation>Besoek transaksie geskiedenis</translation>
</message>
<message>
<source>E&xit</source>
<translation>S&luit af</translation>
</message>
<message>
<source>Quit application</source>
<translation>Sluit af</translation>
</message>
<message>
<source>Show information about Qt</source>
<translation>Wys inligting oor Qt</translation>
</message>
<message>
<source>&Options...</source>
<translation>&Opsies</translation>
</message>
<message>
<source>e-Gulden</source>
<translation>e-Gulden</translation>
</message>
<message>
<source>Wallet</source>
<translation>Beursie</translation>
</message>
<message>
<source>&File</source>
<translation>&Lêer</translation>
</message>
<message>
<source>&Settings</source>
<translation>&Instellings</translation>
</message>
<message>
<source>&Help</source>
<translation>&Hulp</translation>
</message>
<message>
<source>Tabs toolbar</source>
<translation>Blad nutsbalk</translation>
</message>
<message>
<source>%1 behind</source>
<translation>%1 agter</translation>
</message>
<message>
<source>Last received block was generated %1 ago.</source>
<translation>Ontvangs van laaste blok is %1 terug.</translation>
</message>
<message>
<source>Error</source>
<translation>Fout</translation>
</message>
<message>
<source>Information</source>
<translation>Informasie</translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<source>Amount:</source>
<translation>Bedrag:</translation>
</message>
<message>
<source>Amount</source>
<translation>Bedrag</translation>
</message>
<message>
<source>Date</source>
<translation>Datum</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<source>&Label</source>
<translation>&Etiket</translation>
</message>
<message>
<source>&Address</source>
<translation>&Adres</translation>
</message>
</context>
<context>
<name>FreespaceChecker</name>
</context>
<context>
<name>HelpMessageDialog</name>
<message>
<source>Usage:</source>
<translation>Gebruik:</translation>
</message>
</context>
<context>
<name>Intro</name>
<message>
<source>Error</source>
<translation>Fout</translation>
</message>
</context>
<context>
<name>OpenURIDialog</name>
</context>
<context>
<name>OptionsDialog</name>
<message>
<source>Options</source>
<translation>Opsies</translation>
</message>
<message>
<source>W&allet</source>
<translation>&Beursie</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<source>Form</source>
<translation>Vorm</translation>
</message>
</context>
<context>
<name>PaymentServer</name>
</context>
<context>
<name>PeerTableModel</name>
</context>
<context>
<name>QObject</name>
<message>
<source>Amount</source>
<translation>Bedrag</translation>
</message>
</context>
<context>
<name>QRImageWidget</name>
</context>
<context>
<name>RPCConsole</name>
<message>
<source>&Information</source>
<translation>Informasie</translation>
</message>
</context>
<context>
<name>ReceiveCoinsDialog</name>
<message>
<source>&Amount:</source>
<translation>&Bedrag:</translation>
</message>
<message>
<source>&Message:</source>
<translation>&Boodskap:</translation>
</message>
</context>
<context>
<name>ReceiveRequestDialog</name>
</context>
<context>
<name>RecentRequestsTableModel</name>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<source>Send Coins</source>
<translation>Stuur Munstukke</translation>
</message>
<message>
<source>Insufficient funds!</source>
<translation>Onvoldoende fondse</translation>
</message>
<message>
<source>Amount:</source>
<translation>Bedrag:</translation>
</message>
<message>
<source>Transaction Fee:</source>
<translation>Transaksie fooi:</translation>
</message>
<message>
<source>Send to multiple recipients at once</source>
<translation>Stuur aan vele ontvangers op eens</translation>
</message>
<message>
<source>Balance:</source>
<translation>Balans:</translation>
</message>
<message>
<source>S&end</source>
<translation>S&tuur</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<source>A&mount:</source>
<translation>&Bedrag:</translation>
</message>
<message>
<source>Message:</source>
<translation>Boodskap:</translation>
</message>
</context>
<context>
<name>SendConfirmationDialog</name>
</context>
<context>
<name>ShutdownWindow</name>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<source>&Sign Message</source>
<translation>&Teken boodskap</translation>
</message>
<message>
<source>Signature</source>
<translation>Handtekening</translation>
</message>
<message>
<source>Sign &Message</source>
<translation>Teken &Boodskap</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
</context>
<context>
<name>TrafficGraphWidget</name>
</context>
<context>
<name>TransactionDesc</name>
</context>
<context>
<name>TransactionDescDialog</name>
</context>
<context>
<name>TransactionTableModel</name>
</context>
<context>
<name>TransactionView</name>
</context>
<context><|fim▁hole|> </context>
<context>
<name>WalletFrame</name>
</context>
<context>
<name>WalletModel</name>
</context>
<context>
<name>WalletView</name>
</context>
<context>
<name>bitcoin-core</name>
<message>
<source>Options:</source>
<translation>Opsies:</translation>
</message>
<message>
<source>Error: Disk space is low!</source>
<translation>Fout: Hardeskyf spasie is baie laag!</translation>
</message>
<message>
<source>Information</source>
<translation>Informasie</translation>
</message>
<message>
<source>Loading addresses...</source>
<translation>Laai adresse...</translation>
</message>
<message>
<source>Insufficient funds</source>
<translation>Onvoldoende fondse</translation>
</message>
<message>
<source>Loading block index...</source>
<translation>Laai blok indeks...</translation>
</message>
<message>
<source>Loading wallet...</source>
<translation>Laai beursie...</translation>
</message>
<message>
<source>Done loading</source>
<translation>Klaar gelaai</translation>
</message>
<message>
<source>Error</source>
<translation>Fout</translation>
</message>
</context>
</TS><|fim▁end|> | <name>UnitDisplayStatusBarControl</name> |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import sys
import os
import re
def setup_python3():
# Taken from "distribute" setup.py
from distutils.filelist import FileList
from distutils import dir_util, file_util, util, log
from os.path import join, exists
tmp_src = join("build", "src")
# Not covered by "setup.py clean --all", so explicit deletion required.
if exists(tmp_src):
dir_util.remove_tree(tmp_src)
log.set_verbosity(1)
fl = FileList()
for line in open("MANIFEST.in"):
if not line.strip():
continue
fl.process_template_line(line)
dir_util.create_tree(tmp_src, fl.files)
outfiles_2to3 = []
for f in fl.files:
outf, copied = file_util.copy_file(f, join(tmp_src, f), update=1)
if copied and outf.endswith(".py"):
outfiles_2to3.append(outf)
util.run_2to3(outfiles_2to3)
# arrange setup to use the copy
sys.path.insert(0, tmp_src)
return tmp_src
kwargs = {}
if sys.version_info[0] >= 3:
from setuptools import setup
kwargs['use_2to3'] = True
kwargs['install_requires'] = ['isodate', 'pyparsing']
kwargs['tests_require'] = ['html5lib']
kwargs['requires'] = [
'isodate', 'pyparsing',
'SPARQLWrapper']
kwargs['src_root'] = setup_python3()
assert setup
else:
try:
from setuptools import setup
assert setup
kwargs['test_suite'] = "nose.collector"
kwargs['install_requires'] = [
'isodate',
'pyparsing', 'SPARQLWrapper']
if sys.version_info[1]<7: # Python 2.6
kwargs['install_requires'].append('ordereddict')
if sys.version_info[1]<6: # Python 2.5
kwargs['install_requires'].append('pyparsing<=1.5.7')
kwargs['install_requires'].append('simplejson')
kwargs['install_requires'].append('html5lib==0.95')
else:
kwargs['install_requires'].append('html5lib')
except ImportError:
from distutils.core import setup
# Find version. We have to do this because we can't import it in Python 3 until
# its been automatically converted in the setup process.
def find_version(filename):
_version_re = re.compile(r'__version__ = "(.*)"')
for line in open(filename):
version_match = _version_re.match(line)
if version_match:
return version_match.group(1)
version = find_version('rdflib/__init__.py')
packages = ['rdflib',
'rdflib/extras',
'rdflib/plugins',
'rdflib/plugins/parsers',
'rdflib/plugins/parsers/pyRdfa',
'rdflib/plugins/parsers/pyRdfa/transform',
'rdflib/plugins/parsers/pyRdfa/extras',
'rdflib/plugins/parsers/pyRdfa/host',
'rdflib/plugins/parsers/pyRdfa/rdfs',
'rdflib/plugins/parsers/pyMicrodata',
'rdflib/plugins/serializers',
'rdflib/plugins/sparql',
'rdflib/plugins/sparql/results',
'rdflib/plugins/stores',
'rdflib/tools'
]
if os.environ.get('READTHEDOCS', None):
# if building docs for RTD
# install examples, to get docstrings
packages.append("examples")
setup(
name='rdflib',
version=version,
description="RDFLib is a Python library for working with RDF, a " + \
"simple yet powerful language for representing information.",
author="Daniel 'eikeon' Krech",
author_email="[email protected]",
maintainer="RDFLib Team",
maintainer_email="[email protected]",
url="https://github.com/RDFLib/rdflib",
license="https://raw.github.com/RDFLib/rdflib/master/LICENSE",
platforms=["any"],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"License :: OSI Approved :: BSD License",
"Topic :: Software Development :: Libraries :: Python Modules",
"Operating System :: OS Independent",
"Natural Language :: English",
],
long_description="""\
RDFLib is a Python library for working with
RDF, a simple yet powerful language for representing information.
The library contains parsers and serializers for RDF/XML, N3,
NTriples, Turtle, TriX, RDFa and Microdata . The library presents
a Graph interface which can be backed by any one of a number of
Store implementations. The core rdflib includes store
implementations for in memory storage, persistent storage on top
of the Berkeley DB, and a wrapper for remote SPARQL endpoints.
A SPARQL 1.1 engine is also included.
If you have recently reported a bug marked as fixed, or have a craving for
the very latest, you may want the development version instead:
easy_install https://github.com/RDFLib/rdflib/tarball/master
Read the docs at:
http://rdflib.readthedocs.org
""",
packages = packages,
entry_points = {
'console_scripts': [
'rdfpipe = rdflib.tools.rdfpipe:main',
'csv2rdf = rdflib.tools.csv2rdf:main',
'rdf2dot = rdflib.tools.rdf2dot:main',
'rdfs2dot = rdflib.tools.rdfs2dot:main',
'rdfgraphisomorphism = rdflib.tools.graphisomorphism:main',
],
},
<|fim▁hole|><|fim▁end|> | **kwargs
) |
<|file_name|>runtests.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import logging
import os
import shutil
import subprocess
import sys
import tempfile
import warnings
from argparse import ArgumentParser
import django
from django.apps import apps
from django.conf import settings
from django.db import connection
from django.test import TestCase, TransactionTestCase
from django.test.utils import get_runner
from django.utils import six
from django.utils._os import upath
from django.utils.deprecation import (
RemovedInDjango20Warning, RemovedInDjango110Warning,
)
from django.utils.log import DEFAULT_LOGGING
warnings.simplefilter("error", RemovedInDjango110Warning)
warnings.simplefilter("error", RemovedInDjango20Warning)
RUNTESTS_DIR = os.path.abspath(os.path.dirname(upath(__file__)))
TEMPLATE_DIR = os.path.join(RUNTESTS_DIR, 'templates')
# Create a specific subdirectory for the duration of the test suite.
TMPDIR = tempfile.mkdtemp(prefix='django_')
# Set the TMPDIR environment variable in addition to tempfile.tempdir
# so that children processes inherit it.
tempfile.tempdir = os.environ['TMPDIR'] = TMPDIR
SUBDIRS_TO_SKIP = [
'data',
'import_error_package',
'test_discovery_sample',
'test_discovery_sample2',
'test_runner_deprecation_app',
]
ALWAYS_INSTALLED_APPS = [
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sites',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.admin.apps.SimpleAdminConfig',
'django.contrib.staticfiles',
]
ALWAYS_MIDDLEWARE_CLASSES = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
]
# Need to add the associated contrib app to INSTALLED_APPS in some cases to
# avoid "RuntimeError: Model class X doesn't declare an explicit app_label
# and either isn't in an application in INSTALLED_APPS or else was imported
# before its application was loaded."
CONTRIB_TESTS_TO_APPS = {
'flatpages_tests': 'django.contrib.flatpages',
'redirects_tests': 'django.contrib.redirects',
}
def get_test_modules():
modules = []
discovery_paths = [
(None, RUNTESTS_DIR),
# GIS tests are in nested apps
('gis_tests', os.path.join(RUNTESTS_DIR, 'gis_tests')),
]
for modpath, dirpath in discovery_paths:
for f in os.listdir(dirpath):
if ('.' in f or
os.path.basename(f) in SUBDIRS_TO_SKIP or
os.path.isfile(f) or
not os.path.exists(os.path.join(dirpath, f, '__init__.py'))):
continue
modules.append((modpath, f))
return modules
def get_installed():
return [app_config.name for app_config in apps.get_app_configs()]
def setup(verbosity, test_labels):
if verbosity >= 1:
print("Testing against Django installed in '%s'" % os.path.dirname(django.__file__))
# Force declaring available_apps in TransactionTestCase for faster tests.
def no_available_apps(self):
raise Exception("Please define available_apps in TransactionTestCase "
"and its subclasses.")
TransactionTestCase.available_apps = property(no_available_apps)
TestCase.available_apps = None
state = {
'INSTALLED_APPS': settings.INSTALLED_APPS,
'ROOT_URLCONF': getattr(settings, "ROOT_URLCONF", ""),
# Remove the following line in Django 1.10.
'TEMPLATE_DIRS': settings.TEMPLATE_DIRS,
'TEMPLATES': settings.TEMPLATES,
'LANGUAGE_CODE': settings.LANGUAGE_CODE,
'STATIC_URL': settings.STATIC_URL,
'STATIC_ROOT': settings.STATIC_ROOT,
'MIDDLEWARE_CLASSES': settings.MIDDLEWARE_CLASSES,
}
# Redirect some settings for the duration of these tests.
settings.INSTALLED_APPS = ALWAYS_INSTALLED_APPS
settings.ROOT_URLCONF = 'urls'
settings.STATIC_URL = '/static/'
settings.STATIC_ROOT = os.path.join(TMPDIR, 'static')
# Remove the following line in Django 1.10.
settings.TEMPLATE_DIRS = [TEMPLATE_DIR]
settings.TEMPLATES = [{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [TEMPLATE_DIR],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
}]
settings.LANGUAGE_CODE = 'en'
settings.SITE_ID = 1
settings.MIDDLEWARE_CLASSES = ALWAYS_MIDDLEWARE_CLASSES
settings.MIGRATION_MODULES = {
# these 'tests.migrations' modules don't actually exist, but this lets
# us skip creating migrations for the test models.
'auth': 'django.contrib.auth.tests.migrations',
'contenttypes': 'contenttypes_tests.migrations',
}
log_config = DEFAULT_LOGGING
# Filter out non-error logging so we don't have to capture it in lots of
# tests.
log_config['loggers']['django']['level'] = 'ERROR'
settings.LOGGING = log_config
if verbosity > 0:
# Ensure any warnings captured to logging are piped through a verbose
# logging handler. If any -W options were passed explicitly on command
# line, warnings are not captured, and this has no effect.
logger = logging.getLogger('py.warnings')
handler = logging.StreamHandler()
logger.addHandler(handler)
warnings.filterwarnings(
'ignore',
'django.contrib.webdesign will be removed in Django 1.10.',
RemovedInDjango110Warning
)
warnings.filterwarnings(
'ignore',
'The GeoManager class is deprecated.',
RemovedInDjango20Warning
)
# Load all the ALWAYS_INSTALLED_APPS.
django.setup()
# Load all the test model apps.
test_modules = get_test_modules()
# Reduce given test labels to just the app module path
test_labels_set = set()
for label in test_labels:
bits = label.split('.')[:1]
test_labels_set.add('.'.join(bits))
installed_app_names = set(get_installed())
for modpath, module_name in test_modules:
if modpath:
module_label = '.'.join([modpath, module_name])
else:
module_label = module_name
# if the module (or an ancestor) was named on the command line, or
# no modules were named (i.e., run all), import
# this module and add it to INSTALLED_APPS.
if not test_labels:
module_found_in_labels = True
else:
module_found_in_labels = any(
# exact match or ancestor match
module_label == label or module_label.startswith(label + '.')
for label in test_labels_set)
if module_name in CONTRIB_TESTS_TO_APPS and module_found_in_labels:
settings.INSTALLED_APPS.append(CONTRIB_TESTS_TO_APPS[module_name])
if module_found_in_labels and module_label not in installed_app_names:
if verbosity >= 2:
print("Importing application %s" % module_name)
settings.INSTALLED_APPS.append(module_label)
# Add contrib.gis to INSTALLED_APPS if needed (rather than requiring
# @override_settings(INSTALLED_APPS=...) on all test cases.
gis = 'django.contrib.gis'
if connection.features.gis_enabled and gis not in settings.INSTALLED_APPS:
if verbosity >= 2:
print("Importing application %s" % gis)
settings.INSTALLED_APPS.append(gis)
apps.set_installed_apps(settings.INSTALLED_APPS)
return state
def teardown(state):
try:
# Removing the temporary TMPDIR. Ensure we pass in unicode
# so that it will successfully remove temp trees containing
# non-ASCII filenames on Windows. (We're assuming the temp dir
# name itself does not contain non-ASCII characters.)
shutil.rmtree(six.text_type(TMPDIR))
except OSError:
print('Failed to remove temp directory: %s' % TMPDIR)
# Restore the old settings.
for key, value in state.items():
setattr(settings, key, value)
def django_tests(verbosity, interactive, failfast, keepdb, reverse, test_labels, debug_sql):
state = setup(verbosity, test_labels)
extra_tests = []
# Run the test suite, including the extra validation tests.
if not hasattr(settings, 'TEST_RUNNER'):
settings.TEST_RUNNER = 'django.test.runner.DiscoverRunner'
TestRunner = get_runner(settings)
test_runner = TestRunner(
verbosity=verbosity,
interactive=interactive,
failfast=failfast,
keepdb=keepdb,
reverse=reverse,
debug_sql=debug_sql,
)
failures = test_runner.run_tests(
test_labels or get_installed(),
extra_tests=extra_tests,
)
teardown(state)
return failures
def bisect_tests(bisection_label, options, test_labels):
state = setup(options.verbosity, test_labels)
test_labels = test_labels or get_installed()
print('***** Bisecting test suite: %s' % ' '.join(test_labels))
# Make sure the bisection point isn't in the test list
# Also remove tests that need to be run in specific combinations
for label in [bisection_label, 'model_inheritance_same_model_name']:
try:
test_labels.remove(label)
except ValueError:
pass
subprocess_args = [
sys.executable, upath(__file__), '--settings=%s' % options.settings]
if options.failfast:
subprocess_args.append('--failfast')
if options.verbosity:
subprocess_args.append('--verbosity=%s' % options.verbosity)
if not options.interactive:
subprocess_args.append('--noinput')
iteration = 1
while len(test_labels) > 1:
midpoint = len(test_labels) // 2
test_labels_a = test_labels[:midpoint] + [bisection_label]
test_labels_b = test_labels[midpoint:] + [bisection_label]
print('***** Pass %da: Running the first half of the test suite' % iteration)
print('***** Test labels: %s' % ' '.join(test_labels_a))
failures_a = subprocess.call(subprocess_args + test_labels_a)
print('***** Pass %db: Running the second half of the test suite' % iteration)
print('***** Test labels: %s' % ' '.join(test_labels_b))
print('')
failures_b = subprocess.call(subprocess_args + test_labels_b)
if failures_a and not failures_b:
print("***** Problem found in first half. Bisecting again...")
iteration = iteration + 1
test_labels = test_labels_a[:-1]
elif failures_b and not failures_a:
print("***** Problem found in second half. Bisecting again...")
iteration = iteration + 1
test_labels = test_labels_b[:-1]
elif failures_a and failures_b:
print("***** Multiple sources of failure found")
break
else:
print("***** No source of failure found... try pair execution (--pair)")
break
if len(test_labels) == 1:
print("***** Source of error: %s" % test_labels[0])
teardown(state)
def paired_tests(paired_test, options, test_labels):
state = setup(options.verbosity, test_labels)
test_labels = test_labels or get_installed()
print('***** Trying paired execution')
# Make sure the constant member of the pair isn't in the test list
# Also remove tests that need to be run in specific combinations
for label in [paired_test, 'model_inheritance_same_model_name']:
try:
test_labels.remove(label)
except ValueError:
pass
subprocess_args = [
sys.executable, upath(__file__), '--settings=%s' % options.settings]
if options.failfast:
subprocess_args.append('--failfast')
if options.verbosity:
subprocess_args.append('--verbosity=%s' % options.verbosity)
if not options.interactive:
subprocess_args.append('--noinput')
for i, label in enumerate(test_labels):
print('***** %d of %d: Check test pairing with %s' % (
i + 1, len(test_labels), label))
failures = subprocess.call(subprocess_args + [label, paired_test])
if failures:
print('***** Found problem pair with %s' % label)
return
print('***** No problem pair found')
teardown(state)
if __name__ == "__main__":
parser = ArgumentParser(description="Run the Django test suite.")
parser.add_argument('modules', nargs='*', metavar='module',
help='Optional path(s) to test modules; e.g. "i18n" or '
'"i18n.tests.TranslationTests.test_lazy_objects".')
parser.add_argument(
'-v', '--verbosity', default=1, type=int, choices=[0, 1, 2, 3],
help='Verbosity level; 0=minimal output, 1=normal output, 2=all output')
parser.add_argument(
'--noinput', action='store_false', dest='interactive', default=True,
help='Tells Django to NOT prompt the user for input of any kind.')
parser.add_argument(
'--failfast', action='store_true', dest='failfast', default=False,
help='Tells Django to stop running the test suite after first failed '
'test.')
parser.add_argument(
'-k', '--keepdb', action='store_true', dest='keepdb', default=False,
help='Tells Django to preserve the test database between runs.')
parser.add_argument(
'--settings',
help='Python path to settings module, e.g. "myproject.settings". If '
'this isn\'t provided, either the DJANGO_SETTINGS_MODULE '
'environment variable or "test_sqlite" will be used.')
parser.add_argument('--bisect',
help='Bisect the test suite to discover a test that causes a test '
'failure when combined with the named test.')
parser.add_argument('--pair',
help='Run the test suite in pairs with the named test to find problem '
'pairs.')
parser.add_argument('--reverse', action='store_true', default=False,
help='Sort test suites and test cases in opposite order to debug '
'test side effects not apparent with normal execution lineup.')
parser.add_argument('--liveserver',
help='Overrides the default address where the live server (used with '
'LiveServerTestCase) is expected to run from. The default value '
'is localhost:8081.')
parser.add_argument(
'--selenium', action='store_true', dest='selenium', default=False,
help='Run the Selenium tests as well (if Selenium is installed)')
parser.add_argument(
'--debug-sql', action='store_true', dest='debug_sql', default=False,
help='Turn on the SQL query logger within tests')
options = parser.parse_args()
# mock is a required dependency
try:
from django.test import mock # NOQA
except ImportError:
print(
"Please install test dependencies first: \n"
"$ pip install -r requirements/py%s.txt" % sys.version_info.major
)
sys.exit(1)
# Allow including a trailing slash on app_labels for tab completion convenience
options.modules = [os.path.normpath(labels) for labels in options.modules]
if options.settings:
os.environ['DJANGO_SETTINGS_MODULE'] = options.settings
else:<|fim▁hole|> if "DJANGO_SETTINGS_MODULE" not in os.environ:
os.environ['DJANGO_SETTINGS_MODULE'] = 'test_sqlite'
options.settings = os.environ['DJANGO_SETTINGS_MODULE']
if options.liveserver is not None:
os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = options.liveserver
if options.selenium:
os.environ['DJANGO_SELENIUM_TESTS'] = '1'
if options.bisect:
bisect_tests(options.bisect, options, options.modules)
elif options.pair:
paired_tests(options.pair, options, options.modules)
else:
failures = django_tests(options.verbosity, options.interactive,
options.failfast, options.keepdb,
options.reverse, options.modules,
options.debug_sql)
if failures:
sys.exit(bool(failures))<|fim▁end|> | |
<|file_name|>ServiceTaskSpringDelegationTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2010-2020 Alfresco Software, Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.spring.test.servicetask;
import static org.assertj.core.api.Assertions.assertThat;
import java.util.List;
import org.activiti.engine.impl.test.JobTestHelper;
import org.activiti.engine.runtime.ProcessInstance;
import org.activiti.engine.test.Deployment;
import org.activiti.spring.impl.test.SpringActivitiTestCase;
import org.springframework.test.context.ContextConfiguration;
/**
*/
@ContextConfiguration("classpath:org/activiti/spring/test/servicetask/servicetaskSpringTest-context.xml")
public class ServiceTaskSpringDelegationTest extends SpringActivitiTestCase {
private void cleanUp() {
List<org.activiti.engine.repository.Deployment> deployments = repositoryService.createDeploymentQuery().list();
for (org.activiti.engine.repository.Deployment deployment : deployments) {
repositoryService.deleteDeployment(deployment.getId(), true);
}
}
@Override
public void tearDown() {
cleanUp();
}
@Deployment
public void testDelegateExpression() {
ProcessInstance procInst = runtimeService.startProcessInstanceByKey("delegateExpressionToSpringBean");
assertThat(runtimeService.getVariable(procInst.getId(), "myVar")).isEqualTo("Activiti BPMN 2.0 process engine");
assertThat(runtimeService.getVariable(procInst.getId(), "fieldInjection")).isEqualTo("fieldInjectionWorking");
}
@Deployment
public void testAsyncDelegateExpression() throws Exception {
ProcessInstance procInst = runtimeService.startProcessInstanceByKey("delegateExpressionToSpringBean");<|fim▁hole|> assertThat(runtimeService.getVariable(procInst.getId(), "fieldInjection")).isEqualTo("fieldInjectionWorking");
}
@Deployment
public void testMethodExpressionOnSpringBean() {
ProcessInstance procInst = runtimeService.startProcessInstanceByKey("methodExpressionOnSpringBean");
assertThat(runtimeService.getVariable(procInst.getId(), "myVar")).isEqualTo("ACTIVITI BPMN 2.0 PROCESS ENGINE");
}
@Deployment
public void testAsyncMethodExpressionOnSpringBean() {
ProcessInstance procInst = runtimeService.startProcessInstanceByKey("methodExpressionOnSpringBean");
assertThat(JobTestHelper.areJobsAvailable(managementService)).isTrue();
waitForJobExecutorToProcessAllJobs(5000, 500);
assertThat(runtimeService.getVariable(procInst.getId(), "myVar")).isEqualTo("ACTIVITI BPMN 2.0 PROCESS ENGINE");
}
@Deployment
public void testExecutionAndTaskListenerDelegationExpression() {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("executionAndTaskListenerDelegation");
assertThat(runtimeService.getVariable(processInstance.getId(), "executionListenerVar")).isEqualTo("working");
assertThat(runtimeService.getVariable(processInstance.getId(), "taskListenerVar")).isEqualTo("working");
assertThat(runtimeService.getVariable(processInstance.getId(), "executionListenerField")).isEqualTo("executionListenerInjection");
assertThat(runtimeService.getVariable(processInstance.getId(), "taskListenerField")).isEqualTo("taskListenerInjection");
}
}<|fim▁end|> | assertThat(JobTestHelper.areJobsAvailable(managementService)).isTrue();
waitForJobExecutorToProcessAllJobs(5000, 500);
Thread.sleep(1000);
assertThat(runtimeService.getVariable(procInst.getId(), "myVar")).isEqualTo("Activiti BPMN 2.0 process engine"); |
<|file_name|>120_simplify_transaction_log.py<|end_file_name|><|fim▁begin|>"""simplify transaction log
Revision ID: 8c2406df6f8
Revises:58732bb5d14b
Create Date: 2014-08-08 01:57:17.144405
"""
# revision identifiers, used by Alembic.
revision = '8c2406df6f8'<|fim▁hole|>
def upgrade():
conn = op.get_bind()
conn.execute(text('''
ALTER TABLE transaction
CHANGE public_snapshot snapshot LONGTEXT,
CHANGE table_name object_type VARCHAR(20),
DROP COLUMN private_snapshot,
DROP COLUMN delta,
ADD INDEX `ix_transaction_object_public_id` (`object_public_id`)
'''))
def downgrade():
raise Exception()<|fim▁end|> | down_revision = '58732bb5d14b'
from alembic import op
from sqlalchemy.sql import text |
<|file_name|>HttpStatusEncoder.java<|end_file_name|><|fim▁begin|>/**
* Copyright 2007-2015, Kaazing Corporation. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaazing.k3po.driver.internal.behavior.handler.codec.http;
import static java.lang.String.format;
import static java.nio.charset.StandardCharsets.US_ASCII;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.handler.codec.http.HttpResponseStatus;
import org.kaazing.k3po.driver.internal.behavior.handler.codec.ConfigEncoder;
import org.kaazing.k3po.driver.internal.behavior.handler.codec.MessageEncoder;
import org.kaazing.k3po.driver.internal.netty.bootstrap.http.HttpChannelConfig;
public class HttpStatusEncoder implements ConfigEncoder {
private final MessageEncoder codeEncoder;
private final MessageEncoder reasonEncoder;
public HttpStatusEncoder(MessageEncoder codeEncoder, MessageEncoder reasonEncoder) {
this.codeEncoder = codeEncoder;
this.reasonEncoder = reasonEncoder;
}
@Override
public void encode(Channel channel) throws Exception {
HttpChannelConfig httpConfig = (HttpChannelConfig) channel.getConfig();
int code = Integer.parseInt(codeEncoder.encode().toString(US_ASCII));
String reason = reasonEncoder.encode().toString(US_ASCII);
HttpResponseStatus status = new HttpResponseStatus(code, reason);
httpConfig.setStatus(status);<|fim▁hole|> public String toString() {
return format("http:status %s %s", codeEncoder, reasonEncoder);
}
}<|fim▁end|> | }
@Override |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>import os
from django.core.urlresolvers import reverse
from django.db import models
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from easy_thumbnails.fields import ThumbnailerImageField
class VisibilityModel(models.Model):
is_active = models.BooleanField(_('is active'), default=True)
is_public = models.BooleanField(_('is public'), default=False)
class Meta:
abstract = True
class AlbumManager(models.Manager):
def active(self):
return self.filter(is_active=True)
def public(self):
return self.filter(is_active=True, is_public=True)
class Album(VisibilityModel):
created_on = models.DateTimeField(_('created on'), default=timezone.now)
date = models.DateField(_('date'), default=timezone.now)
title = models.CharField(_('title'), max_length=200)
slug = models.SlugField(_('slug'), max_length=200, unique=True)
objects = AlbumManager()
class Meta:
get_latest_by = 'date'
ordering = ['-date']
verbose_name = _('album')
verbose_name_plural = _('albums')
def __unicode__(self):
return self.title
def get_absolute_url(self):
return reverse('chet_album_detail', kwargs={
'year': self.date.strftime('%Y'),
'slug': self.slug,
})
class PhotoManager(models.Manager):
def active(self):
return self.filter(
is_active=True,
album__is_active=True,
)
def public(self):
return self.filter(
is_active=True,
album__is_active=True,<|fim▁hole|> is_public=True,
album__is_public=True,
)
class Photo(VisibilityModel):
created_on = models.DateTimeField(_('created on'), default=timezone.now)
file = ThumbnailerImageField(_('file'), upload_to='chet/photos/%Y/%m/')
album = models.ForeignKey(
Album, verbose_name=_('album'), related_name='photos')
shot_on = models.DateTimeField(_('shot on'), default=timezone.now)
title = models.CharField(_('title'), max_length=200, blank=True)
is_dark = models.BooleanField(
_('is dark'), default=False,
help_text=_('Dark images are shown on a light background.'))
objects = PhotoManager()
class Meta:
get_latest_by = 'shot_on'
ordering = ['shot_on']
verbose_name = _('photo')
verbose_name_plural = _('photos')
def __unicode__(self):
return self.title or os.path.basename(self.file.name)
def get_absolute_url(self):
return reverse('chet_photo_detail', kwargs={
'year': self.album.date.strftime('%Y'),
'slug': self.album.slug,
'photo': self.pk,
})<|fim▁end|> | |
<|file_name|>wjquery.calendar.js<|end_file_name|><|fim▁begin|>/*
* wjquery.calendar 0.1.1
* by composite ([email protected])
* http://www.wonchu.net
* This project licensed under a MIT License.
0.1.0 : 최초작성
0.1.1 : 소스정리
*/
(function ($) {
const WCALENDAR_SV = {
ns: "wcalendar",
dateFormat: "YYYYMMDD",
lang: {
ko: {
week: ["일", "월", "화", "수", "목", "금", "토"]
}
}
};
$.fn.wcalendar = function (method) {
let result, _arguments = arguments;
this.each(function (i, element) {
const $element = $(element);
let $container,
_option = {};
if ($element.prop("tagName").toLowerCase() == "input") {
if ($element.attr("data-wrap-id")) {
$container = $("#" + $element.attr("data-wrap-id"));
} else {
const _id = "wcalendar_" + new Date().getTime();
$element.after("<div id=\"" + _id + "\" />");
_option.element = $element;
$element.attr("data-wrap-id", _id);
$container = $("#" + _id);
}
} else {
$container = $element;
}
const plugin = $container.data(WCALENDAR_SV.ns);
if (plugin && typeof method === 'string') {
if (plugin[method]) {
result = plugin[method].apply(this, Array.prototype.slice.call(_arguments, 1));
} else {
alert('Method ' + method + ' does not exist on jQuery.wcalendar');
}
} else if (!plugin && (typeof method === 'object' || !method)) {
let wcalendar = new WCALENDAR();
$container.data(WCALENDAR_SV.ns, wcalendar);
wcalendar.init($container, $.extend(_option, $.fn.wcalendar.defaultSettings, method || {}));
}
});
return result ? result : $(this);
};
$.fn.wcalendar.defaultSettings = {
width: "200px",
locale: "ko",
dateFormat: "YYYY.MM.DD",
showPrevNextDays: true,
dateIconClass: "wcalendar-dateicon",
mdHoliday: {
"0101": {
ko: "신정"
},
"0505": {
ko: "어린이날"
}
},
holiday: {
"20210519": {
ko: "부처님오신날"
}
}
};
function WCALENDAR() {
let $container, options;
function init(_container, _options) {
$container = _container;
options = _options;
if (options.selectDate) {
options.selectDate = moment(options.selectDate, options.dateFormat);
} else {
if (options.element && options.element.val() != "") {
options.selectDate = moment(options.element.val(), options.dateFormat);
} else {
options.selectDate = moment();
}
}
options.targetDate = options.selectDate.clone();
_WCALENDAR.init($container, options);
}
function draw() {
_WCALENDAR.draw($container, options);
}
function prev() {
options.targetDate = options.targetDate.add(-1, "months");
_WCALENDAR.draw($container, options);
}
function next() {
options.targetDate = options.targetDate.add(1, "months");
_WCALENDAR.draw($container, options);
}
function set(dt) {
options.targetDate = moment(dt, options.dateFormat);
_WCALENDAR.draw($container, options);
}
function select() {
options.targetDate = moment($(".wcalendar-month .title-year", $container).val() + $.pad($(".wcalendar-month .title-month", $container).val(), 2) + "01", "YYYYMMDD");
_WCALENDAR.draw($container, options);
}
function click() {
const _index = $(".wcalendar-undock").index($container);
$(".wcalendar-undock").each(function () {
if ($(".wcalendar-undock").index(this) != _index && $(this).is(":visible")) {
$(this).hide();
}
});
if ($container.is(":visible")) {
$container.hide();
} else {
if (options.element && options.element.val() != "") {
options.selectDate = moment(options.element.val(), options.dateFormat);
options.hasVal = "Y";
} else {
options.selectDate = moment();
options.hasVal = "N";
}
<|fim▁hole|> }
}
function destory() {
if (options.element) {
options.element.removeClass("wcalendar-input");
if (options.element.next("." + options.dateIconClass)) {
options.element.next("." + options.dateIconClass).remove();
}
}
$container.remove();
}
return {
init: init,
draw: draw,
prev: prev,
next: next,
set: set,
select: select,
click: click,
destory: destory
};
}
var _WCALENDAR = {
init: function ($container, options) {
if (options.element) {
options.element.addClass("wcalendar-input");
$container.addClass("wcalendar-undock").css({
"top": options.element.position().top + options.element.outerHeight(),
"left": options.element.position().left,
"width": options.width
});
const $icon = $("<span class=\"" + options.dateIconClass + "\" />");
options.element.after($icon);
$icon.click(function () {
$container.wcalendar("click");
});
options.element.click(function () {
$container.wcalendar("click");
});
$(document).on("click.wcalendar-undock", function (event) {
if ($(event.target).closest(".wcalendar-wrap, .wcalendar-input, ." + options.dateIconClass).length === 0) {
$container.hide();
}
});
}
$container.html(
"<div class=\"wcalendar-wrap\">" +
" <div class=\"wcalendar-month\">" +
" <ul>" +
" <li class=\"prev\"><a href=\"javascript:;\"><span>❮</span></a></li>" +
" <li class=\"next\"><a href=\"javascript:;\"><span>❯</span></a></li>" +
" <li><select class=\"title-year\"></select> <select class=\"title-month\"></select></li>" +
" </ul>" +
" </div>" +
" <ul class=\"wcalendar-weekdays\"></ul>" +
" <ul class=\"wcalendar-days\"></ul>" +
"</div>"
);
this.draw($container, options);
$(".wcalendar-month li>a", $container).click(function () {
$container.wcalendar($(this).parent().attr("class"));
});
$container.find(".wcalendar-days").on("click", "a", function () {
var $t = $(this);
$t.parent().siblings().find("a.active").removeClass("active");
$t.addClass("active");
if (options.callback) {
options.callback($(this).attr("data-val"));
} else if (options.element) {
options.element.val($(this).attr("data-val"));
$container.hide();
}
});
},
draw: function ($container, options) {
const curentDate = moment(),
selectDate = options.selectDate,
targetDate = options.targetDate,
firstDate = targetDate.clone().startOf("month"),
lastDate = targetDate.clone().endOf("month");
let _prevDate, _targetDate, _nextDate;
this.makeSelectOption($(".wcalendar-month .title-year", $container), targetDate.year() - 10, targetDate.year() + 10, targetDate.year());
this.makeSelectOption($(".wcalendar-month .title-month", $container), 1, 12, options.targetDate.month() + 1);
$(".wcalendar-month .title-month, .wcalendar-month .title-year", $container).off("change").on("change", function () {
$container.wcalendar("select");
});
let _weekdays = [];
for (let n = 0; n < 7; n++) {
_weekdays.push("<li>" + WCALENDAR_SV.lang[options.locale].week[n] + "</li>");
}
$container.find(".wcalendar-weekdays").empty().append(_weekdays.join(""));
let _days = [];
for (let i = firstDate.day(); i > 0; i--) {
if (options.showPrevNextDays) {
_prevDate = firstDate.clone().add(-i, "days");
_days.push(this.makeItem(options, "prev", _prevDate, curentDate, selectDate));
} else {
_days.push("<li> </li>");
}
}
for (let j = 0; j < lastDate.date(); j++) {
_targetDate = firstDate.clone().add(j, "days");
_days.push(this.makeItem(options, "target", _targetDate, curentDate, selectDate));
}
for (let k = 1; k <= (6 - lastDate.day()); k++) {
if (options.showPrevNextDays) {
_nextDate = lastDate.clone().add(k, "days");
_days.push(this.makeItem(options, "next", _nextDate, curentDate, selectDate));
} else {
_days.push("<li> </li>");
}
}
$container.find(".wcalendar-days").empty().append(_days.join(""));
},
makeItem: function (options, mode, dt, dt2, dt3) {
let classNames = [],
titles = [],
_classNames = "",
_titles = "";
const dtf = dt.format(WCALENDAR_SV.dateFormat),
dtfmd = dt.format("MMDD"),
dtf2 = dt2.format(WCALENDAR_SV.dateFormat),
dtf3 = dt3.format(WCALENDAR_SV.dateFormat);
classNames.push(mode);
if (dtf2 == dtf) {
classNames.push("today");
}
if (dtf3 == dtf ) {
if(options.hasVal && options.hasVal=="N"){
//nothing
}else{
classNames.push("active");
}
}
if (options.mdHoliday && options.mdHoliday[dtfmd]) {
classNames.push("md-holiday");
titles.push(options.mdHoliday[dtfmd][options.locale]);
}
if (options.holiday && options.holiday[dtf]) {
classNames.push("holiday");
titles.push(options.holiday[dtf][options.locale]);
}
if (classNames.length > 0) {
_classNames = " class=\"" + (classNames.join(" ")) + "\"";
}
if (titles.length > 0) {
_titles = " title=\"" + (titles.join(" ")) + "\"";
}
return "<li>" +
" <a href=\"javascript:;\" data-val=\"" + dt.format(options.dateFormat) + "\"" + _titles + _classNames + ">" + dt.date() + "</a>" +
"</li>";
},
makeSelectOption: function ($t, start, end, v) {
let _options = [];
for (let i = start; i <= end; i++) {
_options.push("<option value=\"" + i + "\"" + (i == v ? " selected=\"selected\"" : "") + ">" + i + "</option>");
}
$t.empty().append(_options.join(""));
}
}
})(jQuery);<|fim▁end|> | options.targetDate = options.selectDate.clone();
_WCALENDAR.draw($container, options);
$container.show();
|
<|file_name|>perfect.rs<|end_file_name|><|fim▁begin|>use super::lookups;
use cards::card::{Card};
use super::{HandRank};
use super::utils::{card_to_deck_number};
fn simulate_32bit_precision(u: usize) -> usize {
let mask = 0xffffffff;
u & mask
}
// don't use this.
pub fn find_fast(something: usize) -> usize {
let mut u = simulate_32bit_precision(something);
//well, this is awkward. The logic in this function relies on arithmetic overflows
u = simulate_32bit_precision(u + 0xe91aaa35);
u = simulate_32bit_precision(u ^ (u >> 16));
u = simulate_32bit_precision(u + (u << 8));
u = simulate_32bit_precision(u ^ (u >> 4));
let b = simulate_32bit_precision((u >> 8) & 0x1ff);
let a = simulate_32bit_precision((u + (u << 2)) >> 19);
simulate_32bit_precision(a ^ (lookups::HASH_ADJUST[b] as usize))
}
pub fn eval_5cards(cards: [&Card; 5]) -> HandRank {
let c1 = card_to_deck_number(cards[0]);
let c2 = card_to_deck_number(cards[1]);
let c3 = card_to_deck_number(cards[2]);
let c4 = card_to_deck_number(cards[3]);
let c5 = card_to_deck_number(cards[4]);
let q : usize = ((c1 | c2 | c3 | c4 | c5) as usize) >> 16;
if (c1 & c2 & c3 & c4 & c5 & 0xf000) != 0 {
return lookups::FLUSHES[q] as HandRank;
}
let s = lookups::UNIQUE_5[q] as HandRank;
if s != 0 {
return s;
}
//TODO: FIXME
// version: perfect hash. Not working currently
let lookup = find_fast(
((c1 & 0xff) * (c2 & 0xff) * (c3 & 0xff) * (c4 & 0xff) * (c5 & 0xff)) as usize
);
HAND_RANK_COUNT - (lookups::HASH_VALUES[lookup] as HandRank)
}
// don't use this.
pub fn eval_7cards(cards: [&Card; 7]) -> HandRank {
let mut tmp;
let mut best = 0;
for ids in lookups::PERM_7.iter() {
let subhand : [&Card; 5] = [
cards[ids[0] as usize],
cards[ids[1] as usize],
cards[ids[2] as usize],
cards[ids[3] as usize],
cards[ids[4] as usize]
];
tmp = eval_5cards(subhand);
if tmp > best {
best = tmp;
}
}
best
}
// these two guys only work by accident
/*
#[test]
fn get_rank_of_5_perfect() {<|fim▁hole|> let c4 = Card(Value::Two, Suit::Clubs);
let c5 = Card(Value::Three, Suit::Hearts);
let cards = [&c1, &c2, &c3, &c4, &c5];
let rank = perfect::eval_5cards(cards);
assert_eq!(hand_rank(rank), HandRankClass::FourOfAKind);
}
#[test]
fn get_rank_of_7_perfect() {
let c1 = Card(Value::Two, Suit::Spades);
let c2 = Card(Value::Two, Suit::Hearts);
let c3 = Card(Value::Two, Suit::Diamonds);
let c4 = Card(Value::Two, Suit::Clubs);
let c5 = Card(Value::Three, Suit::Hearts);
let c6 = Card(Value::Three, Suit::Diamonds);
let c7 = Card(Value::Three, Suit::Clubs);
let cards = [&c1, &c2, &c3, &c4, &c5, &c6, &c7];
let rank = perfect::eval_7cards(cards);
assert_eq!(hand_rank(rank), HandRankClass::FourOfAKind);
}
*/<|fim▁end|> | let c1 = Card(Value::Two, Suit::Spades);
let c2 = Card(Value::Two, Suit::Hearts);
let c3 = Card(Value::Two, Suit::Diamonds); |
<|file_name|>sudoku.go<|end_file_name|><|fim▁begin|>package sudoku
import "fmt"
const (
n = 3
N = 3 * 3
)
var (
resolved bool
)
func solveSudoku(board [][]byte) [][]byte {
// box size 3
row := make([][]int, N)
columns := make([][]int, N)
box := make([][]int, N)
res := make([][]byte, N)
for i := 0; i < N; i++ {
row[i] = make([]int, N+1)
columns[i] = make([]int, N+1)
box[i] = make([]int, N+1)
res[i] = make([]byte, N)
copy(res[i], board[i])
}
for i := 0; i < N; i++ {
for j := 0; j < N; j++ {
if board[i][j] != '.' {
placeNumberAtPos(res, i, j, int(board[i][j]-'0'), row, columns, box)
}
}
}
fmt.Printf("row: %v\n, column: %v\n, box: %v\n", row, columns, box)
permute(res, 0, 0, row, columns, box)
return res
}
func placeNumberAtPos(board [][]byte, i, j, num int, row, columns, box [][]int) {
boxIdx := (i/n)*n + j/n
(row)[i][num]++
(columns)[j][num]++
(box)[boxIdx][num]++
(board)[i][j] = byte('0' + num)
}
func removeNumberAtPos(board [][]byte, i, j, num int, row, columns, box [][]int) {
boxIdx := (i/n)*n + j/n
row[i][num]++
columns[j][num]++
box[boxIdx][num]++
board[i][j] = '.'
}
func isValidPosForNum(i, j, num int, row, columns, box [][]int) bool {
boxIdx := (i/n)*n + j/n
return row[i][num]+columns[j][num]+box[boxIdx][num] == 0
}<|fim▁hole|> if board[i][j] == '.' {
for k := 1; k <= N; k++ {
if isValidPosForNum(i, j, k, row, column, box) {
placeNumberAtPos(board, i, j, k, row, column, box)
fmt.Printf("place k:%d at row: %d and col:%d, row[i][k]= %d, col[j][k] = %d and box[boxidx][k] = %d\n", k, i, j, row[i][k], column[j][k],
box[(i/n)*n+j/n][k])
placeNext(board, i, j, row, column, box)
fmt.Printf("place next then k:%d at row: %d and col:%d, row[i][k]= %d, col[j][k] = %d and box[boxidx][k] = %d\n", k, i, j, row[i][k], column[j][k],
box[(i/n)*n+j/n][k])
if !resolved {
removeNumberAtPos(board, i, j, k, row, column, box)
}
}
}
} else {
placeNext(board, i, j, row, column, box)
}
}
func placeNext(board [][]byte, i, j int, row, column, box [][]int) {
if i == N-1 && j == N-1 {
resolved = true
}
fmt.Printf("board: %v\n, row: %v \n, column: %v\n, box: %v\n", board, row, column, box)
if j == N-1 {
fmt.Println("next row")
permute(board, i+1, 0, row, column, box)
} else {
fmt.Println("next column")
permute(board, i, j+1, row, column, box)
}
}
func solveSudoku2(board [][]byte) [][]byte {
if len(board) == 0 {
return board
}
solve(board)
return board
}
func solve(board [][]byte) bool {
var c byte
for i := 0; i < len(board); i++ {
for j := 0; j < len(board[0]); j++ {
if board[i][j] == '.' {
for c = '1'; c <= '9'; c++ {
if isValid(board, i, j, c) {
board[i][j] = c
if solve(board) {
return true
} else {
board[i][j] = '.'
}
}
}
return false
}
}
}
return true
}
func isValid(board [][]byte, row int, col int, c byte) bool {
for i := 0; i < 9; i++ {
if board[i][col] != '.' && board[i][col] == c {
return false
}
if board[row][i] != '.' && board[row][i] == c {
return false
}
if board[3*(row/3)+i/3][3*(col/3)+i%3] != '.' && board[3*(row/3)+i/3][3*(col/3)+i%3] == c {
return false
}
}
return true
}<|fim▁end|> |
func permute(board [][]byte, i, j int, row, column, box [][]int) { |
<|file_name|>p62.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
corto = 10
largo = long(corto)
<|fim▁hole|><|fim▁end|> | print type(corto)
print type(largo) |
<|file_name|>app.js<|end_file_name|><|fim▁begin|>var fs = require('fs')
// Script directories
var settings = require('./settings.json'); // The settings file
var scriptDir = settings.scriptDir + '/'; // The directory where dota scripts are placed
var scriptDirOut = settings.scriptDirOut; // The directory where our files are outputted
var resourcePath = settings.dotaDir + 'game/dota/resource/'; // The directory to read resource files from
var customDir = settings.customDir; // The directory where our mods are read from, to be merged in
var customAbilitiesLocation = '../src/scripts/npc/npc_abilities_custom.txt'
var langDir = '../src/localization/';
// Code needed to do multipliers
var spellMult = require('./spellMult.json');
// Create the output folder
if(!fs.existsSync(scriptDirOut)) fs.mkdirSync(scriptDirOut);
// Create the output folder
//if(!fs.existsSync(scriptDirOut)) fs.mkdirSync(scriptDirOut);
// Store for our custom stuff
var customAbilities = {};
var customUnits = {};
//var customItems = {};
//var items = {};
var abilities = {};
/*<|fim▁hole|>*/
var langs = ['english', 'schinese'];
var langIn = {};
var langOut = {};
var specialChar; // Special character needed for doto encoding
// theString is the string we search for and use as a key to store in
// if theString can't be find, search using altString
// search in actual language, if that fails, search in english, if that fails, commit suicide
function generateLanguage(theString, altString, appendOnEnd) {
// Grab a reference to english
var english = langIn.english;
if(appendOnEnd == null) appendOnEnd = '';
for(var i=0; i<langs.length; ++i) {
// Grab a language
var lang = langs[i];
var langFile = langIn[lang];
var storeTo = langOut[lang];
if(langFile[theString]) {
storeTo[theString] = langFile[theString] + appendOnEnd;
} else if(langFile[altString]) {
storeTo[theString] = langFile[altString] + appendOnEnd;
} else if(english[theString]) {
storeTo[theString] = english[theString] + appendOnEnd;
} else if(english[altString]) {
storeTo[theString] = english[altString] + appendOnEnd;
} else if(storeTo[altString]) {
storeTo[theString] = storeTo[altString] + appendOnEnd;
} else {
console.log('Failed to find ' + theString);
}
if(!langFile[theString]) langFile[theString] = storeTo[theString];
}
}
var generateAfter = [];
function generateLanguageAfter(theString, altString, appendOnEnd) {
generateAfter.push([theString, altString, appendOnEnd]);
}
function clearGenerateAfter() {
generateAfter = [];
}
// Read in our language files
function prepareLanguageFiles(next) {
var ourData = ''+fs.readFileSync(langDir + 'addon_english.txt');
var english = parseKV(ourData).addon;
specialChar = fs.readFileSync(resourcePath + 'dota_english.txt', 'utf16le').substring(0, 1);
for(var i=0; i<langs.length; ++i) {
// Grab a language
var lang = langs[i];
var data = fs.readFileSync(resourcePath + 'dota_' + lang + '.txt', 'utf16le').substring(1);
// Load her up
langIn[lang] = parseKV(data).lang.Tokens;
langOut[lang] = {};
var toUse;
if(fs.existsSync(langDir + 'addon_' + lang + '.txt')) {
var ourData
if(lang == 'english') {
ourData = ''+fs.readFileSync(langDir + 'addon_' + lang + '.txt');
} else {
ourData = ''+fs.readFileSync(langDir + 'addon_' + lang + '.txt', 'utf16le').substring(1);
}
toUse = parseKV(ourData).addon;
} else {
toUse = english;
}
for(var key in english) {
if(toUse[key]) {
langOut[lang][key] = toUse[key];
} else {
langOut[lang][key] = english[key];
}
}
for(var key in toUse) {
if(!langIn[lang][key]) {
langIn[lang][key] = toUse[key];
}
}
}
console.log('Done loading languages!');
// Run the next step if there is one
if(next) next();
}
/*
Precache generator
*/
function generatePrecacheData(next) {
// Precache generator
fs.readFile(scriptDir+'npc_heroes.txt', function(err, rawHeroes) {
console.log('Loading heroes...');
var rootHeroes = parseKV(''+rawHeroes);
var newKV = {};
// List of heroes to ignore differs based on s1 and s2
// In s2, no bots are supported, so we can just strip every hero
var ignoreHeroes = {
npc_dota_hero_techies: true,
npc_dota_hero_gyrocopter: true,
npc_dota_hero_riki: true
};
var heroes = rootHeroes.DOTAHeroes;
for(var name in heroes) {
if(name == 'Version') continue;
if(name == 'npc_dota_hero_base') continue;
var data = heroes[name];
if(!ignoreHeroes[name]) {
newKV[name+'_lod'] = {
override_hero: name,
AbilityLayout: 6
}
if(data.BotImplemented != 1) {
//newKV[name+'_lod'].Ability1 = 'attribute_bonus';
for(var i=1;i<=32;++i) {
var txt = heroes[name]['Ability' + i];
if(txt) {
if(txt[0].indexOf('special_bonus_') != -1) {
//newKV[name+'_lod']['Ability' + i] = txt;
} else {
newKV[name+'_lod']['Ability' + i] = '';
}
}
}
}
}
// Check if they are melee
if(data.AttackCapabilities == 'DOTA_UNIT_CAP_MELEE_ATTACK') {
if(!newKV[name+'_lod']) {
newKV[name+'_lod'] = {
override_hero: name
}
}
// Give them projectile speed + model
newKV[name+'_lod'].ProjectileSpeed = 1000
newKV[name+'_lod'].ProjectileModel = 'luna_base_attack'
}
// Add ability layout = 6
if(!newKV[name+'_lod']) {
newKV[name+'_lod'] = {
override_hero: name
}
}
newKV[name+'_lod'].AbilityLayout = 6;
// Source2 precache
customUnits['npc_precache_'+name] = {
BaseClass: 'npc_dota_creep',
precache: {
particle_folder: data.particle_folder,
soundfile: data.GameSoundsFile
}
}
// Extra precache stuff
if(data.precache) {
for(var key in data.precache) {
customUnits['npc_precache_'+name].precache[key] = data.precache[key];
}
}
}
// Techies override prcaching
customUnits.npc_precache_npc_dota_hero_techies.precache.model = 'models/heroes/techies/fx_techiesfx_mine.vmdl';
// Store the hero data
fs.writeFile(scriptDirOut+'npc_heroes_custom.txt', toKV(newKV, 'DOTAHeroes'), function(err) {
if (err) throw err;
console.log('Done saving custom heroes!');
// Continue, if there is something else to run
if(next) next();
});
});
}
/*
Custom file mergers
*/
/*function loadItems(next) {
// Simply read in the file, and store into our varible
fs.readFile(scriptDir+'items.txt', function(err, rawItems) {
console.log('Loading items...');
items = parseKV(''+rawItems).DOTAAbilities;
// Continue, if there is something else to run
if(next) next();
});
}*/
function loadAbilities(next) {
// Simply read in the file, and store into our varible
fs.readFile(scriptDir+'npc_abilities.txt', function(err, rawAbs) {
console.log('Loading abilities...');
abilities = parseKV(''+rawAbs).DOTAAbilities;
// Continue, if there is something else to run
if(next) next();
});
}
function loadCustomUnits(next) {
// Simply read in the file, and store into our varible
fs.readFile(customDir+'npc_units_custom.txt', function(err, rawCustomUnits) {
console.log('Loading custom units...');
customUnits = parseKV(''+rawCustomUnits).DOTAUnits;
// Continue, if there is something else to run
if(next) next();
});
}
function loadCustomAbilities(next) {
// Simply read in the file, and store into our varible
fs.readFile(customAbilitiesLocation, function(err, rawCustomAbilities) {
console.log('Loading custom abilities...');
customAbilities = parseKV(''+rawCustomAbilities).DOTAAbilities;
// Continue, if there is something else to run
if(next) next();
});
}
/*
Process Skill Warnings
*/
function generateSkillWarnings(next) {
// Grab a reference to english
var english = langIn.english;
for(var word in english) {
if(word.indexOf('warning_') == 0) {
var value = english[word];
var abilityName = word.replace('warning_', '');
for(var i=0; i<langs.length; ++i) {
// Grab a language
var lang = langs[i];
var langFile = langIn[lang];
var storeTo = langOut[lang];
var storeValue = value;
// Does this language have a different translation of the word?
if(langFile[word]) {
storeValue = langFile[word];
}
// Do we have anything to change?
var searchKey = 'DOTA_Tooltip_ability_' + abilityName+ '_Description';
if(langFile[searchKey]) {
storeValue = langFile[searchKey] + '<br><br>' + storeValue + '<br>';
}
// Store it
storeTo[searchKey] = storeValue;
}
}
}
// Continue
next();
}
/*
Helper functions
*/
// Round to places decimal places
function r(value, places) {
for(var i=0; i<places; i++) {
value *= 10;
}
value = Math.round(value);
for(var i=0; i<places; i++) {
value /= 10;
}
return value;
}
function clone(x) {
if (x === null || x === undefined)
return x;
if (x.clone)
return x.clone();
if (x.constructor == Array)
{
var r = [];
for (var i=0,n=x.length; i<n; i++)
r.push(clone(x[i]));
return r;
}
if(typeof(x) == 'object') {
var y = {};
for(var key in x) {
y[key] = clone(x[key]);
}
return y;
}
return x;
}
/*
Parses most of a KV file
Mostly copied from here:
https://github.com/Matheus28/KeyValue/blob/master/m28/keyvalue/KeyValue.hx
*/
var TYPE_BLOCK = 0;
function parseKV(data) {
// Make sure we have some data to work with
if(!data) return null;
var tree = [{}];
var treeType = [TYPE_BLOCK];
var keys = [null];
var i = 0;
var line = 1;
while(i < data.length) {
var chr = data.charAt(i);
if(chr == ' ' || chr == '\t') {
// Ignore white space
} else if(chr == '\n') {
// We moved onto the next line
line++;
if(data.charAt(i+1) == '\r') i++;
} else if(chr == '\r') {
// We moved onto the next line
line++;
if(data.charAt(i+1) == '\n') i++;
} else if(chr == '/') {
if(data.charAt(i+1) == '/') {
// We found a comment, ignore rest of the line
while(++i < data.length) {
chr = data.charAt(i);
// Check for new line
if(chr == '\n') {
if(data.charAt(i+1) == '\r') ++i;
break;
}
if(chr == '\r') {
if(data.charAt(i+1) == '\n') ++i;
break;
}
}
// We are on a new line
line++;
}
} else if(chr == '"') {
var resultString = '';
i++;
while(i < data.length) {
chr = data.charAt(i);
if(chr == '"') break;
if(chr == '\n') {
// We moved onto the next line
line++;
if(data.charAt(i+1) == '\r') i++;
} else if(chr == '\r') {
// We moved onto the next line
line++;
if(data.charAt(i+1) == '\n') i++;
} else if(chr == '\\') {
i++;
// Gran the mext cjaracter
chr = data.charAt(i);
// Check for escaped characters
switch(chr) {
case '\\':chr = '\\'; break;
case '"': chr = '"'; break;
case '\'': chr = '\''; break;
case 'n': chr = '\n'; break;
case 'r': chr = '\r'; break;
default:
chr = '\\';
i--;
break;
}
}
resultString += chr;
i++;
}
if (i == data.length || chr == '\n' || chr == '\r') throw new Error("Unterminated string at line " + line);
if(treeType[treeType.length - 1] == TYPE_BLOCK){
if (keys[keys.length - 1] == null) {
keys[keys.length - 1] = resultString;
}else {
if(tree[tree.length - 1][keys[keys.length - 1]] == null) {
tree[tree.length - 1][keys[keys.length - 1]] = [];
}
tree[tree.length - 1][keys[keys.length - 1]].push(resultString);
keys[keys.length - 1] = null;
}
}
// Check if we need to reparse the character that ended this string
if(chr != '"') --i;
} else if(chr == '{') {
if(treeType[treeType.length - 1] == TYPE_BLOCK){
if (keys[keys.length - 1] == null) {
throw new Error("A block needs a key at line " + line + " (offset " + i + ")");
}
}
tree.push({});
treeType.push(TYPE_BLOCK);
keys.push(null);
} else if (chr == '}') {
if (tree.length == 1) {
throw new Error("Mismatching bracket at line " + line + " (offset " + i + ")");
}
if (treeType.pop() != TYPE_BLOCK) {
throw new Error("Mismatching brackets at line " + line + " (offset " + i + ")");
}
keys.pop();
var obj = tree.pop();
if(treeType[treeType.length - 1] == TYPE_BLOCK){
tree[tree.length - 1][keys[keys.length - 1]] = obj;
keys[keys.length - 1] = null;
}else {
tree[tree.length - 1].push(obj);
}
} else {
console.log("Unexpected character \"" + chr + "\" at line " + line + " (offset " + i + ")");
// Skip to next line
while(++i < data.length) {
chr = data.charAt(i);
// Check for new line
if(chr == '\n') {
if(data.charAt(i+1) == '\r') ++i;
break;
}
if(chr == '\r') {
if(data.charAt(i+1) == '\n') ++i;
break;
}
}
// We are on a new line
line++;
// Move onto the next char
i++;
}
i++;
}
if (tree.length != 1) {
throw new Error("Missing brackets");
}
return tree[0];
}
function escapeString(str) {
return str.replace(/\\/gm, '\\\\').replace(/\"/gm, '\\"').replace(/(\r\n|\n|\r|\n\r)/gm, '\\n');
}
function toKV(obj, key) {
var myStr = '';
if(obj == null) {
// Nothing to return
return '';
} else if (typeof obj == 'number') {
return '"' + escapeString(key) + '""' + obj + '"';
} else if (typeof obj == 'boolean') {
return '"' + escapeString(key) + '""' + obj + '"';
} else if (typeof obj == 'string') {
return '"' + escapeString(key) + '""' + escapeString(obj) + '"';
} else if(obj instanceof Array) {
// An array of strings
for(var i=0; i<obj.length; i++) {
myStr = myStr + '"' + escapeString(key) + '"\n"' + escapeString(obj[i]) + '"';
}
return myStr;
} else {
// An object
for(var entry in obj) {
myStr += toKV(obj[entry], entry)
}
if(key != null) {
return '"' + escapeString(key) + '"{\n' + myStr + '}';
} else {
return myStr;
}
}
}
/*
Run everything
*/
// Prepare hte languge files
prepareLanguageFiles(function() {
// Load our custom units
loadCustomUnits(function() {
// Load abilities
loadAbilities(function() {
// Load items
//loadItems(function() {
// Load our custom items
//loadCustomItems(function() {
// Load our custom abilities
loadCustomAbilities(function() {
// Generate the custom item abilities
//generateAbilityItems(function() {
// Generate our precache data
generatePrecacheData(function() {
//doCSP(function() {
//doLvl1Ults(function() {
generateSkillWarnings(function() {
// Output language files
for(var i=0; i<langs.length; ++i) {
(function(lang) {
fs.writeFile(scriptDirOut+'addon_' + lang + '_token.txt', specialChar + toKV({Tokens: langOut[lang]}, 'lang'), 'utf16le', function(err) {
if (err) throw err;
console.log('Finished saving ' + lang + '!');
});
fs.writeFile(scriptDirOut+'addon_' + lang + '.txt', specialChar + toKV(langOut[lang], 'addon'), 'utf16le', function(err) {
if (err) throw err;
console.log('Finished saving ' + lang + '!');
});
})(langs[i]);
}
// Output custom files
fs.writeFile(scriptDirOut+'npc_units_custom.txt', toKV(customUnits, 'DOTAUnits'), function(err) {
if (err) throw err;
console.log('Done saving custom units file!');
});
});
//});
//});
});
//});
});
//});
//});
});
});
});<|fim▁end|> | Prepare language files |
<|file_name|>TeraSdk.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Tera Python SDK. It needs a libtera_c.so
TODO(taocipian) __init__.py
"""
from ctypes import CFUNCTYPE, POINTER
from ctypes import byref, cdll, string_at
from ctypes import c_bool, c_char_p, c_void_p
from ctypes import c_uint32, c_int32, c_int64, c_ubyte, c_uint64
class Status(object):
""" status code """
# C++ tera.h ErrorCode
OK = 0
NotFound = 1
BadParam = 2
System = 3
Timeout = 4
Busy = 5
NoQuota = 6
NoAuth = 7
Unknown = 8
NotImpl = 9
reason_list_ = ["ok", "not found", "bad parameter",
"unknown error", "request timeout", "busy",
"no quota", "operation not permitted", "unknown error",
"not implemented"]
def __init__(self, c):
""" init """
self.c_ = c
if c < 0 or c > len(Status.reason_list_) - 1:
self.reason_ = "bad status code"
else:
self.reason_ = Status.reason_list_[c]
def GetReasonString(self):
"""
Returns:
(string) status string
"""
return Status.reason_list_[self.c_]
def GetReasonNumber(self):
"""
Returns:
(long) status code
"""
return self.c_
class ScanDescriptor(object):
""" scan操作描述符
scan出[start_key, end_key)范围内的所有数据,每个cell默认返回最新的1个版本
"""
def __init__(self, start_key):
"""
Args:
start_key(string): scan操作的起始位置,scan结果包含start_key
"""
self.desc = lib.tera_scan_descriptor(start_key,
c_uint64(len(start_key)))
def Destroy(self):
"""
销毁这个scan_descriptor,释放底层资源,以后不得再使用这个对象
"""
lib.tera_scan_descriptor_destroy(self.desc)
def SetEnd(self, end_key):
"""
不调用此函数时,end_key被置为“无穷大”
Args:
end_key(string): scan操作的终止位置,scan结果不包含end_key
"""
lib.tera_scan_descriptor_set_end(self.desc, end_key,
c_uint64(len(end_key)))
def SetMaxVersions(self, versions):
"""
不调用此函数时,默认每个cell只scan出最新版本
Args:
versions(long): scan时某个cell最多被选出多少个版本
"""
lib.tera_scan_descriptor_set_max_versions(self.desc, versions)
def SetBufferSize(self, buffer_size):
"""
服务端将读取的数据攒到buffer里,最多积攒到达buffer_size以后返回一次,
也有可能因为超时或者读取到达终点而buffer没有满就返回,默认值 64 * 1024
这个选项对scan性能有非常明显的影响,
我们的测试显示,1024*1024(1MB)在很多场景下都有比较好的表现,
建议根据自己的场景进行调优
Args:
buffer_size: scan操作buffer的size,单位Byte
"""
lib.tera_scan_descriptor_set_buffer_size(self.desc, buffer_size)
def SetPackInterval(self, interval):
"""
设置scan操作的超时时长,单位ms
服务端在scan操作达到约 interval 毫秒后尽快返回给client结果
Args:
iinterval(long): 一次scan的超时时长,单位ms
"""
lib.tera_scan_descriptor_set_pack_interval(self.desc, interval)
def AddColumn(self, cf, qu):
"""
scan时选择某个Column(ColumnFamily + Qualifier),其它Column过滤掉不返回给客户端
Args:
cf(string): 需要的ColumnFamily名
qu(string): 需要的Qualifier名
"""
lib.tera_scan_descriptor_add_column(self.desc, cf,
qu, c_uint64(len(qu)))
def AddColumnFamily(self, cf):
"""
类同 AddColumn, 这里选择整个 ColumnFamily
Args:
cf(string): 需要的ColumnFamily名
"""
lib.tera_scan_descriptor_add_column_family(self.desc, cf)
def SetTimeRange(self, start, end):
"""
设置返回版本的时间范围
C++接口用户注意:C++的这个接口里start和end参数的顺序和这里相反!
Args:
start(long): 开始时间戳(结果包含该值),
Epoch (00:00:00 UTC, January 1, 1970), measured in us
end(long): 截止时间戳(结果包含该值),
Epoch (00:00:00 UTC, January 1, 1970), measured in us
"""
lib.tera_scan_descriptor_set_time_range(self.desc, start, end)
class ResultStream(object):
""" scan操作返回的输出流
"""
def __init__(self, stream):
""" init """
self.stream = stream
def Destroy(self):
"""
销毁这个result_stream,释放底层资源,以后不得再使用这个对象
"""
lib.tera_result_stream_destroy(self.stream)
def Done(self):
""" 此stream是否已经读完
Returns:
(bool) 如果已经读完,则返回 true, 否则返回 false.
"""
err = c_char_p()
return lib.tera_result_stream_done(self.stream, byref(err))
def Next(self):
""" 迭代到下一个cell
"""
lib.tera_result_stream_next(self.stream)
def RowName(self):
"""
Returns:
(string) 当前cell对应的Rowkey
"""
value = POINTER(c_ubyte)()
vallen = c_uint64()
lib.tera_result_stream_row_name(self.stream,
byref(value), byref(vallen))
return copy_string_to_user(value, long(vallen.value))
def Family(self):
"""
Returns:
(string) 当前cell对应的ColumnFamily
"""
value = POINTER(c_ubyte)()
vallen = c_uint64()
lib.tera_result_stream_family(self.stream, byref(value), byref(vallen))
return copy_string_to_user(value, long(vallen.value))
def Qualifier(self):
"""
Returns:
(string) 当前cell对应的Qulifier
"""
value = POINTER(c_ubyte)()
vallen = c_uint64()
lib.tera_result_stream_qualifier(self.stream,
byref(value), byref(vallen))
return copy_string_to_user(value, long(vallen.value))
def ColumnName(self):
"""
Returns:
(string) 当前cell对应的 ColumnName(即 ColumnFamily:Qulifier)
"""
value = POINTER(c_ubyte)()
vallen = c_uint64()
lib.tera_result_stream_column_name(self.stream,
byref(value), byref(vallen))
return copy_string_to_user(value, long(vallen.value))
def Value(self):
"""
Returns:
(string) 当前cell对应的value
"""
value = POINTER(c_ubyte)()
vallen = c_uint64()
lib.tera_result_stream_value(self.stream, byref(value), byref(vallen))
return copy_string_to_user(value, long(vallen.value))
def ValueInt64(self):
"""
Returns:
(long) 当前cell为一个int64计数器,取出该计数器的数值
对一个非int64计数器调用该方法,属未定义行为
"""
return lib.tera_result_stream_value_int64(self.stream)
def Timestamp(self):
"""
Returns:
(long) 当前cell对应的时间戳,
Epoch (00:00:00 UTC, January 1, 1970), measured in us
"""
return lib.tera_result_stream_timestamp(self.stream)
class Client(object):
""" 通过Client对象访问一个tera集群
使用建议:一个集群对应一个Client即可,如需访问多个Client,需要创建多个
"""
def __init__(self, conf_path, log_prefix):
"""
Raises:
TeraSdkException: 创建一个Client对象失败
"""
err = c_char_p()
self.client = lib.tera_client_open(conf_path, log_prefix, byref(err))
if self.client is None:
raise TeraSdkException("open client failed:" + str(err.value))
def Close(self):
"""
销毁这个client,释放底层资源,以后不得再使用这个对象
"""
lib.tera_client_close(self.client)
def OpenTable(self, name):
""" 打开名为<name>的表
Args:
name(string): 表名
Returns:
(Table) 打开的Table指针
Raises:
TeraSdkException: 打开table时出错
"""
err = c_char_p()
table_ptr = lib.tera_table_open(self.client, name, byref(err))
if table_ptr is None:
raise TeraSdkException("open table failed:" + err.value)
return Table(table_ptr)
MUTATION_CALLBACK = CFUNCTYPE(None, c_void_p)
class RowMutation(object):
""" 对某一行的变更
在Table.ApplyMutation()调用之前,
RowMutation的所有操作(如Put/DeleteColumn)都不会立即生效
"""
def __init__(self, mutation):
""" init """
self.mutation = mutation
def PutKV(self, value, ttl):
""" 写入(修改)值为<value>
Args:
value(string): cell的值
ttl: value 过期时间
"""
lib.tera_row_mutation_put_kv(self.mutation, value,
c_uint64(len(value)), c_int32(ttl))
def Put(self, cf, qu, value):
""" 写入(修改)这一行上
ColumnFamily为<cf>, Qualifier为<qu>的cell值为<value>
Args:
cf(string): ColumnFamily名
qu(string): Qualifier名
value(string): cell的值
"""
lib.tera_row_mutation_put(self.mutation, cf,
qu, c_uint64(len(qu)),
value, c_uint64(len(value)))
def PutWithTimestamp(self, cf, qu, timestamp, value):
""" 写入(修改)这一行上
ColumnFamily为<cf>, Qualifier为<qu>的cell值为<value>
指定版本(时间戳)为timestamp
Args:
cf(string): ColumnFamily名
qu(string): Qualifier名
timestamp(long): 版本号/时间戳
value(string): cell的值
"""
lib.tera_row_mutation_put_with_timestamp(self.mutation, cf,
qu, c_uint64(len(qu)),
timestamp,
value, c_uint64(len(value)))
def DeleteColumnAllVersions(self, cf, qu):
""" 删除这一行上
ColumnFamily为<cf>, Qualifier为<qu>的cell的所有版本
如果没有用到多版本机制或本列只存储了一个版本(默认情况),
那么使用`DeleteColumnAllVersions`而不是`DeleteColumnWithVersion`来删除本列会更方便,
因为不用指定timestamp作为版本号。
Args:
cf(string): ColumnFamily名
qu(string): Qualifier名
"""
lib.tera_row_mutation_delete_column_all_versions(self.mutation, cf,
qu, c_uint64(len(qu)))
def DeleteColumnWithVersion(self, cf, qu, ts):
""" 删除这一行上
ColumnFamily为<cf>, Qualifier为<qu>的cell中Timestamp为<ts>的那个版本
Args:
cf(string): ColumnFamily名
qu(string): Qualifier名
ts(long): Timestamp(版本号)
"""
lib.tera_row_mutation_delete_column_with_version(self.mutation, cf,
qu, c_uint64(len(qu)),
ts)
def DeleteFamily(self, cf):
""" 删除ColumnFamily下所有列的所有版本
Args:
cf(string): ColumnFamily名
"""
lib.tera_row_mutation_delete_family(self.mutation, cf)
def DeleteRow(self):
""" 删除整行
"""
lib.tera_row_mutation_delete_row(self.mutation)
def RowKey(self):
"""
Returns:
(string): 此RowMutation对象的rowkey,例如可用在回调中
"""
value = POINTER(c_ubyte)()
vallen = c_uint64()
lib.tera_row_mutation_rowkey(self.mutation,
byref(value), byref(vallen))
return copy_string_to_user(value, long(vallen.value))
def SetCallback(self, callback):
""" 设置回调
调用此函数则本次变更为异步(Table.ApplyMutation()立即返回);
否则本次变更为同步(Table.ApplyMutation()等待写入操作完成后返回)。
Args:
callback(MUTATION_CALLBACK): 用户回调,不论任何情况,最终都会被调用
"""
lib.tera_row_mutation_set_callback(self.mutation, callback)
def GetStatus(self):
"""
返回本次Mutation的结果状态
Returns:
(class Status) 操作结果状态,可以获知成功或失败,若失败,具体原因
"""
return Status(lib.tera_row_mutation_get_status_code(self.mutation))
def Destroy(self):
"""
销毁这个mutation,释放底层资源,以后不得再使用这个对象
"""
lib.tera_row_mutation_destroy(self.mutation)
# Deprecated
def DeleteColumn(self, cf, qu):
""" 删除这一行上
ColumnFamily为<cf>, Qualifier为<qu>的cell
Args:
cf(string): ColumnFamily名
qu(string): Qualifier名
"""
lib.tera_row_mutation_delete_column(self.mutation, cf,
qu, c_uint64(len(qu)))
# Deprecated
def PutInt64(self, cf, qu, value):
""" 写入(修改)这一行上
ColumnFamily为<cf>, Qualifier为<qu>的cell值为<value>
Args:
cf(string): ColumnFamily名
qu(string): Qualifier名
value(long): cell的值
"""
lib.tera_row_mutation_put_int64(self.mutation, cf,
qu, c_uint64(len(qu)), value)
class Table(object):
""" 对表格的所有增删查改操作由此发起
通过Client.OpenTable()获取一个Table对象
"""
def __init__(self, table):
""" init """
self.table = table
def Close(self):
"""
销毁这个table,释放底层资源,以后不得再使用这个对象
"""
lib.tera_table_close(self.table)
def NewRowMutation(self, rowkey):
""" 生成一个对 rowkey 的RowMutation对象(修改一行)
一个RowMutation对某一行的操作(例如多列修改)是原子的
Args:
rowkey(string): 待变更的rowkey
Returns:
(class RowMutation): RowMutation对象
"""
return RowMutation(lib.tera_row_mutation(self.table, rowkey,
c_uint64(len(rowkey))))
def ApplyMutation(self, mutation):
""" 应用一次变更,
如果之前调用过 SetCallback() 则本次调用为异步,否则为同步
Args:
mutation(class RowMutation): RowMutation对象
"""
lib.tera_table_apply_mutation(self.table, mutation.mutation)
def NewRowReader(self, rowkey):
""" 生成一个对 rowkey 的RowReader对象(读取一行)
一个RowReader对某一行的操作(例如读取多列)是原子的
Args:
rowkey(string): 待读取的rowkey
Returns:
(class RowReader): RowReader对象
"""
return RowReader(lib.tera_row_reader(self.table, rowkey,
c_uint64(len(rowkey))))
def ApplyReader(self, reader):
""" 应用一次读取,
如果之前调用过 SetCallback() 则本次调用为异步,否则为同步
Args:
reader(class RowReader): RowReader对象
"""
lib.tera_table_apply_reader(self.table, reader.reader)
def IsPutFinished(self):
""" table的异步写操作是否*全部*完成
Returns:
(bool) 全部完成则返回true,否则返回false.
"""
return lib.tera_table_is_put_finished(self.table)
def IsGetFinished(self):
""" table的异步读操作是否*全部*完成
Returns:
(bool) 全部完成则返回true,否则返回false.
"""
return lib.tera_table_is_get_finished(self.table)
def BatchGet(self, row_reader_list):
""" 批量get
用法类似 ApplyReader
Args:
row_reader_list(RowReader): 预先构造好的RowReader列表
每一行的读取结果存储在row_reader_list里对应的每个RowReader内,
如果该行读取成功(即返回的状态码是OK),
那么可以调用诸如RowReader.Value()访问读取结果
否则读取出错,通过状态码确定原因。
用法详见sample.py
"""
num = len(row_reader_list)
r = list()
for i in row_reader_list:
r.append(i.reader)
reader_array = (c_void_p * num)(*r)
lib.tera_table_apply_reader_batch(self.table, reader_array, num)
def Get(self, rowkey, cf, qu, snapshot=0):
""" 同步get一个cell的值
Args:
rowkey(string): Rowkey的值
cf(string): ColumnFamily名
qu(string): Qualifier名
snapshot(long): 快照,不关心的用户设置为0即可
Returns:
(string) cell的值
Raises:
TeraSdkException: 读操作失败
"""
err = c_char_p()
value = POINTER(c_ubyte)()
vallen = c_uint64()
result = lib.tera_table_get(
self.table, rowkey, c_uint64(len(rowkey)), cf,
qu, c_uint64(len(qu)), byref(value), byref(vallen), byref(err),
c_uint64(snapshot)
)
if not result:
raise TeraSdkException("get record failed:" + err.value)
return copy_string_to_user(value, long(vallen.value))
def GetInt64(self, rowkey, cf, qu, snapshot):
""" 类同Get()接口,区别是将cell的内容作为int64计数器返回
对非int64计数器的cell调用此方法属于未定义行为
Args:
rowkey(string): Rowkey的值
cf(string): ColumnFamily名
qu(string): Qualifier名
snapshot(long): 快照,不关心的用户设置为0即可
Returns:
(long) cell的数值
Raises:
TeraSdkException: 读操作失败
"""
err = c_char_p()
value = c_int64()
result = lib.tera_table_getint64(
self.table, rowkey, c_uint64(len(rowkey)), cf,
qu, c_uint64(len(qu)), byref(value), byref(err),
c_uint64(snapshot)
)
if not result:
raise TeraSdkException("get record failed:" + err.value)
return long(value.value)
def Put(self, rowkey, cf, qu, value):
""" 同步put一个cell的值
Args:
rowkey(string): Rowkey的值
cf(string): ColumnFamily名
qu(string): Qualifier名
value(string): cell的值
Raises:
TeraSdkException: 写操作失败
"""
err = c_char_p()
result = lib.tera_table_put(
self.table, rowkey, c_uint64(len(rowkey)), cf,
qu, c_uint64(len(qu)), value, c_uint64(len(value)), byref(err)
)
if not result:
raise TeraSdkException("put record failed:" + err.value)
def BatchPut(self, row_mutation_list):
""" 批量put
用法类似 ApplyMutation
Args:
row_mutation_list(RowMutation): 预先构造好的RowMutation列表
每一行的写入操作返回状态存储在row_mutation_list里对应的每个RowMutation内,
如果写入失败,通过状态码确定原因。
用法详见sample.py
"""
num = len(row_mutation_list)
r = list()
for i in row_mutation_list:
r.append(i.mutation)
mutation_array = (c_void_p * num)(*r)
lib.tera_table_apply_mutation_batch(self.table, mutation_array, num)
def PutInt64(self, rowkey, cf, qu, value):
""" 类同Put()方法,区别是这里的参数value可以是一个数字(能够用int64表示)计数器
Args:
rowkey(string): Rowkey的值
cf(string): ColumnFamily名
qu(string): Qualifier名
value(long): cell的数值,能够用int64表示
Raises:
TeraSdkException: 写操作失败
"""
err = c_char_p()
result = lib.tera_table_putint64(
self.table, rowkey, c_uint64(len(rowkey)), cf,
qu, c_uint64(len(qu)), value, byref(err)
)
if not result:
raise TeraSdkException("put record failed:" + err.value)
def Delete(self, rowkey, cf, qu):
""" 同步删除某个cell
Args:
rowkey(string): Rowkey的值
cf(string): ColumnFamily名
qu(string): Qualifier名
"""
lib.tera_table_delete(
self.table, rowkey, c_uint64(len(rowkey)),
cf, qu, c_uint64(len(qu))
)
def Scan(self, desc):
""" 发起一次scan操作
Args:
desc(ScanDescriptor): scan操作描述符
Raises:
TeraSdkException: scan失败
"""
err = c_char_p()
stream = lib.tera_table_scan(
self.table,
desc.desc,
byref(err)
)
if stream is None:
raise TeraSdkException("scan failed:" + err.value)
return ResultStream(stream)
READER_CALLBACK = CFUNCTYPE(None, c_void_p)
class RowReader(object):
""" 提供随机读取一行的功能
"""
def __init__(self, reader):
""" init """
self.reader = reader
def AddColumnFamily(self, cf):
""" 添加期望读取的ColumnFamily
默认读取一行(row)的全部ColumnFamily
Args:
cf(string): 期望读取的ColumnFamily
"""
lib.tera_row_reader_add_column_family(self.reader, cf)
def AddColumn(self, cf, qu):
""" 添加期望读取的Column
默认读取一行(row)的全部Column(ColumnFamily + Qualifier)
Args:
cf(string): 期望读取的ColumnFamily
qu(string): 期望读取的Qualifier
"""
lib.tera_row_reader_add_column(self.reader, cf, qu, c_uint64(len(qu)))
def SetCallback(self, callback):
""" 设置回调
调用此函数则本次随机读为异步(Table.ApplyReader()立即返回);
否则本次随机读为同步(Table.ApplyReader()等待读取操作完成后返回)
可以在回调中执行 Done() 和 Next() 对返回的结果进行迭代处理
Args:
callback(READER_CALLBACK): 用户回调,不论任何情况,最终都会被调用
"""
lib.tera_row_reader_set_callback(self.reader, callback)
def SetTimestamp(self, ts):
""" set timestamp """
lib.tera_row_reader_set_timestamp(self.reader, ts)
def SetTimeRange(self, start, end):
""" set time range """
lib.tera_row_reader_set_time_range(self.reader, start, end)
def SetSnapshot(self, snapshot):
""" set snapshot """
lib.tera_row_reader_set_snapshot(self.reader, snapshot)
def SetMaxVersions(self, versions):
""" set max versions """
lib.tera_row_reader_set_max_versions(self.reader, versions)
def SetTimeout(self, timeout):
""" set timeout """
lib.tera_row_reader_set_timeout(self.reader, timeout)
def Done(self):
""" 结果是否已经读完
Returns:
(bool) 如果已经读完,则返回 true, 否则返回 false.
"""
return lib.tera_row_reader_done(self.reader)
def Next(self):
""" 迭代到下一个cell
"""
lib.tera_row_reader_next(self.reader)
def RowKey(self):
"""
Returns:
(string) 当前cell对应的rowkey
"""
value = POINTER(c_ubyte)()
vallen = c_uint64()
lib.tera_row_reader_rowkey(self.reader,
byref(value), byref(vallen))
return copy_string_to_user(value, long(vallen.value))
def Value(self):
"""
Returns:
(string) 当前cell对应的value
"""
value = POINTER(c_ubyte)()
vallen = c_uint64()
lib.tera_row_reader_value(self.reader, byref(value), byref(vallen))
return copy_string_to_user(value, long(vallen.value))
def ValueInt64(self):
"""
Returns:
(long) 当前cell对应的value
"""
return long(lib.tera_row_reader_value_int64(self.reader))
def Family(self):
"""
Returns:
(string) 当前cell对应的ColumnFamily
"""
value = POINTER(c_ubyte)()
vallen = c_uint64()
lib.tera_row_reader_family(self.reader, byref(value), byref(vallen))
return copy_string_to_user(value, long(vallen.value))
def Qualifier(self):
"""
Returns:
(string) 当前cell对应的Qulifier
"""
value = POINTER(c_ubyte)()
vallen = c_uint64()
lib.tera_row_reader_qualifier(self.reader, byref(value), byref(vallen))
return copy_string_to_user(value, long(vallen.value))
def Timestamp(self):
"""
Returns:
(long) 当前cell对应的时间戳,Unix time
"""
return lib.tera_row_reader_timestamp(self.reader)
def GetStatus(self):
"""
返回本次RowReader读取的结果状态
Returns:
(class Status) 操作结果状态,可以获知成功或失败,若失败,具体原因
"""
return Status(lib.tera_row_reader_get_status_code(self.reader))
def Destroy(self):
"""
销毁这个mutation,释放底层资源,以后不得再使用这个对象
"""
lib.tera_row_reader_destroy(self.reader)
class TeraSdkException(Exception):
""" exception """
def __init__(self, reason):
""" init """
self.reason = reason
def __str__(self):
""" str """
return self.reason
##########################
# 以下代码用户不需要关心 #
##########################
def init_function_prototype_for_scan():
""" scan """
######################
# scan result stream #
######################
lib.tera_result_stream_done.argtypes = [c_void_p,
POINTER(c_char_p)]
lib.tera_result_stream_done.restype = c_bool
lib.tera_result_stream_destroy.argtypes = [c_void_p]
lib.tera_result_stream_destroy.restype = None
lib.tera_result_stream_timestamp.argtypes = [c_void_p]
lib.tera_result_stream_timestamp.restype = c_int64
lib.tera_result_stream_column_name.argtypes = [c_void_p,
POINTER(POINTER(c_ubyte)),
POINTER(c_uint64)]
lib.tera_result_stream_column_name.restype = None
lib.tera_result_stream_family.argtypes = [c_void_p,
POINTER(POINTER(c_ubyte)),
POINTER(c_uint64)]
lib.tera_result_stream_family.restype = None
lib.tera_result_stream_next.argtypes = [c_void_p]
lib.tera_result_stream_next.restype = None
lib.tera_result_stream_qualifier.argtypes = [c_void_p,
POINTER(POINTER(c_ubyte)),
POINTER(c_uint64)]
lib.tera_result_stream_qualifier.restype = None
lib.tera_result_stream_row_name.argtypes = [c_void_p,
POINTER(POINTER(c_ubyte)),
POINTER(c_uint64)]
lib.tera_result_stream_row_name.restype = None
lib.tera_result_stream_value.argtypes = [c_void_p,
POINTER(POINTER(c_ubyte)),
POINTER(c_uint64)]
lib.tera_result_stream_value.restype = None
lib.tera_result_stream_value_int64.argtypes = [c_void_p]
lib.tera_result_stream_value_int64.restype = c_int64
###################
# scan descriptor #
###################
lib.tera_scan_descriptor.argtypes = [c_char_p, c_uint64]
lib.tera_scan_descriptor.restype = c_void_p
lib.tera_scan_descriptor_destroy.argtypes = [c_void_p]
lib.tera_scan_descriptor_destroy.restype = None
lib.tera_scan_descriptor_add_column.argtypes = [c_void_p, c_char_p,
c_char_p, c_uint64]
lib.tera_scan_descriptor_add_column.restype = None
lib.tera_scan_descriptor_add_column_family.argtypes = [c_void_p, c_char_p]
lib.tera_scan_descriptor_add_column_family.restype = None
lib.tera_scan_descriptor_set_buffer_size.argtypes = [c_void_p, c_int64]
lib.tera_scan_descriptor_set_buffer_size.restype = None
lib.tera_scan_descriptor_set_end.argtypes = [c_void_p, c_char_p, c_uint64]
lib.tera_scan_descriptor_set_end.restype = None
lib.tera_scan_descriptor_set_pack_interval.argtypes = [c_char_p, c_int64]
lib.tera_scan_descriptor_set_pack_interval.restype = None
lib.tera_scan_descriptor_set_max_versions.argtypes = [c_void_p, c_int32]
lib.tera_scan_descriptor_set_max_versions.restype = None
lib.tera_scan_descriptor_set_snapshot.argtypes = [c_void_p, c_uint64]
lib.tera_scan_descriptor_set_snapshot.restype = None
lib.tera_scan_descriptor_set_time_range.argtypes = [c_void_p,
c_int64, c_int64]
lib.tera_scan_descriptor_set_time_range.restype = None
def init_function_prototype_for_client():
""" client """
lib.tera_client_open.argtypes = [c_char_p, c_char_p, POINTER(c_char_p)]
lib.tera_client_open.restype = c_void_p
lib.tera_client_close.argtypes = [c_void_p]
lib.tera_client_close.restype = None
lib.tera_table_open.argtypes = [c_void_p, c_char_p, POINTER(c_char_p)]
lib.tera_table_open.restype = c_void_p
lib.tera_table_close.argtypes = [c_void_p]
lib.tera_table_close.restype = None
def init_function_prototype_for_table():
""" table """
lib.tera_table_get.argtypes = [c_void_p, c_char_p, c_uint64,
c_char_p, c_char_p, c_uint64,
POINTER(POINTER(c_ubyte)),
POINTER(c_uint64),
POINTER(c_char_p), c_uint64]
lib.tera_table_get.restype = c_bool
lib.tera_table_getint64.argtypes = [c_void_p, c_char_p, c_uint64,
c_char_p, c_char_p, c_uint64,
POINTER(c_int64), POINTER(c_char_p),
c_uint64]
lib.tera_table_getint64.restype = c_bool
lib.tera_table_put.argtypes = [c_void_p, c_char_p, c_uint64, c_char_p,
c_char_p, c_uint64, c_char_p, c_uint64,
POINTER(c_char_p)]
lib.tera_table_put.restype = c_bool
lib.tera_table_put_kv.argtypes = [c_void_p, c_char_p, c_uint64,
c_char_p, c_uint64, c_int32,
POINTER(c_char_p)]
lib.tera_table_put_kv.restype = c_bool
lib.tera_table_putint64.argtypes = [c_void_p, c_char_p, c_uint64, c_char_p,
c_char_p, c_uint64, c_int64,
POINTER(c_char_p)]
lib.tera_table_putint64.restype = c_bool
lib.tera_table_scan.argtypes = [c_void_p, c_void_p, POINTER(c_char_p)]
lib.tera_table_scan.restype = c_void_p
lib.tera_table_delete.argtypes = [c_void_p, c_char_p, c_uint64,
c_char_p, c_char_p, c_uint64]
lib.tera_table_delete.restype = c_bool
lib.tera_table_apply_mutation.argtypes = [c_void_p, c_void_p]
lib.tera_table_apply_mutation.restype = None
lib.tera_table_apply_mutation_batch.argtypes = [c_void_p,
c_void_p,
c_int64]
lib.tera_table_apply_mutation_batch.restype = None
lib.tera_table_is_put_finished.argtypes = [c_void_p]
lib.tera_table_is_put_finished.restype = c_bool
lib.tera_table_apply_reader.argtypes = [c_void_p, c_void_p]
lib.tera_table_apply_reader.restype = None
lib.tera_table_apply_reader_batch.argtypes = [c_void_p, c_void_p, c_int64]
lib.tera_table_apply_reader_batch.restype = None
lib.tera_table_is_get_finished.argtypes = [c_void_p]
lib.tera_table_is_get_finished.restype = c_bool
lib.tera_row_mutation.argtypes = [c_void_p, c_char_p, c_uint64]
lib.tera_row_mutation.restype = c_void_p
lib.tera_row_mutation_get_status_code.argtypes = [c_void_p]
lib.tera_row_mutation_get_status_code.restype = c_int64
lib.tera_row_mutation_destroy.argtypes = [c_void_p]
lib.tera_row_mutation_destroy.restype = None
def init_function_prototype_for_row_mutation():
""" row_mutation"""
lib.tera_row_mutation_put_kv.argtypes = [c_void_p, c_char_p,
c_uint64, c_int32]
lib.tera_row_mutation_put_kv.restype = None
lib.tera_row_mutation_put.argtypes = [c_void_p, c_char_p,
c_char_p, c_uint64,
c_char_p, c_uint64]
lib.tera_row_mutation_put.restype = None
lib.tera_row_mutation_put_with_timestamp.argtypes = [c_void_p, c_char_p,
c_char_p, c_uint64,
c_int64,
c_void_p, c_uint64]
lib.tera_row_mutation_put_with_timestamp.restype = None
lib.tera_row_mutation_put_int64.argtypes = [c_void_p, c_char_p,
c_char_p, c_uint64, c_int64]
lib.tera_row_mutation_put_int64.restype = None
lib.tera_row_mutation_set_callback.argtypes = [c_void_p, MUTATION_CALLBACK]
lib.tera_row_mutation_set_callback.restype = None
lib.tera_row_mutation_delete_column.argtypes = [c_void_p, c_char_p,
c_char_p, c_uint64]
lib.tera_row_mutation_delete_column.restype = None
lib.tera_row_mutation_delete_family.argtypes = [c_void_p, c_char_p]
lib.tera_row_mutation_delete_family.restype = None
lib.tera_row_mutation_delete_row.argtypes = [c_void_p]
lib.tera_row_mutation_delete_row.restype = None
lib.tera_row_mutation_rowkey.argtypes = [c_void_p,
POINTER(POINTER(c_ubyte)),
POINTER(c_uint64)]
lib.tera_row_mutation_rowkey.restype = None
lib.tera_row_mutation_delete_column_all_versions.argtypes =\
[c_void_p, c_char_p, c_char_p, c_uint64]
lib.tera_row_mutation_delete_column_all_versions.restype = None
lib.tera_row_mutation_delete_column_with_version.argtypes =\
[c_void_p, c_char_p, c_char_p, c_uint64, c_int64]
lib.tera_row_mutation_delete_column_with_version.restype = None
def init_function_prototype_for_row_reader():
""" row_reader """
lib.tera_row_reader.argtypes = [c_void_p, c_char_p, c_uint64]
lib.tera_row_reader.restype = c_void_p
lib.tera_row_reader_add_column_family.argtypes = [c_void_p, c_char_p]
lib.tera_row_reader_add_column_family.restype = None
lib.tera_row_reader_add_column.argtypes = [c_void_p, c_char_p,
c_char_p, c_uint64]
lib.tera_row_reader_add_column.restype = None
lib.tera_row_reader_set_callback.argtypes = [c_void_p, READER_CALLBACK]
lib.tera_row_reader_set_callback.restype = None
lib.tera_row_reader_set_timestamp.argtypes = [c_void_p, c_int64]
lib.tera_row_reader_set_timestamp.restype = None
lib.tera_row_reader_set_time_range.argtypes = [c_void_p, c_int64, c_int64]
lib.tera_row_reader_set_time_range.restype = None
lib.tera_row_reader_set_snapshot.argtypes = [c_void_p, c_uint64]
lib.tera_row_reader_set_snapshot.restype = None
lib.tera_row_reader_set_max_versions.argtypes = [c_void_p, c_uint32]
lib.tera_row_reader_set_max_versions.restype = None
lib.tera_row_reader_set_timeout.argtypes = [c_void_p, c_int64]
lib.tera_row_reader_set_timeout.restype = None
lib.tera_row_reader_done.argtypes = [c_void_p]
lib.tera_row_reader_done.restype = c_bool
lib.tera_row_reader_next.argtypes = [c_void_p]
lib.tera_row_reader_next.restype = None
lib.tera_row_reader_rowkey.argtypes = [c_void_p,
POINTER(POINTER(c_ubyte)),
POINTER(c_uint64)]
lib.tera_row_reader_rowkey.restype = None
lib.tera_row_reader_value.argtypes = [c_void_p,
POINTER(POINTER(c_ubyte)),
POINTER(c_uint64)]
lib.tera_row_reader_value.restype = None
lib.tera_row_reader_value_int64.argtypes = [c_void_p]
lib.tera_row_reader_value_int64.restype = c_int64
lib.tera_row_reader_family.argtypes = [c_void_p,<|fim▁hole|> lib.tera_row_reader_family.restype = None
lib.tera_row_reader_qualifier.argtypes = [c_void_p,
POINTER(POINTER(c_ubyte)),
POINTER(c_uint64)]
lib.tera_row_reader_qualifier.restype = None
lib.tera_row_reader_timestamp.argtypes = [c_void_p]
lib.tera_row_reader_timestamp.restype = c_int64
lib.tera_row_reader_get_status_code.argtypes = [c_void_p]
lib.tera_row_reader_get_status_code.restype = c_int64
lib.tera_row_reader_destroy.argtypes = [c_void_p]
lib.tera_row_reader_destroy.restype = None
def init_function_prototype():
""" init function prototype """
init_function_prototype_for_client()
init_function_prototype_for_table()
init_function_prototype_for_row_reader()
init_function_prototype_for_row_mutation()
init_function_prototype_for_scan()
libc.free.argtypes = [c_void_p]
libc.free.restype = None
def copy_string_to_user(value, size):
""" copy string """
result = string_at(value, size)
libc.free(value)
return result
try:
lib = cdll.LoadLibrary('./libtera_c.so')
except OSError:
lib = cdll.LoadLibrary('libtera_c.so')
libc = cdll.LoadLibrary('libc.so.6')
init_function_prototype()<|fim▁end|> | POINTER(POINTER(c_ubyte)),
POINTER(c_uint64)] |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
##############################################################################
#<|fim▁hole|>#
# ThinkOpen Solutions Brasil
# Copyright (C) Thinkopen Solutions <http://www.tkobr.com>.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import purchase<|fim▁end|> | # OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). |
<|file_name|>XMLWriter.java<|end_file_name|><|fim▁begin|>/*
* #%L
* episodesmanager-services
*
* $Id$
* $HeadURL$
* %%
* Copyright (C) 2009 - 2010 Jean Couteau
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-3.0.html>.
* #L%
*/
package org.kootox.episodesmanager.services.importExport;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.kootox.episodesmanager.entities.Episode;
import org.kootox.episodesmanager.entities.Season;
import org.kootox.episodesmanager.entities.Show;
import org.kootox.episodesmanager.services.EpisodesManagerService;
import org.kootox.episodesmanager.services.ServiceContext;
import org.kootox.episodesmanager.services.shows.EpisodesService;
import org.kootox.episodesmanager.services.shows.SeasonsService;
import org.kootox.episodesmanager.services.shows.ShowsService;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.util.List;
/**
*
* Class that can export different elements of database into XML
*
* User: couteau
* Date: 12 mars 2010
*/
public class XMLWriter implements EpisodesManagerService {
private static Log log = LogFactory.getLog(XMLWriter.class);
protected ServiceContext serviceContext;
public XMLWriter(ServiceContext serviceContext) {
this.serviceContext = serviceContext;
}
public void setServiceContext(ServiceContext serviceContext) {
this.serviceContext = serviceContext;
}
/**
* Export an episode into XML
*
* @param episode the episode to export into XML
* @return the episode XML code
*/
private String exportEpisode(Episode episode) {
StringBuilder builder = new StringBuilder();
builder.append(" <episode>\n");
//title
builder.append(" <title>");
builder.append(escape(episode.getTitle()));
builder.append("</title>\n");
//airing date
builder.append(" <airdate>");
builder.append(episode.getAiringDate());
builder.append("</airdate>\n");
//acquired
builder.append(" <acquired>");
builder.append(episode.getAcquired());
builder.append("</acquired>\n");
//viewed
builder.append(" <watched>");
builder.append(episode.getViewed());
builder.append("</watched>\n");
//number
builder.append(" <number>");
builder.append(episode.getNumber());
builder.append("</number>\n");
//summary
builder.append(" <summary>");
builder.append(escape(episode.getSummary()));
builder.append("</summary>\n");
builder.append(" </episode>\n");
return builder.toString() ;
}
private String exportSeason(Season season){
EpisodesService episodesService = serviceContext.newService(EpisodesService.class);
StringBuilder builder = new StringBuilder();
builder.append(" <season>\n");
//number
builder.append(" <number>");
builder.append(season.getNumber());
builder.append("</number>\n");
builder.append(" <episodes>\n");
List<Episode> episodes = episodesService.getAllEpisodes(season);
for(Episode episode:episodes){
builder.append(exportEpisode(episode));
}
builder.append(" </episodes>\n");
builder.append(" </season>\n");
return builder.toString();
}
private String exportShow(Show show){
SeasonsService service = serviceContext.newService(SeasonsService.class);
StringBuilder builder = new StringBuilder();
builder.append("<show>\n");
//title
builder.append(" <name>");
builder.append(escape(show.getTitle()));
builder.append("</name>\n");
//tvrage id
builder.append(" <tvrageID>");
builder.append(show.getTvrageId());
builder.append("</tvrageID>\n");
//over
builder.append(" <status>");
builder.append(show.getOver());
builder.append("</status>\n");
//origin country
builder.append(" <origin_country>");
builder.append(escape(show.getOriginCountry()));
builder.append("</origin_country>\n");
//runtime
builder.append(" <runtime>");
builder.append(show.getRuntime());
builder.append("</runtime>\n");
//network
builder.append(" <network>");
builder.append(escape(show.getNetwork()));
builder.append("</network>\n");
//airtime
builder.append(" <airtime>");
builder.append(show.getAirtime());
builder.append("</airtime>\n");
//timezone
builder.append(" <timezone>");
builder.append(escape(show.getTimeZone()));
builder.append("</timezone>\n");
builder.append(" <seasons>\n");
List<Season> seasons = service.getAllSeasons(show);
for(Season season:seasons){
builder.append(exportSeason(season));
}
builder.append(" </seasons>\n");
builder.append("</show>\n");
return builder.toString();
}
public void exportToXML(File file) {
ShowsService service = serviceContext.newService(ShowsService.class);
OutputStreamWriter xmlFile = null;
try {
xmlFile = new FileWriter(file);
List<Show> shows = service.getAllShows();
StringBuilder builder = new StringBuilder();
builder.append("<?xml version=\"1.0\"?>\n<shows>\n");
for(Show show:shows) {
builder.append(exportShow(show));
}
builder.append("</shows>\n");
xmlFile.write(builder.toString());
xmlFile.close();
} catch (Exception ex) {
log.error("an error occurred", ex);
} finally {
try {
xmlFile.close();<|fim▁hole|> log.error("Error trying to close file");
} catch (NullPointerException eee) {
log.debug("Stream was not existing");
}
}
}
private String escape(String toEscape){
return StringEscapeUtils.escapeXml(toEscape);
}
}<|fim▁end|> | } catch (IOException eee) { |
<|file_name|>app.js<|end_file_name|><|fim▁begin|>var UI = require('ui');
var ajax = require('ajax');
var Vector2 = require('vector2');
var webserver = decodeURIComponent(localStorage.getItem('webserver') ? localStorage.getItem('webserver') : 'webserver');
var qvserver = localStorage.getItem('qvserver') ? localStorage.getItem('qvserver') : 'qvserver';
var files = localStorage.getItem('files') ? localStorage.getItem('files') : 'files';
// Show splash screen while waiting for data
var splashWindow = new UI.Window();
// Text element to inform user
var text = new UI.Text({
position: new Vector2(0, 0),
size: new Vector2(144, 168),
text:'Downloading data...',
font:'GOTHIC_28_BOLD',
color:'black',
textOverflow:'wrap',
textAlign:'center',
backgroundColor:'white'
});
// Add to splashWindow and show
splashWindow.add(text);
splashWindow.show();
// Make request to the nodejs app //, webserver: test
ajax(
{
url: webserver + '/getdata',
method: 'post',
data: {server: qvserver, files: files, webserver: webserver},
crossDomain: true
},
function(data) {
try {
var items = [];
data = JSON.parse(data);<|fim▁hole|> var areas = data.data;
for(var i = 0; i < areas.length; i++) {
items.push({
title:areas[i].name //,subtitle:time
});
}
var resultsMenu = new UI.Menu({
sections: [{
title: 'Areas',
items: items
}]
});
resultsMenu.on('select', function(e) {
var innerData = data.data[e.itemIndex].areadata;
var title = data.data[e.itemIndex].name;
var details = [];
for(var d = 0; d < innerData.length; d++) {
details.push({
title: innerData[d].category,
subtitle: innerData[d].value
});
}
var detailsMenu = new UI.Menu({
sections: [{
title: title,
items: details}]
});
detailsMenu.show();
});
// Show the Menu, hide the splash
resultsMenu.show();
splashWindow.hide();
} catch (err) {
var text = new UI.Text({
position: new Vector2(0, 0),
size: new Vector2(144, 168),
text:'Error parsing the data',
font:'GOTHIC_28_BOLD',
color:'black',
textOverflow:'wrap',
textAlign:'center',
backgroundColor:'white'
});
// Add to splashWindow and show
splashWindow.add(text);
splashWindow.show();
}
},
function(error) {
var text = new UI.Text({
position: new Vector2(0, 0),
size: new Vector2(144, 168),
text:'Download failed :(',
font:'GOTHIC_28_BOLD',
color:'black',
textOverflow:'wrap',
textAlign:'center',
backgroundColor:'white'
});
// Add to splashWindow and show
splashWindow.add(text);
splashWindow.show();
});
Pebble.addEventListener('showConfiguration', function(e) {
console.log("Showing configuration");
Pebble.openURL('https://googledrive.com/host/0BxjGsOE_3VoOU2RPQ3BjTlBfX0E');
});
Pebble.addEventListener('webviewclosed', function(e) {
var options = JSON.parse(decodeURIComponent(e.response));
qvserver = encodeURIComponent(options.qvserver);
webserver = encodeURIComponent(options.webserver);
files = encodeURIComponent(options.files);
if(qvserver == 'undefined') {
qvserver = 'http://localhost:4799/QMS/Service';
}
localStorage.setItem('qvserver', qvserver);
localStorage.setItem('webserver', webserver);
localStorage.setItem('files', files);
//console.log("Configuration window returned: ", JSON.stringify(options));
});
//Send a string to Pebble
var dict = {
QVSERVER : qvserver,
WEBSERVER: webserver,
FILES: files
};
Pebble.sendAppMessage(dict, function(e) {
console.log("Send successful.");
}, function(e) {
console.log("Send failed!");
});<|fim▁end|> | |
<|file_name|>Platform.py<|end_file_name|><|fim▁begin|>"""
Copyright 2008-2016 Free Software Foundation, Inc.
This file is part of GNU Radio
GNU Radio Companion is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
GNU Radio Companion is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
import os
import sys
from . import ParseXML, Messages, Constants
from .Config import Config
from .Element import Element
from .generator import Generator
from .FlowGraph import FlowGraph
from .Connection import Connection
from .Block import Block
from .Port import Port
from .Param import Param
from .utils import odict, extract_docs
class Platform(Element):
Config = Config
Generator = Generator
FlowGraph = FlowGraph
Connection = Connection
Block = Block
Port = Port
Param = Param
is_platform = True
def __init__(self, *args, **kwargs):
""" Make a platform for GNU Radio """
Element.__init__(self)
self.config = self.Config(*args, **kwargs)
self.block_docstrings = {}
self.block_docstrings_loaded_callback = lambda: None # dummy to be replaced by BlockTreeWindow
self._docstring_extractor = extract_docs.SubprocessLoader(
callback_query_result=self._save_docstring_extraction_result,
callback_finished=lambda: self.block_docstrings_loaded_callback()
)
# Create a dummy flow graph for the blocks
self._flow_graph = Element(self)
self._flow_graph.connections = []
self.blocks = odict()
self._blocks_n = odict()
self._block_categories = {}
self.domains = {}
self.connection_templates = {}
self._auto_hier_block_generate_chain = set()
self.build_block_library()
def __str__(self):
return 'Platform - {}({})'.format(self.config.key, self.config.name)
@staticmethod
def find_file_in_paths(filename, paths, cwd):
"""Checks the provided paths relative to cwd for a certain filename"""
if not os.path.isdir(cwd):
cwd = os.path.dirname(cwd)
if isinstance(paths, str):
paths = (p for p in paths.split(':') if p)
for path in paths:
path = os.path.expanduser(path)
if not os.path.isabs(path):
path = os.path.normpath(os.path.join(cwd, path))
file_path = os.path.join(path, filename)
if os.path.exists(os.path.normpath(file_path)):
return file_path
def load_and_generate_flow_graph(self, file_path):
"""Loads a flow graph from file and generates it"""
Messages.set_indent(len(self._auto_hier_block_generate_chain))
Messages.send('>>> Loading: %r\n' % file_path)
if file_path in self._auto_hier_block_generate_chain:
Messages.send(' >>> Warning: cyclic hier_block dependency\n')
return False
self._auto_hier_block_generate_chain.add(file_path)
try:
flow_graph = self.get_new_flow_graph()
flow_graph.grc_file_path = file_path
# Other, nested higiter_blocks might be auto-loaded here
flow_graph.import_data(self.parse_flow_graph(file_path))
flow_graph.rewrite()
flow_graph.validate()
if not flow_graph.is_valid():
raise Exception('Flowgraph invalid')
if not flow_graph.get_option('generate_options').startswith('hb'):
raise Exception('Not a hier block')
except Exception as e:
Messages.send('>>> Load Error: {}: {}\n'.format(file_path, str(e)))
return False
finally:
self._auto_hier_block_generate_chain.discard(file_path)
Messages.set_indent(len(self._auto_hier_block_generate_chain))
try:
Messages.send('>>> Generating: {}\n'.format(file_path))
generator = self.Generator(flow_graph, file_path)
generator.write()
except Exception as e:
Messages.send('>>> Generate Error: {}: {}\n'.format(file_path, str(e)))
return False
self.load_block_xml(generator.get_file_path_xml())
return True
def build_block_library(self):
"""load the blocks and block tree from the search paths"""
self._docstring_extractor.start()
# Reset
self.blocks.clear()
self._blocks_n.clear()
self._block_categories.clear()
self.domains.clear()
self.connection_templates.clear()
ParseXML.xml_failures.clear()
# Try to parse and load blocks
for xml_file in self.iter_xml_files():
try:
if xml_file.endswith("block_tree.xml"):
self.load_category_tree_xml(xml_file)
elif xml_file.endswith('domain.xml'):
self.load_domain_xml(xml_file)
else:
self.load_block_xml(xml_file)
except ParseXML.XMLSyntaxError as e:
# print >> sys.stderr, 'Warning: Block validation failed:\n\t%s\n\tIgnoring: %s' % (e, xml_file)
pass
except Exception as e:
print >> sys.stderr, 'Warning: XML parsing failed:\n\t%r\n\tIgnoring: %s' % (e, xml_file)
# Add blocks to block tree
for key, block in self.blocks.iteritems():
category = self._block_categories.get(key, block.category)
# Blocks with empty categories are hidden
if not category:
continue
root = category[0]
if root.startswith('[') and root.endswith(']'):
category[0] = root[1:-1]
else:
category.insert(0, Constants.DEFAULT_BLOCK_MODULE_NAME)
block.category = category
self._docstring_extractor.finish()
# self._docstring_extractor.wait()
def iter_xml_files(self):
"""Iterator for block descriptions and category trees"""
for block_path in self.config.block_paths:
if os.path.isfile(block_path):
yield block_path
elif os.path.isdir(block_path):
for dirpath, dirnames, filenames in os.walk(block_path):
for filename in sorted(filter(lambda f: f.endswith('.xml'), filenames)):
yield os.path.join(dirpath, filename)
def load_block_xml(self, xml_file):
"""Load block description from xml file"""
# Validate and import
ParseXML.validate_dtd(xml_file, Constants.BLOCK_DTD)
n = ParseXML.from_file(xml_file).find('block')
n['block_wrapper_path'] = xml_file # inject block wrapper path
# Get block instance and add it to the list of blocks
block = self.Block(self._flow_graph, n)
key = block.get_key()
if key in self.blocks:
print >> sys.stderr, 'Warning: Block with key "{}" already exists.\n\tIgnoring: {}'.format(key, xml_file)
else: # Store the block
self.blocks[key] = block
self._blocks_n[key] = n
self._docstring_extractor.query(
block.get_key(),
block.get_imports(raw=True),
block.get_make(raw=True)
)
def load_category_tree_xml(self, xml_file):
"""Validate and parse category tree file and add it to list"""
ParseXML.validate_dtd(xml_file, Constants.BLOCK_TREE_DTD)
xml = ParseXML.from_file(xml_file)
path = []
def load_category(cat_n):
path.append(cat_n.find('name').strip())
for block_key in cat_n.findall('block'):
if block_key not in self._block_categories:
self._block_categories[block_key] = list(path)
for sub_cat_n in cat_n.findall('cat'):
load_category(sub_cat_n)
path.pop()
load_category(xml.find('cat'))
def load_domain_xml(self, xml_file):
"""Load a domain properties and connection templates from XML"""
ParseXML.validate_dtd(xml_file, Constants.DOMAIN_DTD)
n = ParseXML.from_file(xml_file).find('domain')
key = n.find('key')
if not key:
print >> sys.stderr, 'Warning: Domain with emtpy key.\n\tIgnoring: {}'.format(xml_file)
return
if key in self.domains: # test against repeated keys
print >> sys.stderr, 'Warning: Domain with key "{}" already exists.\n\tIgnoring: {}'.format(key, xml_file)
return
#to_bool = lambda s, d: d if s is None else s.lower() not in ('false', 'off', '0', '')
def to_bool(s, d):
if s is not None:
return s.lower() not in ('false', 'off', '0', '')
return d
color = n.find('color') or ''
try:
import gtk # ugly but handy
gtk.gdk.color_parse(color)
except (ValueError, ImportError):
if color: # no color is okay, default set in GUI
print >> sys.stderr, 'Warning: Can\'t parse color code "{}" for domain "{}" '.format(color, key)
color = None
self.domains[key] = dict(
name=n.find('name') or key,
multiple_sinks=to_bool(n.find('multiple_sinks'), True),
multiple_sources=to_bool(n.find('multiple_sources'), False),
color=color
)
for connection_n in n.findall('connection'):
key = (connection_n.find('source_domain'), connection_n.find('sink_domain'))
if not all(key):
print >> sys.stderr, 'Warning: Empty domain key(s) in connection template.\n\t{}'.format(xml_file)
elif key in self.connection_templates:
print >> sys.stderr, 'Warning: Connection template "{}" already exists.\n\t{}'.format(key, xml_file)
else:
self.connection_templates[key] = connection_n.find('make') or ''
def _save_docstring_extraction_result(self, key, docstrings):
docs = {}
for match, docstring in docstrings.iteritems():
if not docstring or match.endswith('_sptr'):
continue
docstring = docstring.replace('\n\n', '\n').strip()
docs[match] = docstring
self.block_docstrings[key] = docs
##############################################
# Access
##############################################
def parse_flow_graph(self, flow_graph_file):
"""
Parse a saved flow graph file.
Ensure that the file exists, and passes the dtd check.
Args:
flow_graph_file: the flow graph file
Returns:
nested data
@throws exception if the validation fails
"""
flow_graph_file = flow_graph_file or self.config.default_flow_graph
open(flow_graph_file, 'r').close() # Test open
ParseXML.validate_dtd(flow_graph_file, Constants.FLOW_GRAPH_DTD)
return ParseXML.from_file(flow_graph_file)
def get_new_flow_graph(self):
return self.FlowGraph(platform=self)
def get_blocks(self):
return self.blocks.values()
def get_new_block(self, flow_graph, key):
return self.Block(flow_graph, n=self._blocks_n[key])<|fim▁hole|> return [(name, color) for name, key, sizeof, color in Constants.CORE_TYPES]<|fim▁end|> |
def get_colors(self): |
<|file_name|>instructions.rs<|end_file_name|><|fim▁begin|>use crate::ast::AST;
pub use crate::values::ArgumentsType;
pub type Bytecode = Vec<Instruction>;
#[derive(Debug, PartialEq, Clone)]
pub enum Instruction {
Apply,
Argument,
Assignment(String),
Close {
args: Vec<String>,
args_type: ArgumentsType,
body: Bytecode,
},
Frame,<|fim▁hole|> LoadReference(String),
LoadUnspecified,
JumpOnFalse(usize),
JumpOnTrue(usize),
Jump(usize),
Replace(String),
}<|fim▁end|> | LoadConstant(AST), |
<|file_name|>delete_data_for_sample.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# noinspection PyUnresolvedReferences
import init_django
from django.db import transaction
from common.utils import utcnow
from main.archive import DataArchiver
from main.delete import DataDeleter
from main.models import Ranking
from main.purge import purge_player_data
from tasks.base import Command
class Main(Command):
def __init__(self):
super().__init__("Delete ranking and all cache data and ranking data linked to it, used for broken "
"rankings.",
pid_file=True, stoppable=False)
self.add_argument('--delete', dest="delete", action='store_true', default=False,
help="If this is not set, deletes a dry run will be performed instead.")
self.add_argument('--keep-rankings', '-r', dest="keep_rankings", default=None,
help="Comma separated list of rankings to keep.")
def run(self, args, logger):
keep_ids = (int(id) for id in args.keep_rankings.split(","))
with transaction.atomic():
remove_ids = [r.id for r in Ranking.objects.exclude(id__in=keep_ids)]
data_deleter = DataDeleter(dry_run=not args.delete)
data_archiver = DataArchiver(utcnow(), remove=True)
# Remove rankings.
for remove_id in remove_ids:
data_deleter.delete_ranking(remove_id)
# Archive all rankings except the last.
if args.delete:
rankings = Ranking.objects.order_by("-id")[1:]
for ranking in rankings:
logger.info(f"archiving ranking {ranking.id}")
data_archiver.archive_ranking(ranking, self.check_stop)
else:
logger.info("DRY RUN no archiving of rankings")
# Delete ladders that are no longer needed.
keep_season_ids = {r.season_id for r in Ranking.objects.all()}
data_deleter.delete_ladders(tuple(keep_season_ids))
# Delete cache data that is unused.
data_deleter.agressive_delete_cache_data()
# Purge players and teams.
if args.delete:
purge_player_data(check_stop=self.check_stop)
else:
logger.info("DRY RUN no purge player data")
<|fim▁hole|>if __name__ == '__main__':
Main()()<|fim▁end|> | return 0
|
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.urlresolvers import reverse
from django.db import models
from django.utils.encoding import (
force_text, python_2_unicode_compatible)
from django.utils.translation import ugettext_lazy as _
from select_multiple_field.models import SelectMultipleField<|fim▁hole|>
@python_2_unicode_compatible
class ChickenBalls(models.Model):
"""ChickenBalls is used for South migration testing"""
SUICIDE = 's'
HOT = 'h'
HOME_STYLE = 'H'
CAJUN = 'c'
JERK = 'j'
GATOR = 'g'
FLAVOUR_CHOICES = (
(_('Hot & Spicy'), (
(SUICIDE, _('Suicide hot')),
(HOT, _('Hot hot sauce')),
(CAJUN, _('Cajun sauce')),
(JERK, _('Jerk sauce')))),
(_('Traditional'), (
(HOME_STYLE, _('Homestyle')),
(GATOR, _('Gator flavour')))),
)
flavour = SelectMultipleField(
blank=True,
include_blank=False,
max_length=5,
max_choices=2,
choices=FLAVOUR_CHOICES
)
RANCH = 'r'
HONEY_MUSTARD = 'h'
BBQ = 'b'
DIP_CHOICES = (
(RANCH, _('Ranch')),
(HONEY_MUSTARD, _('Honey mustard')),
(BBQ, _('BBQ')),
)
dips = SelectMultipleField(
blank=True,
default='',
include_blank=False,
max_length=6,
max_choices=3,
choices=DIP_CHOICES
)
def __str__(self):
return "pk=%s" % force_text(self.pk)
def get_absolute_url(self):
return reverse('ftw:detail', args=[self.pk])<|fim▁end|> | |
<|file_name|>issue-43106-gating-of-builtin-attrs.rs<|end_file_name|><|fim▁begin|>//~ NOTE not a function
//~^ NOTE not a foreign function or static
//~^^ NOTE not a function or static
// This test enumerates as many compiler-builtin ungated attributes as
// possible (that is, all the mutually compatible ones), and checks
// that we get "expected" (*) warnings for each in the various weird
// places that users might put them in the syntax.
//
// (*): The word "expected" is in quotes above because the cases where
// warnings are and are not emitted might not match a user's intuition
// nor the rustc developers' intent. I am really just trying to
// capture today's behavior in a test, not so that it become enshrined
// as the absolute behavior going forward, but rather so that we do
// not change the behavior in the future without even being *aware* of
// the change when it happens.
//
// At the time of authoring, the attributes here are listed in the
// order that they occur in `librustc_feature`.
//
// Any builtin attributes that:
//
// - are not stable, or
//
// - could not be included here covering the same cases as the other
// attributes without raising an *error* from rustc (note though
// that warnings are of course expected)
//
// have their own test case referenced by filename in an inline
// comment.
//
// The test feeds numeric inputs to each attribute that accepts them
// without error. We do this for two reasons: (1.) to exercise how
// inputs are handled by each, and (2.) to ease searching for related
// occurrences in the source text.
// check-pass
#![feature(test)]
#![warn(unused_attributes, unknown_lints)]
//~^ NOTE the lint level is defined here
//~| NOTE the lint level is defined here
// UNGATED WHITE-LISTED BUILT-IN ATTRIBUTES
#![warn(x5400)] //~ WARN unknown lint: `x5400`
#![allow(x5300)] //~ WARN unknown lint: `x5300`
#![forbid(x5200)] //~ WARN unknown lint: `x5200`
#![deny(x5100)] //~ WARN unknown lint: `x5100`
#![macro_use] // (allowed if no argument; see issue-43160-gating-of-macro_use.rs)
// skipping testing of cfg
// skipping testing of cfg_attr
#![should_panic] //~ WARN `#[should_panic]` only has an effect
#![ignore] //~ WARN `#[ignore]` only has an effect on functions
#![no_implicit_prelude]
#![reexport_test_harness_main = "2900"]
// see gated-link-args.rs
// see issue-43106-gating-of-macro_escape.rs for crate-level; but non crate-level is below at "2700"
// (cannot easily test gating of crate-level #[no_std]; but non crate-level is below at "2600")
#![proc_macro_derive()] //~ WARN `#[proc_macro_derive]` only has an effect
#![doc = "2400"]
#![cold] //~ WARN attribute should be applied to a function
//~^ WARN
// see issue-43106-gating-of-builtin-attrs-error.rs
#![link()]
#![link_name = "1900"]
//~^ WARN attribute should be applied to a foreign function
//~^^ WARN this was previously accepted by the compiler
#![link_section = "1800"]
//~^ WARN attribute should be applied to a function or static
//~^^ WARN this was previously accepted by the compiler
// see issue-43106-gating-of-rustc_deprecated.rs
#![must_use]
// see issue-43106-gating-of-stable.rs
// see issue-43106-gating-of-unstable.rs
// see issue-43106-gating-of-deprecated.rs
#![windows_subsystem = "windows"]
// UNGATED CRATE-LEVEL BUILT-IN ATTRIBUTES
#![crate_name = "0900"]
#![crate_type = "bin"] // cannot pass "0800" here
#![crate_id = "10"]
//~^ WARN use of deprecated attribute
//~| HELP remove this attribute
//~| NOTE `#[warn(deprecated)]` on by default
// FIXME(#44232) we should warn that this isn't used.
#![feature(rust1)]
//~^ WARN no longer requires an attribute to enable
//~| NOTE `#[warn(stable_features)]` on by default
#![no_start]
//~^ WARN use of deprecated attribute
//~| HELP remove this attribute
// (cannot easily gating state of crate-level #[no_main]; but non crate-level is below at "0400")
#![no_builtins]
#![recursion_limit = "0200"]
#![type_length_limit = "0100"]
// USES OF BUILT-IN ATTRIBUTES IN OTHER ("UNUSUAL") PLACES
#[warn(x5400)]
//~^ WARN unknown lint: `x5400`
mod warn {
mod inner { #![warn(x5400)] }
//~^ WARN unknown lint: `x5400`
#[warn(x5400)] fn f() { }
//~^ WARN unknown lint: `x5400`
#[warn(x5400)] struct S;
//~^ WARN unknown lint: `x5400`
#[warn(x5400)] type T = S;
//~^ WARN unknown lint: `x5400`
#[warn(x5400)] impl S { }
//~^ WARN unknown lint: `x5400`
}
#[allow(x5300)]
//~^ WARN unknown lint: `x5300`
mod allow {
mod inner { #![allow(x5300)] }
//~^ WARN unknown lint: `x5300`
#[allow(x5300)] fn f() { }
//~^ WARN unknown lint: `x5300`
#[allow(x5300)] struct S;
//~^ WARN unknown lint: `x5300`
#[allow(x5300)] type T = S;
//~^ WARN unknown lint: `x5300`
#[allow(x5300)] impl S { }
//~^ WARN unknown lint: `x5300`
}
#[forbid(x5200)]
//~^ WARN unknown lint: `x5200`
mod forbid {
mod inner { #![forbid(x5200)] }
//~^ WARN unknown lint: `x5200`
#[forbid(x5200)] fn f() { }
//~^ WARN unknown lint: `x5200`
#[forbid(x5200)] struct S;
//~^ WARN unknown lint: `x5200`
#[forbid(x5200)] type T = S;
//~^ WARN unknown lint: `x5200`
#[forbid(x5200)] impl S { }
//~^ WARN unknown lint: `x5200`
}
#[deny(x5100)]
//~^ WARN unknown lint: `x5100`
mod deny {
mod inner { #![deny(x5100)] }
//~^ WARN unknown lint: `x5100`
#[deny(x5100)] fn f() { }
//~^ WARN unknown lint: `x5100`
#[deny(x5100)] struct S;
//~^ WARN unknown lint: `x5100`
#[deny(x5100)] type T = S;
//~^ WARN unknown lint: `x5100`
#[deny(x5100)] impl S { }
//~^ WARN unknown lint: `x5100`
}
#[macro_use]
mod macro_use {
mod inner { #![macro_use] }
#[macro_use] fn f() { }
//~^ `#[macro_use]` only has an effect<|fim▁hole|> #[macro_use] type T = S;
//~^ `#[macro_use]` only has an effect
#[macro_use] impl S { }
//~^ `#[macro_use]` only has an effect
}
#[macro_export]
//~^ WARN `#[macro_export]` only has an effect on macro definitions
mod macro_export {
mod inner { #![macro_export] }
//~^ WARN `#[macro_export]` only has an effect on macro definitions
#[macro_export] fn f() { }
//~^ WARN `#[macro_export]` only has an effect on macro definitions
#[macro_export] struct S;
//~^ WARN `#[macro_export]` only has an effect on macro definitions
#[macro_export] type T = S;
//~^ WARN `#[macro_export]` only has an effect on macro definitions
#[macro_export] impl S { }
//~^ WARN `#[macro_export]` only has an effect on macro definitions
}
// At time of unit test authorship, if compiling without `--test` then
// non-crate-level #[test] attributes seem to be ignored.
#[test]
mod test { mod inner { #![test] }
fn f() { }
struct S;
type T = S;
impl S { }
}
// At time of unit test authorship, if compiling without `--test` then
// non-crate-level #[bench] attributes seem to be ignored.
#[bench]
mod bench {
mod inner { #![bench] }
#[bench]
struct S;
#[bench]
type T = S;
#[bench]
impl S { }
}
#[path = "3800"]
mod path {
mod inner { #![path="3800"] }
#[path = "3800"] fn f() { }
//~^ WARN `#[path]` only has an effect
#[path = "3800"] struct S;
//~^ WARN `#[path]` only has an effect
#[path = "3800"] type T = S;
//~^ WARN `#[path]` only has an effect
#[path = "3800"] impl S { }
//~^ WARN `#[path]` only has an effect
}
#[automatically_derived]
//~^ WARN `#[automatically_derived]` only has an effect
mod automatically_derived {
mod inner { #![automatically_derived] }
//~^ WARN `#[automatically_derived]
#[automatically_derived] fn f() { }
//~^ WARN `#[automatically_derived]
#[automatically_derived] struct S;
//~^ WARN `#[automatically_derived]
#[automatically_derived] type T = S;
//~^ WARN `#[automatically_derived]
#[automatically_derived] impl S { }
}
#[no_mangle]
//~^ WARN attribute should be applied to a free function, impl method or static [unused_attributes]
//~| WARN this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
mod no_mangle {
//~^ NOTE not a free function, impl method or static
mod inner { #![no_mangle] }
//~^ WARN attribute should be applied to a free function, impl method or static [unused_attributes]
//~| WARN this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
//~| NOTE not a free function, impl method or static
#[no_mangle] fn f() { }
#[no_mangle] struct S;
//~^ WARN attribute should be applied to a free function, impl method or static [unused_attributes]
//~| WARN this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
//~| NOTE not a free function, impl method or static
#[no_mangle] type T = S;
//~^ WARN attribute should be applied to a free function, impl method or static [unused_attributes]
//~| WARN this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
//~| NOTE not a free function, impl method or static
#[no_mangle] impl S { }
//~^ WARN attribute should be applied to a free function, impl method or static [unused_attributes]
//~| WARN this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
//~| NOTE not a free function, impl method or static
trait Tr {
#[no_mangle] fn foo();
//~^ WARN attribute should be applied to a free function, impl method or static [unused_attributes]
//~| WARN this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
//~| NOTE not a free function, impl method or static
#[no_mangle] fn bar() {}
//~^ WARN attribute should be applied to a free function, impl method or static [unused_attributes]
//~| WARN this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
//~| NOTE not a free function, impl method or static
}
}
#[should_panic]
//~^ WARN `#[should_panic]` only has an effect on
mod should_panic {
mod inner { #![should_panic] }
//~^ WARN `#[should_panic]` only has an effect on
#[should_panic] fn f() { }
#[should_panic] struct S;
//~^ WARN `#[should_panic]` only has an effect on
#[should_panic] type T = S;
//~^ WARN `#[should_panic]` only has an effect on
#[should_panic] impl S { }
//~^ WARN `#[should_panic]` only has an effect on
}
#[ignore]
//~^ WARN `#[ignore]` only has an effect on functions
mod ignore {
mod inner { #![ignore] }
//~^ WARN `#[ignore]` only has an effect on functions
#[ignore] fn f() { }
#[ignore] struct S;
//~^ WARN `#[ignore]` only has an effect on functions
#[ignore] type T = S;
//~^ WARN `#[ignore]` only has an effect on functions
#[ignore] impl S { }
//~^ WARN `#[ignore]` only has an effect on functions
}
#[no_implicit_prelude]
mod no_implicit_prelude {
mod inner { #![no_implicit_prelude] }
#[no_implicit_prelude] fn f() { }
//~^ WARN `#[no_implicit_prelude]` only has an effect
#[no_implicit_prelude] struct S;
//~^ WARN `#[no_implicit_prelude]` only has an effect
#[no_implicit_prelude] type T = S;
//~^ WARN `#[no_implicit_prelude]` only has an effect
#[no_implicit_prelude] impl S { }
//~^ WARN `#[no_implicit_prelude]` only has an effect
}
#[reexport_test_harness_main = "2900"]
//~^ WARN crate-level attribute should be
mod reexport_test_harness_main {
mod inner { #![reexport_test_harness_main="2900"] }
//~^ WARN crate-level attribute should be
#[reexport_test_harness_main = "2900"] fn f() { }
//~^ WARN crate-level attribute should be
#[reexport_test_harness_main = "2900"] struct S;
//~^ WARN crate-level attribute should be
#[reexport_test_harness_main = "2900"] type T = S;
//~^ WARN crate-level attribute should be
#[reexport_test_harness_main = "2900"] impl S { }
//~^ WARN crate-level attribute should be
}
// Cannot feed "2700" to `#[macro_escape]` without signaling an error.
#[macro_escape]
//~^ WARN `#[macro_escape]` is a deprecated synonym for `#[macro_use]`
mod macro_escape {
mod inner { #![macro_escape] }
//~^ WARN `#[macro_escape]` is a deprecated synonym for `#[macro_use]`
//~| HELP try an outer attribute: `#[macro_use]`
#[macro_escape] fn f() { }
//~^ WARN `#[macro_escape]` only has an effect
#[macro_escape] struct S;
//~^ WARN `#[macro_escape]` only has an effect
#[macro_escape] type T = S;
//~^ WARN `#[macro_escape]` only has an effect
#[macro_escape] impl S { }
//~^ WARN `#[macro_escape]` only has an effect
}
#[no_std]
//~^ WARN crate-level attribute should be an inner attribute
mod no_std {
mod inner { #![no_std] }
//~^ WARN crate-level attribute should be in the root module
#[no_std] fn f() { }
//~^ WARN crate-level attribute should be an inner attribute
#[no_std] struct S;
//~^ WARN crate-level attribute should be an inner attribute
#[no_std] type T = S;
//~^ WARN crate-level attribute should be an inner attribute
#[no_std] impl S { }
//~^ WARN crate-level attribute should be an inner attribute
}
// At time of authorship, #[proc_macro_derive = "2500"] signals error
// when it occurs on a mod (apart from crate-level). Therefore it goes
// into its own file; see issue-43106-gating-of-proc_macro_derive.rs
#[doc = "2400"]
mod doc {
mod inner { #![doc="2400"] }
#[doc = "2400"] fn f() { }
#[doc = "2400"] struct S;
#[doc = "2400"] type T = S;
#[doc = "2400"] impl S { }
}
#[cold]
//~^ WARN attribute should be applied to a function
//~| WARN this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
mod cold {
//~^ NOTE not a function
mod inner { #![cold] }
//~^ WARN attribute should be applied to a function
//~| WARN this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
//~| NOTE not a function
#[cold] fn f() { }
#[cold] struct S;
//~^ WARN attribute should be applied to a function
//~| WARN this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
//~| NOTE not a function
#[cold] type T = S;
//~^ WARN attribute should be applied to a function
//~| WARN this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
//~| NOTE not a function
#[cold] impl S { }
//~^ WARN attribute should be applied to a function
//~| WARN this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
//~| NOTE not a function
}
#[link_name = "1900"]
//~^ WARN attribute should be applied to a foreign function or static [unused_attributes]
//~| WARN this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
mod link_name {
//~^ NOTE not a foreign function or static
#[link_name = "1900"]
//~^ WARN attribute should be applied to a foreign function or static [unused_attributes]
//~| WARN this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
//~| HELP try `#[link(name = "1900")]` instead
extern "C" { }
//~^ NOTE not a foreign function or static
mod inner { #![link_name="1900"] }
//~^ WARN attribute should be applied to a foreign function or static [unused_attributes]
//~| WARN this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
//~| NOTE not a foreign function or static
#[link_name = "1900"] fn f() { }
//~^ WARN attribute should be applied to a foreign function or static [unused_attributes]
//~| WARN this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
//~| NOTE not a foreign function or static
#[link_name = "1900"] struct S;
//~^ WARN attribute should be applied to a foreign function or static [unused_attributes]
//~| WARN this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
//~| NOTE not a foreign function or static
#[link_name = "1900"] type T = S;
//~^ WARN attribute should be applied to a foreign function or static [unused_attributes]
//~| WARN this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
//~| NOTE not a foreign function or static
#[link_name = "1900"] impl S { }
//~^ WARN attribute should be applied to a foreign function or static [unused_attributes]
//~| WARN this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
//~| NOTE not a foreign function or static
}
#[link_section = "1800"]
//~^ WARN attribute should be applied to a function or static [unused_attributes]
//~| WARN this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
mod link_section {
//~^ NOTE not a function or static
mod inner { #![link_section="1800"] }
//~^ WARN attribute should be applied to a function or static [unused_attributes]
//~| WARN this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
//~| NOTE not a function or static
#[link_section = "1800"] fn f() { }
#[link_section = "1800"] struct S;
//~^ WARN attribute should be applied to a function or static [unused_attributes]
//~| WARN this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
//~| NOTE not a function or static
#[link_section = "1800"] type T = S;
//~^ WARN attribute should be applied to a function or static [unused_attributes]
//~| WARN this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
//~| NOTE not a function or static
#[link_section = "1800"] impl S { }
//~^ WARN attribute should be applied to a function or static [unused_attributes]
//~| WARN this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
//~| NOTE not a function or static
}
// Note that this is a `check-pass` test, so it
// will never invoke the linker. These are here nonetheless to point
// out that we allow them at non-crate-level (though I do not know
// whether they have the same effect here as at crate-level).
#[link()]
mod link {
mod inner { #![link()] }
#[link()] fn f() { }
#[link()] struct S;
#[link()] type T = S;
#[link()] impl S { }
}
struct StructForDeprecated;
#[deprecated]
mod deprecated {
mod inner { #![deprecated] }
#[deprecated] fn f() { }
#[deprecated] struct S1;
#[deprecated] type T = super::StructForDeprecated;
#[deprecated] impl super::StructForDeprecated { }
}
#[must_use]
mod must_use {
mod inner { #![must_use] }
#[must_use] fn f() { }
#[must_use] struct S;
#[must_use] type T = S;
#[must_use] impl S { }
}
#[windows_subsystem = "windows"]
mod windows_subsystem {
mod inner { #![windows_subsystem="windows"] }
#[windows_subsystem = "windows"] fn f() { }
#[windows_subsystem = "windows"] struct S;
#[windows_subsystem = "windows"] type T = S;
#[windows_subsystem = "windows"] impl S { }
}
// BROKEN USES OF CRATE-LEVEL BUILT-IN ATTRIBUTES
#[crate_name = "0900"]
//~^ WARN crate-level attribute should be an inner attribute
mod crate_name {
mod inner { #![crate_name="0900"] }
//~^ WARN crate-level attribute should be in the root module
#[crate_name = "0900"] fn f() { }
//~^ WARN crate-level attribute should be an inner attribute
#[crate_name = "0900"] struct S;
//~^ WARN crate-level attribute should be an inner attribute
#[crate_name = "0900"] type T = S;
//~^ WARN crate-level attribute should be an inner attribute
#[crate_name = "0900"] impl S { }
//~^ WARN crate-level attribute should be an inner attribute
}
#[crate_type = "0800"]
//~^ WARN crate-level attribute should be an inner attribute
mod crate_type {
mod inner { #![crate_type="0800"] }
//~^ WARN crate-level attribute should be in the root module
#[crate_type = "0800"] fn f() { }
//~^ WARN crate-level attribute should be an inner attribute
#[crate_type = "0800"] struct S;
//~^ WARN crate-level attribute should be an inner attribute
#[crate_type = "0800"] type T = S;
//~^ WARN crate-level attribute should be an inner attribute
#[crate_type = "0800"] impl S { }
//~^ WARN crate-level attribute should be an inner attribute
}
#[feature(x0600)]
//~^ WARN crate-level attribute should be an inner attribute
mod feature {
mod inner { #![feature(x0600)] }
//~^ WARN crate-level attribute should be in the root module
#[feature(x0600)] fn f() { }
//~^ WARN crate-level attribute should be an inner attribute
#[feature(x0600)] struct S;
//~^ WARN crate-level attribute should be an inner attribute
#[feature(x0600)] type T = S;
//~^ WARN crate-level attribute should be an inner attribute
#[feature(x0600)] impl S { }
//~^ WARN crate-level attribute should be an inner attribute
}
#[no_main]
//~^ WARN crate-level attribute should be an inner attribute
mod no_main_1 {
mod inner { #![no_main] }
//~^ WARN crate-level attribute should be in the root module
#[no_main] fn f() { }
//~^ WARN crate-level attribute should be an inner attribute
#[no_main] struct S;
//~^ WARN crate-level attribute should be an inner attribute
#[no_main] type T = S;
//~^ WARN crate-level attribute should be an inner attribute
#[no_main] impl S { }
//~^ WARN crate-level attribute should be an inner attribute
}
#[no_builtins]
mod no_builtins {
mod inner { #![no_builtins] }
#[no_builtins] fn f() { }
#[no_builtins] struct S;
#[no_builtins] type T = S;
#[no_builtins] impl S { }
}
#[recursion_limit="0200"]
//~^ WARN crate-level attribute should be an inner attribute
mod recursion_limit {
mod inner { #![recursion_limit="0200"] }
//~^ WARN crate-level attribute should be in the root module
#[recursion_limit="0200"] fn f() { }
//~^ WARN crate-level attribute should be an inner attribute
#[recursion_limit="0200"] struct S;
//~^ WARN crate-level attribute should be an inner attribute
#[recursion_limit="0200"] type T = S;
//~^ WARN crate-level attribute should be an inner attribute
#[recursion_limit="0200"] impl S { }
//~^ WARN crate-level attribute should be an inner attribute
}
#[type_length_limit="0100"]
//~^ WARN crate-level attribute should be an inner attribute
mod type_length_limit {
mod inner { #![type_length_limit="0100"] }
//~^ WARN crate-level attribute should be in the root module
#[type_length_limit="0100"] fn f() { }
//~^ WARN crate-level attribute should be an inner attribute
#[type_length_limit="0100"] struct S;
//~^ WARN crate-level attribute should be an inner attribute
#[type_length_limit="0100"] type T = S;
//~^ WARN crate-level attribute should be an inner attribute
#[type_length_limit="0100"] impl S { }
//~^ WARN crate-level attribute should be an inner attribute
}
fn main() {}<|fim▁end|> |
#[macro_use] struct S;
//~^ `#[macro_use]` only has an effect
|
<|file_name|>external_authentication_oauth1.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2012 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
This module contains functions and methods to authenticate with OAuth1
providers.
"""
__revision__ = \
"$Id$"
from invenio.containerutils import get_substructure
from invenio.dbquery import run_sql
from invenio.external_authentication import ExternalAuth
class ExternalOAuth1(ExternalAuth):
"""
Contains methods for authenticate with an OpenID provider.
"""
@staticmethod
def __init_req(req):
req.g['oauth1_provider_name'] = ''
req.g['oauth1_debug'] = 0
req.g['oauth1_msg'] = ''
req.g['oauth1_debug_msg'] = ''
req.g['oauth1_response'] = None
def auth_user(self, username, password, req=None):
"""
Tries to find email and identity of the user from OAuth1 provider. If it
doesn't find any of them, returns (None, None)
@param username: Isn't used in this function
@type username: str
@param password: Isn't used in this function
@type password: str
@param req: request
@type req: invenio.webinterface_handler_wsgi.SimulatedModPythonRequest
@rtype: str|NoneType, str|NoneType
"""
from invenio.access_control_config import CFG_OAUTH1_CONFIGURATIONS
from invenio.access_control_config import CFG_OAUTH1_PROVIDERS
from invenio.webinterface_handler import wash_urlargd
from rauth.service import OAuth1Service
self.__init_req(req)
args = wash_urlargd(req.form, {'provider': (str, ''),
'login_method': (str, ''),
'oauth_token': (str, ''),
'oauth_verifier': (str, ''),
'denied': (str, '')
})
provider_name = req.g['oauth1_provider_name'] = args['provider']
if not provider_name in CFG_OAUTH1_PROVIDERS:
req.g['oauth1_msg'] = 22
return None, None
# Load the configurations to construct OAuth1 service
config = CFG_OAUTH1_CONFIGURATIONS[args['provider']]
req.g['oauth1_debug'] = config.get('debug', 0)
if not args['oauth_token']:
# In case of an error, display corresponding message
if args['denied']:
req.g['oauth1_msg'] = 21
return None, None
else:
req.g['oauth1_msg'] = 22
return None, None
provider = OAuth1Service(
name = req.g['oauth1_provider_name'],
consumer_key = config['consumer_key'],
consumer_secret = config['consumer_secret'],
request_token_url = config['request_token_url'],
access_token_url = config['access_token_url'],
authorize_url = config['authorize_url'],
header_auth = True)
# Get the request token secret from database and exchange it with the
# access token.
query = """SELECT secret FROM oauth1_storage WHERE token = %s"""
params = (args['oauth_token'],)
try:
# If the request token is already used, return
request_token_secret = run_sql(query, params)[0][0]
except IndexError:
req.g['oauth1_msg'] = 22
return None, None
response = provider.get_access_token(
'GET',
request_token = args['oauth_token'],
request_token_secret = request_token_secret,
params = {
'oauth_verifier': args['oauth_verifier']
}
)
if req.g['oauth1_debug']:
req.g['oauth1_debug_msg'] = str(response.content) + "<br/>"
# Some providers send the identity and access token together.
email, identity = self._get_user_email_and_id(response.content, req)
if not identity and config.has_key('request_url'):
# For some providers, to reach user profile we need to make request
# to a specific url.
params = config.get('request_parameters', {})
response = provider.get(config['request_url'],
params = params,
access_token = response.content['oauth_token'],
access_token_secret = response.content['oauth_token_secret']
)
if req.oauth1_debug:
req.g['oauth1_debug_msg'] += str(response.content) + "<br/>"
email, identity = self._get_user_email_and_id(response.content, req)
if identity:
# If identity is found, add the name of the provider at the
# beginning of the identity because different providers may have
# different users with same id.
identity = "%s:%s" % (req.g['oauth1_provider_name'], identity)
else:
req.g['oauth1_msg'] = 23
# Delete the token saved in the database since it is useless now.
query = """
DELETE FROM oauth1_storage
WHERE token=%s
OR date_creation < DATE_SUB(NOW(), INTERVAL 1 HOUR)
"""
params = (args['oauth_token'],)
run_sql(query, params)
if req.g['oauth1_debug']:
req.g['oauth1_msg'] = "<code>%s</code>" % req.g['oauth1_debug_msg'].replace("\n", "<br/>")
return None, None
return email, identity
def fetch_user_nickname(self, username, password=None, req=None):
"""
Fetches the OAuth1 provider for nickname of the user. If it doesn't
find any, returns None.
This function doesn't need username, password or req. They are exist
just because this class is derived from ExternalAuth
@param username: Isn't used in this function
@type username: str
@param password: Isn't used in this function
@type password: str
@param req: Isn't used in this function
@type req: invenio.webinterface_handler_wsgi.SimulatedModPythonRequest
@rtype: str or NoneType
"""
from invenio.access_control_config import CFG_OAUTH1_CONFIGURATIONS
if req.g['oauth1_provider_name']:
path = None
if CFG_OAUTH1_CONFIGURATIONS[req.g['oauth1_provider_name']].has_key(
'nickname'
):
path = CFG_OAUTH1_CONFIGURATIONS[req.g['oauth1_provider_name']]['nickname']
if path:
return get_substructure(req.oauth1_response, path)
else:<|fim▁hole|>
def _get_user_email_and_id(self, container, req):
"""
Returns external identity and email address together. Since identity is
essential for OAuth1 authentication, if it doesn't find external
identity returns None, None.
@param container: container which contains email and id
@type container: list|dict
@rtype str|NoneType, str|NoneType
"""
from invenio.access_control_config import CFG_OAUTH1_CONFIGURATIONS
identity = None
email = None
if CFG_OAUTH1_CONFIGURATIONS[req.g['oauth1_provider_name']].has_key('id'):
path = CFG_OAUTH1_CONFIGURATIONS[req.g['oauth1_provider_name']]['id']
identity = get_substructure(container, path)
if identity:
if CFG_OAUTH1_CONFIGURATIONS[req.g['oauth1_provider_name']].has_key('email'):
path = CFG_OAUTH1_CONFIGURATIONS[req.g['oauth1_provider_name']]['email']
email = get_substructure(container, path)
req.g['oauth1_response'] = container
return email, identity
@staticmethod
def get_msg(req):
return req.g['oauth1_msg']<|fim▁end|> | return None |
<|file_name|>angles.py<|end_file_name|><|fim▁begin|>import numpy as np
from numba import njit as jit<|fim▁hole|>@jit
def _kepler_equation(E, M, ecc):
return E_to_M(E, ecc) - M
@jit
def _kepler_equation_prime(E, M, ecc):
return 1 - ecc * np.cos(E)
@jit
def _kepler_equation_hyper(F, M, ecc):
return F_to_M(F, ecc) - M
@jit
def _kepler_equation_prime_hyper(F, M, ecc):
return ecc * np.cosh(F) - 1
def newton_factory(func, fprime):
@jit
def jit_newton_wrapper(x0, args=(), tol=1.48e-08, maxiter=50):
p0 = float(x0)
for _ in range(maxiter):
fval = func(p0, *args)
fder = fprime(p0, *args)
newton_step = fval / fder
p = p0 - newton_step
if abs(p - p0) < tol:
return p
p0 = p
return np.nan
return jit_newton_wrapper
_newton_elliptic = newton_factory(_kepler_equation, _kepler_equation_prime)
_newton_hyperbolic = newton_factory(
_kepler_equation_hyper, _kepler_equation_prime_hyper
)
@jit
def D_to_nu(D):
r"""True anomaly from parabolic anomaly.
Parameters
----------
D : float
Eccentric anomaly.
Returns
-------
nu : float
True anomaly.
Notes
-----
From [1]_:
.. math::
\nu = 2 \arctan{D}
"""
return 2.0 * np.arctan(D)
@jit
def nu_to_D(nu):
r"""Parabolic anomaly from true anomaly.
Parameters
----------
nu : float
True anomaly in radians.
Returns
-------
D : float
Parabolic anomaly.
Warnings
--------
The parabolic anomaly will be continuous in (-∞, ∞)
only if the true anomaly is in (-π, π].
No validation or wrapping is performed.
Notes
-----
The treatment of the parabolic case is heterogeneous in the literature,
and that includes the use of an equivalent quantity to the eccentric anomaly:
[1]_ calls it "parabolic eccentric anomaly" D,
[2]_ also uses the letter D but calls it just "parabolic anomaly",
[3]_ uses the letter B citing indirectly [4]_
(which however calls it "parabolic time argument"),
and [5]_ does not bother to define it.
We use this definition:
.. math::
B = \tan{\frac{\nu}{2}}
References
----------
.. [1] Farnocchia, Davide, Davide Bracali Cioci, and Andrea Milani.
"Robust resolution of Kepler’s equation in all eccentricity regimes."
.. [2] Bate, Muller, White.
.. [3] Vallado, David. "Fundamentals of Astrodynamics and Applications",
2013.
.. [4] IAU VIth General Assembly, 1938.
.. [5] Battin, Richard H. "An introduction to the Mathematics and Methods
of Astrodynamics, Revised Edition", 1999.
"""
# TODO: Rename to B
return np.tan(nu / 2.0)
@jit
def nu_to_E(nu, ecc):
r"""Eccentric anomaly from true anomaly.
.. versionadded:: 0.4.0
Parameters
----------
nu : float
True anomaly in radians.
ecc : float
Eccentricity.
Returns
-------
E : float
Eccentric anomaly, between -π and π radians.
Warnings
--------
The eccentric anomaly will be between -π and π radians,
no matter the value of the true anomaly.
Notes
-----
The implementation uses the half-angle formula from [3]_:
.. math::
E = 2 \arctan \left ( \sqrt{\frac{1 - e}{1 + e}} \tan{\frac{\nu}{2}} \right)
\in (-\pi, \pi]
"""
E = 2 * np.arctan(np.sqrt((1 - ecc) / (1 + ecc)) * np.tan(nu / 2))
return E
@jit
def nu_to_F(nu, ecc):
r"""Hyperbolic anomaly from true anomaly.
Parameters
----------
nu : float
True anomaly in radians.
ecc : float
Eccentricity (>1).
Returns
-------
F : float
Hyperbolic anomaly.
Warnings
--------
The hyperbolic anomaly will be continuous in (-∞, ∞)
only if the true anomaly is in (-π, π],
which should happen anyway
because the true anomaly is limited for hyperbolic orbits.
No validation or wrapping is performed.
Notes
-----
The implementation uses the half-angle formula from [3]_:
.. math::
F = 2 \operatorname{arctanh} \left( \sqrt{\frac{e-1}{e+1}} \tan{\frac{\nu}{2}} \right)
"""
F = 2 * np.arctanh(np.sqrt((ecc - 1) / (ecc + 1)) * np.tan(nu / 2))
return F
@jit
def E_to_nu(E, ecc):
r"""True anomaly from eccentric anomaly.
.. versionadded:: 0.4.0
Parameters
----------
E : float
Eccentric anomaly in radians.
ecc : float
Eccentricity.
Returns
-------
nu : float
True anomaly, between -π and π radians.
Warnings
--------
The true anomaly will be between -π and π radians,
no matter the value of the eccentric anomaly.
Notes
-----
The implementation uses the half-angle formula from [3]_:
.. math::
\nu = 2 \arctan \left( \sqrt{\frac{1 + e}{1 - e}} \tan{\frac{E}{2}} \right)
\in (-\pi, \pi]
"""
nu = 2 * np.arctan(np.sqrt((1 + ecc) / (1 - ecc)) * np.tan(E / 2))
return nu
@jit
def F_to_nu(F, ecc):
r"""True anomaly from hyperbolic anomaly.
Parameters
----------
F : float
Hyperbolic anomaly.
ecc : float
Eccentricity (>1).
Returns
-------
nu : float
True anomaly.
Notes
-----
The implementation uses the half-angle formula from [3]_:
.. math::
\nu = 2 \arctan \left( \sqrt{\frac{e + 1}{e - 1}} \tanh{\frac{F}{2}} \right)
\in (-\pi, \pi]
"""
nu = 2 * np.arctan(np.sqrt((ecc + 1) / (ecc - 1)) * np.tanh(F / 2))
return nu
@jit
def M_to_E(M, ecc):
"""Eccentric anomaly from mean anomaly.
.. versionadded:: 0.4.0
Parameters
----------
M : float
Mean anomaly in radians.
ecc : float
Eccentricity.
Returns
-------
E : float
Eccentric anomaly.
Notes
-----
This uses a Newton iteration on the Kepler equation.
"""
if -np.pi < M < 0 or np.pi < M:
E0 = M - ecc
else:
E0 = M + ecc
E = _newton_elliptic(E0, args=(M, ecc))
return E
@jit
def M_to_F(M, ecc):
"""Hyperbolic anomaly from mean anomaly.
Parameters
----------
M : float
Mean anomaly in radians.
ecc : float
Eccentricity (>1).
Returns
-------
F : float
Hyperbolic anomaly.
Notes
-----
This uses a Newton iteration on the hyperbolic Kepler equation.
"""
F0 = np.arcsinh(M / ecc)
F = _newton_hyperbolic(F0, args=(M, ecc), maxiter=100)
return F
@jit
def M_to_D(M):
"""Parabolic anomaly from mean anomaly.
Parameters
----------
M : float
Mean anomaly in radians.
Returns
-------
D : float
Parabolic anomaly.
Notes
-----
This uses the analytical solution of Barker's equation from [5]_.
"""
B = 3.0 * M / 2.0
A = (B + (1.0 + B**2) ** 0.5) ** (2.0 / 3.0)
D = 2 * A * B / (1 + A + A**2)
return D
@jit
def E_to_M(E, ecc):
r"""Mean anomaly from eccentric anomaly.
.. versionadded:: 0.4.0
Parameters
----------
E : float
Eccentric anomaly in radians.
ecc : float
Eccentricity.
Returns
-------
M : float
Mean anomaly.
Warnings
--------
The mean anomaly will be outside of (-π, π]
if the eccentric anomaly is.
No validation or wrapping is performed.
Notes
-----
The implementation uses the plain original Kepler equation:
.. math::
M = E - e \sin{E}
"""
M = E - ecc * np.sin(E)
return M
@jit
def F_to_M(F, ecc):
r"""Mean anomaly from eccentric anomaly.
Parameters
----------
F : float
Hyperbolic anomaly.
ecc : float
Eccentricity (>1).
Returns
-------
M : float
Mean anomaly.
Notes
-----
As noted in [5]_, by manipulating
the parametric equations of the hyperbola
we can derive a quantity that is equivalent
to the eccentric anomaly in the elliptic case:
.. math::
M = e \sinh{F} - F
"""
M = ecc * np.sinh(F) - F
return M
@jit
def D_to_M(D):
r"""Mean anomaly from parabolic anomaly.
Parameters
----------
D : float
Parabolic anomaly.
Returns
-------
M : float
Mean anomaly.
Notes
-----
We use this definition:
.. math::
M = B + \frac{B^3}{3}
Notice that M < ν until ν ~ 100 degrees,
then it reaches π when ν ~ 120 degrees,
and grows without bounds after that.
Therefore, it can hardly be called an "anomaly"
since it is by no means an angle.
"""
M = D + D**3 / 3
return M
@jit
def fp_angle(nu, ecc):
r"""Returns the flight path angle.
Parameters
----------
nu : float
True anomaly in radians.
ecc : float
Eccentricity.
Returns
-------
fp_angle: float
Flight path angle
Notes
-----
From [3]_, pp. 113:
.. math::
\phi = \arctan(\frac {e \sin{\nu}}{1 + e \cos{\nu}})
"""
return np.arctan2(ecc * np.sin(nu), 1 + ecc * np.cos(nu))<|fim▁end|> | |
<|file_name|>NavigationHeader.js<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2013-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* Facebook, Inc. ("Facebook") owns all right, title and interest, including
* all intellectual property and other proprietary rights, in and to the React
* Native CustomComponents software (the "Software"). Subject to your
* compliance with these terms, you are hereby granted a non-exclusive,
* worldwide, royalty-free copyright license to (1) use and copy the Software;
* and (2) reproduce and distribute the Software as part of your own software
* ("Your Software"). Facebook reserves all rights not expressly granted to
* you in this license agreement.
*
* THE SOFTWARE AND DOCUMENTATION, IF ANY, ARE PROVIDED "AS IS" AND ANY EXPRESS
* OR IMPLIED WARRANTIES (INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE) ARE DISCLAIMED.
* IN NO EVENT SHALL FACEBOOK OR ITS AFFILIATES, OFFICERS, DIRECTORS OR
* EMPLOYEES BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THE SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* @providesModule NavigationHeader
* @flow
*/
'use strict';
const React = require('React');
const ReactNative = require('react-native');
const NavigationHeaderTitle = require('NavigationHeaderTitle');
const NavigationHeaderBackButton = require('NavigationHeaderBackButton');
const NavigationPropTypes = require('NavigationPropTypes');
const NavigationHeaderStyleInterpolator = require('NavigationHeaderStyleInterpolator');
const ReactComponentWithPureRenderMixin = require('react/lib/ReactComponentWithPureRenderMixin');
const {
Animated,
Platform,
StyleSheet,
View,
} = ReactNative;
<|fim▁hole|> NavigationSceneRendererProps,
NavigationStyleInterpolator,
} from 'NavigationTypeDefinition';
type SubViewProps = NavigationSceneRendererProps & {
onNavigateBack: ?Function,
};
type SubViewRenderer = (subViewProps: SubViewProps) => ?React.Element<any>;
type DefaultProps = {
renderLeftComponent: SubViewRenderer,
renderRightComponent: SubViewRenderer,
renderTitleComponent: SubViewRenderer,
statusBarHeight: number | Animated.Value,
};
type Props = NavigationSceneRendererProps & {
onNavigateBack: ?Function,
renderLeftComponent: SubViewRenderer,
renderRightComponent: SubViewRenderer,
renderTitleComponent: SubViewRenderer,
style?: any,
viewProps?: any,
statusBarHeight: number | Animated.Value,
};
type SubViewName = 'left' | 'title' | 'right';
const APPBAR_HEIGHT = Platform.OS === 'ios' ? 44 : 56;
const STATUSBAR_HEIGHT = Platform.OS === 'ios' ? 20 : 0;
const {PropTypes} = React;
class NavigationHeader extends React.Component<DefaultProps, Props, any> {
props: Props;
static defaultProps = {
renderTitleComponent: (props: SubViewProps) => {
const title = String(props.scene.route.title || '');
return <NavigationHeaderTitle>{title}</NavigationHeaderTitle>;
},
renderLeftComponent: (props: SubViewProps) => {
if (props.scene.index === 0 || !props.onNavigateBack) {
return null;
}
return (
<NavigationHeaderBackButton
onPress={props.onNavigateBack}
/>
);
},
renderRightComponent: (props: SubViewProps) => {
return null;
},
statusBarHeight: STATUSBAR_HEIGHT,
};
static propTypes = {
...NavigationPropTypes.SceneRendererProps,
onNavigateBack: PropTypes.func,
renderLeftComponent: PropTypes.func,
renderRightComponent: PropTypes.func,
renderTitleComponent: PropTypes.func,
style: View.propTypes.style,
statusBarHeight: PropTypes.number,
viewProps: PropTypes.shape(View.propTypes),
};
shouldComponentUpdate(nextProps: Props, nextState: any): boolean {
return ReactComponentWithPureRenderMixin.shouldComponentUpdate.call(
this,
nextProps,
nextState
);
}
render(): React.Element<any> {
const { scenes, style, viewProps } = this.props;
const scenesProps = scenes.map(scene => {
const props = NavigationPropTypes.extractSceneRendererProps(this.props);
props.scene = scene;
return props;
});
const barHeight = (this.props.statusBarHeight instanceof Animated.Value)
? Animated.add(this.props.statusBarHeight, new Animated.Value(APPBAR_HEIGHT))
: APPBAR_HEIGHT + this.props.statusBarHeight;
return (
<Animated.View style={[
styles.appbar,
{ height: barHeight },
style
]}
{...viewProps}
>
{scenesProps.map(this._renderLeft, this)}
{scenesProps.map(this._renderTitle, this)}
{scenesProps.map(this._renderRight, this)}
</Animated.View>
);
}
_renderLeft(props: NavigationSceneRendererProps): ?React.Element<any> {
return this._renderSubView(
props,
'left',
this.props.renderLeftComponent,
NavigationHeaderStyleInterpolator.forLeft,
);
}
_renderTitle(props: NavigationSceneRendererProps): ?React.Element<any> {
return this._renderSubView(
props,
'title',
this.props.renderTitleComponent,
NavigationHeaderStyleInterpolator.forCenter,
);
}
_renderRight(props: NavigationSceneRendererProps): ?React.Element<any> {
return this._renderSubView(
props,
'right',
this.props.renderRightComponent,
NavigationHeaderStyleInterpolator.forRight,
);
}
_renderSubView(
props: NavigationSceneRendererProps,
name: SubViewName,
renderer: SubViewRenderer,
styleInterpolator: NavigationStyleInterpolator,
): ?React.Element<any> {
const {
scene,
navigationState,
} = props;
const {
index,
isStale,
key,
} = scene;
const offset = navigationState.index - index;
if (Math.abs(offset) > 2) {
// Scene is far away from the active scene. Hides it to avoid unnecessary
// rendering.
return null;
}
const subViewProps = {...props, onNavigateBack: this.props.onNavigateBack};
const subView = renderer(subViewProps);
if (subView === null) {
return null;
}
const pointerEvents = offset !== 0 || isStale ? 'none' : 'box-none';
return (
<Animated.View
pointerEvents={pointerEvents}
key={name + '_' + key}
style={[
styles[name],
{ marginTop: this.props.statusBarHeight },
styleInterpolator(props),
]}>
{subView}
</Animated.View>
);
}
static HEIGHT = APPBAR_HEIGHT + STATUSBAR_HEIGHT;
static Title = NavigationHeaderTitle;
static BackButton = NavigationHeaderBackButton;
}
const styles = StyleSheet.create({
appbar: {
alignItems: 'center',
backgroundColor: Platform.OS === 'ios' ? '#EFEFF2' : '#FFF',
borderBottomColor: 'rgba(0, 0, 0, .15)',
borderBottomWidth: Platform.OS === 'ios' ? StyleSheet.hairlineWidth : 0,
elevation: 4,
flexDirection: 'row',
justifyContent: 'flex-start',
},
title: {
bottom: 0,
left: APPBAR_HEIGHT,
position: 'absolute',
right: APPBAR_HEIGHT,
top: 0,
},
left: {
bottom: 0,
left: 0,
position: 'absolute',
top: 0,
},
right: {
bottom: 0,
position: 'absolute',
right: 0,
top: 0,
},
});
module.exports = NavigationHeader;<|fim▁end|> | import type { |
<|file_name|>eLTE_Tool.cpp<|end_file_name|><|fim▁begin|>/*Copyright 2015 Huawei Technologies Co., Ltd. All rights reserved.
eSDK is licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.*/
#include "stdafx.h"
#include <Shlwapi.h>
#include <algorithm>
#include "eLTE_Tool.h"
#pragma comment(lib, "Shlwapi.lib")
#define STR_LENGTH_SIZE 20
#define INT_FORMAT_D 10
#define DOUBLE_STR_SIZE 64
#define DOUBLE_FORMAT "%g"
#define UINT_FORMAT "%u"
eLTE_Tool::eLTE_Tool()
{
}
eLTE_Tool::~eLTE_Tool()
{
<|fim▁hole|>
std::string eLTE_Tool::Int2String(int iVar)
{
char buf[STR_LENGTH_SIZE] = {0};
(void)_itoa_s(iVar, buf, INT_FORMAT_D);
return std::string(buf);
}
std::string eLTE_Tool::UInt2String(unsigned int iVar)
{
char _Buf[STR_LENGTH_SIZE];
(void)sprintf_s(_Buf, sizeof(_Buf), UINT_FORMAT, iVar);
return std::string(_Buf);
}
std::string eLTE_Tool::Double2String(double dVar)
{
char _Buf[DOUBLE_STR_SIZE];
(void)sprintf_s(_Buf, sizeof(_Buf), DOUBLE_FORMAT, dVar);
return std::string(_Buf);
}
int eLTE_Tool::String2Int(const std::string& str)
{
int iRet = 0;
try
{
iRet = std::stoi(str);
}
catch (...)
{
iRet = 0;
}
return iRet;
}
unsigned int eLTE_Tool::String2UInt(const std::string& str)
{
unsigned int uiRet = 0;
try
{
uiRet = std::stoul(str);
}
catch (...)
{
uiRet = 0;
}
return uiRet;
}
int eLTE_Tool::WString2Int(const std::wstring& wstr)
{
int iRet = 0;
try
{
iRet = std::stoi(wstr);
}
catch (...)
{
iRet = 0;
}
return iRet;
}
unsigned int eLTE_Tool::WString2UInt(const std::wstring& wstr)
{
unsigned int uiRet = 0;
try
{
uiRet = std::stoul(wstr);
}
catch (...)
{
uiRet = 0;
}
return uiRet;
}
std::string eLTE_Tool::UnicodeToANSI(const CString& str)
{
std::string strResult("");
int textlen = WideCharToMultiByte(CP_ACP, 0, str, -1, NULL, 0, NULL, NULL);
if (0 >= textlen)
{
// WideCharToMultiByte failed.
return strResult;
}
size_t bufsize = (size_t)(textlen+1);
char* pBuf = new char[bufsize];
memset(pBuf, 0, sizeof(char)*bufsize);
WideCharToMultiByte(CP_ACP, 0, str, -1, pBuf, textlen, NULL, NULL);//lint !e713
strResult = pBuf;
delete[] pBuf;
return strResult;
}
std::string eLTE_Tool::UnicodeToUTF8(const std::wstring& str)
{
std::string strResult("");
int textlen = WideCharToMultiByte(CP_UTF8, 0, str.c_str(), -1, NULL, 0, NULL, NULL);
if (0 >= textlen)
{
// WideCharToMultiByte failed.
return strResult;
}
size_t bufsize = (size_t)(textlen+1);
char* pBuf = new char[bufsize];
memset(pBuf, 0, sizeof(char)*bufsize);
WideCharToMultiByte(CP_UTF8, 0, str.c_str(), -1, pBuf, textlen, NULL, NULL);//lint !e713
strResult = pBuf;
delete[] pBuf;
return strResult;
}
CString eLTE_Tool::ANSIToUnicode(const std::string& str)
{
CString strResult(L"");
int textlen = MultiByteToWideChar(CP_ACP, 0, str.c_str(), -1, NULL, 0);
if (0 >= textlen)
{
// MultiByteToWideChar failed.
return strResult;
}
size_t bufsize = (size_t)(textlen+1);
wchar_t* pBuf = new wchar_t[bufsize];
memset(pBuf, 0, sizeof(wchar_t)*bufsize);
MultiByteToWideChar(CP_ACP, 0, str.c_str(), -1, (LPWSTR)pBuf, textlen);//lint !e713
strResult = pBuf;
delete[] pBuf;
return strResult;
}
CString eLTE_Tool::UTF8ToUnicode(const std::string& str)
{
CString strResult(L"");
int textlen = MultiByteToWideChar(CP_UTF8, 0, str.c_str(), -1, NULL, 0);
if (0 >= textlen)
{
// MultiByteToWideChar failed.
return strResult;
}
size_t bufsize = (size_t)(textlen+1);
wchar_t* pBuf = new wchar_t[bufsize];
memset(pBuf, 0, sizeof(wchar_t)*bufsize);
MultiByteToWideChar(CP_UTF8, 0, str.c_str(), -1, (LPWSTR)pBuf, textlen);//lint !e713
strResult = pBuf;
delete[] pBuf;
return strResult;
}
//std::string eLTE_Tool::UTF8ToANSI(const std::string& str)
//{
// std::wstring strUnicode = UTF8ToUnicode(str);
// std::string strAnsi = UnicodeToANSI(strUnicode);
// return strAnsi;
//}
//std::string eLTE_Tool::ANSIToUTF8(const std::string& str)
//{
// std::wstring strUnicode = ANSIToUnicode(str);
// std::string strUtf8 = UnicodeToUTF8(strUnicode);
// return strUtf8;
//}
std::string eLTE_Tool::GetIPByUIntValue(unsigned long ulIP)
{
int val1 = (ulIP & 0xFF000000) >> 24;
int val2 = (ulIP & 0x00FF0000) >> 16;
int val3 = (ulIP & 0x0000FF00) >> 8;
int val4 = (ulIP & 0x000000FF);
std::string str = ".";
std::string strIP
= Int2String(val1)
+ str
+ Int2String(val2)
+ str
+ Int2String(val3)
+ str
+ Int2String(val4);
return strIP;
}<|fim▁end|> | } |
<|file_name|>angular-tabs.js<|end_file_name|><|fim▁begin|>'use strict';
angular.module('angular-tabs', ['angular-tabs-utils'])
.provider('$uiTabs', function () {
/**
*
*/
var TabDefinition = function () {
this.$dirty = false;
this.$selected = false;
this.$volatile = true;
this.$data = {};
};
TabDefinition.prototype.onClose = ['$q', function ($q) {
return function () {
var deferred = $q.defer();
deferred.resolve();
return deferred.promise;
};
}];
/**
* Map of tab definitions
*/
var tabDefinitions = {
null: {template: ''}
};
/**
*
* @param type {string}
* @param definition {Object}
* @returns {Object} self
*/
this.tab = function (type, definition) {
var tabDefinition = angular.extend(
{}, definition
);
if (tabDefinition.volatile !== undefined) {
tabDefinition.$volatile = !!tabDefinition.volatile;
delete tabDefinition.volatile;
}
tabDefinitions[type] = tabDefinition;
return this;
};
/**
*
* @param definition {Object}
* @returns {Object} self
*/
this.welcome = function (definition) {
return this.tab(null, definition);
};
this.config = function(options) {
TabDefinition.prototype.tabHeaderItemTemplate = options.tabHeaderItemTemplate;
TabDefinition.prototype.tabHeaderMenuItemTemplate = options.tabHeaderMenuItemTemplate;
TabDefinition.prototype.tabHeaderItemTemplateUrl = options.tabHeaderItemTemplateUrl || 'src/templates/templateItemUrl.html';
TabDefinition.prototype.tabHeaderMenuItemTemplateUrl = options.tabHeaderMenuItemTemplateUrl || 'src/templates/templateMenuItemUrl.html';
};
/**
*
* @param handler {function}
*/
this.onClose = function (handler) {
TabDefinition.prototype.onClose = handler;
};
/*
*
*/
function initTab(type, options) {
var tabDefinition = tabDefinitions[type];
if (!tabDefinition) {
return undefined;
}
var tabDefInstance = angular.extend(new TabDefinition(), tabDefinition, options || {});
if (!!tabDefInstance.template && !!options && !!options.templateUrl) {
delete tabDefInstance.template;
}
return tabDefInstance;
}
this.$get = ["$rootScope", "$injector", "$sce", "$http", "$q", "$templateCache", "utils", function ($rootScope, $injector, $sce, $http, $q, $templateCache, utils) {
/**
* Basically TABS.arr & TABS.map contain the same tabs objects
*/
var TABS = {
arr : [],
map : {},
history : [],
activeTab: undefined
};
/**
* Return a tab object
* @param id tab id
* @returns {tab}
*/
var getTab = function (id) {
return TABS.map[id];
};
var removeTab = function (id) {
var tab = getTab(id);
$q.when(tab).
then(function () {
if (tab.onClose) {
var fn = angular.isString(tab.onClose) ? $injector.get(tab.onClose) : $injector.invoke(tab.onClose);
return fn(tab);
}
}).
// after tab close resolved
then(function () {
removeTabIntern(tab);
});
};
var removeTabIntern = function (tab) {
utils.remove(TABS.history, function (tabId) {
return tabId === tab.$$tabId;
});
if (tab.$selected && TABS.history.length > 0) {
selectTab(TABS.history[TABS.history.length - 1]);
}
cleanTabScope(tab);
TABS.arr.splice(TABS.arr.indexOf(tab), 1);
delete TABS.map[tab.$$tabId];
$rootScope.$broadcast('$tabRemoveSuccess', tab);
};
var getActiveTab = function () {
return TABS.activeTab;
};
/**
* Return all tabs
* @returns {*}
*/
var getTabs = function () {
return TABS.arr; // clone ?
};
/*
Private
*/
var cleanTabScope = function (tab) {
if (tab.scope) {
tab.scope.$destroy();
tab.scope = null;
}
};
/**
* Add a new tab
* @param type type of a tab described with $uiTabsProvider
* @param options init tab options (title, disabled)
* @param id (optional) tab's unique id. If 'id' exists, tab content of this tab will be replaced
* @returns {Promise(tab)}
*/
var addTab = function (type, options, id) {
var newTab = initTab(type, options);
if (!newTab) {
throw new Error('Unknown tab type: ' + type);
}
newTab.$$tabId = id || Math.random().toString(16).substr(2);
newTab.close = function () {
removeTab(this.$$tabId);
};
return loadTabIntern(newTab).then(function(newTab) {
var find = getTab(newTab.$$tabId);
if (!find) {
// Add Tab
if (type !== null) {
TABS.arr.push(newTab);
}
TABS.map[newTab.$$tabId] = newTab;
} else {
// Replace tab
cleanTabScope(find);
angular.copy(newTab, find);
}
return selectTab(newTab.$$tabId);
}, function (error) {
$rootScope.$broadcast('$tabAddError', newTab, error);
});
};
/**
* Select an existing tab
* @param tabId tab id
* @returns {tab}
*/
function loadTabIntern(tab) {
return $q.when(tab).
then(function () {
// Start loading template
$rootScope.$broadcast('$tabLoadStart', tab);
var locals = angular.extend({}, tab.resolve),
template, templateUrl;
angular.forEach(locals, function (value, key) {
locals[key] = angular.isString(value) ?
$injector.get(value) : $injector.invoke(value);
});
if (angular.isDefined(template = tab.template)) {
if (angular.isFunction(template)) {
template = template(tab);
}
} else if (angular.isDefined(templateUrl = tab.templateUrl)) {
if (angular.isFunction(templateUrl)) {
templateUrl = templateUrl(tab);
}
templateUrl = $sce.getTrustedResourceUrl(templateUrl);
if (angular.isDefined(templateUrl)) {
tab.loadedTemplateUrl = templateUrl;
template = $http.get(templateUrl, {cache: $templateCache}).
then(function (response) {
return response.data;
});
}
}
if (angular.isDefined(template)) {
locals.$template = template;
}
return $q.all(locals);
}).then(function(locals) {
tab.locals = locals;
$rootScope.$broadcast('$tabLoadEnd', tab);
return tab;
});
}
/**
* Select an existing tab
* @param tabId tab id
* @returns {tab}
*/
function selectTab(tabId) {
var next = getTab(tabId),
last = getActiveTab();
if (next && last && next.$$tabId === last.$$tabId) {
$rootScope.$broadcast('$tabUpdate', last);
} else if (next) {
$rootScope.$broadcast('$tabChangeStart', next, last);
if (last) {
last.$selected = false;
}
next.$selected = true;
TABS.activeTab = next;
utils.remove(TABS.history, function (id) {
return tabId === id;
});
TABS.history.push(tabId);
$rootScope.$broadcast('$tabChangeSuccess', next, last);
} else {
$rootScope.$broadcast('$tabChangeError', next, 'Cloud not found tab with id #' + tabId);
}
return next;
}
// Add welcome tab
addTab(null);
/**
* Public API
*/
return {
addTab: addTab,
getTabs: getTabs,
getTab: getTab,
removeTab: removeTab,
selectTab: selectTab,
getActiveTab: getActiveTab
};
}];
})
.directive('tabView', ["$uiTabs", "$anchorScroll", "$animate", function ($uiTabs, $anchorScroll, $animate) {
return {
restrict: 'ECA',
terminal: true,
priority: 400,
transclude: 'element',
link: function (scope, $element, attr, ctrl, $transclude) {
var currentScope,
currentElement,
previousElement,
autoScrollExp = attr.autoscroll,
onloadExp = attr.onload || '',
elems = {};
function onTabRemoveSuccess(event, tab) {
if (tab.$selected === false) {
var elem = elems[tab.$$tabId];
if (elem) {
delete elems[tab.$$tabId];
elem.remove();
elem = null;
}
}
}
function cleanupLastView() {
var id = currentElement && currentElement.data('$$tabId');
var tab = $uiTabs.getTab(id);
if (previousElement) {
previousElement.remove();
previousElement = null;
}
if (currentScope && tab === undefined) {
currentScope.$destroy();
currentScope = null;
}
if (currentElement) {
if (tab) {
$animate.addClass(currentElement, 'ng-hide');
previousElement = null;
} else {
$animate.leave(currentElement, function () {
previousElement = null;
});
previousElement = currentElement;
currentElement = null;
}
}
}
function onTabChangeSuccess(event, currentTab) {
var elem = elems[currentTab.$$tabId];
if (elem) {
$animate.removeClass(elem, 'ng-hide');
cleanupLastView();
currentElement = elem;
return;
}
var locals = currentTab && currentTab.locals,
template = locals && locals.$template;
if (angular.isDefined(template)) {
var newScope = scope.$new();
newScope.$$tabId = currentTab.$$tabId;
if (currentTab.$volatile !== false) {
newScope.$data = currentTab.$data;
}
newScope.$setTabDirty = function () {
currentTab.$dirty = true;
};
// Note: This will also link all children of tab-view that were contained in the original
// html. If that content contains controllers, ... they could pollute/change the scope.
// However, using ng-view on an element with additional content does not make sense...
// Note: We can't remove them in the cloneAttchFn of $transclude as that
// function is called before linking the content, which would apply child
// directives to non existing elements.
var clone = $transclude(newScope, function (clone) {
$animate.enter(clone, null, currentElement || $element, function onNgViewEnter() {
if (angular.isDefined(autoScrollExp) && (!autoScrollExp || scope.$eval(autoScrollExp))) {
$anchorScroll();
}
});
cleanupLastView();
});
currentElement = clone;
currentScope = newScope;
if (currentTab.$volatile === false) {
currentElement.data('$$tabId', currentTab.$$tabId);
elems[currentTab.$$tabId] = currentElement;
currentTab.scope = newScope;
}
newScope.$emit('$tabContentLoaded');
newScope.$eval(onloadExp);
} else {
cleanupLastView();
}
}
scope.$on('$tabChangeSuccess', onTabChangeSuccess);
scope.$on('$tabRemoveSuccess', onTabRemoveSuccess);
}
};
}])
.directive('tabView', ["$compile", "$controller", "$uiTabs", function ($compile, $controller, $uiTabs) {
return {
restrict: 'ECA',
priority: -400,
link: function (scope, $element) {
var current = $uiTabs.getActiveTab(),
locals = current.locals;
scope.$$currentTab = current;<|fim▁hole|>
if (current.controller) {
locals.$scope = scope;
var controller = $controller(current.controller, locals);
if (current.controllerAs) {
scope[current.controllerAs] = controller;
}
$element.data('$ngControllerController', controller);
$element.children().data('$ngControllerController', controller);
}
link(scope);
},
controller: ["$scope", function ($scope) {
this.$$getCurrentTab = function () {
return $scope.$$currentTab;
};
}]
};
}])
.directive('closeUiTab', function () {
return {
restrict: 'ECA',
priority: -400,
require: '^tabView',
link: function (scope, $element, attr, tabViewCtrl) {
$element.on('click', function () {
scope.$apply(function() {
tabViewCtrl.$$getCurrentTab().close();
});
});
}
};
})
.directive('closeUiTabHeader', function () {
return {
restrict: 'ECA',
priority: -401,
link: function (scope, $element, attr) {
$element.on('click', function () {
scope.$apply(function() {
scope.tab.close();
});
});
}
};
})
.directive('tabHeaderItem', ["$http", "$templateCache", "$compile", "$sce", "$q", function ($http, $templateCache, $compile, $sce, $q) {
return {
restrict: 'EA',
scope: {
index: '=',
tab: '='
},
priority: -402,
link: function (scope, element, attrs) {
var template, templateUrl;
if (attrs.type === 'menu') {
template = scope.tab.tabHeaderMenuItemTemplate;
templateUrl = scope.tab.tabHeaderMenuItemTemplateUrl;
} else {
template = scope.tab.tabHeaderItemTemplate;
templateUrl = scope.tab.tabHeaderItemTemplateUrl;
}
if (angular.isDefined(template)) {
if (angular.isFunction(template)) {
template = template(tab);
}
} else if (angular.isDefined(templateUrl)) {
if (angular.isFunction(templateUrl)) {
templateUrl = templateUrl(tab);
}
templateUrl = $sce.getTrustedResourceUrl(templateUrl);
if (angular.isDefined(templateUrl)) {
//tab.loadedTemplateUrl = templateUrl;
template = $http.get(templateUrl, {cache: $templateCache}).
then(function (response) {
return response.data;
});
}
}
$q.when(template).then(function(tplContent) {
element.replaceWith($compile(tplContent.trim())(scope));
});
/* xxxx
$http.get(tplUrl, {cache: $templateCache}).success(function (tplContent) {
element.replaceWith($compile(tplContent.trim())(scope));
});*/
}
};
}])
.directive('tabHeader', ["$uiTabs", "$window", "$timeout", "utils", function ($uiTabs, $window, $timeout, utils) {
return {
restrict: 'ECA',
priority: -400,
template: '<div class="ui-tab-header" ui-tab-menu-dropdown>\n <div class="ui-tab-header-wrapper">\n <ul class="ui-tab-header-container">\n <li class="ui-tab-header-item" ng-class="{active: tab.$selected}" data-ng-repeat="tab in tabs" data-ng-click="selectTab(tab, $index)">\n <span tab-header-item type="tab" tab="tab" index="$index"></span>\n </li>\n </ul>\n </div>\n\n <span class="ui-tab-header-menu-toggle" ui-tab-menu-dropdown-toggle ng-show="showTabMenuHandler"></span>\n <div class="ui-tab-header-menu">\n <ul>\n <li class="ui-tab-header-menu-item" data-ng-repeat="tab in tabs" data-ng-click="selectTab(tab, $index)">\n <span tab-header-item type="menu" tab="tab" index="$index"></span>\n </li>\n </ul>\n </div>\n</div>\n',
scope: {},
controller: function() {},
link: function (scope, elem, attr) {
var container = elem.find('ul.ui-tab-header-container');
var wrapper = elem.find('div.ui-tab-header-wrapper');
var lastSelectedIndex;
scope.tabs = $uiTabs.getTabs();
scope.selectTab = function (tab, index) {
$uiTabs.selectTab(tab.$$tabId);
scrollToTab(index);
};
scope.closeTab = function (tab) {
$uiTabs.removeTab(tab.$$tabId);
};
scope.$watchCollection('tabs', function (tabs) {
$timeout(function () {
var index = tabs.indexOf($uiTabs.getActiveTab());
if (index !== -1) {
scrollToTab(index);
}
});
});
var scrollToTab = function (index) {
var left;
if (container.outerWidth() + container.position().left < wrapper.innerWidth()) {
// Trim space in the right (when deletion or window resize)
left = Math.min((wrapper.innerWidth() - container.outerWidth() ), 0);
}
scope.showTabMenuHandler = wrapper.innerWidth() < container.outerWidth();
if (index !== undefined) {
var li = elem.find('li.ui-tab-header-item:nth-child(' + (index + 1) + ')');
var leftOffset = container.position().left;
if (leftOffset + li.position().left < 0) {
// Scroll to active tab at left
left = -li.position().left;
} else {
// Scroll to active tab at right
var liOffset = li.position().left + li.outerWidth() + leftOffset;
if (liOffset > wrapper.innerWidth()) {
left = wrapper.innerWidth() + leftOffset - liOffset;
}
}
}
if (left !== undefined) {
container.css({left: left});
}
lastSelectedIndex = index;
};
var w = angular.element($window);
w.bind('resize', utils.debounce(function (event) {
scope.$apply(scrollToTab(lastSelectedIndex));
}, 200));
}
};
}])
.constant('dropdownConfig', {
openClass: 'open'
})
.service('dropdownService', ['$document', function ($document) {
var openScope = null;
this.open = function (dropdownScope) {
if (!openScope) {
$document.bind('click', closeDropdown);
$document.bind('keydown', escapeKeyBind);
}
if (openScope && openScope !== dropdownScope) {
openScope.isOpen = false;
}
openScope = dropdownScope;
};
this.close = function (dropdownScope) {
if (openScope === dropdownScope) {
openScope = null;
$document.unbind('click', closeDropdown);
$document.unbind('keydown', escapeKeyBind);
}
};
var closeDropdown = function (evt) {
// This method may still be called during the same mouse event that
// unbound this event handler. So check openScope before proceeding.
if (!openScope) {
return;
}
var toggleElement = openScope.getToggleElement();
if (evt && toggleElement && toggleElement[0].contains(evt.target)) {
return;
}
openScope.$apply(function () {
openScope.isOpen = false;
});
};
var escapeKeyBind = function (evt) {
if (evt.which === 27) {
openScope.focusToggleElement();
closeDropdown();
}
};
}])
.controller('DropdownController', ['$scope', '$attrs', '$parse', 'dropdownConfig', 'dropdownService', '$animate', function ($scope, $attrs, $parse, dropdownConfig, dropdownService, $animate) {
var self = this,
scope = $scope.$new(), // create a child scope so we are not polluting original one
openClass = dropdownConfig.openClass,
getIsOpen,
setIsOpen = angular.noop,
toggleInvoker = $attrs.onToggle ? $parse($attrs.onToggle) : angular.noop;
this.init = function (element) {
self.$element = element;
if ($attrs.isOpen) {
getIsOpen = $parse($attrs.isOpen);
setIsOpen = getIsOpen.assign;
$scope.$watch(getIsOpen, function (value) {
scope.isOpen = !!value;
});
}
};
this.toggle = function (open) {
scope.isOpen = arguments.length ? !!open : !scope.isOpen;
return scope.isOpen;
};
// Allow other directives to watch status
this.isOpen = function () {
return scope.isOpen;
};
scope.getToggleElement = function () {
return self.toggleElement;
};
scope.focusToggleElement = function () {
if (self.toggleElement) {
self.toggleElement[0].focus();
}
};
scope.$watch('isOpen', function (isOpen, wasOpen) {
$animate[isOpen ? 'addClass' : 'removeClass'](self.$element, openClass);
if (isOpen) {
scope.focusToggleElement();
dropdownService.open(scope);
} else {
dropdownService.close(scope);
}
setIsOpen($scope, isOpen);
if (angular.isDefined(isOpen) && isOpen !== wasOpen) {
toggleInvoker($scope, {open: !!isOpen});
}
});
$scope.$on('$locationChangeSuccess', function () {
scope.isOpen = false;
});
$scope.$on('$destroy', function () {
scope.$destroy();
});
}])
.directive('uiTabMenuDropdown', function () {
return {
controller: 'DropdownController',
link: function (scope, element, attrs, dropdownCtrl) {
dropdownCtrl.init(element);
}
};
})
.directive('uiTabMenuDropdownToggle', function () {
return {
require: '?^uiTabMenuDropdown',
priority: -500,
link: function (scope, element, attrs, dropdownCtrl) {
if (!dropdownCtrl) {
return;
}
dropdownCtrl.toggleElement = element;
var toggleDropdown = function (event) {
event.preventDefault();
if (!element.hasClass('disabled') && !attrs.disabled) {
scope.$apply(function () {
dropdownCtrl.toggle();
});
}
};
element.bind('click', toggleDropdown);
// WAI-ARIA
element.attr({'aria-haspopup': true, 'aria-expanded': false});
scope.$watch(dropdownCtrl.isOpen, function (isOpen) {
element.attr('aria-expanded', !!isOpen);
});
scope.$on('$destroy', function () {
element.unbind('click', toggleDropdown);
});
}
};
})
.run(["$templateCache", function($templateCache) {
$templateCache.put('src/templates/templateItemUrl.html',
'<span class="asterisk" ng-show="tab.$dirty">*</span>' +
'<span class="ui-tab-header-title">{{tab.title}}</span>' +
'<span class="ui-tab-header-close" close-ui-tab-header></span>'
);
$templateCache.put('src/templates/templateMenuItemUrl.html',
'<span class="ui-tab-header-menu-item-title">{{tab.title}}</span>'
);
}]);
'use strict';
angular.module('angular-tabs-utils', [])
.service('utils', function () {
return {
remove: function (array, callback) {
var index = -1,
length = array ? array.length : 0,
result = [];
while (++index < length) {
var value = array[index];
if (callback && callback(value, index, array)) {
result.push(value);
[].splice.call(array, index--, 1);
length--;
}
}
return result;
},
debounce: function (func, wait, immediate) {
var args,
result,
thisArg,
timeoutId;
function delayed() {
timeoutId = null;
if (!immediate) {
result = func.apply(thisArg, args);
}
}
return function () {
var isImmediate = immediate && !timeoutId;
args = arguments;
thisArg = this;
clearTimeout(timeoutId);
timeoutId = setTimeout(delayed, wait);
if (isImmediate) {
result = func.apply(thisArg, args);
}
return result;
};
}
};
});<|fim▁end|> | $element.html(locals.$template);
$element.addClass('ui-tab-system-view');
var link = $compile($element.contents()); |
<|file_name|>SubKey.java<|end_file_name|><|fim▁begin|>package org.fnppl.opensdx.security;
/*
* Copyright (C) 2010-2015
* fine people e.V. <[email protected]>
* Henning Thieß <[email protected]>
*
* http://fnppl.org
*/
/*
* Software license
*
* As far as this file or parts of this file is/are software, rather than documentation, this software-license applies / shall be applied.
*
* This file is part of openSDX
* openSDX is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* openSDX is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* and GNU General Public License along with openSDX.
* If not, see <http://www.gnu.org/licenses/>.
*
*/
/*
* Documentation license
*
* As far as this file or parts of this file is/are documentation, rather than software, this documentation-license applies / shall be applied.
*
* This file is part of openSDX.
* Permission is granted to copy, distribute and/or modify this document
* under the terms of the GNU Free Documentation License, Version 1.3
* or any later version published by the Free Software Foundation;
* with no Invariant Sections, no Front-Cover Texts, and no Back-Cover Texts.
* A copy of the license is included in the section entitled "GNU
* Free Documentation License" resp. in the file called "FDL.txt".
*
*/
public class SubKey extends OSDXKey {
protected MasterKey parentKey = null;
protected String parentkeyid = null;//could be the parentkey is not loaded - then *only* the id is present
protected SubKey() {
super();
super.setLevel(LEVEL_SUB);
}
//public Result uploadToKeyServer(KeyVerificator keyverificator) {
public Result uploadToKeyServer(KeyClient client) {
if (!hasPrivateKey()) {
System.out.println("uploadToKeyServer::!hasprivatekey");
return Result.error("no private key available");
}
if (!isPrivateKeyUnlocked()) {
System.out.println("uploadToKeyServer::!privatekeyunlocked");
return Result.error("private key is locked");
}
if (authoritativekeyserver.equals("LOCAL")) {
System.out.println("uploadToKeyServer::authoritativekeyserver==local");
return Result.error("authoritative keyserver can not be LOCAL");
}
//if (authoritativekeyserverPort<=0) return Result.error("authoritative keyserver port not set");
if (parentKey==null) {<|fim▁hole|> System.out.println("uploadToKeyServer::parentkey==null");
return Result.error("missing parent key");
}
try {
//KeyClient client = new KeyClient(authoritativekeyserver, KeyClient.OSDX_KEYSERVER_DEFAULT_PORT, "", keyverificator);
// KeyClient client = new KeyClient(
// authoritativekeyserver,
// 80, //TODO HT 2011-06-26 check me!!!
// //KeyClient.OSDX_KEYSERVER_DEFAULT_PORT,
// "",
// keyverificator
// );
//System.out.println("Before SubKey.putSubkey...");
boolean ok = client.putSubKey(this, parentKey);
//System.out.println("AFTER SubKey.putSubkey -> "+ok);
if (ok) {
return Result.succeeded();
} else {
return Result.error(client.getMessage());
}
} catch (Exception ex) {
ex.printStackTrace();
return Result.error(ex);
}
}
public String getParentKeyID() {
if (parentKey!=null) return parentKey.getKeyID();
else return parentkeyid;
}
public void setParentKey(MasterKey parent) {
unsavedChanges = true;
parentKey = parent;
parentkeyid = parent.getKeyID();
authoritativekeyserver = parent.authoritativekeyserver;
//authoritativekeyserverPort = parent.authoritativekeyserverPort;
}
public MasterKey getParentKey() {
return parentKey;
}
public void setLevel(int level) {
if (this instanceof RevokeKey && isSub()) {
super.setLevel(LEVEL_REVOKE);
} else {
throw new RuntimeException("ERROR not allowed to set level for SubKey");
}
}
public void setParentKeyID(String id) {
unsavedChanges = true;
parentkeyid = id;
parentKey = null;
}
}<|fim▁end|> | |
<|file_name|>crossfade.js<|end_file_name|><|fim▁begin|>//// Copyright (c) Microsoft Corporation. All rights reserved<|fim▁hole|>
(function () {
"use strict";
var page = WinJS.UI.Pages.define("/html/crossfade.html", {
ready: function (element, options) {
runAnimation.addEventListener("click", runCrossfadeAnimation, false);
element2.style.opacity = "0";
}
});
function runCrossfadeAnimation() {
var incoming;
var outgoing;
// Set incoming and outgoing elements
if (element1.style.opacity === "0") {
incoming = element1;
outgoing = element2;
} else {
incoming = element2;
outgoing = element1;
}
// Run crossfade animation
WinJS.UI.Animation.crossFade(incoming, outgoing);
}
})();<|fim▁end|> | |
<|file_name|>TelicProtocolDecoderTest.java<|end_file_name|><|fim▁begin|>package org.traccar.protocol;
import org.junit.Test;
import org.traccar.ProtocolTest;
public class TelicProtocolDecoderTest extends ProtocolTest {
@Test
public void testDecode() throws Exception {
TelicProtocolDecoder decoder = new TelicProtocolDecoder(new TelicProtocol());
verifyNull(decoder, text(
"0026355565071347499|206|01|001002008"));
verifyPosition(decoder, text(
"052028495198,160917073641,0,160917073642,43879,511958,3,24,223,17,,,-3,142379,,0010,00,64,205,0,0499"));
verifyPosition(decoder, text(
"01302849516,160917073503,0,160917073504,43907,512006,3,11,160,14,,,-7,141811,,0010,00,64,206,0,0499"));
verifyPosition(decoder, text(
"002135556507134749999,010817171138,0,010817171138,004560973,50667173,3,0,0,11,1,1,100,958071,20601,000000,00,4142,0000,0000,0208,10395,0"));
verifyPosition(decoder, text(
"442045993198,290317131935,0,290317131935,269158,465748,3,26,183,,,,184,85316567,226,01,00,68,218"));
verifyPosition(decoder, text(
"673091036017,290317131801,0,290317131801,262214,450536,3,40,199,8,,,154,19969553,,0011,00,59,240,0,0406"));
verifyPosition(decoder, text(
"092020621198,280317084155,0,280317084156,259762,444356,3,42,278,9,,,89,56793311,,0110,00,67,0,,0400"));
verifyPosition(decoder, text(
"502091227598,280317084149,0,280317084149,261756,444358,3,33,286,9,,,77,3143031,,0010,00,171,240,0,0406"));
verifyPosition(decoder, text(
"232027997498,230317083900,0,230317083900,260105,444112,3,22,259,,,,111,61110817,226,01,00,255,218,00000000000000"));
verifyPosition(decoder, text(
"072027997498,230317082635,0,230317082635,260332,444265,3,28,165,,,,124,61107582,226,01,00,255,219,00000000000000"));
verifyNull(decoder, text(
"0026203393|226|10|002004010"));<|fim▁hole|> verifyPosition(decoder, text(
"003020339325,190317083052,0,180317103127,259924,445133,3,0,0,9,,,93,12210141,,0010,00,40,240,0,0406"));
verifyNull(decoder, text(
"0026296218SCCE01_SCCE|226|10|0267"));
verifyNull(decoder, text(
"1242022592TTUV0100,0201,351266000022592,170403114305,0115859,480323,3,30,5,9,3,4,650,250000000,26202,1001,0001,211,233,111,0"));
verifyPosition(decoder, text(
"123002259213,170403114305,1234,170403114305,0115859,480323,3,30,5,9,3,4,650,250000000,26202,1001,0001,211,233,111,0,600"));
verifyNull(decoder, text(
"0044296218TLOC0267,00,011009000296218,190317083036,255178,445072,3,0,82,,,,168,14741296,,00,00,0,217"));
verifyPosition(decoder, text(
"003097061325,220616044200,0,220616044200,247169,593911,3,48,248,8,,,50,1024846,,1111,00,48,0,51,0406"));
verifyPosition(decoder, text(
"003097061325,210216112630,0,210216001405,246985,594078,3,0,283,12,,,23,4418669,,0010,00,117,0,0,0108"));
verifyPosition(decoder, text(
"592078222222,010100030200,0,240516133500,222222,222222,3,0,0,5,,,37,324,,1010,00,48,0,0,0406"));
verifyPosition(decoder, text(
"002017297899,220216111100,0,220216111059,014306446,46626713,3,7,137,7,,,448,266643,,0000,00,0,206,0,0407"));
verifyPosition(decoder, text(
"003097061325,210216112630,0,210216001405,246985,594078,3,0,283,12,,,23,4418669,,0010,00,117,0,0,0108"));
verifyNull(decoder, text(
"0026970613|248|01|004006011"));
verifyPosition(decoder, text(
"032097061399,210216112800,0,210216112759,246912,594076,3,47,291,10,,,46,4419290,,0010,00,100,0,0,0108"));
verifyPosition(decoder, text(
"002017297899,190216202500,0,190216202459,014221890,46492170,3,0,0,6,,,1034,43841,,0000,00,0,209,0,0407"));
verifyPosition(decoder, text(
"182043672999,010100001301,0,270613041652,166653,475341,3,0,355,6,2,1,231,8112432,23201,01,00,217,0,0,0,0,7"),
position("2013-06-27 04:16:52.000", true, 47.53410, 16.66530));
verifyPosition(decoder, text(
"182043672999,010100001301,0,270613041652,166653,475341,3,0,355,6,2,1,231,8112432,23201,01,00,217,0,0,0,0,7"));
}
}<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
a class to access the REST API of the website www.factuursturen.nl
"""
import collections
import ConfigParser
from datetime import datetime, date
import re
import requests
from os.path import expanduser
import copy
import urllib
__author__ = 'Reinoud van Leeuwen'
__copyright__ = "Copyright 2013, Reinoud van Leeuwen"
__license__ = "BSD"
__maintainer__ = "Reinoud van Leeuwen"
__email__ = "[email protected]"
CONVERTABLEFIELDS = {
'clients' : {'clientnr': 'int',
'showcontact': 'bool',
'tax_shifted': 'bool',
'lastinvoice': 'date',
'top': 'int',
'stddiscount': 'float',
'notes_on_invoice': 'bool',
'active': 'bool',
'default_email': 'int',
'timestamp': 'date'},
'products': {'id': 'int',
'price': 'float',
'taxes': 'int'},
'invoices': {'profile': 'int',
'discount': 'float',
'paymentperiod': 'int',
'collection': 'bool',
'tax': 'float',
'totalintax': 'float',
'sent': 'date',
'uncollectible': 'date',
'lastreminder': 'date',
'open': 'float',
'paiddate': 'date',
'duedate': 'date',
'overwrite_if_exist': 'bool',
'initialdate': 'date',
'finalsenddate': 'date'},
'invoices_payment': {'date': 'date'},
'invoices_saved': {'id': 'int',
'profile': 'int',
'discount': 'float',
'paymentperiod': 'int',
'totaldiscount': 'float',
'totalintax': 'float',
'clientnr': 'int'},
'invoices_repeated': {'id': 'int',
'profile': 'int',
'discount': 'float',
'paymentperiod': 'int',
'datesaved': 'date',
'totalintax': 'float',
'initialdate': 'date',
'nextsenddate': 'date',
'finalsenddate': 'date',
'clientnr': 'int'},
'profiles': {'id': 'int'},
'countrylist' : {'id': 'int'},
'taxes': {'percentage': 'int',
'default': 'bool'}
}
API = {'getters' : ['clients',
'products',
'invoices',
'invoices_saved',
'invoices_repeated',
'profiles',
'balance',
'countrylist',
'taxes'],
'single_getters' : ['invoices_pdf'],
'posters' : ['clients',
'products',
'invoices'],
'putters' : ['clients',
'products',
'invoices_payment'],
'deleters' : ['clients',
'products',
'invoices',
'invoices_saved',
'invoices_repeated']}
class FactuursturenError(Exception):
"""Base class for exceptions in this module."""
def __init__(self, value = ''):
self.value = value
def __str__(self):
return repr(self.value)
class FactuursturenGetError(FactuursturenError):
pass
class FactuursturenPostError(FactuursturenError):
pass
class FactuursturenWrongPostvalue(FactuursturenError):
pass
class FactuursturenWrongPutvalue(FactuursturenError):
pass
class FactuursturenEmptyResult(FactuursturenError):
pass
class FactuursturenNoAuth(FactuursturenError):
pass
class FactuursturenConversionError(FactuursturenError):
pass
class FactuursturenWrongCall(FactuursturenError):
pass
class FactuursturenNotFound(FactuursturenError):
pass
class FactuursturenNoMoreApiCalls(FactuursturenError):
pass
class Client:
"""
client class to access www.factuursturen.nl though REST API
"""
def __init__(self,
apikey='',
username='',
configsection='default',
host='www.factuursturen.nl',
protocol='https',
apipath='/api',
version='v0'):
"""
initialize object
<|fim▁hole|> When apikey and username are not present, look for INI-style file .factuursturen_rc
in current directory and homedirectory to find those values there.
when only username is present, try to find apikey in configfilesection where it is defined
:param apikey: APIkey (string) as generated online on the website http://www.factuursturen.nl
:param username: accountname for the website
:param configsection: section in file ~/.factuursturen_rc where apikey and username should be present
"""
self._url = protocol + '://' + host + apipath + '/' + version + '/'
# try to read auth details from file when not passed
config = ConfigParser.RawConfigParser()
config.read(['.factuursturen_rc', expanduser('~/.factuursturen_rc')])
if (not apikey) and (not username):
try:
self._apikey = config.get(configsection, 'apikey')
self._username = config.get(configsection, 'username')
except ConfigParser.NoSectionError:
raise FactuursturenNoAuth ('key and username not given, nor found in .factuursturen_rc or ~/.factuursturen_rc')
except ConfigParser.NoOptionError:
raise FactuursturenNoAuth ('no complete auth found')
elif username and (not apikey):
self._username = username
for section in config.sections():
if config.get(section, 'username') == username:
self._apikey = config.get(section, 'apikey')
if not self._apikey:
raise FactuursturenNoAuth ('no apikey found for username {}'.format(username))
else:
if not (apikey and username):
raise FactuursturenNoAuth ('no complete auth passed to factuursturen.Client')
self._apikey = apikey
self._username = username
# remaining allowed calls to API
self._remaining = None
self._lastresponse = None
self._headers = {'content-type': 'application/json',
'accept': 'application/json'}
# keep a list of which functions can be used to convert the fields
# from and to a string
self._convertfunctions = {'fromstring': {'int': self._string2int,
'bool': self._string2bool,
'float': self._string2float,
'date': self._string2date},
'tostring': {'int': self._int2string,
'bool': self._bool2string,
'float': self._float2string,
'date': self._date2string}}
# single value conversionfunctions
def _string2int(self, string):
try:
return int(string)
except ValueError:
raise FactuursturenConversionError('cannot convert {} to int'.format(string))
def _string2bool(self, string):
return string.lower() in ("yes", "true", "t", "1")
def _string2float(self, string):
try:
return float(string)
except ValueError:
raise FactuursturenConversionError('cannot convert {} to float'.format(string))
def _string2date(self, string):
if string == '':
return None
try:
return datetime.strptime(string, '%Y-%m-%d')
except ValueError:
raise FactuursturenConversionError('cannot convert {} to date'.format(string))
def _int2string(self, number):
if not isinstance(number, int):
raise FactuursturenConversionError('number {} should be of type int'.format(number))
return str(number)
def _bool2string(self, booleanvalue):
if not isinstance(booleanvalue, int):
raise FactuursturenConversionError('booleanvalue should be of type bool')
return str(booleanvalue).lower()
def _float2string(self, number):
if not (isinstance(number, float) or (isinstance(number, int))):
raise FactuursturenConversionError('number {} should be of type float'.format(number))
return str(number)
def _date2string(self, date):
if not isinstance(date, datetime):
raise FactuursturenConversionError('date should be of type datetime')
return date.strftime("%Y-%m-%d")
def _convertstringfields_in_dict(self, adict, function, direction):
"""convert fields of a single dict either from or to strings
fieldnames to convert are read from CONVERTIBLEFIELDS dict, which
is in essence a datadictionary for this API
:param adict: dictionary to convert
:param function: callable function in the API ('clients', 'products' etc)
:param direction: either 'tostring' or 'fromstring'
"""
if direction not in self._convertfunctions:
raise FactuursturenWrongCall ('_convertstringfields_in_dict called with {}'.format(direction))
if function in CONVERTABLEFIELDS:
for key, value in adict.iteritems():
if key in CONVERTABLEFIELDS[function]:
# note: target is something like 'int'. Depending
# on conversion direction, this is the source or the target
target = CONVERTABLEFIELDS[function][key]
conversion_function = self._convertfunctions[direction][target]
try:
adict[key] = conversion_function(value)
except FactuursturenConversionError:
print "key = {}, value = {}, direction = {}, target = {}".format(key, value, direction, target)
raise BaseException
return adict
def _convertstringfields_in_list_of_dicts(self, alist, function, direction):
"""convert each dict in the list
Basically, a loop over the function _convertstringfields_in_dict
:param alist: a list of dicts
:param function: callable function in the API ('clients', 'products' etc)
:param direction: either 'tostring' or 'fromstring'
"""
if direction not in self._convertfunctions:
raise FactuursturenWrongCall ('_convertstringfields_in_list_of_dicts called with {}'.format(direction))
for index, entry in enumerate(alist):
alist[index] = self._convertstringfields_in_dict(alist[index], function, direction)
return alist
def _flatten(self, adict, parent_key=''):
"""flatten a nested dict
The API expects nested dicts to be flattened when posting
{'lines': {'line1': {'amount': 1,
'tax': 21},
'line2': {'amount': 2,
'tax': 21}
}
}
to
{'lines[line1][amount]': 1,
'lines[line1][tax]': 21,
'lines[line2][amount]': 2,
'lines[line2][tax]': 21
}
:param adict: a nested dict
:param parent_key: should be empty, used for recursion
"""
items = []
for k, v in adict.items():
new_key = parent_key + '[' + k + ']' if parent_key else k
if isinstance(v, collections.MutableMapping):
items.extend(self._flatten(v, new_key).items())
else:
items.append((new_key, v))
return dict(items)
def _fixkeynames(self, adict):
"""replace keynames in dict
replace keys like 'lines[line0][amount_desc]'
with 'lines[0][amount_desc]'
(keeping the same value)
:param adict: dictionary to be changed
"""
for key, val in adict.items():
fields = re.split('\]\[', key)
if len(fields) > 1:
leftfields = re.split('\[', fields[0])
middlefield = re.sub("[^0-9]", "", leftfields[1])
newfield = leftfields[0] + '[' + middlefield + '][' + fields[1]
adict[newfield] = val
del adict[key]
return adict
def _prepare_for_send(self, adict, function):
"""fix dict so it can be posted
:param adict: dictionary to be posted
:param function: callable function from the API ('clients', 'products', etc)
"""
adict = self._convertstringfields_in_dict(adict, function, 'tostring')
adict = self._flatten(adict)
adict = self._fixkeynames(adict)
return adict
def _escape_characters(self, string):
"""escape unsafe webcharacters to use in API call
by default urllib considers '/' as safe, override the default for the second argument by
considering nothing safe
"""
return urllib.quote(str(string), safe='')
@property
def remaining(self):
"""return remaining allowed API calls (for this hour)"""
return self._remaining
@property
def ok(self):
"""return status of last call"""
return self._lastresponse
def post(self, function, objData):
"""Generic wrapper for all POSTable functions
errors from server during post (like wrong values) are propagated to the exceptionclass
:param function: callabe function from the API ('clients', 'products', etc)
:param objData: data to be posted
"""
fullUrl = self._url + function
objData_local = copy.deepcopy(objData)
if function not in API['posters']:
raise FactuursturenPostError("{function} not in available POSTable functions".format(function=function))
if isinstance(objData_local, dict):
objData_local = self._prepare_for_send(objData_local, function)
response = requests.post(fullUrl,
data=objData_local,
auth=(self._username, self._apikey))
self._lastresponse = response.ok
if response.ok:
self._remaining = int(response.headers['x-ratelimit-remaining'])
return response.content
else:
raise FactuursturenWrongPostvalue(response.content)
def put(self, function, objId, objData):
"""Generic wrapper for all PUTable functions
errors from server during post (like wrong values) are propagated to the exceptionclass
:param function: callabe function from the API ('clients', 'products', etc)
:param objId: id of object to be put (usually retrieved from the API)
:param objData: data to be posted. All required fields should be present, or the API will not accept the changes
"""
fullUrl = self._url + function + '/{objId}'.format(objId=self._escape_characters(objId))
if function not in API['putters']:
raise FactuursturenPostError("{function} not in available PUTable functions".format(function=function))
if isinstance(objData, dict):
objData = self._prepare_for_send(objData, function)
response = requests.put(fullUrl,
data=objData,
auth=(self._username, self._apikey))
self._lastresponse = response.ok
if response.ok:
self._remaining = int(response.headers['x-ratelimit-remaining'])
return
else:
raise FactuursturenWrongPutvalue(response.content)
def delete(self, function, objId):
"""Generic wrapper for all DELETEable functions
errors from server during post (like wrong values) are propagated to the exceptionclass
:param function: callabe function from the API ('clients', 'products', etc)
:param objId: id of object to be put (usually retrieved from the API)
"""
fullUrl = self._url + function + '/{objId}'.format(objId=self._escape_characters(objId))
if function not in API['deleters']:
raise FactuursturenPostError("{function} not in available DELETEable functions".format(function=function))
response = requests.delete(fullUrl,
auth=(self._username, self._apikey))
self._lastresponse = response.ok
if response.ok:
self._remaining = int(response.headers['x-ratelimit-remaining'])
else:
raise FactuursturenError(response.content)
def get(self, function, objId=None):
"""Generic wrapper for all GETtable functions
when no objId is passed, retrieve all objects (in a list of dicts)
when objId is passed, only retrieve a single object (in a single dict)
:param function: callabe function from the API ('clients', 'products', etc)
:param objId: id of object to be put (usually retrieved from the API)
"""
# TODO: some errorchecking:
# - on function
# - on return
# - on network error
# - on password
# - on remaining allowed requests
fullUrl = self._url + function
# check function against self.getters and self.singleGetters
if function not in API['getters'] + API['single_getters']:
raise FactuursturenGetError("{function} not in available GETtable functions".format(function=function))
if objId:
fullUrl += '/{objId}'.format(objId=self._escape_characters(objId))
response = requests.get(fullUrl,
auth=(self._username, self._apikey),
headers=self._headers)
self._lastresponse = response.ok
# when one record is returned, acces it normally so
# return the single element of the dict that is called 'client'
# when the functioncall was 'clients/<id>
singlefunction = function[:-1]
self._remaining = int(response.headers['x-ratelimit-remaining'])
if response.ok:
if function == 'invoices_pdf':
return response.content
try:
raw_structure = response.json()
if objId is None:
retval = self._convertstringfields_in_list_of_dicts(raw_structure, function, 'fromstring')
else:
retval = self._convertstringfields_in_dict(raw_structure[singlefunction], function, 'fromstring')
except FactuursturenError as error:
print error
retval = response.content
return retval
else:
# TODO: more checking
if response.status_code == 404:
raise FactuursturenNotFound (response.content)
elif self._remaining == 0:
raise FactuursturenNoMoreApiCalls ('limit of API calls reached.')
else:
raise FactuursturenEmptyResult (response.content)<|fim▁end|> | |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from rest_framework import routers
from . import views
router = routers.DefaultRouter(trailing_slash=False)
router.register(r'complaints', views.ComplaintViewSet)<|fim▁hole|><|fim▁end|> |
urlpatterns = router.urls |
<|file_name|>no-capture-arc.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or<|fim▁hole|>// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// error-pattern: use of moved value
extern mod extra;
use extra::arc;
use std::task;
fn main() {
let v = ~[1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
let arc_v = arc::Arc::new(v);
do task::spawn() {
let v = arc_v.get();
assert_eq!(v[3], 4);
};
assert_eq!((arc_v.get())[2], 3);
info2!("{:?}", arc_v);
}<|fim▁end|> | // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license |
<|file_name|>trailing_zeros.rs<|end_file_name|><|fim▁begin|>use malachite_base::num::basic::integers::PrimitiveInt;
use malachite_base::num::conversion::traits::WrappingFrom;<|fim▁hole|>use platform::Limb;
/// Interpreting a slice of `Limb`s as the limbs of a `Natural` in ascending order, returns the
/// number of trailing zeros in the binary expansion of a `Natural` (equivalently, the multiplicity
/// of 2 in its prime factorization). The limbs cannot be empty or all zero.
///
/// Time: worst case O(n)
///
/// Additional memory: worst case O(1)
///
/// where n = `xs.len()`
///
/// # Panics
/// Panics if `xs` only contains zeros.
///
/// # Examples
/// ```
/// use malachite_nz::natural::logic::trailing_zeros::limbs_trailing_zeros;
///
/// assert_eq!(limbs_trailing_zeros(&[4]), 2);
/// assert_eq!(limbs_trailing_zeros(&[0, 4]), 34);
/// ```
#[doc(hidden)]
pub fn limbs_trailing_zeros(xs: &[Limb]) -> u64 {
let zeros = slice_leading_zeros(xs);
let remaining_zeros = TrailingZeros::trailing_zeros(xs[zeros]);
(u64::wrapping_from(zeros) << Limb::LOG_WIDTH) + remaining_zeros
}
impl Natural {
/// Returns the number of trailing zeros in the binary expansion of a `Natural` (equivalently,
/// the multiplicity of 2 in its prime factorization) or `None` is the `Natural` is 0.
///
/// Time: worst case O(n)
///
/// Additional memory: worst case O(1)
///
/// where n = `self.significant_bits()`
///
/// # Examples
/// ```
/// extern crate malachite_base;
/// extern crate malachite_nz;
///
/// use malachite_base::num::basic::traits::Zero;
/// use malachite_nz::natural::Natural;
///
/// assert_eq!(Natural::ZERO.trailing_zeros(), None);
/// assert_eq!(Natural::from(3u32).trailing_zeros(), Some(0));
/// assert_eq!(Natural::from(72u32).trailing_zeros(), Some(3));
/// assert_eq!(Natural::from(100u32).trailing_zeros(), Some(2));
/// assert_eq!(Natural::trillion().trailing_zeros(), Some(12));
/// ```
pub fn trailing_zeros(&self) -> Option<u64> {
match *self {
natural_zero!() => None,
Natural(Small(small)) => Some(TrailingZeros::trailing_zeros(small)),
Natural(Large(ref limbs)) => Some(limbs_trailing_zeros(limbs)),
}
}
}<|fim▁end|> | use malachite_base::num::logic::traits::TrailingZeros;
use malachite_base::slices::slice_leading_zeros;
use natural::InnerNatural::{Large, Small};
use natural::Natural; |
<|file_name|>fuzz_runner.rs<|end_file_name|><|fim▁begin|>// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
#![no_main]
use diem_fuzzer::FuzzTarget;
use libfuzzer_sys::fuzz_target;
use once_cell::sync::Lazy;
use std::process;
static FUZZ_TARGET: Lazy<FuzzTarget> = Lazy::new(|| {
match FuzzTarget::from_env() {
Ok(target) => target,
Err(err) => {
// Lazy behaves poorly with panics, so abort here.<|fim▁hole|> "*** [fuzz_runner] Error while determining fuzz target: {}",
err
);
process::abort();
}
}
});
fuzz_target!(|data: &[u8]| {
FUZZ_TARGET.fuzz(data);
});<|fim▁end|> | eprintln!( |
<|file_name|>cli.py<|end_file_name|><|fim▁begin|># Copyright 2017 Kevin Howell
#
# This file is part of sixoclock.
#
# sixoclock is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# sixoclock is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with sixoclock. If not, see <http://www.gnu.org/licenses/>.
import argparse
import humanize
import logging
import os.path
import time
from sixoclock.config import Configuration
from sixoclock.backends.file import FileBackend
from sixoclock.file import File
class Cli:
def __init__(self):
config = os.path.join(os.path.expanduser('~'), '.sixoclock.yml')
self.configuration = Configuration(config)
parser = argparse.ArgumentParser(description='Simple personal backups.')
parser.add_argument('--no-log', action='store_true', help='do not log')
parser.add_argument('--log-file', help='log file')
parser.set_defaults(function=lambda args: parser.print_usage(), log_file=None)
subparsers = parser.add_subparsers(title='commands')
backup_parser = subparsers.add_parser('backup', help='perform a backup')
backup_parser.add_argument('-c', '--collection', help='backup a specific collection')
backup_parser.add_argument('--dry-run', action='store_true', help='do not backup, show what would happen')
backup_parser.set_defaults(function=self.backup)
query_parser = subparsers.add_parser('query', help='find a file in configured sources or mirrors')
query_parser.add_argument('-c', '--collection', help='look only in a specific collection')
query_parser.add_argument('-m', '--mirror', help='look only in a specific mirror')
query_parser.add_argument('--path', help='relative path of the file')
query_parser.add_argument('--filename', help='base filename (ex. foo.txt)')
query_parser.add_argument('--file', help='file to use as a basis')
query_parser.add_argument('--md5', help='md5 hash')
query_parser.add_argument('--sha1', help='sha1 hash')
query_parser.add_argument('--sha256', help='sha256 hash')
query_parser.add_argument('--size', help='file size in bytes')
query_parser.set_defaults(function=self.query)
status_parser = subparsers.add_parser('status', help='show backup status')
status_parser.add_argument('-c', '--collection', help='show status of a specific collection')
status_parser.set_defaults(function=self.status)
refresh_parser = subparsers.add_parser('refresh-cache', help='refresh cache')
refresh_parser.add_argument('-c', '--collection', help='refresh mirror caches for a specific collection')
refresh_parser.add_argument('-m', '--mirror', help='refresh mirror caches for a specific mirror')
refresh_parser.add_argument('--rebuild', action='store_true', help='remove entries and rebuild the cache')
refresh_parser.set_defaults(function=self.refresh_cache)
for name, backend in self.configuration.backends.items():
if backend.has_subparser():
backend_parser = subparsers.add_parser(name, help='{} backend subcommands'.format(name))
backend.contribute_to_subparser(backend_parser)
self.parser = parser
def main(self):
args = self.parser.parse_args()
log_filename = args.log_file or 'sixoclock.{}.log'.format(int(time.time()))
if not args.no_log:
logging.basicConfig(filename=log_filename, level=logging.INFO)
args.function(args)
def backup(self, args):
for name, collection in self.configuration.collections.items():
if args.collection and name != collection:
continue
print('Backing up collection: {}'.format(name))
actions = collection.backup(args.dry_run)
if args.dry_run:
for action in actions:
print('Would back up {} to {}'.format(action.file, action.destination))
def query(self, args):
filters = []
if args.path:
filters.append(File.path == args.path)
if args.file:
filebackend = FileBackend()
file = filebackend.get(args.file)
filters.append(File.sha1 == file.sha1)
filters.append(File.path.like('%/{}'.format(os.path.basename(args.file))))
if args.filename:
filters.append(File.path.like('%/{}'.format(args.filename)))<|fim▁hole|> filters.append(File.sha1 == args.sha1)
if args.sha256:
filters.append(File.sha256 == args.sha256)
if args.size:
filters.append(File.size == args.size)
collections = self.configuration.collections.values()
if args.collection:
collections = [self.configuration.collections[args.collection]]
if args.mirror:
filters.append(File.mirror_uri == args.mirror)
for collection in collections:
collection.refresh_cache()
for match in collection.query(*filters):
print('Match: {}'.format(match.uri))
def status(self, args):
for name, collection in self.configuration.collections.items():
if args.collection and name != args.collection:
continue
print('Collection: {}'.format(name))
stats = collection.stats()
print(' # Source files: {}'.format(stats.source_file_count))
size = humanize.naturalsize(stats.size)
percentage = 100.0
if stats.size > 0:
percentage = stats.backed_up_size / stats.size
print(' Total size: {}, {}% backed up'.format(size, percentage))
def refresh_cache(self, args):
for name, collection in self.configuration.collections.items():
if args.collection and name != args.collection:
continue
collection.refresh_cache(mirror=args.mirror, reset=args.refresh)<|fim▁end|> | if args.md5:
filters.append(File.md5 == args.md5)
if args.sha1: |
<|file_name|>issue-21361.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn main() {
let v = vec![1, 2, 3];
let boxed: Box<Iterator<Item=i32>> = Box::new(v.into_iter());
assert_eq!(boxed.max(), Some(3));
let v = vec![1, 2, 3];
let boxed: &mut Iterator<Item=i32> = &mut v.into_iter();<|fim▁hole|><|fim▁end|> | assert_eq!(boxed.max(), Some(3));
} |
<|file_name|>RegionWorld.java<|end_file_name|><|fim▁begin|>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package net.projectzombie.regionrotation.modules;
import com.sk89q.worldguard.bukkit.WGBukkit;
import com.sk89q.worldedit.LocalWorld;
import com.sk89q.worldguard.protection.managers.RegionManager;
import org.bukkit.Bukkit;
import org.bukkit.World;
import java.util.UUID;
/**
* Parent class for modules to store their world and respective WorldGuard
* region manager.
*
* @author Jesse Bannon ([email protected])
*/
public abstract class RegionWorld
{<|fim▁hole|> {
this.worldUID = worldUID;
this.isValid = this.getWorld() != null && getRegionManager() != null;
}
/** @return Whether the object is valid for use or not. */
protected boolean isValid() { return this.isValid; }
protected UUID getWorldUID() { return this.worldUID; }
protected World getWorld() { return Bukkit.getWorld(worldUID); }
protected LocalWorld getLocalWorld() { return com.sk89q.worldedit.bukkit.BukkitUtil.getLocalWorld(this.getWorld()); }
protected RegionManager getRegionManager() { return WGBukkit.getRegionManager(getWorld()); }
}<|fim▁end|> | private final UUID worldUID;
private final boolean isValid;
protected RegionWorld(final UUID worldUID) |
<|file_name|>sysoptions.py<|end_file_name|><|fim▁begin|>"""Options list for system config."""
import os
from collections import OrderedDict
from lutris import runners
from lutris.util import display, system
def get_optirun_choices():
"""Return menu choices (label, value) for Optimus"""
choices = [("Off", "off")]
if system.find_executable("primusrun"):
choices.append(("primusrun", "primusrun"))
if system.find_executable("optirun"):
choices.append(("optirun/virtualgl", "optirun"))
return choices
system_options = [ # pylint: disable=invalid-name
{
"option": "game_path",
"type": "directory_chooser",
"label": "Default installation folder",
"default": os.path.expanduser("~/Games"),
"scope": ["runner", "system"],
"help": "The default folder where you install your games."
},
{
"option": "disable_runtime",
"type": "bool",
"label": "Disable Lutris Runtime",
"default": False,
"help": (
"The Lutris Runtime loads some libraries before running the "
"game. Which can cause some incompatibilities in some cases. "
"Check this option to disable it."
),
},
{
"option": "prefer_system_libs",
"type": "bool",
"label": "Prefer system libraries",
"default": True,
"help": (
"When the runtime is enabled, prioritize the system libraries"
" over the provided ones."
),
},
{
"option": "reset_desktop",
"type": "bool",
"label": "Restore resolution on game exit",
"default": False,
"help": (
"Some games don't restore your screen resolution when \n"
"closed or when they crash. This is when this option comes \n"
"into play to save your bacon."
),
},
{
"option": "single_cpu",
"type": "bool",
"label": "Restrict to single core",
"advanced": True,
"default": False,
"help": "Restrict the game to a single CPU core.",
},
{
"option": "restore_gamma",
"type": "bool",
"default": False,
"label": "Restore gamma on game exit",
"advanced": True,
"help": (
"Some games don't correctly restores gamma on exit, making "
"your display too bright. Select this option to correct it."
),
},
{
"option": "disable_compositor",
"label": "Disable desktop effects",
"type": "bool",
"default": False,
"advanced": True,
"help": (
"Disable desktop effects while game is running, "
"reducing stuttering and increasing performance"
),
},
{
"option": "reset_pulse",
"type": "bool",
"label": "Reset PulseAudio",
"default": False,
"advanced": True,
"condition": system.find_executable("pulseaudio"),
"help": "Restart PulseAudio before launching the game.",
},
{
"option": "pulse_latency",
"type": "bool",
"label": "Reduce PulseAudio latency",
"default": False,
"advanced": True,
"condition": system.find_executable("pulseaudio"),
"help": (
"Set the environment variable PULSE_LATENCY_MSEC=60 "
"to improve audio quality on some games"
),
},
{
"option": "use_us_layout",
"type": "bool",
"label": "Switch to US keyboard layout",
"default": False,
"advanced": True,
"help": "Switch to US keyboard qwerty layout while game is running",
},
{
"option": "optimus",
"type": "choice",
"default": "off",
"choices": get_optirun_choices,
"label": "Optimus launcher (NVIDIA Optimus laptops)",
"advanced": True,
"help": (
"If you have installed the primus or bumblebee packages, "
"select what launcher will run the game with the command, "
"activating your NVIDIA graphic chip for high 3D "
"performance. primusrun normally has better performance, but"
"optirun/virtualgl works better for more games."
),
},
{
"option": "fps_limit",
"type": "string",
"size": "small",
"label": "Fps limit",
"advanced": True,
"condition": bool(system.find_executable("strangle")),
"help": "Limit the game's fps to desired number",
},
{
"option": "gamemode",
"type": "bool",
"default": system.LINUX_SYSTEM.is_feature_supported("GAMEMODE"),
"condition": system.LINUX_SYSTEM.is_feature_supported("GAMEMODE"),
"label": "Enable Feral gamemode",
"help": "Request a set of optimisations be temporarily applied to the host OS",
},
{
"option": "dri_prime",
"type": "bool",
"default": False,
"condition": display.USE_DRI_PRIME,
"label": "Use PRIME (hybrid graphics on laptops)",
"advanced": True,
"help": (
"If you have open source graphic drivers (Mesa), selecting this "
"option will run the game with the 'DRI_PRIME=1' environment variable, "
"activating your discrete graphic chip for high 3D "
"performance."
),
},
{
"option": "sdl_video_fullscreen",
"type": "choice",
"label": "SDL 1.2 Fullscreen Monitor",
"choices": display.get_output_list,
"default": "off",
"advanced": True,
"help": (
"Hint SDL 1.2 games to use a specific monitor when going "
"fullscreen by setting the SDL_VIDEO_FULLSCREEN "
"environment variable"
),
},
{
"option": "display",
"type": "choice",
"label": "Turn off monitors except",
"choices": display.get_output_choices,
"default": "off",
"advanced": True,
"help": (
"Only keep the selected screen active while the game is "
"running. \n"
"This is useful if you have a dual-screen setup, and are \n"
"having display issues when running a game in fullscreen."
),
},
{
"option": "resolution",
"type": "choice",
"label": "Switch resolution to",
"choices": display.get_resolution_choices,
"default": "off",
"help": "Switch to this screen resolution while the game is running.",
},
{
"option": "terminal",
"label": "Run in a terminal",
"type": "bool",
"default": False,
"advanced": True,
"help": "Run the game in a new terminal window.",
},
{
"option": "terminal_app",
"label": "Terminal application",
"type": "choice_with_entry",
"choices": system.get_terminal_apps,
"default": system.get_default_terminal(),
"advanced": True,
"help": (
"The terminal emulator to be run with the previous option."
"Choose from the list of detected terminal apps or enter "
"the terminal's command or path."
"Note: Not all terminal emulators are guaranteed to work."
),
},
{
"option": "env",
"type": "mapping",
"label": "Environment variables",
"help": "Environment variables loaded at run time",
},
{
"option": "prefix_command",
"type": "string",
"label": "Command prefix",
"advanced": True,
"help": (
"Command line instructions to add in front of the game's "
"execution command."
),
},
{
"option": "manual_command",
"type": "file",
"label": "Manual command",
"advanced": True,
"help": ("Script to execute from the game's contextual menu"),
},
{
"option": "prelaunch_command",
"type": "file",
"label": "Pre-launch command",
"advanced": True,
"help": "Script to execute before the game starts",
},
{
"option": "prelaunch_wait",
"type": "bool",
"label": "Wait for pre-launch command completion",
"advanced": True,
"default": False,
"help": "Run the game only once the pre-launch command has exited",
},
{
"option": "postexit_command",
"type": "file",
"label": "Post-exit command",
"advanced": True,
"help": "Script to execute when the game exits",
},
{
"option": "include_processes",
"type": "string",
"label": "Include processes",
"advanced": True,
"help": (
"What processes to include in process monitoring. "
"This is to override the built-in exclude list.\n"
"Space-separated list, processes including spaces "
"can be wrapped in quotation marks."
),
},
{
"option": "exclude_processes",
"type": "string",
"label": "Exclude processes",
"advanced": True,
"help": (
"What processes to exclude in process monitoring. "
"For example background processes that stick around "
"after the game has been closed.\n"
"Space-separated list, processes including spaces "
"can be wrapped in quotation marks."
),
},
{
"option": "killswitch",
"type": "string",
"label": "Killswitch file",
"advanced": True,
"help": (
"Path to a file which will stop the game when deleted \n"
"(usually /dev/input/js0 to stop the game on joystick "
"unplugging)"
),
},
{
"option": "xboxdrv",
"type": "string",
"label": "xboxdrv config",
"advanced": True,
"condition": system.find_executable("xboxdrv"),
"help": (
"Command line options for xboxdrv, a driver for XBOX 360 "
"controllers. Requires the xboxdrv package installed."
),
},
{
"option": "sdl_gamecontrollerconfig",
"type": "string",
"label": "SDL2 gamepad mapping",
"advanced": True,
"help": (
"SDL_GAMECONTROLLERCONFIG mapping string or path to a custom "
"gamecontrollerdb.txt file containing mappings."
),
},
{<|fim▁hole|> "option": "xephyr",
"label": "Use Xephyr",
"type": "choice",
"choices": (
("Off", "off"),
("8BPP (256 colors)", "8bpp"),
("16BPP (65536 colors)", "16bpp"),
("24BPP (16M colors)", "24bpp"),
),
"default": "off",
"advanced": True,
"help": "Run program in Xephyr to support 8BPP and 16BPP color modes",
},
{
"option": "xephyr_resolution",
"type": "string",
"label": "Xephyr resolution",
"advanced": True,
"help": "Screen resolution of the Xephyr server",
},
{
"option": "xephyr_fullscreen",
"type": "bool",
"label": "Xephyr Fullscreen",
"default": True,
"advanced": True,
"help": "Open Xephyr in fullscreen (at the desktop resolution)",
},
]
def with_runner_overrides(runner_slug):
"""Return system options updated with overrides from given runner."""
options = system_options
try:
runner = runners.import_runner(runner_slug)
except runners.InvalidRunner:
return options
if not getattr(runner, "system_options_override"):
runner = runner()
if runner.system_options_override:
opts_dict = OrderedDict((opt["option"], opt) for opt in options)
for option in runner.system_options_override:
key = option["option"]
if opts_dict.get(key):
opts_dict[key] = opts_dict[key].copy()
opts_dict[key].update(option)
else:
opts_dict[key] = option
options = [opt for opt in list(opts_dict.values())]
return options<|fim▁end|> | |
<|file_name|>javascript.py<|end_file_name|><|fim▁begin|>import os
import subprocess
import shutil
karma = os.path.join(os.path.dirname(__file__), '../node_modules/.bin/karma')
def javascript_tests():
if not shutil.which('nodejs'):
print("W: nodejs not available, skipping javascript tests")
return 0
elif os.path.exists(karma):
chrome_exec = shutil.which('chromium') or shutil.which('chromium-browser')
if chrome_exec:
os.environ["CHROME_BIN"] = chrome_exec
else:
print("Please install a chromium browser package in order"
"to run javascript unit tests.")
return 2
return subprocess.call(
[karma, "start", "test/karma.conf.js", "--single-run"]
)
else:
print("I: skipping javascript test (karma not available)")
return 0<|fim▁hole|>if __name__ == "__main__":
import sys
sys.exit(javascript_tests())<|fim▁end|> | |
<|file_name|>hub.js<|end_file_name|><|fim▁begin|>'use strict';
var https = require('https');
var q = require('q');
function logError(error) {
console.log("[Lightify] Error!");
if (error.statusMessage) {
console.log("[Lightify] HTTP Error: " + error.statusCode + " - " + error.statusMessage);
console.log("[Lightify] HTTP Headers: ");
console.log(error.headers);
}
else {
console.log(error);
}
}
var internal = {
// global Osram Lightify constants
lightifyBaseUrl : "us.lightify-api.org",
lightifySessionUrl : "/lightify/services/session",
lightifyDevicesUrl : "/lightify/services/devices/",
deviceSerialNumber: undefined,
username: undefined,
password: undefined,
securityToken: undefined,
getSecurityToken : function () {
var postData = JSON.stringify({
username: this.username,
password: this.password,
serialNumber: this.deviceSerialNumber
});
return this.makeLightifyRequest(this.lightifyBaseUrl, this.lightifySessionUrl, false, false, 'POST', postData)
.then((data) => {
this.securityToken = data.securityToken;
return data;
});
},
getDevices : function () {
return this.makeLightifyRequest(this.lightifyBaseUrl, this.lightifyDevicesUrl, this.securityToken, false, 'GET', undefined);
},
makeLightifyRequest : function (url, path, securityToken, returnRawBody, method, content) {
console.log("[Lightify] -------------------------")
console.log("[Lightify] method : " + method);
console.log("[Lightify] makeLightifyRequest: " + url)
console.log("[Lightify] path : " + path);
console.log("[Lightify] securityToken : " + securityToken);
var deferred = q.defer();
var requestOptions = {
protocol: 'https:',
host: url,
path: path,
method: method,
headers: {}
};
if (securityToken) {
requestOptions.headers['Authorization'] = securityToken;
}
else if (content) {
requestOptions.headers['Content-Type'] = 'application/json';
requestOptions.headers['Content-Length'] = content.length;
}
var request = https.request(requestOptions);
request.on('response', function(response) {
var body = '';
response.setEncoding('utf8');
response.on('data', function(data) {
body += data;
});
response.on('end', function() {
if (response.statusCode != 200) {
deferred.reject(new Error("Invalid HTTP response: " + response.statusCode + " - " + response.statusMessage));
} else {
if (returnRawBody) {
deferred.resolve(body);
}
else {
var parsedBody = JSON.parse(body);
deferred.resolve(parsedBody);
}
}
});
response.on('error', function(e) {
deferred.reject(e);
});
});
request.on('error', (e) => {
deferred.reject(e);
});
if (content) {
request.write(content);
}
request.end();
return deferred.promise;
},
};
module.exports = {
inputNeeded : [
{
type: 'input',
name: 'deviceSerialNumber',
message: 'Osram Lightify Device Serial Number (alpha-numeric without trailing -XX): ',
validate: function(value) {
var pass = !!value;
if (pass) {
return true;
} else {
return 'Please enter a valid Serial Number.';
}
}
},
{
type: 'input',
name: 'username',
message: 'Osram Lightify User Name (create this in the Osram Lightify app): ',
validate: function(value) {
var pass = !!value;
if (pass) {
return true;
} else {
return 'Please enter a valid User Name.';
}
}
},
{
type: 'password',
name: 'password',
message: 'Osram Lightify Password (create this in the Osram Lightify app): ',
validate: function(value) {
var pass = !!value;
if (pass) {
return true;
} else {
return 'Please enter a valid Password.';
}
}
}
],
// devices list as returned by the server
devices: undefined,
// the is the serial number of the Lightify hub device
deviceSerialNumber: undefined,
// this is the oauth security token for the current session
securityToken: undefined,
// authorizes to the server based on the properties saved in the object
authorize: function() {
// get initial set of authorization info, including account server
return internal.getSecurityToken().then((data) => {
// get the list of devices
return internal.getDevices().then((devices) => {
this.securityToken = data.securityToken;
this.devices = devices;
this.deviceSerialNumber = internal.deviceSerialNumber;
return devices;
})<|fim▁hole|> });
},
// connects to the hub based on the answers provided
connect : function(answers) {
var deferred = q.defer();
console.log("[Lightify] connect");
if (!answers.deviceSerialNumber && !answers.username && ! answers.password) {
logError("Invalid input");
setImmediate(function () { deferred.reject(new Error("Invalid input")); });
}
else {
internal.username = answers.username;
internal.password = answers.password;
internal.deviceSerialNumber = answers.deviceSerialNumber;
this.authorize().then(function (data) {
deferred.resolve(data);
},
function (error) {
logError(error);
deferred.reject(error);
});
}
return deferred.promise;
},
printDevices: function (devices) {
devices.forEach(function(device) {
console.log(device.name + " (" + device.deviceId + ") - " + device.deviceType);
});
}
};<|fim▁end|> | |
<|file_name|>spec.rs<|end_file_name|><|fim▁begin|>// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Spec deserialization.
use std::io::Read;
use serde_json;
use serde_json::Error;
use spec::{Params, Genesis, Engine, State};
/// Spec deserialization.
#[derive(Debug, PartialEq, Deserialize)]
pub struct Spec {
/// Spec name.
pub name: String,
/// Engine.
pub engine: Engine,
/// Spec params.
pub params: Params,
/// Genesis header.
pub genesis: Genesis,
/// Genesis state.
pub accounts: State,
/// Boot nodes.
pub nodes: Option<Vec<String>>,
}
impl Spec {
/// Loads test from json.
pub fn load<R>(reader: R) -> Result<Self, Error> where R: Read {
serde_json::from_reader(reader)
}
}
#[cfg(test)]
mod tests {
use serde_json;
use spec::spec::Spec;
#[test]
fn spec_deserialization() {
let s = r#"{
"name": "Morden",
"engine": {
"Ethash": {
"params": {
"tieBreakingGas": false,
"gasLimitBoundDivisor": "0x0400",
"minimumDifficulty": "0x020000",
"difficultyBoundDivisor": "0x0800",
"durationLimit": "0x0d",
"blockReward": "0x4563918244F40000",
"registrar" : "0xc6d9d2cd449a754c494264e1809c50e34d64562b"
}
}
},
"params": {
"accountStartNonce": "0x0100000",<|fim▁hole|> "frontierCompatibilityModeLimit": "0x789b0",
"maximumExtraDataSize": "0x20",
"minGasLimit": "0x1388",
"networkID" : "0x2"
},
"genesis": {
"seal": {
"ethereum": {
"mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000",
"nonce": "0x00006d6f7264656e"
}
},
"difficulty": "0x20000",
"author": "0x0000000000000000000000000000000000000000",
"timestamp": "0x00",
"parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000",
"extraData": "0x",
"gasLimit": "0x2fefd8"
},
"nodes": [
"enode://b1217cbaa440e35ed471157123fe468e19e8b5ad5bedb4b1fdbcbdab6fb2f5ed3e95dd9c24a22a79fdb2352204cea207df27d92bfd21bfd41545e8b16f637499@104.44.138.37:30303"
],
"accounts": {
"0000000000000000000000000000000000000001": { "balance": "1", "nonce": "1048576", "builtin": { "name": "ecrecover", "pricing": { "linear": { "base": 3000, "word": 0 } } } },
"0000000000000000000000000000000000000002": { "balance": "1", "nonce": "1048576", "builtin": { "name": "sha256", "pricing": { "linear": { "base": 60, "word": 12 } } } },
"0000000000000000000000000000000000000003": { "balance": "1", "nonce": "1048576", "builtin": { "name": "ripemd160", "pricing": { "linear": { "base": 600, "word": 120 } } } },
"0000000000000000000000000000000000000004": { "balance": "1", "nonce": "1048576", "builtin": { "name": "identity", "pricing": { "linear": { "base": 15, "word": 3 } } } },
"102e61f5d8f9bc71d0ad4a084df4e65e05ce0e1c": { "balance": "1606938044258990275541962092341162602522202993782792835301376", "nonce": "1048576" }
}
}"#;
let _deserialized: Spec = serde_json::from_str(s).unwrap();
// TODO: validate all fields
}
}<|fim▁end|> | |
<|file_name|>SerializableForwardingInjectionPoint.java<|end_file_name|><|fim▁begin|>/*
* JBoss, Home of Professional Open Source
* Copyright 2012, Red Hat, Inc., and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software<|fim▁hole|> */
package org.jboss.weld.util.bean;
import java.io.Serializable;
import javax.enterprise.inject.spi.InjectionPoint;
import org.jboss.weld.injection.ForwardingInjectionPoint;
import org.jboss.weld.serialization.InjectionPointHolder;
public class SerializableForwardingInjectionPoint extends ForwardingInjectionPoint implements Serializable {
private static final long serialVersionUID = 7803445899943317029L;
private final InjectionPointHolder ip;
public SerializableForwardingInjectionPoint(String contextId, InjectionPoint ip) {
this.ip = new InjectionPointHolder(contextId, ip);
}
@Override
protected InjectionPoint delegate() {
return ip.get();
}
}<|fim▁end|> | * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. |
<|file_name|>static-method-on-struct-and-enum.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-android: FIXME(#10381)
// compile-flags:-g
// debugger:rbreak zzz
// debugger:run
// STRUCT
// debugger:finish
// debugger:print arg1
// check:$1 = 1
// debugger:print arg2
// check:$2 = 2
// debugger:continue
// ENUM
// debugger:finish
// debugger:print arg1
// check:$3 = -3
// debugger:print arg2
// check:$4 = 4.5
// debugger:print arg3
// check:$5 = 5
// debugger:continue
#[feature(struct_variant)];
struct Struct {
x: int
}
impl Struct {
fn static_method(arg1: int, arg2: int) -> int {
zzz();
arg1 + arg2
}
}
enum Enum {
Variant1 { x: int },
Variant2,
Variant3(f64, int, char),
}
impl Enum {
fn static_method(arg1: int, arg2: f64, arg3: uint) -> int {
zzz();
arg1<|fim▁hole|> Struct::static_method(1, 2);
Enum::static_method(-3, 4.5, 5);
}
fn zzz() {()}<|fim▁end|> | }
}
fn main() { |
<|file_name|>riotwatcher.py<|end_file_name|><|fim▁begin|>from collections import deque
import time
import requests
# Constants
BRAZIL = 'br'
EUROPE_NORDIC_EAST = 'eune'
EUROPE_WEST = 'euw'
KOREA = 'kr'
LATIN_AMERICA_NORTH = 'lan'
LATIN_AMERICA_SOUTH = 'las'
NORTH_AMERICA = 'na'
OCEANIA = 'oce'
RUSSIA = 'ru'
TURKEY = 'tr'
# Platforms
platforms = {
BRAZIL: 'BR1',
EUROPE_NORDIC_EAST: 'EUN1',
EUROPE_WEST: 'EUW1',
KOREA: 'KR',
LATIN_AMERICA_NORTH: 'LA1',
LATIN_AMERICA_SOUTH: 'LA2',
NORTH_AMERICA: 'NA1',
OCEANIA: 'OC1',
RUSSIA: 'RU',
TURKEY: 'TR1'
}
queue_types = [
'CUSTOM', # Custom games
'NORMAL_5x5_BLIND', # Normal 5v5 blind pick
'BOT_5x5', # Historical Summoners Rift coop vs AI games
'BOT_5x5_INTRO', # Summoners Rift Intro bots
'BOT_5x5_BEGINNER', # Summoner's Rift Coop vs AI Beginner Bot games
'BOT_5x5_INTERMEDIATE', # Historical Summoner's Rift Coop vs AI Intermediate Bot games
'NORMAL_3x3', # Normal 3v3 games
'NORMAL_5x5_DRAFT', # Normal 5v5 Draft Pick games
'ODIN_5x5_BLIND', # Dominion 5v5 Blind Pick games
'ODIN_5x5_DRAFT', # Dominion 5v5 Draft Pick games
'BOT_ODIN_5x5', # Dominion Coop vs AI games
'RANKED_SOLO_5x5', # Ranked Solo 5v5 games
'RANKED_PREMADE_3x3', # Ranked Premade 3v3 games
'RANKED_PREMADE_5x5', # Ranked Premade 5v5 games
'RANKED_TEAM_3x3', # Ranked Team 3v3 games
'RANKED_TEAM_5x5', # Ranked Team 5v5 games
'BOT_TT_3x3', # Twisted Treeline Coop vs AI games
'GROUP_FINDER_5x5', # Team Builder games
'ARAM_5x5', # ARAM games
'ONEFORALL_5x5', # One for All games
'FIRSTBLOOD_1x1', # Snowdown Showdown 1v1 games
'FIRSTBLOOD_2x2', # Snowdown Showdown 2v2 games
'SR_6x6', # Hexakill games
'URF_5x5', # Ultra Rapid Fire games
'BOT_URF_5x5', # Ultra Rapid Fire games played against AI games
'NIGHTMARE_BOT_5x5_RANK1', # Doom Bots Rank 1 games
'NIGHTMARE_BOT_5x5_RANK2', # Doom Bots Rank 2 games
'NIGHTMARE_BOT_5x5_RANK5', # Doom Bots Rank 5 games
'ASCENSION_5x5', # Ascension games
'HEXAKILL', # 6v6 games on twisted treeline
'KING_PORO_5x5', # King Poro game games
'COUNTER_PICK', # Nemesis games,
'BILGEWATER_5x5', # Black Market Brawlers games
]
game_maps = [
{'map_id': 1, 'name': "Summoner's Rift", 'notes': "Summer Variant"},
{'map_id': 2, 'name': "Summoner's Rift", 'notes': "Autumn Variant"},
{'map_id': 3, 'name': "The Proving Grounds", 'notes': "Tutorial Map"},
{'map_id': 4, 'name': "Twisted Treeline", 'notes': "Original Version"},
{'map_id': 8, 'name': "The Crystal Scar", 'notes': "Dominion Map"},
{'map_id': 10, 'name': "Twisted Treeline", 'notes': "Current Version"},
{'map_id': 11, 'name': "Summoner's Rift", 'notes': "Current Version"},
{'map_id': 12, 'name': "Howling Abyss", 'notes': "ARAM Map"},
{'map_id': 14, 'name': "Butcher's Bridge", 'notes': "ARAM Map"},
]
game_modes = [
'CLASSIC', # Classic Summoner's Rift and Twisted Treeline games
'ODIN', # Dominion/Crystal Scar games
'ARAM', # ARAM games
'TUTORIAL', # Tutorial games
'ONEFORALL', # One for All games
'ASCENSION', # Ascension games
'FIRSTBLOOD', # Snowdown Showdown games
'KINGPORO', # King Poro games
]
game_types = [
'CUSTOM_GAME', # Custom games
'TUTORIAL_GAME', # Tutorial games
'MATCHED_GAME', # All other games
]
sub_types = [
'NONE', # Custom games
'NORMAL', # Summoner's Rift unranked games
'NORMAL_3x3', # Twisted Treeline unranked games
'ODIN_UNRANKED', # Dominion/Crystal Scar games
'ARAM_UNRANKED_5v5', # ARAM / Howling Abyss games
'BOT', # Summoner's Rift and Crystal Scar games played against AI
'BOT_3x3', # Twisted Treeline games played against AI
'RANKED_SOLO_5x5', # Summoner's Rift ranked solo queue games
'RANKED_TEAM_3x3', # Twisted Treeline ranked team games
'RANKED_TEAM_5x5', # Summoner's Rift ranked team games
'ONEFORALL_5x5', # One for All games
'FIRSTBLOOD_1x1', # Snowdown Showdown 1x1 games
'FIRSTBLOOD_2x2', # Snowdown Showdown 2x2 games
'SR_6x6', # Hexakill games
'CAP_5x5', # Team Builder games
'URF', # Ultra Rapid Fire games
'URF_BOT', # Ultra Rapid Fire games against AI
'NIGHTMARE_BOT', # Nightmare bots
'ASCENSION', # Ascension games
'HEXAKILL', # Twisted Treeline 6x6 Hexakill
'KING_PORO', # King Poro games
'COUNTER_PICK', # Nemesis games
'BILGEWATER', # Black Market Brawlers games
]
player_stat_summary_types = [
'Unranked', # Summoner's Rift unranked games
'Unranked3x3', # Twisted Treeline unranked games
'OdinUnranked', # Dominion/Crystal Scar games
'AramUnranked5x5', # ARAM / Howling Abyss games
'CoopVsAI', # Summoner's Rift and Crystal Scar games played against AI
'CoopVsAI3x3', # Twisted Treeline games played against AI
'RankedSolo5x5', # Summoner's Rift ranked solo queue games
'RankedTeams3x3', # Twisted Treeline ranked team games
'RankedTeams5x5', # Summoner's Rift ranked team games
'OneForAll5x5', # One for All games
'FirstBlood1x1', # Snowdown Showdown 1x1 games
'FirstBlood2x2', # Snowdown Showdown 2x2 games
'SummonersRift6x6', # Hexakill games
'CAP5x5', # Team Builder games
'URF', # Ultra Rapid Fire games
'URFBots', # Ultra Rapid Fire games played against AI
'NightmareBot', # Summoner's Rift games played against Nightmare AI
'Hexakill', # Twisted Treeline 6x6 Hexakill games
'KingPoro', # King Poro games
'CounterPick', # Nemesis games
'Bilgewater', # Black Market Brawlers games
]
solo_queue, ranked_5s, ranked_3s = 'RANKED_SOLO_5x5', 'RANKED_TEAM_5x5', 'RANKED_TEAM_3x3'
api_versions = {
'champion': 1.2,
'current-game': 1.0,
'featured-games': 1.0,
'game': 1.3,
'league': 2.5,
'lol-static-data': 1.2,
'lol-status': 1.0,
'match': 2.2,
'matchhistory': 2.2,
'matchlist': 2.2,
'stats': 1.3,
'summoner': 1.4,
'team': 2.4
}
class LoLException(Exception):
def __init__(self, error, response):
self.error = error
self.response = response
def __str__(self):
return self.error
error_400 = "Bad request"
error_401 = "Unauthorized"
error_404 = "Game data not found"
error_429 = "Too many requests"
error_500 = "Internal server error"
error_503 = "Service unavailable"
def raise_status(response):
if response.status_code == 400:
raise LoLException(error_400, response)
elif response.status_code == 401:
raise LoLException(error_401, response)
elif response.status_code == 404:
raise LoLException(error_404, response)
elif response.status_code == 429:
raise LoLException(error_429, response)
elif response.status_code == 500:
raise LoLException(error_500, response)
elif response.status_code == 503:
raise LoLException(error_503, response)
else:
response.raise_for_status()
class RateLimit:
def __init__(self, allowed_requests, seconds):
self.allowed_requests = allowed_requests
self.seconds = seconds
self.made_requests = deque()
def __reload(self):
t = time.time()
while len(self.made_requests) > 0 and self.made_requests[0] < t:
self.made_requests.popleft()
def add_request(self):
self.made_requests.append(time.time() + self.seconds)
def request_available(self):
self.__reload()
return len(self.made_requests) < self.allowed_requests
class RiotWatcher:
def __init__(self, key, default_region=NORTH_AMERICA, limits=(RateLimit(10, 10), RateLimit(500, 600), )):
self.key = key
self.default_region = default_region
self.limits = limits
def can_make_request(self):
for lim in self.limits:
if not lim.request_available():
return False
return True
def base_request(self, url, region, static=False, **kwargs):
if region is None:
region = self.default_region
args = {'api_key': self.key}
for k in kwargs:
if kwargs[k] is not None:
args[k] = kwargs[k]
r = requests.get(
'https://{proxy}.api.pvp.net/api/lol/{static}{region}/{url}'.format(
proxy='global' if static else region,
static='static-data/' if static else '',
region=region,
url=url
),
params=args
)
if not static:
for lim in self.limits:
lim.add_request()
raise_status(r)
return r.json()
def _observer_mode_request(self, url, proxy=None, **kwargs):
if proxy is None:
proxy = self.default_region
args = {'api_key': self.key}
for k in kwargs:
if kwargs[k] is not None:
args[k] = kwargs[k]
r = requests.get(
'https://{proxy}.api.pvp.net/observer-mode/rest/{url}'.format(
proxy=proxy,
url=url
),
params=args
)
for lim in self.limits:
lim.add_request()
raise_status(r)
return r.json()
@staticmethod
def sanitized_name(name):
return name.replace(' ', '').lower()
# champion-v1.2
def _champion_request(self, end_url, region, **kwargs):
return self.base_request(
'v{version}/champion/{end_url}'.format(
version=api_versions['champion'],
end_url=end_url
),
region,
**kwargs
)
def get_all_champions(self, region=None, free_to_play=False):
return self._champion_request('', region, freeToPlay=free_to_play)
def get_champion(self, champion_id, region=None):
return self._champion_request('{id}'.format(id=champion_id), region)
# current-game-v1.0
def get_current_game(self, summoner_id, platform_id=None, region=None):
if platform_id is None:
platform_id = platforms[self.default_region]
return self._observer_mode_request(
'consumer/getSpectatorGameInfo/{platform}/{summoner_id}'.format(
platform=platform_id,
summoner_id=summoner_id
),
region
)
# featured-game-v1.0
def get_featured_games(self, proxy=None):
return self._observer_mode_request('featured', proxy)
# game-v1.3
def _game_request(self, end_url, region, **kwargs):
return self.base_request(
'v{version}/game/{end_url}'.format(
version=api_versions['game'],
end_url=end_url
),
region,
**kwargs
)
def get_recent_games(self, summoner_id, region=None):
return self._game_request('by-summoner/{summoner_id}/recent'.format(summoner_id=summoner_id), region)
# league-v2.5
def _league_request(self, end_url, region, **kwargs):
return self.base_request(
'v{version}/league/{end_url}'.format(
version=api_versions['league'],
end_url=end_url
),
region,
**kwargs
)
def get_league(self, summoner_ids=None, team_ids=None, region=None):
"""summoner_ids and team_ids arguments must be iterable, only one should be specified, not both"""
if (summoner_ids is None) != (team_ids is None):
if summoner_ids is not None:
return self._league_request(
'by-summoner/{summoner_ids}'.format(summoner_ids=','.join([str(s) for s in summoner_ids])),
region
)
else:
return self._league_request(
'by-team/{team_ids}'.format(team_ids=','.join([str(t) for t in team_ids])),
region
)
def get_league_entry(self, summoner_ids=None, team_ids=None, region=None):
"""summoner_ids and team_ids arguments must be iterable, only one should be specified, not both"""
if (summoner_ids is None) != (team_ids is None):
if summoner_ids is not None:
return self._league_request(
'by-summoner/{summoner_ids}/entry'.format(
summoner_ids=','.join([str(s) for s in summoner_ids])
),
region
)
else:
return self._league_request(
'by-team/{team_ids}/entry'.format(team_ids=','.join([str(t) for t in team_ids])),
region
)
def get_challenger(self, region=None, queue=solo_queue):
return self._league_request('challenger', region, type=queue)
def get_master(self, region=None, queue=solo_queue):
return self._league_request('master', region, type=queue)
# lol-static-data-v1.2
def _static_request(self, end_url, region, **kwargs):
return self.base_request(
'v{version}/{end_url}'.format(
version=api_versions['lol-static-data'],
end_url=end_url
),
region,
static=True,
**kwargs
)
def static_get_champion_list(self, region=None, locale=None, version=None, data_by_id=None, champ_data=None):
return self._static_request(
'champion',
region,
locale=locale,
version=version,
dataById=data_by_id,
champData=champ_data
)
def static_get_champion(self, champ_id, region=None, locale=None, version=None, champ_data=None):
return self._static_request(
'champion/{id}'.format(id=champ_id),
region,
locale=locale,
version=version,
champData=champ_data
)
def static_get_item_list(self, region=None, locale=None, version=None, item_list_data=None):
return self._static_request('item', region, locale=locale, version=version, itemListData=item_list_data)
def static_get_item(self, item_id, region=None, locale=None, version=None, item_data=None):
return self._static_request(
'item/{id}'.format(id=item_id),
region,
locale=locale,
version=version,
itemData=item_data
)
def static_get_mastery_list(self, region=None, locale=None, version=None, mastery_list_data=None):
return self._static_request(
'mastery',
region,
locale=locale,
version=version,
masteryListData=mastery_list_data
)
def static_get_mastery(self, mastery_id, region=None, locale=None, version=None, mastery_data=None):
return self._static_request(
'mastery/{id}'.format(id=mastery_id),
region,
locale=locale,
version=version,
masteryData=mastery_data
)
def static_get_realm(self, region=None):
return self._static_request('realm', region)
def static_get_rune_list(self, region=None, locale=None, version=None, rune_list_data=None):
return self._static_request('rune', region, locale=locale, version=version, runeListData=rune_list_data)
def static_get_rune(self, rune_id, region=None, locale=None, version=None, rune_data=None):
return self._static_request(
'rune/{id}'.format(id=rune_id),
region,
locale=locale,
version=version,
runeData=rune_data<|fim▁hole|>
def static_get_summoner_spell_list(self, region=None, locale=None, version=None, data_by_id=None, spell_data=None):
return self._static_request(
'summoner-spell',
region,
locale=locale,
version=version,
dataById=data_by_id,
spellData=spell_data
)
def static_get_summoner_spell(self, spell_id, region=None, locale=None, version=None, spell_data=None):
return self._static_request(
'summoner-spell/{id}'.format(id=spell_id),
region,
locale=locale,
version=version,
spellData=spell_data
)
def static_get_versions(self, region=None):
return self._static_request('versions', region)
# match-v2.2
def _match_request(self, end_url, region, **kwargs):
return self.base_request(
'v{version}/match/{end_url}'.format(
version=api_versions['match'],
end_url=end_url
),
region,
**kwargs
)
def get_match(self, match_id, region=None, include_timeline=False):
return self._match_request(
'{match_id}'.format(match_id=match_id),
region,
includeTimeline=include_timeline
)
# lol-status-v1.0
@staticmethod
def get_server_status(region=None):
if region is None:
url = 'shards'
else:
url = 'shards/{region}'.format(region=region)
r = requests.get('http://status.leagueoflegends.com/{url}'.format(url=url))
raise_status(r)
return r.json()
# match history-v2.2
def _match_history_request(self, end_url, region, **kwargs):
return self.base_request(
'v{version}/matchhistory/{end_url}'.format(
version=api_versions['matchhistory'],
end_url=end_url
),
region,
**kwargs
)
def get_match_history(self, summoner_id, region=None, champion_ids=None, ranked_queues=None, begin_index=None,
end_index=None):
return self._match_history_request(
'{summoner_id}'.format(summoner_id=summoner_id),
region,
championIds=champion_ids,
rankedQueues=ranked_queues,
beginIndex=begin_index,
endIndex=end_index
)
# match list-v2.2
def _match_list_request(self, end_url, region, **kwargs):
return self.base_request(
'v{version}/matchlist/by-summoner/{end_url}'.format(
version=api_versions['matchlist'],
end_url=end_url,
),
region,
**kwargs
)
def get_match_list(self, summoner_id, region=None, champion_ids=None, ranked_queues=None, seasons=None,
begin_time=None, end_time=None, begin_index=None, end_index=None):
return self._match_list_request(
'{summoner_id}'.format(summoner_id=summoner_id),
region,
championsIds=champion_ids,
rankedQueues=ranked_queues,
seasons=seasons,
beginTime=begin_time,
endTime=end_time,
beginIndex=begin_index,
endIndex=end_index
)
# stats-v1.3
def _stats_request(self, end_url, region, **kwargs):
return self.base_request(
'v{version}/stats/{end_url}'.format(
version=api_versions['stats'],
end_url=end_url
),
region,
**kwargs
)
def get_stat_summary(self, summoner_id, region=None, season=None):
return self._stats_request(
'by-summoner/{summoner_id}/summary'.format(summoner_id=summoner_id),
region,
season='SEASON{}'.format(season) if season is not None else None)
def get_ranked_stats(self, summoner_id, region=None, season=None):
return self._stats_request(
'by-summoner/{summoner_id}/ranked'.format(summoner_id=summoner_id),
region,
season='SEASON{}'.format(season) if season is not None else None
)
# summoner-v1.4
def _summoner_request(self, end_url, region, **kwargs):
return self.base_request(
'v{version}/summoner/{end_url}'.format(
version=api_versions['summoner'],
end_url=end_url
),
region,
**kwargs
)
def get_mastery_pages(self, summoner_ids, region=None):
return self._summoner_request(
'{summoner_ids}/masteries'.format(summoner_ids=','.join([str(s) for s in summoner_ids])),
region
)
def get_rune_pages(self, summoner_ids, region=None):
return self._summoner_request(
'{summoner_ids}/runes'.format(summoner_ids=','.join([str(s) for s in summoner_ids])),
region
)
def get_summoners(self, names=None, ids=None, region=None):
if (names is None) != (ids is None):
return self._summoner_request(
'by-name/{summoner_names}'.format(
summoner_names=','.join([self.sanitized_name(n) for n in names])) if names is not None
else '{summoner_ids}'.format(summoner_ids=','.join([str(i) for i in ids])),
region
)
else:
return None
def get_summoner(self, name=None, _id=None, region=None):
if (name is None) != (_id is None):
if name is not None:
name = self.sanitized_name(name)
return self.get_summoners(names=[name, ], region=region)[name]
else:
return self.get_summoners(ids=[_id, ], region=region)[str(_id)]
return None
def get_summoner_name(self, summoner_ids, region=None):
return self._summoner_request(
'{summoner_ids}/name'.format(summoner_ids=','.join([str(s) for s in summoner_ids])),
region
)
# team-v2.4
def _team_request(self, end_url, region, **kwargs):
return self.base_request(
'v{version}/team/{end_url}'.format(
version=api_versions['team'],
end_url=end_url
),
region,
**kwargs
)
def get_teams_for_summoner(self, summoner_id, region=None):
return self.get_teams_for_summoners([summoner_id, ], region=region)[str(summoner_id)]
def get_teams_for_summoners(self, summoner_ids, region=None):
return self._team_request(
'by-summoner/{summoner_id}'.format(summoner_id=','.join([str(s) for s in summoner_ids])),
region
)
def get_team(self, team_id, region=None):
return self.get_teams([team_id, ], region=region)[str(team_id)]
def get_teams(self, team_ids, region=None):
return self._team_request('{team_ids}'.format(team_ids=','.join(str(t) for t in team_ids)), region)<|fim▁end|> | ) |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>//
// partial2js
// Copyright (c) 2014 Dennis Sänger
// Licensed under the MIT
// http://opensource.org/licenses/MIT
//
"use strict";
var glob = require('glob-all');
var fs = require('fs');
var path = require('path');
var stream = require('stream');
var htmlmin = require('html-minifier').minify;
var escape = require('js-string-escape');
var eol = require('os').EOL;
function Partial2Js( opts ) {
opts = opts || {};
var self = this;
this.debug = !!opts.debug;
this.patterns = [];
this.files = [];
this.contents = {};
this.uniqueFn = function( file ) {
return file;
};
var log = (function log() {
if ( this.debug ) {
console.log.apply( console, arguments );
}
}).bind( this );
var find = (function() {
this.files = glob.sync( this.patterns.slice( 0 )) || [];
}).bind( this );
function cleanPatterns( patterns ) {
return patterns.map(function( entry ) {
return entry.replace(/\/\*+/g, '');
});
}
function compare( patterns, a, b ) {
return matchInPattern( patterns, a ) - matchInPattern( patterns, b );
}
var sort = (function() {
var clean = cleanPatterns( this.patterns );
this.files.sort(function( a, b ) {
return compare( clean, a, b );
});
}).bind( this );
//
// this function is not every functional ;)
// Should use findIndex() [ES6] as soon as possible
//
function matchInPattern( patterns, entry ) {
var res = patterns.length + 100;
patterns.every(function( pattern, index ) {
if ( entry.indexOf( pattern ) > -1 ) {
res = index;
return false;
}
return true;
});
return res;
}
var unique = (function() {
if ( typeof this.uniqueFn === 'function' && this.files && this.files.length ) {
var obj = {};
this.files.forEach(function( file ) {<|fim▁hole|> if ( !obj[key] ) {
obj[key] = file;
}
});
this.files = obj;
}
}).bind( this );
var asString = (function( moduleName ) {
var buffer = '';
buffer += '(function(window,document){' + eol;
buffer += '"use strict";' + eol;
buffer += 'angular.module("'+moduleName+'",[]).run(["$templateCache",function($templateCache){' + eol;
for ( var k in this.contents ) {
buffer += ' $templateCache.put("'+k+'","'+this.contents[k]+'");' + eol;
}
buffer += '}]);' + eol;
buffer += '})(window,document);';
return buffer;
}).bind( this );
var read = (function() {
var id, path, stat;
this.contents = {};
for( var k in this.files ) {
id = k;
path = this.files[k];
stat = fs.statSync( path );
if ( stat.isFile()) {
log('read file:', path, '=>', id );
this.contents[id] = fs.readFileSync( path );
}
}
return this.contents;
}).bind( this );
var asStream = function( string ) {
var s = new stream.Readable();
s._read = function noop() {};
s.push( string );
s.push(null);
return s;
};
var minify = (function() {
var opts = {
collapseWhitespace: true,
preserveLineBreaks: false,
removeComments: true,
removeRedundantAttributes: true,
removeEmptyAttributes: false,
keepClosingSlash: true,
maxLineLength: 0,
customAttrCollapse: /.+/,
html5: true
};
for ( var k in this.contents ) {
this.contents[k] = escape(htmlmin( String(this.contents[k]), opts ));
}
}).bind( this );
this.add = function( pattern ) {
this.patterns.push( pattern );
return this;
};
this.not = function( pattern ) {
this.patterns.push( '!'+pattern );
return this;
};
this.folder = function( folder ) {
if ( folder && String( folder ) === folder ) {
folder = path.resolve( folder ) + '/**/*';
this.patterns.push( folder );
}
return this;
};
this.unique = function( fn ) {
this.uniqueFn = fn;
return this;
};
this.stringify = function( moduleName ) {
find();
sort();
unique();
read();
minify();
return asString( moduleName );
};
this.stream = function( moduleName ) {
return asStream( this.stringify( moduleName ) );
};
}
module.exports = function( opts ) {
return new Partial2Js( opts );
};<|fim▁end|> | var key = self.uniqueFn( file ); |
<|file_name|>guild.py<|end_file_name|><|fim▁begin|>import os
from holster.enum import Enum
from rowboat.types import Model, SlottedModel, Field, DictField, text, raw, rule_matcher
CooldownMode = Enum(
'GUILD',
'CHANNEL',
'USER',
)
class PluginConfigObj(object):
client = None
class PluginsConfig(Model):
def __init__(self, inst, obj):
self.client = None
self.load_into(inst, obj)<|fim▁hole|> inst = PluginConfigObj()
cls(inst, obj)
return inst
@classmethod
def force_load_plugin_configs(cls):
"""
This function can be called to ensure that this class will have all its
attributes properly loaded, as they are dynamically set when plugin configs
are defined.
"""
plugins = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'plugins')
for name in os.listdir(plugins):
__import__('rowboat.plugins.{}'.format(
name.rsplit('.', 1)[0]
))
class CommandOverrideConfig(SlottedModel):
disabled = Field(bool, default=False)
level = Field(int)
class CommandsConfig(SlottedModel):
prefix = Field(str, default='')
mention = Field(bool, default=False)
overrides = Field(raw)
def get_command_override(self, command):
return rule_matcher(command, self.overrides or [])
class GuildConfig(SlottedModel):
nickname = Field(text)
commands = Field(CommandsConfig, default=None, create=False)
levels = DictField(int, int)
plugins = Field(PluginsConfig.parse)<|fim▁end|> |
@classmethod
def parse(cls, obj, *args, **kwargs): |
<|file_name|>__openerp__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License<|fim▁hole|># along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Attendances',
'version': '1.1',
'category': 'Human Resources',
'description': """
This module aims to manage employee's attendances.
==================================================
Keeps account of the attendances of the employees on the basis of the
actions(Sign in/Sign out) performed by them.
""",
'author': 'OpenERP SA',
'images': ['images/hr_attendances.jpeg'],
'depends': ['hr'],
'data': [
'security/ir_rule.xml',
'security/ir.model.access.csv',
'hr_attendance_view.xml',
'hr_attendance_report.xml',
'wizard/hr_attendance_bymonth_view.xml',
'wizard/hr_attendance_byweek_view.xml',
'wizard/hr_attendance_error_view.xml',
'res_config_view.xml',
],
'demo': ['hr_attendance_demo.xml'],
'test': [
'test/attendance_process.yml',
'test/hr_attendance_report.yml',
],
'installable': True,
'auto_install': False,
#web
"js": ["static/src/js/attendance.js"],
'qweb' : ["static/src/xml/attendance.xml"],
'css' : ["static/src/css/slider.css"],
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|> | |
<|file_name|>GVDatabaseHelper.java<|end_file_name|><|fim▁begin|>package com.avygeil.GrandVide;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;<|fim▁hole|>{
private final GrandVide gv;
private final String url;
private final String driverClass;
private final String user;
private final String password;
public static String DB_REGIONS_SCHEME;
public static String DB_STATS_SCHEME;
public static String AUTOINCREMENT;
public static String NOCASE;
private Connection connection = null;
private Statement statement = null;
private PreparedStatement prepared = null;
GVDatabaseHelper(GrandVide grandvide)
{
this.gv = grandvide;
if (gv.configurationHandler.sqlDriver.equalsIgnoreCase("sqlite"))
{
url = "jdbc:sqlite:plugins/GrandVide/grandvide.db";
driverClass = "org.sqlite.JDBC";
user = "";
password = "";
AUTOINCREMENT = "AUTOINCREMENT";
NOCASE = "COLLATE NOCASE";
}
else if (gv.configurationHandler.sqlDriver.equalsIgnoreCase("mysql"))
{
url = "jdbc:mysql://" + gv.configurationHandler.mysqlHost + ":" + gv.configurationHandler.mysqlPort + "/" + gv.configurationHandler.mysqlDatabase;
driverClass = "com.mysql.jdbc.Driver";
user = gv.configurationHandler.mysqlUser;
password = gv.configurationHandler.mysqlPassword;
AUTOINCREMENT = "AUTO_INCREMENT";
NOCASE = "";
}
else
{
url = null;
driverClass = null;
user = null;
password = null;
AUTOINCREMENT = null;
NOCASE = null;
}
DB_REGIONS_SCHEME = "CREATE TABLE IF NOT EXISTS " + gv.configurationHandler.mysqlPrefix + "regions(id INTEGER PRIMARY KEY " + AUTOINCREMENT + ", name TEXT, world TEXT, container BLOB, teams BLOB, power BLOB)";
DB_STATS_SCHEME = "CREATE TABLE IF NOT EXISTS " + gv.configurationHandler.mysqlPrefix + "stats(id INTEGER PRIMARY KEY " + AUTOINCREMENT + ", player TEXT, kills INTEGER, deaths INTEGER, damage_dealt INTEGER, damage_taken INTEGER, block_break INTEGER, block_place INTEGER, games_joined INTEGER, games_finished INTEGER)";
}
public void setConnection() throws Exception
{
Class.forName(driverClass);
try
{
gv.getLogger().info("Connexion a " + url + "...");
connection = DriverManager.getConnection(url, user, password);
}
catch (SQLException e)
{
gv.getLogger().severe("Impossible d'etablir la connexion a la base de donnees");
throw e;
}
try
{
statement = connection.createStatement();
}
catch (Exception e)
{
try { connection.close(); } catch (Exception ignore) {}
connection = null;
gv.getLogger().severe("Une erreur s'est produite avec la base de donnees");
throw e;
}
}
public void closeConnection()
{
if (statement != null)
try { statement.close(); } catch (Exception ignore) {}
if (connection != null)
try { connection.close(); } catch (Exception ignore) {}
}
public void closeResultSet(ResultSet rs)
{
if (rs != null)
try { rs.close(); } catch (Exception ignore) {}
}
public void execute(String instruction) throws Exception
{
try
{
statement.executeUpdate(instruction);
}
catch (SQLException e)
{
gv.getLogger().warning("La demande SQL n'a pas pu etre executee");
throw new Exception(e.getMessage());
}
}
public ResultSet query(String query) throws Exception
{
try
{
return statement.executeQuery(query);
}
catch (SQLException e)
{
gv.getLogger().warning("La requete SQL n'a pas pu etre executee");
throw new Exception(e.getMessage());
}
}
public PreparedStatement getPrepared()
{
return prepared;
}
public void prepare(String query) throws Exception
{
try
{
prepared = connection.prepareStatement(query);
}
catch (SQLException e)
{
gv.getLogger().warning("La requete preparee SQL n'a pas pu etre executee");
throw new Exception(e.getMessage());
}
}
public void finalize() throws Exception
{
try
{
prepared.execute();
}
catch (SQLException e)
{
gv.getLogger().warning("La requete preparee SQL n'a pas pu etre finalisee");
throw new Exception(e.getMessage());
}
finally
{
if (prepared != null)
try { prepared.close(); } catch (Exception ignore) {}
}
}
}<|fim▁end|> | import java.sql.SQLException;
import java.sql.Statement;
public class GVDatabaseHelper |
<|file_name|>mixin.js<|end_file_name|><|fim▁begin|>import {smartTable} from 'smart-table-core';
//change the structure of returned items
const smartTableExtension = function ({table, tableState, data}) {
const oldChangeRegister = table.onDisplayChange;
//will overwrite the default onDisplayChange
return {
onDisplayChange(listener) {
oldChangeRegister(function (items) {<|fim▁hole|> }
};
};
// our composed factory
const superSmartTable = function ({data}) {
const core = smartTable({data});
return Object.assign(core, smartTableExtension({table: core})); //overwrite core method by mixin the extension within the core
};
//use our super smart table
const data = [
{surname: 'Deubaze', name: 'Raymond'},
{surname: 'Foo', name: 'Bar'},
{surname: 'Doe', name: 'John'}
];
const table = superSmartTable({data});
// core methods available
table.onDisplayChange(items => console.log(items)); // no need to extract "value" property as the method has been overwritten
table.exec();<|fim▁end|> | const itemValues = items.map(i => i.value);
listener(itemValues);
}); |
<|file_name|>similarity_alga.rs<|end_file_name|><|fim▁begin|>use alga::general::{
AbstractGroup, AbstractLoop, AbstractMagma, AbstractMonoid, AbstractQuasigroup,
AbstractSemigroup, Identity, Multiplicative, RealField, TwoSidedInverse,
};
use alga::linear::Similarity as AlgaSimilarity;
use alga::linear::{AffineTransformation, ProjectiveTransformation, Rotation, Transformation};
use crate::base::allocator::Allocator;
use crate::base::dimension::DimName;
use crate::base::{DefaultAllocator, VectorN};
use crate::geometry::{AbstractRotation, Point, Similarity, Translation};
/*
*
* Algebraic structures.
*
*/
impl<N: RealField + simba::scalar::RealField, D: DimName, R> Identity<Multiplicative>
for Similarity<N, D, R>
where
R: Rotation<Point<N, D>> + AbstractRotation<N, D>,
DefaultAllocator: Allocator<N, D>,
{
#[inline]
fn identity() -> Self {
Self::identity()
}
}
impl<N: RealField + simba::scalar::RealField, D: DimName, R> TwoSidedInverse<Multiplicative>
for Similarity<N, D, R>
where
R: Rotation<Point<N, D>> + AbstractRotation<N, D>,
DefaultAllocator: Allocator<N, D>,
{
#[inline]
#[must_use = "Did you mean to use two_sided_inverse_mut()?"]
fn two_sided_inverse(&self) -> Self {
self.inverse()
}
#[inline]
fn two_sided_inverse_mut(&mut self) {
self.inverse_mut()
}
}
impl<N: RealField + simba::scalar::RealField, D: DimName, R> AbstractMagma<Multiplicative>
for Similarity<N, D, R>
where
R: Rotation<Point<N, D>> + AbstractRotation<N, D>,
DefaultAllocator: Allocator<N, D>,
{
#[inline]
fn operate(&self, rhs: &Self) -> Self {
self * rhs
}
}
macro_rules! impl_multiplicative_structures(
($($marker: ident<$operator: ident>),* $(,)*) => {$(
impl<N: RealField + simba::scalar::RealField, D: DimName, R> $marker<$operator> for Similarity<N, D, R>
where R: Rotation<Point<N, D>> + AbstractRotation<N, D>,
DefaultAllocator: Allocator<N, D> { }
)*}
);
impl_multiplicative_structures!(
AbstractSemigroup<Multiplicative>,
AbstractMonoid<Multiplicative>,
AbstractQuasigroup<Multiplicative>,
AbstractLoop<Multiplicative>,
AbstractGroup<Multiplicative>
);
/*
*
* Transformation groups.
*<|fim▁hole|>where
R: Rotation<Point<N, D>> + AbstractRotation<N, D>,
DefaultAllocator: Allocator<N, D>,
{
#[inline]
fn transform_point(&self, pt: &Point<N, D>) -> Point<N, D> {
self.transform_point(pt)
}
#[inline]
fn transform_vector(&self, v: &VectorN<N, D>) -> VectorN<N, D> {
self.transform_vector(v)
}
}
impl<N: RealField + simba::scalar::RealField, D: DimName, R> ProjectiveTransformation<Point<N, D>>
for Similarity<N, D, R>
where
R: Rotation<Point<N, D>> + AbstractRotation<N, D>,
DefaultAllocator: Allocator<N, D>,
{
#[inline]
fn inverse_transform_point(&self, pt: &Point<N, D>) -> Point<N, D> {
self.inverse_transform_point(pt)
}
#[inline]
fn inverse_transform_vector(&self, v: &VectorN<N, D>) -> VectorN<N, D> {
self.inverse_transform_vector(v)
}
}
impl<N: RealField + simba::scalar::RealField, D: DimName, R> AffineTransformation<Point<N, D>>
for Similarity<N, D, R>
where
R: Rotation<Point<N, D>> + AbstractRotation<N, D>,
DefaultAllocator: Allocator<N, D>,
{
type NonUniformScaling = N;
type Rotation = R;
type Translation = Translation<N, D>;
#[inline]
fn decompose(&self) -> (Translation<N, D>, R, N, R) {
(
self.isometry.translation.clone(),
self.isometry.rotation.clone(),
self.scaling(),
<R as AbstractRotation<N, D>>::identity(),
)
}
#[inline]
fn append_translation(&self, t: &Self::Translation) -> Self {
t * self
}
#[inline]
fn prepend_translation(&self, t: &Self::Translation) -> Self {
self * t
}
#[inline]
fn append_rotation(&self, r: &Self::Rotation) -> Self {
Similarity::from_isometry(self.isometry.append_rotation(r), self.scaling())
}
#[inline]
fn prepend_rotation(&self, r: &Self::Rotation) -> Self {
Similarity::from_isometry(self.isometry.prepend_rotation(r), self.scaling())
}
#[inline]
fn append_scaling(&self, s: &Self::NonUniformScaling) -> Self {
self.append_scaling(*s)
}
#[inline]
fn prepend_scaling(&self, s: &Self::NonUniformScaling) -> Self {
self.prepend_scaling(*s)
}
#[inline]
fn append_rotation_wrt_point(&self, r: &Self::Rotation, p: &Point<N, D>) -> Option<Self> {
let mut res = self.clone();
res.append_rotation_wrt_point_mut(r, p);
Some(res)
}
}
impl<N: RealField + simba::scalar::RealField, D: DimName, R> AlgaSimilarity<Point<N, D>>
for Similarity<N, D, R>
where
R: Rotation<Point<N, D>> + AbstractRotation<N, D>,
DefaultAllocator: Allocator<N, D>,
{
type Scaling = N;
#[inline]
fn translation(&self) -> Translation<N, D> {
self.isometry.translation()
}
#[inline]
fn rotation(&self) -> R {
self.isometry.rotation()
}
#[inline]
fn scaling(&self) -> N {
self.scaling()
}
}<|fim▁end|> | */
impl<N: RealField + simba::scalar::RealField, D: DimName, R> Transformation<Point<N, D>>
for Similarity<N, D, R> |
<|file_name|>queries.py<|end_file_name|><|fim▁begin|>import psycopg2
from db.enums import *
base = psycopg2.connect("dbname='cardkeepersample' user='andrew' host='localhost' password='1234'")
cursor = base.cursor()
# Wrapped queries in alphabetic order
def active_packs(user_id, start=0, count=10):
query = """SELECT packs.pack_id, packs.name FROM user_packs, packs WHERE packs.pack_id = user_packs.pack_id
AND user_packs.status = %s AND user_id = %s ORDER BY pack_id
OFFSET %s LIMIT %s;"""
cursor.execute(query, (CardStatusType.ACTIVE.value, user_id, start, count))
return cursor.fetchall()
def add_pack(user_id, pack_id):
query = """INSERT INTO user_packs (pack_id, user_id, status) VALUES (%s, %s, 'Active');"""
cursor.execute(query, (pack_id, user_id))
query = """SELECT card_id FROM cards WHERE cards.pack_id = %s"""
cursor.execute(query, (pack_id,))
cards = cursor.fetchall()
for i in cards:
query = """INSERT INTO user_cards (user_id, card_id, times_reviewed, correct_answers, status) VALUES (%s, %s, 0, 0, 'Active');"""
cursor.execute(query, (user_id, i[0]))
base.commit()
def add_user(user):
query = """INSERT INTO users (user_id, name, general_goal, weekly_goal, notifications_learn, notifications_stats, joined)
VALUES (%s, %s, %s, %s, %s, %s, current_date);"""
cursor.execute(query, tuple(user))
base.commit()
def available_packs(user_id):
query = """SELECT packs.pack_id, packs.name FROM packs
WHERE packs.privacy = 'public' LIMIT 105;"""
cursor.execute(query)
return cursor.fetchall()
def available_groups(user_id, rights=RightsType.USER, include_higher=False):
query = """SELECT groups.group_id, groups.name FROM groups, user_groups
WHERE groups.group_id = user_groups.group_id
AND user_groups.user_id = %s
AND user_groups.rights """ + ("<" if include_higher else "") + "= %s;"""
cursor.execute(query, (user_id, rights))
return cursor.fetchall()
def delete_pack(pack_id):
owner_id = get_pack(pack_id)['owner_id']
cursor.execute('''
DELETE FROM user_cards
USING cards
WHERE
user_cards.card_id = cards.card_id AND
cards.pack_id = %s;
''', (pack_id,))
cursor.execute(
'DELETE FROM cards WHERE pack_id = %s;',
(pack_id,)
)
cursor.execute(
'DELETE FROM user_packs WHERE pack_id = %s;',
(pack_id,)
)
cursor.execute(
'DELETE FROM packs WHERE pack_id = %s;',
(pack_id,)
)
base.commit()
def get_all_cards_in_pack(pack_id):
cursor.execute('''
SELECT card_id, front, back, comment, type
FROM cards
WHERE pack_id = %s;
''', (pack_id,))
return [{'card_id': card_id, 'front': front, 'back': back,
'comment': comment, 'type': tp}
for card_id, front, back, comment, tp
in cursor.fetchall()]
def get_pack(pack_id, user_id=None):
cursor.execute(
'SELECT name, owner_id, privacy FROM packs WHERE pack_id = %s;',
(pack_id,)
)
name, owner_id, privacy = cursor.fetchone()
status = None
if user_id is not None:
cursor.execute('''
SELECT status FROM user_packs
WHERE user_id = %s AND pack_id = %s;
''', (user_id, pack_id))
status = cursor.fetchone()[0]
return {
'pack_id': pack_id,
'name': name,
'owner_id': owner_id,
'privacy': privacy,
'status': status
}
def if_added(user_id, pack_id):
query = "SELECT * FROM user_packs WHERE user_id = %s AND pack_id = %s;"
cursor.execute(query, (user_id, pack_id))
return list(cursor.fetchall())
# TODO: Take permissions lists into account
def has_pack_read_access(pack_id, user_id):
pack_info = get_pack(pack_id)
return user_id == pack_info['owner_id'] or pack_info['privacy'] == 'public'
def if_registered(user_id):
query = "SELECT * FROM users WHERE users.user_id = %s;"
cursor.execute(query, (user_id,))
return True if len(cursor.fetchall()) else False
def cards_for_learning(user_id):
query = """SELECT cards.front, cards.back, cards.comment FROM user_cards, cards
WHERE user_cards.card_id = cards.card_id AND
user_id = %s AND cards.type = %s"""
cursor.execute(query, (user_id, CardType.SHORT))
return cursor.fetchall()
def new_card(front, back):
query = "INSERT INTO cards (front, back) VALUES (%s, %s);"
cursor.execute(query, (front, back))
base.commit()
def new_group(name, owner, privacy="public"):
query = "INSERT INTO groups (name, privacy, owner_id) VALUES (%s, %s, %s);"
cursor.execute(query, (name, privacy, owner))
base.commit()
def new_pack(name, owner, privacy=PrivacyType.PUBLIC, status=CardStatusType.ACTIVE, cards=[]):
if isinstance(privacy, PrivacyType):
privacy = privacy.value
if isinstance(status, CardStatusType):
status = status.value
query = "INSERT INTO packs (name, owner_id, privacy) VALUES (%s, %s, %s);"
cursor.execute(query, (name, owner, privacy))
query = "SELECT pack_id FROM packs WHERE name = %s AND owner_id = %s;"
cursor.execute(query, (name, owner))
pack_id = cursor.fetchone()[0]
query = "INSERT INTO user_packs (user_id, pack_id, status) VALUES (%s, %s, %s);"
cursor.execute(query, (owner, pack_id, status))
insert_query = "INSERT INTO cards (pack_id, front, back, comment, type) VALUES (%s, %s, %s, %s, %s) RETURNING card_id;"
insert2_query = "INSERT INTO user_cards (user_id, card_id, times_reviewed, correct_answers, status)" \
"VALUES (%s, %s, 0, 0, 'Active');"
for card in cards:
front = card['front']
back = card['back']
comment = card['comment']
cursor.execute(insert_query, (pack_id, front, back, comment, CardType.SHORT.value))
card_id = cursor.fetchone()[0]
cursor.execute(insert2_query, (owner, card_id))
base.commit()
return pack_id
def select_cards(user_id, pack_id):
print(user_id, pack_id)
query = """SELECT cards.card_id, cards.front, cards.back, cards.comment
FROM cards, user_cards
WHERE cards.card_id = user_cards.card_id
AND user_cards.status = %s
AND cards.pack_id = %s
AND user_cards.user_id = %s"""
cursor.execute(query, (CardStatusType.ACTIVE.value, pack_id, user_id))
return cursor.fetchall()
def update_card_data(user_id, card_id, answer):
query = """UPDATE user_cards SET times_reviewed = times_reviewed+1, correct_answers = correct_answers+%s
WHERE user_id = %s AND card_id = %s"""
cursor.execute(query, (answer, user_id, card_id))
base.commit()
def update_card_status(user_id, card_id, status):<|fim▁hole|> WHERE user_id = %s AND card_id = %s"""
cursor.execute(query, (status, user_id, card_id))
base.commit()
def update_pack_name(pack_id, new_name):
query = 'UPDATE packs SET name = %s WHERE pack_id = %s;'
cursor.execute(query, (new_name, pack_id))
base.commit()
def update_pack_privacy(pack_id, new_privacy):
if isinstance(new_privacy, PrivacyType):
new_privacy = new_privacy.value
query = 'UPDATE packs SET privacy = %s WHERE pack_id = %s;'
cursor.execute(query, (new_privacy, pack_id))
base.commit()
def update_pack_status(user_id, pack_id, status):
query = """UPDATE user_cards SET status = %s
WHERE user_id = %s AND card_id = %s"""
cursor.execute(query, (status, user_id, pack_id))
base.commit()<|fim▁end|> | query = """UPDATE user_cards SET status = %s |
<|file_name|>CMakeIcons.java<|end_file_name|><|fim▁begin|>package cmake.icons;
import com.intellij.openapi.util.IconLoader;
import javax.swing.*;
/**<|fim▁hole|>public class CMakeIcons {
public static final Icon FILE = IconLoader.getIcon("/icons/cmake.png");
public static final Icon MACRO = IconLoader.getIcon("/icons/hashtag.png");
public static final Icon FUN = IconLoader.getIcon("/icons/fun.jpg");
public static final Icon LOOP = IconLoader.getIcon("/icons/loop.png");
}<|fim▁end|> | * Created by alex on 12/21/14.
*/ |
<|file_name|>test_parameters.py<|end_file_name|><|fim▁begin|># Licensed under a 3-clause BSD style license - see LICENSE.rst
# STDLIB
from types import MappingProxyType
# THIRD PARTY
import numpy as np
import pytest
# LOCAL
from astropy.cosmology import parameters, realizations
def test_realizations_in_dir():
"""Test the realizations are in ``dir`` of :mod:`astropy.cosmology.parameters`."""
d = dir(parameters)
assert set(d) == set(parameters.__all__)
for n in parameters.available:
assert n in d
@pytest.mark.parametrize("name", parameters.available)
def test_getting_parameters(name):
"""
Test getting 'parameters' and that it is derived from the corresponding<|fim▁hole|> """
params = getattr(parameters, name)
assert isinstance(params, MappingProxyType)
assert params["name"] == name
# Check parameters have the right keys and values
cosmo = getattr(realizations, name)
assert params["name"] == cosmo.name
assert params["cosmology"] == cosmo.__class__.__qualname__
# All the cosmology parameters are equal
for n in cosmo.__parameters__:
assert np.array_equal(params[n], getattr(cosmo, n))
# All the metadata is included. Parameter values take precedence, so only
# checking the keys.
assert set(cosmo.meta.keys()).issubset(params.keys())
# Lastly, check the generation process.
m = cosmo.to_format("mapping", cosmology_as_str=True, move_from_meta=True)
assert params == m<|fim▁end|> | realization. |
<|file_name|>hc.js<|end_file_name|><|fim▁begin|><|fim▁hole|> $(".showcase-wrapper").hide();
$(".showcase-wrapper").fadeIn("slow");
});
/*
var toggle = false;
$('.nav-toggle').on('click', function () {
if (toggle == false) {
$('#sidebar-wrapper').stop().animate({
'left': '4px'
});
toggle = true;
} else {
$('#sidebar-wrapper').stop().animate({
'left': '250px'
});
toggle = false;
}
});
*/
$(function() {
$('.project-box>.row>.project-post').append('<span class="more-info">Click for more information</span>');
$('.project-box').click(function(e) {
if (e.target.tagName == "A" || e.target.tagName == "IMG") {
return true;
}
$(this).find('.more-info').toggle();
$(this).find('.post').slideToggle();
});
});<|fim▁end|> | $(document).ready(function() {
$(".container").hide();
$(".container").fadeIn('5000'); |
<|file_name|>feature-gate-link_cfg.rs<|end_file_name|><|fim▁begin|>#[link(name = "foo", cfg(foo))]<|fim▁hole|>fn main() {}<|fim▁end|> | //~^ ERROR: is unstable
extern "C" {}
|
<|file_name|>pageserver.py<|end_file_name|><|fim▁begin|>import tornado.web
import forms
import config
import io
class Index(tornado.web.RequestHandler):
"""
Returns the index page.
"""
def get(self):
form = forms.RecomputeForm()
recomputations_count = io.get_recomputations_count()
latest_recomputations = io.load_all_recomputations(config.latest_recomputations_count)
self.render("index.html", recompute_form=form, recomputations_count=recomputations_count,
latest_recomputations=latest_recomputations)
class Recomputations(tornado.web.RequestHandler):
"""
Returns the recomputations/search page.
"""<|fim▁hole|>
def get(self):
self.render("recomputations.html", filter_recomputations_form=self.form, recomputations=self.recomputations)
def post(self):
if self.form.validate():
print "here"
name = self.form.name.data
if name != "":
print name
self.recomputations = [r for r in self.recomputations if r["name"] == name]
self.render("recomputations.html", filter_recomputations_form=self.form, recomputations=self.recomputations)
class Recomputation(tornado.web.RequestHandler):
"""
Returns the individual recomputation page.
"""
def get(self, name):
if name.isdigit():
recomputation = io.load_recomputation_by_id(int(name))
else:
recomputation = io.load_recomputation(name)
if recomputation is not None:
self.render("recomputation.html", recomputation=recomputation)
else:
self.render("recomputation404.html", name=name)<|fim▁end|> |
def initialize(self):
self.form = forms.FilterRecomputationsForm(self.request.arguments)
self.recomputations = io.load_all_recomputations() |
<|file_name|>common_test.go<|end_file_name|><|fim▁begin|>// Copyright The OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package pdata<|fim▁hole|> "encoding/base64"
"fmt"
"strconv"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
otlpcommon "go.opentelemetry.io/collector/model/internal/data/protogen/common/v1"
)
func TestAttributeValue(t *testing.T) {
v := NewAttributeValueString("abc")
assert.EqualValues(t, AttributeValueTypeString, v.Type())
assert.EqualValues(t, "abc", v.StringVal())
v = NewAttributeValueInt(123)
assert.EqualValues(t, AttributeValueTypeInt, v.Type())
assert.EqualValues(t, 123, v.IntVal())
v = NewAttributeValueDouble(3.4)
assert.EqualValues(t, AttributeValueTypeDouble, v.Type())
assert.EqualValues(t, 3.4, v.DoubleVal())
v = NewAttributeValueBool(true)
assert.EqualValues(t, AttributeValueTypeBool, v.Type())
assert.True(t, v.BoolVal())
v = NewAttributeValueEmpty()
assert.EqualValues(t, AttributeValueTypeEmpty, v.Type())
v.SetStringVal("abc")
assert.EqualValues(t, AttributeValueTypeString, v.Type())
assert.EqualValues(t, "abc", v.StringVal())
v.SetIntVal(123)
assert.EqualValues(t, AttributeValueTypeInt, v.Type())
assert.EqualValues(t, 123, v.IntVal())
v.SetDoubleVal(3.4)
assert.EqualValues(t, AttributeValueTypeDouble, v.Type())
assert.EqualValues(t, 3.4, v.DoubleVal())
v.SetBoolVal(true)
assert.EqualValues(t, AttributeValueTypeBool, v.Type())
assert.True(t, v.BoolVal())
bytesValue := []byte{1, 2, 3, 4}
v = NewAttributeValueBytes(bytesValue)
assert.EqualValues(t, AttributeValueTypeBytes, v.Type())
assert.EqualValues(t, bytesValue, v.BytesVal())
}
func TestAttributeValueType(t *testing.T) {
assert.EqualValues(t, "EMPTY", AttributeValueTypeEmpty.String())
assert.EqualValues(t, "STRING", AttributeValueTypeString.String())
assert.EqualValues(t, "BOOL", AttributeValueTypeBool.String())
assert.EqualValues(t, "INT", AttributeValueTypeInt.String())
assert.EqualValues(t, "DOUBLE", AttributeValueTypeDouble.String())
assert.EqualValues(t, "MAP", AttributeValueTypeMap.String())
assert.EqualValues(t, "ARRAY", AttributeValueTypeArray.String())
assert.EqualValues(t, "BYTES", AttributeValueTypeBytes.String())
}
func TestAttributeValueMap(t *testing.T) {
m1 := NewAttributeValueMap()
assert.Equal(t, AttributeValueTypeMap, m1.Type())
assert.Equal(t, NewAttributeMap(), m1.MapVal())
assert.Equal(t, 0, m1.MapVal().Len())
m1.MapVal().InsertDouble("double_key", 123)
assert.Equal(t, 1, m1.MapVal().Len())
got, exists := m1.MapVal().Get("double_key")
assert.True(t, exists)
assert.Equal(t, NewAttributeValueDouble(123), got)
// Create a second map.
m2 := NewAttributeValueMap()
assert.Equal(t, 0, m2.MapVal().Len())
// Modify the source map that was inserted.
m2.MapVal().UpsertString("key_in_child", "somestr")
assert.Equal(t, 1, m2.MapVal().Len())
got, exists = m2.MapVal().Get("key_in_child")
assert.True(t, exists)
assert.Equal(t, NewAttributeValueString("somestr"), got)
// Insert the second map as a child. This should perform a deep copy.
m1.MapVal().Insert("child_map", m2)
assert.EqualValues(t, 2, m1.MapVal().Len())
got, exists = m1.MapVal().Get("double_key")
assert.True(t, exists)
assert.Equal(t, NewAttributeValueDouble(123), got)
got, exists = m1.MapVal().Get("child_map")
assert.True(t, exists)
assert.Equal(t, m2, got)
// Modify the source map m2 that was inserted into m1.
m2.MapVal().UpdateString("key_in_child", "somestr2")
assert.EqualValues(t, 1, m2.MapVal().Len())
got, exists = m2.MapVal().Get("key_in_child")
assert.True(t, exists)
assert.Equal(t, NewAttributeValueString("somestr2"), got)
// The child map inside m1 should not be modified.
childMap, childMapExists := m1.MapVal().Get("child_map")
require.True(t, childMapExists)
got, exists = childMap.MapVal().Get("key_in_child")
require.True(t, exists)
assert.Equal(t, NewAttributeValueString("somestr"), got)
// Now modify the inserted map (not the source)
childMap.MapVal().UpdateString("key_in_child", "somestr3")
assert.EqualValues(t, 1, childMap.MapVal().Len())
got, exists = childMap.MapVal().Get("key_in_child")
require.True(t, exists)
assert.Equal(t, NewAttributeValueString("somestr3"), got)
// The source child map should not be modified.
got, exists = m2.MapVal().Get("key_in_child")
require.True(t, exists)
assert.Equal(t, NewAttributeValueString("somestr2"), got)
removed := m1.MapVal().Remove("double_key")
assert.True(t, removed)
assert.EqualValues(t, 1, m1.MapVal().Len())
_, exists = m1.MapVal().Get("double_key")
assert.False(t, exists)
removed = m1.MapVal().Remove("child_map")
assert.True(t, removed)
assert.EqualValues(t, 0, m1.MapVal().Len())
_, exists = m1.MapVal().Get("child_map")
assert.False(t, exists)
// Test nil KvlistValue case for MapVal() func.
orig := &otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_KvlistValue{KvlistValue: nil}}
m1 = AttributeValue{orig: orig}
assert.EqualValues(t, NewAttributeMap(), m1.MapVal())
}
func TestNilOrigSetAttributeValue(t *testing.T) {
av := NewAttributeValueEmpty()
av.SetStringVal("abc")
assert.EqualValues(t, "abc", av.StringVal())
av = NewAttributeValueEmpty()
av.SetIntVal(123)
assert.EqualValues(t, 123, av.IntVal())
av = NewAttributeValueEmpty()
av.SetBoolVal(true)
assert.True(t, av.BoolVal())
av = NewAttributeValueEmpty()
av.SetDoubleVal(1.23)
assert.EqualValues(t, 1.23, av.DoubleVal())
av = NewAttributeValueEmpty()
av.SetBytesVal([]byte{1, 2, 3})
assert.Equal(t, []byte{1, 2, 3}, av.BytesVal())
}
func TestAttributeValueEqual(t *testing.T) {
av1 := NewAttributeValueEmpty()
av2 := NewAttributeValueEmpty()
assert.True(t, av1.Equal(av2))
av2 = NewAttributeValueString("abc")
assert.False(t, av1.Equal(av2))
assert.False(t, av2.Equal(av1))
av1 = NewAttributeValueString("abc")
assert.True(t, av1.Equal(av2))
av2 = NewAttributeValueString("edf")
assert.False(t, av1.Equal(av2))
av2 = NewAttributeValueInt(123)
assert.False(t, av1.Equal(av2))
assert.False(t, av2.Equal(av1))
av1 = NewAttributeValueInt(234)
assert.False(t, av1.Equal(av2))
av1 = NewAttributeValueInt(123)
assert.True(t, av1.Equal(av2))
av2 = NewAttributeValueDouble(123)
assert.False(t, av1.Equal(av2))
assert.False(t, av2.Equal(av1))
av1 = NewAttributeValueDouble(234)
assert.False(t, av1.Equal(av2))
av1 = NewAttributeValueDouble(123)
assert.True(t, av1.Equal(av2))
av2 = NewAttributeValueBool(false)
assert.False(t, av1.Equal(av2))
assert.False(t, av2.Equal(av1))
av1 = NewAttributeValueBool(true)
assert.False(t, av1.Equal(av2))
av1 = NewAttributeValueBool(false)
assert.True(t, av1.Equal(av2))
av2 = NewAttributeValueBytes([]byte{1, 2, 3})
assert.False(t, av1.Equal(av2))
assert.False(t, av2.Equal(av1))
av1 = NewAttributeValueBytes([]byte{1, 2, 4})
assert.False(t, av1.Equal(av2))
av1 = NewAttributeValueBytes([]byte{1, 2, 3})
assert.True(t, av1.Equal(av2))
av1 = NewAttributeValueArray()
av1.SliceVal().AppendEmpty().SetIntVal(123)
assert.False(t, av1.Equal(av2))
assert.False(t, av2.Equal(av1))
av2 = NewAttributeValueArray()
av2.SliceVal().AppendEmpty().SetDoubleVal(123)
assert.False(t, av1.Equal(av2))
NewAttributeValueInt(123).CopyTo(av2.SliceVal().At(0))
assert.True(t, av1.Equal(av2))
av1.CopyTo(av2.SliceVal().AppendEmpty())
assert.False(t, av1.Equal(av2))
av1 = NewAttributeValueMap()
av1.MapVal().UpsertString("foo", "bar")
assert.False(t, av1.Equal(av2))
assert.False(t, av2.Equal(av1))
av2 = NewAttributeValueMap()
av2.MapVal().UpsertString("foo", "bar")
assert.True(t, av1.Equal(av2))
fooVal, ok := av2.MapVal().Get("foo")
if !ok {
assert.Fail(t, "expected to find value with key foo")
}
fooVal.SetStringVal("not-bar")
assert.False(t, av1.Equal(av2))
}
func TestNilAttributeMap(t *testing.T) {
assert.EqualValues(t, 0, NewAttributeMap().Len())
val, exist := NewAttributeMap().Get("test_key")
assert.False(t, exist)
assert.EqualValues(t, AttributeValue{nil}, val)
insertMap := NewAttributeMap()
insertMap.Insert("k", NewAttributeValueString("v"))
assert.EqualValues(t, generateTestAttributeMap(), insertMap)
insertMapString := NewAttributeMap()
insertMapString.InsertString("k", "v")
assert.EqualValues(t, generateTestAttributeMap(), insertMapString)
insertMapNull := NewAttributeMap()
insertMapNull.InsertNull("k")
assert.EqualValues(t, generateTestEmptyAttributeMap(), insertMapNull)
insertMapInt := NewAttributeMap()
insertMapInt.InsertInt("k", 123)
assert.EqualValues(t, generateTestIntAttributeMap(), insertMapInt)
insertMapDouble := NewAttributeMap()
insertMapDouble.InsertDouble("k", 12.3)
assert.EqualValues(t, generateTestDoubleAttributeMap(), insertMapDouble)
insertMapBool := NewAttributeMap()
insertMapBool.InsertBool("k", true)
assert.EqualValues(t, generateTestBoolAttributeMap(), insertMapBool)
insertMapBytes := NewAttributeMap()
insertMapBytes.InsertBytes("k", []byte{1, 2, 3, 4, 5})
assert.EqualValues(t, generateTestBytesAttributeMap(), insertMapBytes)
updateMap := NewAttributeMap()
updateMap.Update("k", NewAttributeValueString("v"))
assert.EqualValues(t, NewAttributeMap(), updateMap)
updateMapString := NewAttributeMap()
updateMapString.UpdateString("k", "v")
assert.EqualValues(t, NewAttributeMap(), updateMapString)
updateMapInt := NewAttributeMap()
updateMapInt.UpdateInt("k", 123)
assert.EqualValues(t, NewAttributeMap(), updateMapInt)
updateMapDouble := NewAttributeMap()
updateMapDouble.UpdateDouble("k", 12.3)
assert.EqualValues(t, NewAttributeMap(), updateMapDouble)
updateMapBool := NewAttributeMap()
updateMapBool.UpdateBool("k", true)
assert.EqualValues(t, NewAttributeMap(), updateMapBool)
updateMapBytes := NewAttributeMap()
updateMapBytes.UpdateBytes("k", []byte{1, 2, 3})
assert.EqualValues(t, NewAttributeMap(), updateMapBytes)
upsertMap := NewAttributeMap()
upsertMap.Upsert("k", NewAttributeValueString("v"))
assert.EqualValues(t, generateTestAttributeMap(), upsertMap)
upsertMapString := NewAttributeMap()
upsertMapString.UpsertString("k", "v")
assert.EqualValues(t, generateTestAttributeMap(), upsertMapString)
upsertMapInt := NewAttributeMap()
upsertMapInt.UpsertInt("k", 123)
assert.EqualValues(t, generateTestIntAttributeMap(), upsertMapInt)
upsertMapDouble := NewAttributeMap()
upsertMapDouble.UpsertDouble("k", 12.3)
assert.EqualValues(t, generateTestDoubleAttributeMap(), upsertMapDouble)
upsertMapBool := NewAttributeMap()
upsertMapBool.UpsertBool("k", true)
assert.EqualValues(t, generateTestBoolAttributeMap(), upsertMapBool)
upsertMapBytes := NewAttributeMap()
upsertMapBytes.UpsertBytes("k", []byte{1, 2, 3, 4, 5})
assert.EqualValues(t, generateTestBytesAttributeMap(), upsertMapBytes)
removeMap := NewAttributeMap()
assert.False(t, removeMap.Remove("k"))
assert.EqualValues(t, NewAttributeMap(), removeMap)
// Test Sort
assert.EqualValues(t, NewAttributeMap(), NewAttributeMap().Sort())
}
func TestAttributeMapWithEmpty(t *testing.T) {
origWithNil := []otlpcommon.KeyValue{
{},
{
Key: "test_key",
Value: otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_StringValue{StringValue: "test_value"}},
},
{
Key: "test_key2",
Value: otlpcommon.AnyValue{Value: nil},
},
}
sm := AttributeMap{
orig: &origWithNil,
}
val, exist := sm.Get("test_key")
assert.True(t, exist)
assert.EqualValues(t, AttributeValueTypeString, val.Type())
assert.EqualValues(t, "test_value", val.StringVal())
val, exist = sm.Get("test_key2")
assert.True(t, exist)
assert.EqualValues(t, AttributeValueTypeEmpty, val.Type())
assert.EqualValues(t, "", val.StringVal())
sm.Insert("other_key", NewAttributeValueString("other_value"))
val, exist = sm.Get("other_key")
assert.True(t, exist)
assert.EqualValues(t, AttributeValueTypeString, val.Type())
assert.EqualValues(t, "other_value", val.StringVal())
sm.InsertString("other_key_string", "other_value")
val, exist = sm.Get("other_key")
assert.True(t, exist)
assert.EqualValues(t, AttributeValueTypeString, val.Type())
assert.EqualValues(t, "other_value", val.StringVal())
sm.InsertInt("other_key_int", 123)
val, exist = sm.Get("other_key_int")
assert.True(t, exist)
assert.EqualValues(t, AttributeValueTypeInt, val.Type())
assert.EqualValues(t, 123, val.IntVal())
sm.InsertDouble("other_key_double", 1.23)
val, exist = sm.Get("other_key_double")
assert.True(t, exist)
assert.EqualValues(t, AttributeValueTypeDouble, val.Type())
assert.EqualValues(t, 1.23, val.DoubleVal())
sm.InsertBool("other_key_bool", true)
val, exist = sm.Get("other_key_bool")
assert.True(t, exist)
assert.EqualValues(t, AttributeValueTypeBool, val.Type())
assert.True(t, val.BoolVal())
sm.InsertBytes("other_key_bytes", []byte{1, 2, 3})
val, exist = sm.Get("other_key_bytes")
assert.True(t, exist)
assert.EqualValues(t, AttributeValueTypeBytes, val.Type())
assert.EqualValues(t, []byte{1, 2, 3}, val.BytesVal())
sm.Update("other_key", NewAttributeValueString("yet_another_value"))
val, exist = sm.Get("other_key")
assert.True(t, exist)
assert.EqualValues(t, AttributeValueTypeString, val.Type())
assert.EqualValues(t, "yet_another_value", val.StringVal())
sm.UpdateString("other_key_string", "yet_another_value")
val, exist = sm.Get("other_key_string")
assert.True(t, exist)
assert.EqualValues(t, AttributeValueTypeString, val.Type())
assert.EqualValues(t, "yet_another_value", val.StringVal())
sm.UpdateInt("other_key_int", 456)
val, exist = sm.Get("other_key_int")
assert.True(t, exist)
assert.EqualValues(t, AttributeValueTypeInt, val.Type())
assert.EqualValues(t, 456, val.IntVal())
sm.UpdateDouble("other_key_double", 4.56)
val, exist = sm.Get("other_key_double")
assert.True(t, exist)
assert.EqualValues(t, AttributeValueTypeDouble, val.Type())
assert.EqualValues(t, 4.56, val.DoubleVal())
sm.UpdateBool("other_key_bool", false)
val, exist = sm.Get("other_key_bool")
assert.True(t, exist)
assert.EqualValues(t, AttributeValueTypeBool, val.Type())
assert.False(t, val.BoolVal())
sm.UpdateBytes("other_key_bytes", []byte{4, 5, 6})
val, exist = sm.Get("other_key_bytes")
assert.True(t, exist)
assert.EqualValues(t, AttributeValueTypeBytes, val.Type())
assert.EqualValues(t, []byte{4, 5, 6}, val.BytesVal())
sm.Upsert("other_key", NewAttributeValueString("other_value"))
val, exist = sm.Get("other_key")
assert.True(t, exist)
assert.EqualValues(t, AttributeValueTypeString, val.Type())
assert.EqualValues(t, "other_value", val.StringVal())
sm.UpsertString("other_key_string", "other_value")
val, exist = sm.Get("other_key")
assert.True(t, exist)
assert.EqualValues(t, AttributeValueTypeString, val.Type())
assert.EqualValues(t, "other_value", val.StringVal())
sm.UpsertInt("other_key_int", 123)
val, exist = sm.Get("other_key_int")
assert.True(t, exist)
assert.EqualValues(t, AttributeValueTypeInt, val.Type())
assert.EqualValues(t, 123, val.IntVal())
sm.UpsertDouble("other_key_double", 1.23)
val, exist = sm.Get("other_key_double")
assert.True(t, exist)
assert.EqualValues(t, AttributeValueTypeDouble, val.Type())
assert.EqualValues(t, 1.23, val.DoubleVal())
sm.UpsertBool("other_key_bool", true)
val, exist = sm.Get("other_key_bool")
assert.True(t, exist)
assert.EqualValues(t, AttributeValueTypeBool, val.Type())
assert.True(t, val.BoolVal())
sm.UpsertBytes("other_key_bytes", []byte{7, 8, 9})
val, exist = sm.Get("other_key_bytes")
assert.True(t, exist)
assert.EqualValues(t, AttributeValueTypeBytes, val.Type())
assert.EqualValues(t, []byte{7, 8, 9}, val.BytesVal())
sm.Upsert("yet_another_key", NewAttributeValueString("yet_another_value"))
val, exist = sm.Get("yet_another_key")
assert.True(t, exist)
assert.EqualValues(t, AttributeValueTypeString, val.Type())
assert.EqualValues(t, "yet_another_value", val.StringVal())
sm.UpsertString("yet_another_key_string", "yet_another_value")
val, exist = sm.Get("yet_another_key_string")
assert.True(t, exist)
assert.EqualValues(t, AttributeValueTypeString, val.Type())
assert.EqualValues(t, "yet_another_value", val.StringVal())
sm.UpsertInt("yet_another_key_int", 456)
val, exist = sm.Get("yet_another_key_int")
assert.True(t, exist)
assert.EqualValues(t, AttributeValueTypeInt, val.Type())
assert.EqualValues(t, 456, val.IntVal())
sm.UpsertDouble("yet_another_key_double", 4.56)
val, exist = sm.Get("yet_another_key_double")
assert.True(t, exist)
assert.EqualValues(t, AttributeValueTypeDouble, val.Type())
assert.EqualValues(t, 4.56, val.DoubleVal())
sm.UpsertBool("yet_another_key_bool", false)
val, exist = sm.Get("yet_another_key_bool")
assert.True(t, exist)
assert.EqualValues(t, AttributeValueTypeBool, val.Type())
assert.False(t, val.BoolVal())
sm.UpsertBytes("yet_another_key_bytes", []byte{1})
val, exist = sm.Get("yet_another_key_bytes")
assert.True(t, exist)
assert.EqualValues(t, AttributeValueTypeBytes, val.Type())
assert.EqualValues(t, []byte{1}, val.BytesVal())
assert.True(t, sm.Remove("other_key"))
assert.True(t, sm.Remove("other_key_string"))
assert.True(t, sm.Remove("other_key_int"))
assert.True(t, sm.Remove("other_key_double"))
assert.True(t, sm.Remove("other_key_bool"))
assert.True(t, sm.Remove("other_key_bytes"))
assert.True(t, sm.Remove("yet_another_key"))
assert.True(t, sm.Remove("yet_another_key_string"))
assert.True(t, sm.Remove("yet_another_key_int"))
assert.True(t, sm.Remove("yet_another_key_double"))
assert.True(t, sm.Remove("yet_another_key_bool"))
assert.True(t, sm.Remove("yet_another_key_bytes"))
assert.False(t, sm.Remove("other_key"))
assert.False(t, sm.Remove("yet_another_key"))
// Test that the initial key is still there.
val, exist = sm.Get("test_key")
assert.True(t, exist)
assert.EqualValues(t, AttributeValueTypeString, val.Type())
assert.EqualValues(t, "test_value", val.StringVal())
val, exist = sm.Get("test_key2")
assert.True(t, exist)
assert.EqualValues(t, AttributeValueTypeEmpty, val.Type())
assert.EqualValues(t, "", val.StringVal())
_, exist = sm.Get("test_key3")
assert.False(t, exist)
// Test Sort
assert.EqualValues(t, AttributeMap{orig: &origWithNil}, sm.Sort())
}
func TestAttributeMapIterationNil(t *testing.T) {
NewAttributeMap().Range(func(k string, v AttributeValue) bool {
// Fail if any element is returned
t.Fail()
return true
})
}
func TestAttributeMap_Range(t *testing.T) {
rawMap := map[string]AttributeValue{
"k_string": NewAttributeValueString("123"),
"k_int": NewAttributeValueInt(123),
"k_double": NewAttributeValueDouble(1.23),
"k_bool": NewAttributeValueBool(true),
"k_empty": NewAttributeValueEmpty(),
"k_bytes": NewAttributeValueBytes([]byte{}),
}
am := NewAttributeMapFromMap(rawMap)
assert.Equal(t, 6, am.Len())
calls := 0
am.Range(func(k string, v AttributeValue) bool {
calls++
return false
})
assert.Equal(t, 1, calls)
am.Range(func(k string, v AttributeValue) bool {
assert.True(t, v.Equal(rawMap[k]))
delete(rawMap, k)
return true
})
assert.EqualValues(t, 0, len(rawMap))
}
func TestAttributeMap_InitFromMap(t *testing.T) {
am := NewAttributeMapFromMap(map[string]AttributeValue(nil))
assert.EqualValues(t, NewAttributeMap(), am)
rawMap := map[string]AttributeValue{
"k_string": NewAttributeValueString("123"),
"k_int": NewAttributeValueInt(123),
"k_double": NewAttributeValueDouble(1.23),
"k_bool": NewAttributeValueBool(true),
"k_null": NewAttributeValueEmpty(),
"k_bytes": NewAttributeValueBytes([]byte{1, 2, 3}),
}
rawOrig := []otlpcommon.KeyValue{
newAttributeKeyValueString("k_string", "123"),
newAttributeKeyValueInt("k_int", 123),
newAttributeKeyValueDouble("k_double", 1.23),
newAttributeKeyValueBool("k_bool", true),
newAttributeKeyValueNull("k_null"),
newAttributeKeyValueBytes("k_bytes", []byte{1, 2, 3}),
}
am = NewAttributeMapFromMap(rawMap)
assert.EqualValues(t, AttributeMap{orig: &rawOrig}.Sort(), am.Sort())
}
func TestAttributeValue_CopyTo(t *testing.T) {
// Test nil KvlistValue case for MapVal() func.
dest := NewAttributeValueEmpty()
orig := &otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_KvlistValue{KvlistValue: nil}}
AttributeValue{orig: orig}.CopyTo(dest)
assert.Nil(t, dest.orig.Value.(*otlpcommon.AnyValue_KvlistValue).KvlistValue)
// Test nil ArrayValue case for SliceVal() func.
dest = NewAttributeValueEmpty()
orig = &otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_ArrayValue{ArrayValue: nil}}
AttributeValue{orig: orig}.CopyTo(dest)
assert.Nil(t, dest.orig.Value.(*otlpcommon.AnyValue_ArrayValue).ArrayValue)
// Test copy empty value.
AttributeValue{orig: &otlpcommon.AnyValue{}}.CopyTo(dest)
assert.Nil(t, dest.orig.Value)
}
func TestAttributeMap_CopyTo(t *testing.T) {
dest := NewAttributeMap()
// Test CopyTo to empty
NewAttributeMap().CopyTo(dest)
assert.EqualValues(t, 0, dest.Len())
// Test CopyTo larger slice
generateTestAttributeMap().CopyTo(dest)
assert.EqualValues(t, generateTestAttributeMap(), dest)
// Test CopyTo same size slice
generateTestAttributeMap().CopyTo(dest)
assert.EqualValues(t, generateTestAttributeMap(), dest)
// Test CopyTo with an empty Value in the destination
(*dest.orig)[0].Value = otlpcommon.AnyValue{}
generateTestAttributeMap().CopyTo(dest)
assert.EqualValues(t, generateTestAttributeMap(), dest)
}
func TestAttributeValue_copyTo(t *testing.T) {
av := NewAttributeValueEmpty()
destVal := otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_IntValue{}}
av.copyTo(&destVal)
assert.EqualValues(t, nil, destVal.Value)
}
func TestAttributeMap_Update(t *testing.T) {
origWithNil := []otlpcommon.KeyValue{
{
Key: "test_key",
Value: otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_StringValue{StringValue: "test_value"}},
},
{
Key: "test_key2",
Value: otlpcommon.AnyValue{Value: nil},
},
}
sm := AttributeMap{
orig: &origWithNil,
}
av, exists := sm.Get("test_key")
assert.True(t, exists)
assert.EqualValues(t, AttributeValueTypeString, av.Type())
assert.EqualValues(t, "test_value", av.StringVal())
av.SetIntVal(123)
av2, exists := sm.Get("test_key")
assert.True(t, exists)
assert.EqualValues(t, AttributeValueTypeInt, av2.Type())
assert.EqualValues(t, 123, av2.IntVal())
av, exists = sm.Get("test_key2")
assert.True(t, exists)
assert.EqualValues(t, AttributeValueTypeEmpty, av.Type())
assert.EqualValues(t, "", av.StringVal())
av.SetIntVal(123)
av2, exists = sm.Get("test_key2")
assert.True(t, exists)
assert.EqualValues(t, AttributeValueTypeInt, av2.Type())
assert.EqualValues(t, 123, av2.IntVal())
}
func TestAttributeMap_EnsureCapacity_Zero(t *testing.T) {
am := NewAttributeMap()
am.EnsureCapacity(0)
assert.Equal(t, 0, am.Len())
assert.Equal(t, 0, cap(*am.orig))
}
func TestAttributeMap_EnsureCapacity(t *testing.T) {
am := NewAttributeMap()
am.EnsureCapacity(5)
assert.Equal(t, 0, am.Len())
assert.Equal(t, 5, cap(*am.orig))
am.EnsureCapacity(3)
assert.Equal(t, 0, am.Len())
assert.Equal(t, 5, cap(*am.orig))
am.EnsureCapacity(8)
assert.Equal(t, 0, am.Len())
assert.Equal(t, 8, cap(*am.orig))
}
func TestAttributeMap_Clear(t *testing.T) {
am := NewAttributeMap()
assert.Nil(t, *am.orig)
am.Clear()
assert.Nil(t, *am.orig)
am.EnsureCapacity(5)
assert.NotNil(t, *am.orig)
am.Clear()
assert.Nil(t, *am.orig)
}
func TestAttributeMap_RemoveIf(t *testing.T) {
rawMap := map[string]AttributeValue{
"k_string": NewAttributeValueString("123"),
"k_int": NewAttributeValueInt(123),
"k_double": NewAttributeValueDouble(1.23),
"k_bool": NewAttributeValueBool(true),
"k_empty": NewAttributeValueEmpty(),
"k_bytes": NewAttributeValueBytes([]byte{}),
}
am := NewAttributeMapFromMap(rawMap)
assert.Equal(t, 6, am.Len())
am.RemoveIf(func(key string, val AttributeValue) bool {
return key == "k_int" || val.Type() == AttributeValueTypeBool
})
assert.Equal(t, 4, am.Len())
_, exists := am.Get("k_string")
assert.True(t, exists)
_, exists = am.Get("k_bool")
assert.False(t, exists)
_, exists = am.Get("k_int")
assert.False(t, exists)
}
func BenchmarkAttributeValue_CopyTo(b *testing.B) {
av := NewAttributeValueString("k")
c := NewAttributeValueInt(123)
b.ResetTimer()
for n := 0; n < b.N; n++ {
c.copyTo(av.orig)
}
if av.IntVal() != 123 {
b.Fail()
}
}
func BenchmarkAttributeValue_SetIntVal(b *testing.B) {
av := NewAttributeValueString("k")
b.ResetTimer()
for n := 0; n < b.N; n++ {
av.SetIntVal(int64(n))
}
if av.IntVal() != int64(b.N-1) {
b.Fail()
}
}
func BenchmarkAttributeMap_Range(b *testing.B) {
const numElements = 20
rawOrig := make([]otlpcommon.KeyValue, numElements)
for i := 0; i < numElements; i++ {
rawOrig[i] = otlpcommon.KeyValue{
Key: "k" + strconv.Itoa(i),
Value: otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_StringValue{StringValue: "v" + strconv.Itoa(i)}},
}
}
am := AttributeMap{
orig: &rawOrig,
}
b.ResetTimer()
for n := 0; n < b.N; n++ {
numEls := 0
am.Range(func(k string, v AttributeValue) bool {
numEls++
return true
})
if numEls != numElements {
b.Fail()
}
}
}
func BenchmarkAttributeMap_RangeOverMap(b *testing.B) {
const numElements = 20
rawOrig := make(map[string]AttributeValue, numElements)
for i := 0; i < numElements; i++ {
key := "k" + strconv.Itoa(i)
rawOrig[key] = NewAttributeValueString("v" + strconv.Itoa(i))
}
b.ResetTimer()
for n := 0; n < b.N; n++ {
numEls := 0
for _, v := range rawOrig {
if v.orig == nil {
continue
}
numEls++
}
if numEls != numElements {
b.Fail()
}
}
}
func BenchmarkAttributeMap_Remove(b *testing.B) {
b.StopTimer()
// Remove all of the even keys
keysToRemove := map[string]struct{}{}
for j := 0; j < 50; j++ {
keysToRemove[fmt.Sprintf("%d", j*2)] = struct{}{}
}
for i := 0; i < b.N; i++ {
m := NewAttributeMap()
for j := 0; j < 100; j++ {
m.InsertString(fmt.Sprintf("%d", j), "string value")
}
b.StartTimer()
for k := range keysToRemove {
m.Remove(k)
}
b.StopTimer()
}
}
func BenchmarkAttributeMap_RemoveIf(b *testing.B) {
b.StopTimer()
// Remove all of the even keys
keysToRemove := map[string]struct{}{}
for j := 0; j < 50; j++ {
keysToRemove[fmt.Sprintf("%d", j*2)] = struct{}{}
}
for i := 0; i < b.N; i++ {
m := NewAttributeMap()
for j := 0; j < 100; j++ {
m.InsertString(fmt.Sprintf("%d", j), "string value")
}
b.StartTimer()
m.RemoveIf(func(key string, _ AttributeValue) bool {
_, remove := keysToRemove[key]
return remove
})
b.StopTimer()
}
}
func BenchmarkStringMap_RangeOverMap(b *testing.B) {
const numElements = 20
rawOrig := make(map[string]string, numElements)
for i := 0; i < numElements; i++ {
key := "k" + strconv.Itoa(i)
rawOrig[key] = "v" + strconv.Itoa(i)
}
b.ResetTimer()
for n := 0; n < b.N; n++ {
numEls := 0
for _, v := range rawOrig {
if v == "" {
continue
}
numEls++
}
if numEls != numElements {
b.Fail()
}
}
}
func fillTestAttributeValue(dest AttributeValue) {
dest.SetStringVal("v")
}
func generateTestAttributeValue() AttributeValue {
av := NewAttributeValueEmpty()
fillTestAttributeValue(av)
return av
}
func generateTestAttributeMap() AttributeMap {
am := NewAttributeMap()
fillTestAttributeMap(am)
return am
}
func fillTestAttributeMap(dest AttributeMap) {
NewAttributeMapFromMap(map[string]AttributeValue{
"k": NewAttributeValueString("v"),
}).CopyTo(dest)
}
func generateTestEmptyAttributeMap() AttributeMap {
return NewAttributeMapFromMap(map[string]AttributeValue{
"k": NewAttributeValueEmpty(),
})
}
func generateTestIntAttributeMap() AttributeMap {
return NewAttributeMapFromMap(map[string]AttributeValue{
"k": NewAttributeValueInt(123),
})
}
func generateTestDoubleAttributeMap() AttributeMap {
return NewAttributeMapFromMap(map[string]AttributeValue{
"k": NewAttributeValueDouble(12.3),
})
}
func generateTestBoolAttributeMap() AttributeMap {
return NewAttributeMapFromMap(map[string]AttributeValue{
"k": NewAttributeValueBool(true),
})
}
func generateTestBytesAttributeMap() AttributeMap {
return NewAttributeMapFromMap(map[string]AttributeValue{
"k": NewAttributeValueBytes([]byte{1, 2, 3, 4, 5}),
})
}
func TestAttributeValueArray(t *testing.T) {
a1 := NewAttributeValueArray()
assert.EqualValues(t, AttributeValueTypeArray, a1.Type())
assert.EqualValues(t, NewAttributeValueSlice(), a1.SliceVal())
assert.EqualValues(t, 0, a1.SliceVal().Len())
a1.SliceVal().AppendEmpty().SetDoubleVal(123)
assert.EqualValues(t, 1, a1.SliceVal().Len())
assert.EqualValues(t, NewAttributeValueDouble(123), a1.SliceVal().At(0))
// Create a second array.
a2 := NewAttributeValueArray()
assert.EqualValues(t, 0, a2.SliceVal().Len())
a2.SliceVal().AppendEmpty().SetStringVal("somestr")
assert.EqualValues(t, 1, a2.SliceVal().Len())
assert.EqualValues(t, NewAttributeValueString("somestr"), a2.SliceVal().At(0))
// Insert the second array as a child.
a2.CopyTo(a1.SliceVal().AppendEmpty())
assert.EqualValues(t, 2, a1.SliceVal().Len())
assert.EqualValues(t, NewAttributeValueDouble(123), a1.SliceVal().At(0))
assert.EqualValues(t, a2, a1.SliceVal().At(1))
// Check that the array was correctly inserted.
childArray := a1.SliceVal().At(1)
assert.EqualValues(t, AttributeValueTypeArray, childArray.Type())
assert.EqualValues(t, 1, childArray.SliceVal().Len())
v := childArray.SliceVal().At(0)
assert.EqualValues(t, AttributeValueTypeString, v.Type())
assert.EqualValues(t, "somestr", v.StringVal())
// Test nil values case for SliceVal() func.
a1 = AttributeValue{orig: &otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_ArrayValue{ArrayValue: nil}}}
assert.EqualValues(t, NewAttributeValueSlice(), a1.SliceVal())
}
func TestAttributeSliceWithNilValues(t *testing.T) {
origWithNil := []otlpcommon.AnyValue{
{},
{Value: &otlpcommon.AnyValue_StringValue{StringValue: "test_value"}},
}
sm := AttributeValueSlice{
orig: &origWithNil,
}
val := sm.At(0)
assert.EqualValues(t, AttributeValueTypeEmpty, val.Type())
assert.EqualValues(t, "", val.StringVal())
val = sm.At(1)
assert.EqualValues(t, AttributeValueTypeString, val.Type())
assert.EqualValues(t, "test_value", val.StringVal())
sm.AppendEmpty().SetStringVal("other_value")
val = sm.At(2)
assert.EqualValues(t, AttributeValueTypeString, val.Type())
assert.EqualValues(t, "other_value", val.StringVal())
}
func TestAsString(t *testing.T) {
tests := []struct {
name string
input AttributeValue
expected string
}{
{
name: "string",
input: NewAttributeValueString("string value"),
expected: "string value",
},
{
name: "int64",
input: NewAttributeValueInt(42),
expected: "42",
},
{
name: "float64",
input: NewAttributeValueDouble(1.61803399),
expected: "1.61803399",
},
{
name: "boolean",
input: NewAttributeValueBool(true),
expected: "true",
},
{
name: "empty_map",
input: NewAttributeValueMap(),
expected: "{}",
},
{
name: "simple_map",
input: simpleAttributeValueMap(),
expected: "{\"arrKey\":[\"strOne\",\"strTwo\"],\"boolKey\":false,\"floatKey\":18.6,\"intKey\":7,\"mapKey\":{\"keyOne\":\"valOne\",\"keyTwo\":\"valTwo\"},\"nullKey\":null,\"strKey\":\"strVal\"}",
},
{
name: "empty_array",
input: NewAttributeValueArray(),
expected: "[]",
},
{
name: "simple_array",
input: simpleAttributeValueArray(),
expected: "[\"strVal\",7,18.6,false,null]",
},
{
name: "empty",
input: NewAttributeValueEmpty(),
expected: "",
},
{
name: "bytes",
input: NewAttributeValueBytes([]byte("String bytes")),
expected: base64.StdEncoding.EncodeToString([]byte("String bytes")),
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
actual := test.input.AsString()
assert.Equal(t, test.expected, actual)
})
}
}
func TestAsRaw(t *testing.T) {
arr := NewAttributeValueArray()
arr.SliceVal().AppendEmpty().SetBoolVal(false)
arr.SliceVal().AppendEmpty().SetBytesVal([]byte("test"))
arr.SliceVal().AppendEmpty().SetDoubleVal(12.9)
arr.SliceVal().AppendEmpty().SetIntVal(91)
arr.SliceVal().AppendEmpty().SetStringVal("another string")
tests := []struct {
name string
input AttributeMap
expected map[string]interface{}
}{
{
name: "asraw",
input: NewAttributeMapFromMap(
map[string]AttributeValue{
"array": arr,
"bool": NewAttributeValueBool(true),
"bytes": NewAttributeValueBytes([]byte("bytes value")),
"double": NewAttributeValueDouble(1.2),
"empty": NewAttributeValueEmpty(),
"int": NewAttributeValueInt(900),
"map": NewAttributeValueMap(),
"string": NewAttributeValueString("string value"),
},
),
expected: map[string]interface{}{
"array": []interface{}{false, []byte("test"), 12.9, int64(91), "another string"},
"bool": true,
"bytes": []byte("bytes value"),
"double": 1.2,
"empty": interface{}(nil),
"int": int64(900),
"map": map[string]interface{}{},
"string": "string value",
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
actual := test.input.AsRaw()
assert.Equal(t, test.expected, actual)
})
}
}
func simpleAttributeValueMap() AttributeValue {
ret := NewAttributeValueMap()
attrMap := ret.MapVal()
attrMap.UpsertString("strKey", "strVal")
attrMap.UpsertInt("intKey", 7)
attrMap.UpsertDouble("floatKey", 18.6)
attrMap.UpsertBool("boolKey", false)
attrMap.Upsert("nullKey", NewAttributeValueEmpty())
attrMap.Upsert("mapKey", constructTestAttributeSubmap())
attrMap.Upsert("arrKey", constructTestAttributeSubarray())
return ret
}
func simpleAttributeValueArray() AttributeValue {
ret := NewAttributeValueArray()
attrArr := ret.SliceVal()
attrArr.AppendEmpty().SetStringVal("strVal")
attrArr.AppendEmpty().SetIntVal(7)
attrArr.AppendEmpty().SetDoubleVal(18.6)
attrArr.AppendEmpty().SetBoolVal(false)
attrArr.AppendEmpty()
return ret
}
func constructTestAttributeSubmap() AttributeValue {
value := NewAttributeValueMap()
value.MapVal().UpsertString("keyOne", "valOne")
value.MapVal().UpsertString("keyTwo", "valTwo")
return value
}
func constructTestAttributeSubarray() AttributeValue {
value := NewAttributeValueArray()
value.SliceVal().AppendEmpty().SetStringVal("strOne")
value.SliceVal().AppendEmpty().SetStringVal("strTwo")
return value
}<|fim▁end|> |
import ( |
<|file_name|>deactivate_realm.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from __future__ import print_function
from django.core.management.base import BaseCommand
import sys
from zerver.lib.actions import do_deactivate_realm
from zerver.models import get_realm
class Command(BaseCommand):
help = """Script to deactivate a realm."""
def add_arguments(self, parser):
parser.add_argument('domain', metavar='<domain>', type=str,
help='domain of realm to deactivate')
def handle(self, *args, **options):
realm = get_realm(options["domain"])
if realm is None:
print("Could not find realm %s" % (options["domain"],))
sys.exit(1)
print("Deactivating", options["domain"])<|fim▁hole|> do_deactivate_realm(realm)
print("Done!")<|fim▁end|> | |
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import networkx as nx
import re
<|fim▁hole|> with open(file_path) as ifs:
lines = map(lambda ele: ele.strip(), ifs.readlines())
lines = filter(lambda ele: not ele.startswith('#') and re.match('.*[0-9]+.*[0-9]+', ele), lines)
pair_list = map(lambda ele: extract_first_two(map(lambda ele2: ele2.strip(), ele.split())), lines)
return nx.Graph(pair_list)<|fim▁end|> | def get_graph_info(file_path):
def extract_first_two(collection):
return [int(collection[0]), int(collection[1])]
|
<|file_name|>project-card-list.component.ts<|end_file_name|><|fim▁begin|>import { ChangeDetectionStrategy, Component, EventEmitter, Input, Output } from '@angular/core';
import { PagedList, Project } from '@dev/translatr-model';
import { firstChar } from '@dev/translatr-sdk';
import { trackByFn } from '@translatr/utils';
@Component({
changeDetection: ChangeDetectionStrategy.OnPush,
selector: 'app-project-card-list',
templateUrl: './project-card-list.component.html',
styleUrls: ['./project-card-list.component.scss']
})
export class ProjectCardListComponent {
@Input() projects: PagedList<Project>;
@Input() canCreate = false;<|fim▁hole|> @Output() readonly create = new EventEmitter<void>();
firstChar = firstChar;
trackByFn = trackByFn;
onCreateProject(): void {
this.create.emit();
}
}<|fim▁end|> | @Input() showMore = true;
@Input() showMoreLink: any[] | string | null | undefined;
|
<|file_name|>webpack.config.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | module.exports = (env = "dev") =>
// you may extend to other env to produce a production build for example
require(`./webpack.${env}.config.js`); |
<|file_name|>sage_methods.py<|end_file_name|><|fim▁begin|>"""
Project: flask-rest<|fim▁hole|>Description: All of the rest methods...
"""
class SageMethod:
GET = 'get'
POST = 'post'
DELETE = 'delete'
PUT = 'put'
ALL = [GET, POST, DELETE, PUT]<|fim▁end|> | Author: Saj Arora |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# GuessIt - A library for guessing information from filenames
# Copyright (c) 2011 Nicolas Wack <[email protected]>
#
# GuessIt is free software; you can redistribute it and/or modify it under
# the terms of the Lesser GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# GuessIt is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Lesser GNU General Public License for more details.
#
# You should have received a copy of the Lesser GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
__version__ = '0.5.2'
__all__ = ['Guess', 'Language',
'guess_file_info', 'guess_video_info',
'guess_movie_info', 'guess_episode_info']
# Do python3 detection before importing any other module, to be sure that
# it will then always be available
# with code from http://lucumr.pocoo.org/2011/1/22/forwards-compatible-python/
import sys
if sys.version_info[0] >= 3:
PY3 = True
unicode_text_type = str
native_text_type = str
base_text_type = str
def u(x):
return str(x)
def s(x):
return x
class UnicodeMixin(object):
__str__ = lambda x: x.__unicode__()
import binascii
def to_hex(x):
return binascii.hexlify(x).decode('utf-8')
else:
PY3 = False
__all__ = [ str(s) for s in __all__ ] # fix imports for python2
unicode_text_type = unicode
native_text_type = str
base_text_type = basestring
def u(x):
if isinstance(x, str):
return x.decode('utf-8')
return unicode(x)
def s(x):
if isinstance(x, unicode):
return x.encode('utf-8')
if isinstance(x, list):
return [ s(y) for y in x ]
if isinstance(x, tuple):
return tuple(s(y) for y in x)
if isinstance(x, dict):
return dict((s(key), s(value)) for key, value in x.items())
return x
class UnicodeMixin(object):
__str__ = lambda x: unicode(x).encode('utf-8')
def to_hex(x):
return x.encode('hex')
from guessit.guess import Guess, merge_all
from guessit.language import Language
from guessit.matcher import IterativeMatcher
import logging
log = logging.getLogger(__name__)
class NullHandler(logging.Handler):
def emit(self, record):
pass
# let's be a nicely behaving library
h = NullHandler()
log.addHandler(h)
def guess_file_info(filename, filetype, info=None):
"""info can contain the names of the various plugins, such as 'filename' to
detect filename info, or 'hash_md5' to get the md5 hash of the file.
>>> guess_file_info('tests/dummy.srt', 'autodetect', info = ['hash_md5', 'hash_sha1'])
{'hash_md5': 'e781de9b94ba2753a8e2945b2c0a123d', 'hash_sha1': 'bfd18e2f4e5d59775c2bc14d80f56971891ed620'}
"""
result = []
hashers = []
if info is None:
info = ['filename']
if isinstance(info, base_text_type):
info = [info]
for infotype in info:
if infotype == 'filename':
m = IterativeMatcher(filename, filetype=filetype)
result.append(m.matched())
elif infotype == 'hash_mpc':
from guessit.hash_mpc import hash_file
try:
result.append(Guess({'hash_mpc': hash_file(filename)},
confidence=1.0))
except Exception as e:
log.warning('Could not compute MPC-style hash because: %s' % e)
elif infotype == 'hash_ed2k':
from guessit.hash_ed2k import hash_file
try:
result.append(Guess({'hash_ed2k': hash_file(filename)},
confidence=1.0))
except Exception as e:
log.warning('Could not compute ed2k hash because: %s' % e)
elif infotype.startswith('hash_'):
import hashlib
hashname = infotype[5:]
try:
hasher = getattr(hashlib, hashname)()
hashers.append((infotype, hasher))
except AttributeError:
log.warning('Could not compute %s hash because it is not available from python\'s hashlib module' % hashname)
else:
log.warning('Invalid infotype: %s' % infotype)
# do all the hashes now, but on a single pass
if hashers:
try:
blocksize = 8192
hasherobjs = dict(hashers).values()
with open(filename, 'rb') as f:
chunk = f.read(blocksize)
while chunk:
for hasher in hasherobjs:
hasher.update(chunk)
chunk = f.read(blocksize)
for infotype, hasher in hashers:
result.append(Guess({infotype: hasher.hexdigest()},
confidence=1.0))
except Exception as e:
log.warning('Could not compute hash because: %s' % e)
result = merge_all(result)
<|fim▁hole|> result['series'] += ' (%s)' % result['country'].alpha2.upper()
return result
def guess_video_info(filename, info=None):
return guess_file_info(filename, 'autodetect', info)
def guess_movie_info(filename, info=None):
return guess_file_info(filename, 'movie', info)
def guess_episode_info(filename, info=None):
return guess_file_info(filename, 'episode', info)<|fim▁end|> | # last minute adjustments
# if country is in the guessed properties, make it part of the filename
if 'country' in result: |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Gateway documentation build configuration file, created by
# sphinx-quickstart on Tue Sep 25 06:46:30 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
sys.path.append(os.path.abspath('_themes'))
sys.path.append(os.path.abspath('.'))
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.1'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo',
'sphinx.ext.coverage', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Gateway'
copyright = u'2012, Stephane Wirtel'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
import pkg_resources
try:
release = pkg_resources.get_distribution('gateway').version
except pkg_resources.DistributionNotFound:
print 'To build the documentation, The distribution information of Gateway'
print 'Has to be available. Either install the package into your'
print 'development environment or run "setup.py develop" to setup the'
print 'metadata. A virtualenv is recommended!'
sys.exit(1)
del pkg_resources
if 'dev' in release:
release = release.split('dev')[0] + 'dev'
version = '.'.join(release.split('.')[:2])
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
#pygments_style = 'sphinx'
pygments_style = 'flask_theme_support.FlaskyStyle'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'flask'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_themes']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.<|fim▁hole|>
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Gatewaydoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
'fontpkg' : r'\usepackage{mathpazo}',
'papersize' : 'a4paper',
'pointsize' : '12pt',
'preamble' : r' \usepackage{flaskstyle}',
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Gateway.tex', u'Gateway Documentation',
u'Stephane Wirtel', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
latex_use_parts = True
latex_use_modindex = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
latex_additional_files = [
'flaskstyle.sty',
]
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'gateway', u'Gateway Documentation',
[u'Stephane Wirtel'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Gateway', u'Gateway Documentation',
u'Stephane Wirtel', 'Gateway', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
#locale_dirs = ['translated/']
#language = 'fr'<|fim▁end|> | #html_use_smartypants = True |
<|file_name|>backend_termion.rs<|end_file_name|><|fim▁begin|>#[cfg(feature = "termion")]
#[test]
fn backend_termion_should_only_write_diffs() -> Result<(), Box<dyn std::error::Error>> {
use std::{fmt::Write, io::Cursor};
let mut bytes = Vec::new();
let mut stdout = Cursor::new(&mut bytes);
{
use tui::{
backend::TermionBackend, layout::Rect, widgets::Paragraph, Terminal, TerminalOptions,
Viewport,
};
let backend = TermionBackend::new(&mut stdout);
let area = Rect::new(0, 0, 3, 1);
let mut terminal = Terminal::with_options(
backend,
TerminalOptions {
viewport: Viewport::fixed(area),
},
)?;
terminal.draw(|f| {
f.render_widget(Paragraph::new("a"), area);
})?;
terminal.draw(|f| {
f.render_widget(Paragraph::new("ab"), area);
})?;
terminal.draw(|f| {
f.render_widget(Paragraph::new("abc"), area);
})?;
}
let expected = {
use termion::{color, cursor, style};
let mut s = String::new();
// First draw
write!(s, "{}", cursor::Goto(1, 1))?;
s.push('a');
write!(s, "{}", color::Fg(color::Reset))?;
write!(s, "{}", color::Bg(color::Reset))?;
write!(s, "{}", style::Reset)?;
write!(s, "{}", cursor::Hide)?;
// Second draw
write!(s, "{}", cursor::Goto(2, 1))?;
s.push('b');
write!(s, "{}", color::Fg(color::Reset))?;
write!(s, "{}", color::Bg(color::Reset))?;
write!(s, "{}", style::Reset)?;
write!(s, "{}", cursor::Hide)?;
// Third draw
write!(s, "{}", cursor::Goto(3, 1))?;
s.push('c');
write!(s, "{}", color::Fg(color::Reset))?;
write!(s, "{}", color::Bg(color::Reset))?;
write!(s, "{}", style::Reset)?;
write!(s, "{}", cursor::Hide)?;
// Terminal drop
write!(s, "{}", cursor::Show)?;
s
};
assert_eq!(std::str::from_utf8(&bytes)?, expected);
Ok(())<|fim▁hole|><|fim▁end|> | } |
<|file_name|>32.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>
export = RadarWeather32;<|fim▁end|> | import { RadarWeather32 } from "../../"; |
<|file_name|>formatters.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import itertools
import logging
import logging.config
import logging.handlers
import sys
import traceback
import six
from six import moves
from oslo_context import context as context_utils
from oslo_serialization import jsonutils
def _dictify_context(context):
if context is None:
return {}
if not isinstance(context, dict) and getattr(context, 'to_dict', None):
context = context.to_dict()
return context
# A configuration object is given to us when the application registers
# the logging options.
_CONF = None
def _store_global_conf(conf):
global _CONF
_CONF = conf
def _update_record_with_context(record):
"""Given a log record, update it with context information.
The request context, if there is one, will either be in the
extra values for the incoming record or in the global
thread-local store.
"""
context = record.__dict__.get(
'context',
context_utils.get_current()
)
d = _dictify_context(context)
# Copy the context values directly onto the record so they can be
# used by the formatting strings.
for k, v in d.items():
setattr(record, k, v)
return context
class JSONFormatter(logging.Formatter):
def __init__(self, fmt=None, datefmt=None):
# NOTE(jkoelker) we ignore the fmt argument, but its still there
# since logging.config.fileConfig passes it.
self.datefmt = datefmt
def formatException(self, ei, strip_newlines=True):
lines = traceback.format_exception(*ei)
if strip_newlines:
lines = [moves.filter(
lambda x: x,
line.rstrip().splitlines()) for line in lines]
lines = list(itertools.chain(*lines))
return lines
def format(self, record):
message = {'message': record.getMessage(),
'asctime': self.formatTime(record, self.datefmt),
'name': record.name,
'msg': record.msg,
'args': record.args,
'levelname': record.levelname,
'levelno': record.levelno,
'pathname': record.pathname,
'filename': record.filename,
'module': record.module,
'lineno': record.lineno,
'funcname': record.funcName,
'created': record.created,
'msecs': record.msecs,
'relative_created': record.relativeCreated,
'thread': record.thread,
'thread_name': record.threadName,
'process_name': record.processName,
'process': record.process,
'traceback': None}
# Build the extra values that were given to us, including
# the context.
context = _update_record_with_context(record)
if hasattr(record, 'extra'):
extra = record.extra.copy()
else:
extra = {}
for key in getattr(record, 'extra_keys', []):
if key not in extra:
extra[key] = getattr(record, key)
# If we saved a context object, explode it into the extra
# dictionary because the values are more useful than the
# object reference.
if 'context' in extra:
extra.update(_dictify_context(context))
del extra['context']
message['extra'] = extra
if record.exc_info:
message['traceback'] = self.formatException(record.exc_info)
return jsonutils.dumps(message)
class ContextFormatter(logging.Formatter):
"""A context.RequestContext aware formatter configured through flags.
The flags used to set format strings are: logging_context_format_string
and logging_default_format_string. You can also specify
logging_debug_format_suffix to append extra formatting if the log level is
debug.
For information about what variables are available for the formatter see:
http://docs.python.org/library/logging.html#formatter
If available, uses the context value stored in TLS - local.store.context
"""
def __init__(self, *args, **kwargs):
"""Initialize ContextFormatter instance
Takes additional keyword arguments which can be used in the message
format string.
:keyword project: project name
:type project: string
:keyword version: project version
:type version: string
"""
self.project = kwargs.pop('project', 'unknown')
self.version = kwargs.pop('version', 'unknown')
self.conf = kwargs.pop('config', _CONF)
logging.Formatter.__init__(self, *args, **kwargs)
def format(self, record):
"""Uses contextstring if request_id is set, otherwise default."""
# NOTE(jecarey): If msg is not unicode, coerce it into unicode
# before it can get to the python logging and
# possibly cause string encoding trouble
if not isinstance(record.msg, six.text_type):
record.msg = six.text_type(record.msg)
# store project info
record.project = self.project
record.version = self.version
context = _update_record_with_context(record)
if context:
# FIXME(dhellmann): We should replace these nova-isms with
# more generic handling in the Context class. See the
# app-agnostic-logging-parameters blueprint.
instance = getattr(context, 'instance', None)
instance_uuid = getattr(context, 'instance_uuid', None)
# resource_uuid was introduced in oslo_context's
# RequestContext
resource_uuid = getattr(context, 'resource_uuid', None)
instance_extra = ''
if instance:
instance_extra = (self.conf.instance_format
% {'uuid': instance})
elif instance_uuid:
instance_extra = (self.conf.instance_uuid_format
% {'uuid': instance_uuid})
elif resource_uuid:
instance_extra = (self.conf.instance_uuid_format
% {'uuid': resource_uuid})
record.instance = instance_extra
# NOTE(sdague): default the fancier formatting params
# to an empty string so we don't throw an exception if
# they get used
for key in ('instance', 'color', 'user_identity', 'resource'):
if key not in record.__dict__:
record.__dict__[key] = ''
if record.__dict__.get('request_id'):
fmt = self.conf.logging_context_format_string
else:
fmt = self.conf.logging_default_format_string
<|fim▁hole|>
if sys.version_info < (3, 2):
self._fmt = fmt
else:
self._style = logging.PercentStyle(fmt)
self._fmt = self._style._fmt
# Cache this on the record, Logger will respect our formatted copy
if record.exc_info:
record.exc_text = self.formatException(record.exc_info, record)
return logging.Formatter.format(self, record)
def formatException(self, exc_info, record=None):
"""Format exception output with CONF.logging_exception_prefix."""
if not record:
return logging.Formatter.formatException(self, exc_info)
stringbuffer = moves.StringIO()
traceback.print_exception(exc_info[0], exc_info[1], exc_info[2],
None, stringbuffer)
lines = stringbuffer.getvalue().split('\n')
stringbuffer.close()
if self.conf.logging_exception_prefix.find('%(asctime)') != -1:
record.asctime = self.formatTime(record, self.datefmt)
formatted_lines = []
for line in lines:
pl = self.conf.logging_exception_prefix % record.__dict__
fl = '%s%s' % (pl, line)
formatted_lines.append(fl)
return '\n'.join(formatted_lines)<|fim▁end|> | if (record.levelno == logging.DEBUG and
self.conf.logging_debug_format_suffix):
fmt += " " + self.conf.logging_debug_format_suffix |
<|file_name|>recipe.rs<|end_file_name|><|fim▁begin|>use nom::IResult;
// chars
named!(identifier<&str,&str>, re_find_static!(
concat!(
r"^",
r"[\p{Lu}\p{Ll}\p{Lt}\p{Lm}\p{Lo}\p{Nl}]",
r"[\p{Lu}\p{Ll}\p{Lt}\p{Lm}\p{Lo}\p{Nl}\p{Mn}\p{Mc}\p{Nd}\p{Pc}\p{Cf}]*",
)
));
named!(sp <&str,&str>, re_find_static!(r"^[\t\v\f \xA0\x{FEFF}\p{Zs}]+"));
named!(eol <&str,&str>, re_find_static!(r"^((\r?\n)|[\r\x{2028}\x{2029}]|\z)"));
#[test]
fn chars_test() {
{
let text = "あのよろし1番";
assert_eq!(identifier(text), IResult::Done("",text));
}
{
let text = " \t";
assert_eq!(sp(text), IResult::Done("",text));
}
{
let text = "\r\n";<|fim▁hole|> {
let text = "\r";
assert_eq!(eol(text), IResult::Done("",text));
}
{
let text = "\n";
assert_eq!(eol(text), IResult::Done("",text));
}
{
let text = "\u{2028}";
assert_eq!(eol(text), IResult::Done("",text));
}
{
let text = "\u{2029}";
assert_eq!(eol(text), IResult::Done("",text));
}
{
let text = "";
assert_eq!(eol(text), IResult::Done("",text));
}
}
// mark
named!(at <&str,&str>, re_find_static!(r"^[@@]"));
named!(sharp<&str,&str>, re_find_static!(r"^[##]"));
named!(dash <&str,&str>, re_find_static!(r"^[-‐–—―−-ー]"));
named!(coron<&str,&str>, re_find_static!(r"^[::]"));
#[test]
fn mark_test() {
{
let text = "@";
assert_eq!(at(text), IResult::Done("",text));
}
{
let text = "@";
assert_eq!(at(text), IResult::Done("",text));
}
{
let text = "#";
assert_eq!(sharp(text), IResult::Done("",text));
}
{
let text = "#";
assert_eq!(sharp(text), IResult::Done("",text));
}
{
let text = "-";
assert_eq!(dash(text), IResult::Done("",text));
}
{
let text = ":";
assert_eq!(coron(text), IResult::Done("",text));
}
}<|fim▁end|> | assert_eq!(eol(text), IResult::Done("",text));
} |
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "houseofdota.production_settings")
from django.core.management import execute_from_command_line<|fim▁hole|><|fim▁end|> |
execute_from_command_line(sys.argv) |
Subsets and Splits