code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
from setuptools import setup, find_packages
setup(
name='vent',
version='v0.4.0',
packages=['vent', 'vent.core', 'vent.core.file-drop',
'vent.core.rq-worker', 'vent.core.rq-dashboard', 'vent.menus',
'vent.menus.tutorials', 'vent.core.rmq-es-connector',
'vent.helpers', 'vent.api'],
install_requires=['docker', 'npyscreen'],
scripts=['bin/vent'],
license='Apache License 2.0',
author='arpit',
author_email='',
maintainer='Charlie Lewis',
maintainer_email='[email protected]',
description='A library that includes a CLI designed to serve as a platform to collect and analyze data across a flexible set of tools and technologies.',
keywords='docker containers platform collection analysis tools devops',
url='https://github.com/CyberReboot/vent',
)
| cprafullchandra/vent | setup.py | Python | apache-2.0 | 838 |
# This file is part of the Trezor project.
#
# Copyright (C) 2012-2018 SatoshiLabs and contributors
#
# This library is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License version 3
# as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the License along with this library.
# If not, see <https://www.gnu.org/licenses/lgpl-3.0.html>.
import pytest
from trezorlib import lisk, messages as proto
from trezorlib.tools import parse_path
from .common import TrezorTest
@pytest.mark.lisk
class TestMsgLiskSignTx(TrezorTest):
def test_lisk_sign_tx_send(self):
self.setup_mnemonic_allallall()
with self.client:
self.client.set_expected_responses(
[
proto.ButtonRequest(code=proto.ButtonRequestType.SignTx),
proto.ButtonRequest(code=proto.ButtonRequestType.ConfirmOutput),
proto.LiskSignedTx(
signature=bytes.fromhex(
"f48532d43e8c5abadf50bb7b82098b31eec3e67747e5328c0675203e86441899c246fa3aea6fc91043209431ce710c5aa34aa234546b85b88299d5a379bff202"
)
),
]
)
lisk.sign_tx(
self.client,
parse_path("m/44'/134'/0'"),
{
"amount": "10000000",
"recipientId": "9971262264659915921L",
"timestamp": 57525937,
"type": 0,
"fee": "10000000",
"asset": {},
},
)
@pytest.mark.skip_t1
def test_lisk_sign_tx_send_wrong_path(self):
self.setup_mnemonic_allallall()
with self.client:
self.client.set_expected_responses(
[
proto.ButtonRequest(
code=proto.ButtonRequestType.UnknownDerivationPath
),
proto.ButtonRequest(code=proto.ButtonRequestType.SignTx),
proto.ButtonRequest(code=proto.ButtonRequestType.ConfirmOutput),
proto.LiskSignedTx(
signature=bytes.fromhex(
"cdce9eba2ea8fa75f90fbc725f0d9de6152c7189a3044ab2fe307d9ff54754856e09125d7a15376eaf4bb5451b63881821948222ccd9ffb5da4d9b1aa8bd4904"
)
),
]
)
lisk.sign_tx(
self.client,
parse_path("m/44'/134'/123456'/123456'/123456'/123456'/123456'"),
{
"amount": "10000000",
"recipientId": "9971262264659915921L",
"timestamp": 57525937,
"type": 0,
"fee": "10000000",
"asset": {},
},
)
def test_lisk_sign_tx_send_with_data(self):
self.setup_mnemonic_allallall()
with self.client:
self.client.set_expected_responses(
[
proto.ButtonRequest(code=proto.ButtonRequestType.SignTx),
proto.ButtonRequest(code=proto.ButtonRequestType.ConfirmOutput),
proto.LiskSignedTx(
signature=bytes.fromhex(
"4e83a651e82f2f787a71a5f44a2911dd0429ee4001b80c79fb7d174ea63ceeefdfba55aa3a9f31fa14b8325a39ad973dcd7eadbaa77b0447a9893f84b60f210e"
)
),
]
)
lisk.sign_tx(
self.client,
parse_path("m/44'/134'/0'"),
{
"amount": "10000000",
"recipientId": "9971262264659915921L",
"timestamp": 57525937,
"type": 0,
"fee": "20000000",
"asset": {"data": "Test data"},
},
)
def test_lisk_sign_tx_second_signature(self):
self.setup_mnemonic_allallall()
with self.client:
self.client.set_expected_responses(
[
proto.ButtonRequest(code=proto.ButtonRequestType.PublicKey),
proto.ButtonRequest(code=proto.ButtonRequestType.ConfirmOutput),
proto.LiskSignedTx(
signature=bytes.fromhex(
"e27d8997d0bdbc9ab4ad928fcf140edb25a217007987447270085c0872e4178c018847d1378a949ad2aa913692f10aeec340810fd9de02da9d4461c63b6b6c06"
)
),
]
)
lisk.sign_tx(
self.client,
parse_path("m/44'/134'/0'"),
{
"amount": "0",
"timestamp": 57525937,
"type": 1,
"fee": "500000000",
"asset": {
"signature": {
"publicKey": "5d036a858ce89f844491762eb89e2bfbd50a4a0a0da658e4b2628b25b117ae09"
}
},
},
)
def test_lisk_sign_tx_delegate_registration(self):
self.setup_mnemonic_allallall()
with self.client:
self.client.set_expected_responses(
[
proto.ButtonRequest(code=proto.ButtonRequestType.SignTx),
proto.ButtonRequest(code=proto.ButtonRequestType.ConfirmOutput),
proto.LiskSignedTx(
signature=bytes.fromhex(
"e9f68b9961198f4e0d33d6ae95cbd90ab243c2c1f9fcc51db54eb54cc1491db53d237131e12da9485bfbfbd02255c431d08095076f926060c434edb01cf25807"
)
),
]
)
lisk.sign_tx(
self.client,
parse_path("m/44'/134'/0'"),
{
"amount": "0",
"timestamp": 57525937,
"type": 2,
"fee": "2500000000",
"asset": {"delegate": {"username": "trezor_t"}},
},
)
def test_lisk_sign_tx_cast_votes(self):
self.setup_mnemonic_allallall()
with self.client:
self.client.set_expected_responses(
[
proto.ButtonRequest(code=proto.ButtonRequestType.SignTx),
proto.ButtonRequest(code=proto.ButtonRequestType.ConfirmOutput),
proto.LiskSignedTx(
signature=bytes.fromhex(
"18d7cb27276a83178427aab2abcb5ee1c8ae9e8e2d1231585dcae7a83dd7d5167eea5baca890169bc80dcaf187320cab47c2f65a20c6483fede0f059919e4106"
)
),
]
)
lisk.sign_tx(
self.client,
parse_path("m/44'/134'/0'"),
{
"amount": "0",
"timestamp": 57525937,
"type": 3,
"fee": "100000000",
"asset": {
"votes": [
"+b002f58531c074c7190714523eec08c48db8c7cfc0c943097db1a2e82ed87f84",
"-ec111c8ad482445cfe83d811a7edd1f1d2765079c99d7d958cca1354740b7614",
]
},
},
)
def test_lisk_sign_tx_multisignature(self):
self.setup_mnemonic_allallall()
with self.client:
self.client.set_expected_responses(
[
proto.ButtonRequest(code=proto.ButtonRequestType.SignTx),
proto.ButtonRequest(code=proto.ButtonRequestType.ConfirmOutput),
proto.LiskSignedTx(
signature=bytes.fromhex(
"b84438ae3d419d270eacd0414fc8818d8f2c721602be54c3d705cf4cb3305de44e674f6dac9aac87379cce006cc97f2f635f296a48ab6a6adf62e2c11e08e409"
)
),
]
)
lisk.sign_tx(
self.client,
parse_path("m/44'/134'/0'"),
{
"amount": "0",
"timestamp": 57525937,
"type": 4,
"fee": "1500000000",
"asset": {
"multisignature": {
"min": 2,
"lifetime": 5,
"keysgroup": [
"+5d036a858ce89f844491762eb89e2bfbd50a4a0a0da658e4b2628b25b117ae09",
"+922fbfdd596fa78269bbcadc67ec2a1cc15fc929a19c462169568d7a3df1a1aa",
],
}
},
},
)
| romanz/python-trezor | trezorlib/tests/device_tests/test_msg_lisk_signtx.py | Python | lgpl-3.0 | 9,294 |
#ecef.py
#https://code.google.com/p/pysatel/source/browse/trunk/coord.py?r=22
from math import pow, degrees, radians
from scipy import mat, cos, sin, arctan, sqrt, pi, arctan2, deg2rad, rad2deg
#TO-DO: UPDATE THESE NUMBERS USING THE earth_radius.py
#
# Constants defined by the World Geodetic System 1984 (WGS84)
a = 6378.137
b = 6356.7523142
esq = 6.69437999014 * 0.001
e1sq = 6.73949674228 * 0.001
f = 1 / 298.257223563
def geodetic2ecef(lat, lon, alt, degrees=True):
"""geodetic2ecef(lat, lon, alt)
[deg][deg][m]
Convert geodetic coordinates to ECEF."""
if degrees:
lat=deg2rad(lat)
lon=deg2rad(lon)
#lat, lon = radians(lat), radians(lon)
xi = sqrt(1 - esq * sin(lat))
x = (a / xi + alt) * cos(lat) * cos(lon)
y = (a / xi + alt) * cos(lat) * sin(lon)
z = (a / xi * (1 - esq) + alt) * sin(lat)
return x, y, z
def ecef2geodetic(x, y, z, degrees=True):
"""ecef2geodetic(x, y, z)
[m][m][m]
Convert ECEF coordinates to geodetic.
J. Zhu, "Conversion of Earth-centered Earth-fixed coordinates \
to geodetic coordinates," IEEE Transactions on Aerospace and \
Electronic Systems, vol. 30, pp. 957-961, 1994."""
r = sqrt(x * x + y * y)
Esq = a * a - b * b
F = 54 * b * b * z * z
G = r * r + (1 - esq) * z * z - esq * Esq
C = (esq * esq * F * r * r) / (pow(G, 3))
S = cbrt(1 + C + sqrt(C * C + 2 * C))
P = F / (3 * pow((S + 1 / S + 1), 2) * G * G)
Q = sqrt(1 + 2 * esq * esq * P)
r_0 = -(P * esq * r) / (1 + Q) + sqrt(0.5 * a * a*(1 + 1.0 / Q) - \
P * (1 - esq) * z * z / (Q * (1 + Q)) - 0.5 * P * r * r)
U = sqrt(pow((r - esq * r_0), 2) + z * z)
V = sqrt(pow((r - esq * r_0), 2) + (1 - esq) * z * z)
Z_0 = b * b * z / (a * V)
h = U * (1 - b * b / (a * V))
lat = arctan((z + e1sq * Z_0) / r)
lon = arctan2(y, x)
return rad2deg(lat), rad2deg(lon), z
| bosmanoglu/adore-doris | lib/python/basic/projections/ecef.py | Python | gpl-2.0 | 1,932 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from exam import fixture
from sentry.interfaces.base import InterfaceValidationError
from sentry.interfaces.http import Http
from sentry.testutils import TestCase
class HttpTest(TestCase):
@fixture
def interface(self):
return Http.to_python(dict(
url='http://example.com',
))
def test_path(self):
assert self.interface.get_path() == 'sentry.interfaces.Http'
def test_serialize_unserialize_behavior(self):
result = type(self.interface).to_python(self.interface.to_json())
assert result.to_json() == self.interface.to_json()
def test_basic(self):
result = self.interface
assert result.url == 'http://example.com'
assert result.method is None
assert result.fragment == ''
assert result.query_string == ''
assert result.data is None
assert result.cookies == []
assert result.headers == []
assert result.env == {}
assert result.full_url == result.url
def test_full(self):
result = Http.to_python(dict(
method='GET',
url='http://example.com',
query_string='foo=bar',
fragment='foobar',
headers={'x-foo-bar': 'baz'},
cookies={'foo': 'bar'},
env={'bing': 'bong'},
data='hello world',
))
assert result.method == 'GET'
assert result.query_string == 'foo=bar'
assert result.fragment == 'foobar'
assert result.cookies == [('foo', 'bar')]
assert result.headers == [('X-Foo-Bar', 'baz')]
assert result.env == {'bing': 'bong'}
assert result.data == 'hello world'
def test_query_string_as_dict(self):
result = Http.to_python(dict(
url='http://example.com',
query_string={'foo': 'bar'},
))
assert result.query_string == 'foo=bar'
def test_query_string_as_dict_unicode(self):
result = Http.to_python(dict(
url='http://example.com',
query_string={'foo': u'\N{SNOWMAN}'},
))
assert result.query_string == 'foo=%E2%98%83'
def test_data_as_dict(self):
result = Http.to_python(dict(
url='http://example.com',
data={'foo': 'bar'},
))
assert result.data == '{"foo":"bar"}'
def test_form_encoded_data(self):
result = Http.to_python(dict(
url='http://example.com',
headers={'Content-Type': 'application/x-www-form-urlencoded'},
data='foo=bar',
))
assert result.data == 'foo=bar'
def test_cookies_as_string(self):
result = Http.to_python(dict(
url='http://example.com',
cookies='a=b;c=d',
))
assert result.cookies == [('a', 'b'), ('c', 'd')]
result = Http.to_python(dict(
url='http://example.com',
cookies='a=b&c=d',
))
assert result.cookies == [('a', 'b'), ('c', 'd')]
def test_cookies_in_header(self):
result = Http.to_python(dict(
url='http://example.com',
headers={'Cookie': 'a=b;c=d'},
))
assert result.cookies == [('a', 'b'), ('c', 'd')]
result = Http.to_python(dict(
url='http://example.com',
headers={'Cookie': 'a=b;c=d'},
cookies={'foo': 'bar'},
))
assert result.cookies == [('foo', 'bar')]
def test_query_string_and_fragment_as_params(self):
result = Http.to_python(dict(
url='http://example.com',
query_string='foo=bar',
fragment='fragment',
))
assert result.url == 'http://example.com'
assert result.full_url == 'http://example.com?foo=bar#fragment'
def test_query_string_and_fragment_in_url(self):
result = Http.to_python(dict(
url='http://example.com?foo=bar#fragment',
))
assert result.url == 'http://example.com'
assert result.full_url == 'http://example.com?foo=bar#fragment'
def test_header_value_list(self):
result = Http.to_python(dict(
url='http://example.com',
headers={'Foo': ['1', '2']},
))
assert result.headers == [('Foo', '1, 2')]
def test_header_value_str(self):
result = Http.to_python(dict(
url='http://example.com',
headers={'Foo': 1}
))
assert result.headers == [('Foo', '1')]
def test_method(self):
with self.assertRaises(InterfaceValidationError):
Http.to_python(dict(
url='http://example.com',
method='1234',
))
with self.assertRaises(InterfaceValidationError):
Http.to_python(dict(
url='http://example.com',
method='A' * 33,
))
with self.assertRaises(InterfaceValidationError):
Http.to_python(dict(
url='http://example.com',
method='A',
))
result = Http.to_python(dict(
url='http://example.com',
method='TEST',
))
assert result.method == 'TEST'
result = Http.to_python(dict(
url='http://example.com',
method='FOO-BAR',
))
assert result.method == 'FOO-BAR'
result = Http.to_python(dict(
url='http://example.com',
method='FOO_BAR',
))
assert result.method == 'FOO_BAR'
| JackDanger/sentry | tests/sentry/interfaces/test_http.py | Python | bsd-3-clause | 5,583 |
import detectlanguage
def detect(data):
result = detectlanguage.client.post('detect', { 'q': data })
return result['data']['detections']
def simple_detect(data):
result = detect(data)
return result[0]['language']
def user_status():
return detectlanguage.client.get('user/status')
def languages():
return detectlanguage.client.get('languages')
| detectlanguage/detectlanguage-python | detectlanguage/api.py | Python | mit | 353 |
#!/usr/bin/python
import unittest
import logging
import datetime
import os
# own modules
from datalogger import Timeseries as Timeseries
from datalogger import TimeseriesArray as TimeseriesArray
from datalogger import TimeseriesArrayStats as TimeseriesArrayStats
from datalogger import TimeseriesStats as TimeseriesStats
from datalogger import QuantileArray as QuantileArray
from datalogger import DataLogger as DataLogger
class Test(unittest.TestCase):
def setUp(self):
self.basedir = "/var/rrd"
self.datestring = "2015-11-30"
self.project = DataLogger.get_projects(self.basedir)[0]
self.tablename = DataLogger.get_tablenames(self.basedir, self.project)[0]
self.datalogger = DataLogger(self.basedir, self.project, self.tablename)
def test_simple(self):
self.assertTrue(self.datalogger.project == self.project)
self.assertTrue(self.datalogger.tablename == self.tablename)
self.assertTrue(isinstance(self.datalogger.delimiter, basestring))
self.assertTrue(isinstance(self.datalogger.ts_keyname, basestring))
self.assertTrue(isinstance(self.datalogger.headers, tuple))
self.assertTrue(isinstance(self.datalogger.value_keynames, tuple))
self.assertTrue(all((keyname in self.datalogger.headers for keyname in self.datalogger.value_keynames)))
self.assertTrue(isinstance(self.datalogger.index_keynames, tuple))
self.assertTrue(all((keyname in self.datalogger.headers for keyname in self.datalogger.index_keynames)))
self.assertTrue(isinstance(self.datalogger.blacklist, tuple))
self.assertTrue(all((keyname in self.datalogger.headers for keyname in self.datalogger.blacklist)))
self.assertTrue(isinstance(self.datalogger.raw_basedir, basestring))
self.assertTrue(os.path.exists(self.datalogger.raw_basedir))
self.assertTrue(os.path.isdir(self.datalogger.raw_basedir))
self.assertTrue(isinstance(self.datalogger.global_cachedir, basestring))
self.assertTrue(os.path.exists(self.datalogger.global_cachedir))
self.assertTrue(os.path.isdir(self.datalogger.global_cachedir))
# meta is something like this
# {u'ts_keyname': u'ts',
# 'stat_func_names': [u'count', ... ],
# u'interval': 300,
# u'blacklist': [],
# u'headers': [u'ts', u'http_host', ... ],
# u'delimiter': u'\t',
# u'value_keynames': {
# u'actconn': u'asis',
# u'hits': u'asis',
# ...
# },
# u'index_keynames': [u'http_host']}
self.assertTrue(self.datalogger.meta["headers"] == list(self.datalogger.headers))
self.assertTrue(self.datalogger.meta["value_keynames"].keys() == list(self.datalogger.value_keynames))
self.assertTrue(self.datalogger.meta["index_keynames"] == list(self.datalogger.index_keynames))
self.assertTrue(self.datalogger.meta["blacklist"] == list(self.datalogger.blacklist))
self.assertTrue(self.datalogger.meta["delimiter"] == self.datalogger.delimiter)
self.assertTrue(self.datalogger.meta["ts_keyname"] == self.datalogger.ts_keyname)
self.assertTrue(isinstance(self.datalogger.meta["stat_func_names"], list))
def test_statics(self):
self.assertTrue(isinstance(DataLogger.get_user(self.basedir), basestring))
self.assertTrue(isinstance(DataLogger.get_group(self.basedir), basestring))
self.assertTrue(isinstance(DataLogger.get_yesterday_datestring(), basestring))
lbd = DataLogger.get_last_business_day_datestring()
self.assertTrue(isinstance(DataLogger.get_last_business_day_datestring(), basestring))
self.assertTrue(isinstance(DataLogger.datestring_to_date(lbd), datetime.date))
for datestring in DataLogger.datewalker("2016-01-01", "2016-02-29"):
self.assertTrue(isinstance(datestring, basestring))
for datestring in DataLogger.monthwalker("2016-02"):
self.assertTrue(isinstance(datestring, basestring))
self.assertEqual(list(DataLogger.monthwalker("2016-02"))[-1], "2016-02-29")
self.assertTrue(isinstance(DataLogger.get_ts_for_datestring("2016-01-01"), tuple))
self.assertTrue(isinstance(DataLogger.get_ts_for_datestring("2016-01-01")[0], float))
self.assertTrue(isinstance(DataLogger.get_ts_for_datestring("2016-01-01")[1], float))
def test_data(self):
self.datalogger.load_tsa(self.datestring)
self.datalogger.load_tsastats(self.datestring)
self.datalogger.load_correlationmatrix(self.datestring)
self.datalogger.load_quantile(self.datestring)
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
unittest.main()
| gunny26/datalogger | datalogger/test_DataLogger.py | Python | apache-2.0 | 4,739 |
"""
porkchop.server
~~~~~~~~~~~~~~~
:copyright: (c) 2011-2012 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
from SocketServer import ThreadingMixIn
import json
import traceback
import urlparse
from porkchop.plugin import PorkchopPluginHandler
class GetHandler(BaseHTTPRequestHandler):
def format_output(self, fmt, data):
if fmt == 'json':
return json.dumps(data)
else:
return '\n'.join(self.json_path(data))
def json_path(self, data):
results = []
def path_helper(data, path, results):
for key, val in data.iteritems():
if isinstance(val, dict):
path_helper(val, '/'.join((path, key)), results)
else:
results.append(('%s %s' % (('/'.join((path, key)))\
.replace('.', '_'), val)))
path_helper(data, '', results)
return results
def do_GET(self):
data = {}
formats = {'json': 'application/json', 'text': 'text/plain'}
request = urlparse.urlparse(self.path)
try:
(path, fmt) = request.path.split('.')
if fmt not in formats.keys():
fmt = 'text'
except ValueError:
path = request.path
if self.headers.get('accept', False) == 'application/json':
fmt = 'json'
else:
fmt = 'text'
if self.headers.get('x-porkchop-refresh', False):
force_refresh = True
else:
force_refresh = False
module = path.split('/')[1]
try:
if module:
plugin = PorkchopPluginHandler.plugins[module](self)
plugin.force_refresh = force_refresh
self.log_message('Calling plugin: %s with force=%s' % (module, force_refresh))
data.update({module: plugin.data})
else:
for plugin_name, plugin in PorkchopPluginHandler.plugins.iteritems():
try:
plugin.force_refresh = force_refresh
self.log_message('Calling plugin: %s with force=%s' % (plugin_name, force_refresh))
# if the plugin has no data, it'll only have one key:
# refreshtime
result = plugin(self).data
if len(result) > 1:
data.update({plugin_name: result})
except Exception, e:
self.log_error('Error loading plugin: name=%s exception=%s, traceback=%s', plugin_name, e,
traceback.format_exc())
if len(data):
self.send_response(200)
self.send_header('Content-Type', formats[fmt])
self.end_headers()
self.wfile.write(self.format_output(fmt, data) + '\n')
else:
raise Exception('Unable to load any plugins')
except Exception, e:
self.log_error('Error: %s\n%s', e, traceback.format_exc())
self.send_response(404)
self.send_header('Content-Type', formats[fmt])
self.end_headers()
self.wfile.write(self.format_output(fmt, {}))
class ThreadedHTTPServer(ThreadingMixIn, HTTPServer):
pass
| disqus/porkchop | porkchop/server.py | Python | apache-2.0 | 3,450 |
import sys
def setup(core, object):
object.setAttachment('objType', 'ring')
object.setStfFilename('static_item_n')
object.setStfName('item_entertainer_ring_01_02')
object.setDetailFilename('static_item_d')
object.setDetailName('item_entertainer_ring_01_02')
object.setIntAttribute('cat_stat_mod_bonus.@stat_n:constitution_modified', 6)
object.setIntAttribute('cat_stat_mod_bonus.@stat_n:precision_modified', 6)
object.setStringAttribute('class_required', 'Entertainer')
object.setAttachment('radial_filename', 'ring/unity')
return
| agry/NGECore2 | scripts/object/tangible/wearables/ring/item_entertainer_ring_01_02.py | Python | lgpl-3.0 | 542 |
import numpy as np
import scipy.sparse as ssp
from sklearn.metrics import roc_auc_score
def precisionAtK(Y_pred_orig, Y_true_orig, k, verbose=False):
Y_pred = Y_pred_orig.copy()
Y_true = Y_true_orig.copy()
row_sum = np.asarray(Y_true.sum(axis=1)).reshape(-1)
indices = row_sum.argsort()
row_sum.sort()
start = 0
while start < len(indices) and row_sum[start] == 0:
start +=1
indices = indices[start:]
Y_pred = Y_pred[indices,:]
Y_true = Y_true[indices,:]
p = np.zeros(k)
assert Y_pred.shape == Y_true.shape
n_items,n_labels = Y_pred.shape
prevMatch = 0
for i in xrange(1,k+1):
Jidx = np.argmax(Y_pred,1)
prevMatch += np.sum(Y_true[np.arange(n_items),Jidx])
Y_pred[np.arange(n_items),Jidx] = -np.inf
p[i-1] = prevMatch/(i*n_items)
return tuple(p[[0,2,4]])
def precisionAtKChunks(Y_pred_orig, Y_true_orig, k,verbose=True):
p = np.zeros((len(Y_pred_orig),k))
n_total_items = 0
n_labels = 0
for c,(Y_pred_chunk,Y_true_chunk) in enumerate(zip(Y_pred_orig,Y_true_orig)):
prevMatch = 0
# print "Computing %dth precision"%c
Y_pred = Y_pred_chunk.copy()
Y_true = Y_true_chunk.copy()
n_items, n_labels = Y_pred.shape
n_total_items += n_items
for i in xrange(1,k+1):
Jidx = np.argmax(Y_pred,1)
prevMatch += Y_true[np.arange(n_items),Jidx].sum()
Y_pred[np.arange(n_items),Jidx] = -np.inf
p[c,i-1] = prevMatch #/(i*n_items)
q = np.zeros(k)
# print "q:",
for i in range(1,k+1):
q[i-1] = p[:,i-1].sum()/(i*n_total_items)
# if verbose:
# for i in q[[0,2,4]]:
# print " %0.4f "%i,
# print ""
return tuple(q[[0,2,4]])
def DCG_k(Y_pred_orig, Y_true_orig, k, verbose=False):
Y_pred = np.asarray(Y_pred_orig.copy())
Y_true = Y_true_orig.copy()
# print ssp.csr_matrix(Y_true).todense().shape()
p = np.zeros(k)
assert Y_pred.shape == Y_true.shape,\
"Shape mismatch:"+str(Y_pred.shape)+str(Y_true.shape)
n_items,n_labels = Y_pred.shape
prevMatch = 0
for i in xrange(1,k+1):
Jidx = np.argmax(Y_pred,1)
prevMatch += np.sum(Y_true[np.arange(n_items),Jidx])
Y_pred[np.arange(n_items),Jidx] = -np.inf
p[i-1] = prevMatch/(np.log2(i+1)*n_items)
return p
def nDCG_k(Y_pred, Y_true, k, verbose=False):
DCG_k_score = DCG_k(Y_pred, Y_true, k)
if ssp.issparse(Y_true):
IDCG_k_score = DCG_k(Y_true.todense(), Y_true, k)
else:
IDCG_k_score = DCG_k(Y_true, Y_true, k)
p = DCG_k_score/IDCG_k_score
# p = DCG_k_score
# if verbose:
# for i in p[[0,2,4]]:
# print " %0.4f"%(i),
# print ""
return tuple(p[[0,2,4]])
def AUC(Y_pred, Y_true):
row_sum = np.asarray(Y_true.sum(axis=1)).reshape(-1)
indices = row_sum.argsort()
row_sum.sort()
start = 0
while start < len(indices) and row_sum[start] == 0:
start +=1
indices = indices[start:]
Y_pred = Y_pred[indices,:]
Y_true = Y_true[indices,:]
if ssp.issparse(Y_true):
Y_true = Y_true.todense()
if ssp.issparse(Y_pred):
Y_pred = Y_pred.todense()
return roc_auc_score(Y_true, Y_pred)
| nirbhayjm/Large-Scale-Multi-label-Learning | src/evaluation.py | Python | mit | 3,293 |
#!/usr/bin/env python
###############################################################################
# Copyright 2017 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
"""
Control Planning Analyzer
"""
import argparse
import math
import sys
import threading
import time
import matplotlib
import matplotlib.pyplot as plt
import numpy
import tkFileDialog
from matplotlib import patches
from matplotlib import lines
from cyber_py import cyber
from modules.localization.proto import localization_pb2
from modules.canbus.proto import chassis_pb2
from modules.planning.proto import planning_pb2
from modules.control.proto import control_cmd_pb2
class ControlInfo(object):
"""
ControlInfo Class
"""
def __init__(self, axarr):
self.throttlecmd = []
self.throttlefbk = []
self.brakecmd = []
self.brakefbk = []
self.steercmd = []
self.steerfbk = []
self.speed = []
self.curvature = []
self.imuright = []
self.imuforward = []
self.imuup = []
self.controltime = []
self.planningtime = []
self.localizationtime = []
self.canbustime = []
self.acceleration_lookup = []
self.speed_lookup = []
self.acc_open = []
self.acc_close = []
self.station_error = []
self.speed_error = []
self.heading_error = []
self.lateral_error = []
self.heading_error_rate = []
self.lateral_error_rate = []
self.target_speed = []
self.target_curvature = []
self.target_acceleration = []
self.target_heading = []
self.target_time = []
self.driving_mode = 0
self.mode_time = []
self.ax = axarr
self.planningavailable = False
self.lock = threading.Lock()
def callback_planning(self, entity):
"""
New Planning Trajectory
"""
basetime = entity.header.timestamp_sec
numpoints = len(entity.trajectory_point)
with self.lock:
self.pointx = numpy.zeros(numpoints)
self.pointy = numpy.zeros(numpoints)
self.pointspeed = numpy.zeros(numpoints)
self.pointtime = numpy.zeros(numpoints)
self.pointtheta = numpy.zeros(numpoints)
self.pointcurvature = numpy.zeros(numpoints)
self.pointacceleration = numpy.zeros(numpoints)
for idx in range(numpoints):
self.pointx[idx] = entity.trajectory_point[idx].path_point.x
self.pointy[idx] = entity.trajectory_point[idx].path_point.y
self.pointspeed[idx] = entity.trajectory_point[idx].v
self.pointtheta[idx] = entity.trajectory_point[
idx].path_point.theta
self.pointcurvature[idx] = entity.trajectory_point[
idx].path_point.kappa
self.pointacceleration[idx] = entity.trajectory_point[
idx].a
self.pointtime[
idx] = entity.trajectory_point[idx].relative_time + basetime
if numpoints == 0:
self.planningavailable = False
else:
self.planningavailable = True
def callback_canbus(self, entity):
"""
New Canbus
"""
self.throttlefbk.append(entity.throttle_percentage)
self.brakefbk.append(entity.brake_percentage)
self.steerfbk.append(entity.steering_percentage)
self.speed.append(entity.speed_mps)
self.canbustime.append(entity.header.timestamp_sec)
if entity.driving_mode == chassis_pb2.Chassis.COMPLETE_AUTO_DRIVE:
if self.driving_mode == 0:
self.mode_time.append(entity.header.timestamp_sec)
self.driving_mode = 1
elif self.driving_mode == 1:
self.mode_time.append(entity.header.timestamp_sec)
self.driving_mode = 0
def callback_localization(self, entity):
"""
New Localization
"""
self.imuright.append(entity.pose.linear_acceleration_vrf.x)
self.imuforward.append(entity.pose.linear_acceleration_vrf.y)
self.imuup.append(entity.pose.linear_acceleration_vrf.z)
self.localizationtime.append(entity.header.timestamp_sec)
def callback_control(self, entity):
"""
New Control Command
"""
self.throttlecmd.append(entity.throttle)
self.brakecmd.append(entity.brake)
self.steercmd.append(entity.steering_target)
self.controltime.append(entity.header.timestamp_sec)
self.acceleration_lookup.append(
entity.debug.simple_lon_debug.acceleration_lookup)
self.speed_lookup.append(entity.debug.simple_lon_debug.speed_lookup)
self.acc_open.append(
entity.debug.simple_lon_debug.preview_acceleration_reference)
self.acc_close.append(
entity.debug.simple_lon_debug.acceleration_cmd_closeloop)
self.station_error.append(entity.debug.simple_lon_debug.station_error)
self.speed_error.append(entity.debug.simple_lon_debug.speed_error)
self.curvature.append(entity.debug.simple_lat_debug.curvature)
self.heading_error.append(entity.debug.simple_lat_debug.heading_error)
self.lateral_error.append(entity.debug.simple_lat_debug.lateral_error)
self.heading_error_rate.append(
entity.debug.simple_lat_debug.heading_error_rate)
self.lateral_error_rate.append(
entity.debug.simple_lat_debug.lateral_error_rate)
with self.lock:
if self.planningavailable:
self.target_speed.append(
numpy.interp(entity.header.timestamp_sec, self.pointtime,
self.pointspeed))
self.target_curvature.append(
numpy.interp(entity.header.timestamp_sec, self.pointtime,
self.pointcurvature))
self.target_acceleration.append(
numpy.interp(entity.header.timestamp_sec, self.pointtime,
self.pointacceleration))
self.target_heading.append(
numpy.interp(entity.header.timestamp_sec, self.pointtime,
self.pointtheta))
self.target_time.append(entity.header.timestamp_sec)
def longitudinal(self):
"""
Showing Longitudinal
"""
for loc, ax in numpy.ndenumerate(self.ax):
ax.clear()
self.ax[0, 0].plot(
self.canbustime, self.throttlefbk, label='Throttle Feedback')
self.ax[0, 0].plot(
self.controltime, self.throttlecmd, label='Throttle Command')
self.ax[0, 0].plot(
self.canbustime, self.brakefbk, label='Brake Feedback')
self.ax[0, 0].plot(
self.controltime, self.brakecmd, label='Brake Command')
self.ax[0, 0].legend(fontsize='medium')
self.ax[0, 0].grid(True)
self.ax[0, 0].set_title('Throttle Brake Info')
self.ax[0, 0].set_xlabel('Time')
self.ax[0, 1].plot(
self.speed_lookup, self.acceleration_lookup, label='Table Lookup')
self.ax[0, 1].plot(
self.target_speed, self.target_acceleration, label='Target')
self.ax[0, 1].legend(fontsize='medium')
self.ax[0, 1].grid(True)
self.ax[0, 1].set_title('Calibration Lookup')
self.ax[0, 1].set_xlabel('Speed')
self.ax[0, 1].set_ylabel('Acceleration')
self.ax[1, 0].plot(self.canbustime, self.speed, label='Vehicle Speed')
self.ax[1, 0].plot(
self.target_time, self.target_speed, label='Target Speed')
self.ax[1, 0].plot(
self.target_time, self.target_acceleration, label='Target Acc')
self.ax[1, 0].plot(
self.localizationtime, self.imuforward, label='IMU Forward')
self.ax[1, 0].legend(fontsize='medium')
self.ax[1, 0].grid(True)
self.ax[1, 0].set_title('Speed Info')
self.ax[1, 0].set_xlabel('Time')
self.ax[1, 1].plot(
self.controltime, self.acceleration_lookup, label='Lookup Acc')
self.ax[1, 1].plot(self.controltime, self.acc_open, label='Acc Open')
self.ax[1, 1].plot(self.controltime, self.acc_close, label='Acc Close')
self.ax[1, 1].plot(
self.controltime, self.station_error, label='station_error')
self.ax[1, 1].plot(
self.controltime, self.speed_error, label='speed_error')
self.ax[1, 1].legend(fontsize='medium')
self.ax[1, 1].grid(True)
self.ax[1, 1].set_title('IMU Info')
self.ax[1, 1].set_xlabel('Time')
if len(self.mode_time) % 2 == 1:
self.mode_time.append(self.controltime[-1])
for i in range(0, len(self.mode_time), 2):
self.ax[0, 0].axvspan(
self.mode_time[i], self.mode_time[i + 1], fc='0.1', alpha=0.1)
self.ax[1, 0].axvspan(
self.mode_time[i], self.mode_time[i + 1], fc='0.1', alpha=0.1)
self.ax[1, 1].axvspan(
self.mode_time[i], self.mode_time[i + 1], fc='0.1', alpha=0.1)
plt.draw()
def lateral(self):
"""
Plot everything in time domain
"""
print "Showing Lateral"
for loc, ax in numpy.ndenumerate(self.ax):
ax.clear()
self.ax[0, 0].plot(
self.canbustime, self.steerfbk, label='Steering Feedback')
self.ax[0, 0].plot(
self.controltime, self.steercmd, label='Steering Command')
self.ax[0, 0].plot(self.controltime, self.curvature, label='Curvature')
self.ax[0, 0].legend(fontsize='medium')
self.ax[0, 0].grid(True)
self.ax[0, 0].set_title('Steering Info')
self.ax[0, 0].set_xlabel('Time')
"""
self.ax[0, 1].legend(fontsize = 'medium')
self.ax[0, 1].grid(True)
self.ax[0, 1].set_title('Calibration Lookup')
self.ax[0, 1].set_xlabel('Speed')
self.ax[0, 1].set_ylabel('Acceleration')
"""
self.ax[1, 0].plot(
self.controltime, self.heading_error, label='heading_error')
self.ax[1, 0].plot(
self.controltime, self.lateral_error, label='lateral_error')
self.ax[1, 0].legend(fontsize='medium')
self.ax[1, 0].grid(True)
self.ax[1, 0].set_title('Error Info')
self.ax[1, 0].set_xlabel('Time')
self.ax[1, 1].plot(
self.controltime,
self.heading_error_rate,
label='heading_error_rate')
self.ax[1, 1].plot(
self.controltime,
self.lateral_error_rate,
label='lateral_error_rate')
self.ax[1, 1].legend(fontsize='medium')
self.ax[1, 1].grid(True)
self.ax[1, 1].set_title('IMU Info')
self.ax[1, 1].set_xlabel('Time')
if len(self.mode_time) % 2 == 1:
self.mode_time.append(self.controltime[-1])
for i in range(0, len(self.mode_time), 2):
self.ax[0, 0].axvspan(
self.mode_time[i], self.mode_time[i + 1], fc='0.1', alpha=0.1)
self.ax[1, 0].axvspan(
self.mode_time[i], self.mode_time[i + 1], fc='0.1', alpha=0.1)
self.ax[1, 1].axvspan(
self.mode_time[i], self.mode_time[i + 1], fc='0.1', alpha=0.1)
plt.draw()
def press(self, event):
"""
Keyboard events during plotting
"""
if event.key == 'q' or event.key == 'Q':
plt.close('all')
if event.key == 'a' or event.key == 'A':
self.longitutidinal()
if event.key == 'z' or event.key == 'Z':
self.lateral()
if __name__ == "__main__":
from cyber_py.record import RecordReader
parser = argparse.ArgumentParser(
description='Process and analyze control and planning data')
parser.add_argument('--bag', type=str, help='use Rosbag')
args = parser.parse_args()
fig, axarr = plt.subplots(2, 2)
plt.tight_layout()
axarr[0, 0].get_shared_x_axes().join(axarr[0, 0], axarr[1, 0])
axarr[1, 1].get_shared_x_axes().join(axarr[0, 0], axarr[1, 1])
controlinfo = ControlInfo(axarr)
if args.bag:
file_path = args.bag
# bag = rosbag.Bag(file_path)
reader = RecordReader(file_path)
for msg in reader.read_messages():
print msg.timestamp, msg.topic
if msg.topic == "/apollo/localization/pose":
localization = localization_pb2.LocalizationEstimate()
localization.ParseFromString(msg.message)
controlinfo.callback_localization(localization)
elif msg.topic == "/apollo/planning":
adc_trajectory = planning_pb2.ADCTrajectory()
adc_trajectory.ParseFromString(msg.message)
controlinfo.callback_planning(adc_trajectory)
elif msg.topic == "/apollo/control":
control_cmd = control_cmd_pb2.ControlCommand()
control_cmd.ParseFromString(msg.message)
controlinfo.callback_control(control_cmd)
elif msg.topic == "/apollo/canbus/chassis":
chassis = chassis_pb2.Chassis()
chassis.ParseFromString(msg.message)
controlinfo.callback_canbus(chassis)
print "Done reading the file"
else:
cyber.init()
# rospy.init_node('control_info', anonymous=True)
node = cyber.Node("rtk_recorder")
planningsub = node.create_reader('/apollo/planning',
planning_pb2.ADCTrajectory,
controlinfo.callback_planning)
localizationsub = node.create_reader(
'/apollo/localization/pose', localization_pb2.LocalizationEstimate,
controlinfo.callback_localization)
controlsub = node.create_reader('/apollo/control',
control_cmd_pb2.ControlCommand,
controlinfo.callback_control)
canbussub = node.create_reader('/apollo/canbus/chassis',
chassis_pb2.Chassis,
controlinfo.callback_canbus)
raw_input("Press Enter To Stop")
mng = plt.get_current_fig_manager()
controlinfo.longitudinal()
fig.canvas.mpl_connect('key_press_event', controlinfo.press)
plt.show()
| msbeta/apollo | modules/tools/control_info/control_info.py | Python | apache-2.0 | 15,205 |
# Copyright (C) 2010-2014 GRNET S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import distribute_setup
distribute_setup.use_setuptools()
import os
import sys
from distutils.util import convert_path
from fnmatch import fnmatchcase
from setuptools import setup, find_packages
HERE = os.path.abspath(os.path.normpath(os.path.dirname(__file__)))
from synnefo.versions.app import __version__
# Package info
VERSION = __version__
SHORT_DESCRIPTION = 'Synnefo Compute, Network and Image component'
PACKAGES_ROOT = '.'
PACKAGES = find_packages(PACKAGES_ROOT)
# Package meta
CLASSIFIERS = []
# Package requirements
INSTALL_REQUIRES = [
'Django>=1.4, <1.5',
'simplejson>=2.1.1',
'python-dateutil>=1.4.1',
'IPy>=0.70',
'South>=0.7.3',
'pycrypto>=2.1.0',
'puka',
'python-daemon>=1.5.5, <1.6',
'snf-common',
'snf-pithos-backend',
'lockfile>=0.8, <0.9',
'ipaddr',
'setproctitle>=1.0.1',
'bitarray>=0.8',
'objpool>=0.3',
'astakosclient',
'snf-django-lib',
'snf-branding',
'snf-webproject',
'requests>=0.12.1',
'paramiko'
]
EXTRAS_REQUIRES = {
'DISPATCHER': ['puka', 'python-daemon==1.5.5', 'lockfile==0.8',
'setproctitle>=1.0.1'],
'SSH_KEYS': ['pycrypto==2.1.0'],
}
TESTS_REQUIRES = [
'factory_boy==2.1.0'
]
# Provided as an attribute, so you can append to these instead
# of replicating them:
standard_exclude = ["*.py", "*.pyc", "*$py.class", "*~", ".*", "*.bak"]
standard_exclude_directories = [
".*", "CVS", "_darcs", "./build", "./dist", "EGG-INFO", "*.egg-info", "snf-0.7"
]
# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
# Note: you may want to copy this into your setup.py file verbatim, as
# you can't import this from another package, when you don't know if
# that package is installed yet.
def find_package_data(
where=".",
package="",
exclude=standard_exclude,
exclude_directories=standard_exclude_directories,
only_in_packages=True,
show_ignored=False):
"""
Return a dictionary suitable for use in ``package_data``
in a distutils ``setup.py`` file.
The dictionary looks like::
{"package": [files]}
Where ``files`` is a list of all the files in that package that
don"t match anything in ``exclude``.
If ``only_in_packages`` is true, then top-level directories that
are not packages won"t be included (but directories under packages
will).
Directories matching any pattern in ``exclude_directories`` will
be ignored; by default directories with leading ``.``, ``CVS``,
and ``_darcs`` will be ignored.
If ``show_ignored`` is true, then all the files that aren"t
included in package data are shown on stderr (for debugging
purposes).
Note patterns use wildcards, or can be exact paths (including
leading ``./``), and all searching is case-insensitive.
"""
out = {}
stack = [(convert_path(where), "", package, only_in_packages)]
while stack:
where, prefix, package, only_in_packages = stack.pop(0)
for name in os.listdir(where):
fn = os.path.join(where, name)
if os.path.isdir(fn):
bad_name = False
for pattern in exclude_directories:
if (fnmatchcase(name, pattern)
or fn.lower() == pattern.lower()):
bad_name = True
if show_ignored:
print >> sys.stderr, (
"Directory %s ignored by pattern %s"
% (fn, pattern))
break
if bad_name:
continue
if (os.path.isfile(os.path.join(fn, "__init__.py"))
and not prefix):
if not package:
new_package = name
else:
new_package = package + "." + name
stack.append((fn, "", new_package, False))
else:
stack.append((fn, prefix + name + "/", package, only_in_packages))
elif package or not only_in_packages:
# is a file
bad_name = False
for pattern in exclude:
if (fnmatchcase(name, pattern)
or fn.lower() == pattern.lower()):
bad_name = True
if show_ignored:
print >> sys.stderr, (
"File %s ignored by pattern %s"
% (fn, pattern))
break
if bad_name:
continue
out.setdefault(package, []).append(prefix+name)
return out
setup(
name = 'snf-cyclades-app',
version = VERSION,
license = 'GNU GPLv3',
url = 'http://www.synnefo.org/',
description = SHORT_DESCRIPTION,
classifiers = CLASSIFIERS,
author='Synnefo development team',
author_email='[email protected]',
maintainer='Synnefo development team',
maintainer_email='[email protected]',
namespace_packages = ['synnefo', 'synnefo.versions'],
packages = PACKAGES,
package_dir= {'': PACKAGES_ROOT},
include_package_data = True,
package_data = find_package_data('.'),
zip_safe = False,
install_requires = INSTALL_REQUIRES,
extras_require = EXTRAS_REQUIRES,
tests_require = TESTS_REQUIRES,
dependency_links = ['http://www.synnefo.org/packages/pypi'],
entry_points = {
'console_scripts': [
'snf-dispatcher = synnefo.logic.dispatcher:main',
],
'synnefo': [
'default_settings = synnefo.app_settings.default',
'web_apps = synnefo.app_settings:synnefo_web_apps',
'web_middleware = synnefo.app_settings:synnefo_web_middleware',
'web_context_processors = synnefo.app_settings:synnefo_web_context_processors',
'urls = synnefo.app_settings.urls:urlpatterns',
'web_static = synnefo.app_settings:synnefo_static_files',
]
},
)
| olgabrani/synnefo | snf-cyclades-app/setup.py | Python | gpl-3.0 | 6,899 |
# -*- coding: utf-8 -*-
from flask import url_for
from sqlalchemy import Column, ForeignKey, func, Integer, select, Text
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import column_property
from pokr.models import Meeting
from pokr.database import Base
class Statement(Base):
__tablename__ = 'statement'
id = Column(Integer, autoincrement=True, primary_key=True)
meeting_id = Column(ForeignKey('meeting.id'), nullable=False, index=True)
person_id = Column(ForeignKey('person.id'), index=True)
sequence = Column(Integer, nullable=False)
speaker = Column(Text)
content = Column(Text)
date = column_property(select([Meeting.date])\
.where(Meeting.id==meeting_id), deferred=True)
@property
def url(self):
return url_for('meeting', id=self.meeting_id)
@hybrid_property
def anchor(self):
return '{0}-{1}'.format(person_id, sequence)
@anchor.expression
def anchor(self):
return func.concat(person_id, '-', sequence)
| teampopong/pokr.kr | pokr/models/statement.py | Python | apache-2.0 | 1,051 |
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Runs install and update tests.
Install tests are performed using a single Chrome build, whereas two or more
builds are needed for Update tests. There are separate command arguments for
the builds that will be used for each of the tests. If a test file contains
both types of tests(install and update), both arguments should be specified.
Otherwise, specify only the command argument that is required for the test.
To run a test with this script, append the module name to the _TEST_MODULES
list. Modules added to the list must be in the same directory or in a sub-
directory that's in the same location as this script.
Example:
$ python run_install_tests.py --url=<chrome_builds_url> --filter=* \
--install-build=24.0.1290.0 --update-builds=24.0.1289.0,24.0.1290.0
"""
import logging
import optparse
import os
import re
import sys
import unittest
import chrome_installer_win
from install_test import InstallTest
_DIRECTORY = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(_DIRECTORY, os.path.pardir, os.path.pardir,
os.path.pardir, 'build', 'util', 'lib'))
from common import unittest_util
from common import util
# To run tests from a module, append the module name to this list.
_TEST_MODULES = ['sample_updater', 'theme_updater']
for module in _TEST_MODULES:
__import__(module)
class Main(object):
"""Main program for running 'Fresh Install' and 'Updater' tests."""
def __init__(self):
self._SetLoggingConfiguration()
self._ParseArgs()
self._Run()
def _ParseArgs(self):
"""Parses command line arguments."""
parser = optparse.OptionParser()
parser.add_option(
'-u', '--url', type='string', default='', dest='url',
help='Specifies the build url, without the build number.')
parser.add_option(
'-o', '--options', type='string', default='',
help='Specifies any additional Chrome options (i.e. --system-level).')
parser.add_option(
'--install-build', type='string', default='', dest='install_build',
help='Specifies the build to be used for fresh install testing.')
parser.add_option(
'--update-builds', type='string', default='', dest='update_builds',
help='Specifies the builds to be used for updater testing.')
parser.add_option(
'--install-type', type='string', default='user', dest='install_type',
help='Type of installation (i.e., user, system, or both)')
parser.add_option(
'-f', '--filter', type='string', default='*', dest='filter',
help='Filter that specifies the test or testsuite to run.')
self._opts, self._args = parser.parse_args()
self._ValidateArgs()
if self._opts.install_type == 'system':
InstallTest.SetInstallType(chrome_installer_win.InstallationType.SYSTEM)
update_builds = (self._opts.update_builds.split(',') if
self._opts.update_builds else [])
options = self._opts.options.split(',') if self._opts.options else []
InstallTest.InitTestFixture(self._opts.install_build, update_builds,
self._opts.url, options)
def _ValidateArgs(self):
"""Verifies the sanity of the command arguments.
Confirms that all specified builds have a valid version number, and the
build urls are valid.
"""
builds = []
if self._opts.install_build:
builds.append(self._opts.install_build)
if self._opts.update_builds:
builds.extend(self._opts.update_builds.split(','))
builds = list(frozenset(builds))
for build in builds:
if not re.match('\d+\.\d+\.\d+\.\d+', build):
raise RuntimeError('Invalid build number: %s' % build)
if not util.DoesUrlExist('%s/%s/' % (self._opts.url, build)):
raise RuntimeError('Could not locate build no. %s' % build)
def _SetLoggingConfiguration(self):
"""Sets the basic logging configuration."""
log_format = '%(asctime)s %(levelname)-8s %(message)s'
logging.basicConfig(level=logging.INFO, format=log_format)
def _Run(self):
"""Runs the unit tests."""
all_tests = unittest.defaultTestLoader.loadTestsFromNames(_TEST_MODULES)
tests = unittest_util.FilterTestSuite(all_tests, self._opts.filter)
result = unittest_util.TextTestRunner(verbosity=1).run(tests)
# Run tests again if installation type is 'both'(i.e., user and system).
if self._opts.install_type == 'both':
# Load the tests again so test parameters can be reinitialized.
all_tests = unittest.defaultTestLoader.loadTestsFromNames(_TEST_MODULES)
tests = unittest_util.FilterTestSuite(all_tests, self._opts.filter)
InstallTest.SetInstallType(chrome_installer_win.InstallationType.SYSTEM)
result = unittest_util.TextTestRunner(verbosity=1).run(tests)
del(tests)
if not result.wasSuccessful():
print >>sys.stderr, ('Not all tests were successful.')
sys.exit(1)
sys.exit(0)
if __name__ == '__main__':
Main()
| espadrine/opera | chromium/src/chrome/test/install_test/run_install_tests.py | Python | bsd-3-clause | 5,168 |
"""Module to provide generic utilities for other accelerometer modules."""
from collections import OrderedDict
import datetime
import glob
import json
import math
import numpy as np
import os
import pandas as pd
import re
DAYS = ['mon', 'tue', 'wed', 'thur', 'fri', 'sat', 'sun']
TIME_SERIES_COL = 'time'
def formatNum(num, decimalPlaces):
"""return str of number formatted to number of decimalPlaces
When writing out 10,000's of files, it is useful to format the output to n
decimal places as a space saving measure.
:param float num: Float number to be formatted.
:param int decimalPlaces: Number of decimal places for output format
:return: Number formatted to number of decimalPlaces
:rtype: str
:Example:
>>> import accUtils
>>> accUtils.formatNum(2.567, 2)
2.57
"""
fmt = '%.' + str(decimalPlaces) + 'f'
return float(fmt % num)
def meanSDstr(mean, std, numDecimalPlaces):
"""return str of mean and stdev numbers formatted to number of decimalPlaces
:param float mean: Mean number to be formatted.
:param float std: Standard deviation number to be formatted.
:param int decimalPlaces: Number of decimal places for output format
:return: String formatted to number of decimalPlaces
:rtype: str
:Example:
>>> import accUtils
>>> accUtils.meanSDstr(2.567, 0.089, 2)
2.57 (0.09)
"""
outStr = str(formatNum(mean, numDecimalPlaces))
outStr += ' ('
outStr += str(formatNum(std, numDecimalPlaces))
outStr += ')'
return outStr
def meanCIstr(mean, std, n, numDecimalPlaces):
"""return str of mean and 95% confidence interval numbers formatted
:param float mean: Mean number to be formatted.
:param float std: Standard deviation number to be formatted.
:param int n: Number of observations
:param int decimalPlaces: Number of decimal places for output format
:return: String formatted to number of decimalPlaces
:rtype: str
:Example:
>>> import accUtils
>>> accUtils.meanSDstr(2.567, 0.089, 2)
2.57 (0.09)
"""
stdErr = std / math.sqrt(n)
lowerCI = mean - 1.96*stdErr
upperCI = mean + 1.96*stdErr
outStr = str(formatNum(mean, numDecimalPlaces))
outStr += ' ('
outStr += str(formatNum(lowerCI, numDecimalPlaces))
outStr += ' - '
outStr += str(formatNum(upperCI, numDecimalPlaces))
outStr += ')'
return outStr
def toScreen(msg):
"""Print msg str prepended with current time
:param str mgs: Message to be printed to screen
:return: Print msg str prepended with current time
:rtype: void
:Example:
>>> import accUtils
>>> accUtils.toScreen("hello")
2018-11-28 10:53:18 hello
"""
timeFormat = '%Y-%m-%d %H:%M:%S'
print(f"\n{datetime.datetime.now().strftime(timeFormat)}\t{msg}")
def writeStudyAccProcessCmds(accDir, outDir, cmdsFile='processCmds.txt',
accExt="cwa", cmdOptions=None, filesCSV="files.csv"):
"""Read files to process and write out list of processing commands
This creates the following output directory structure containing all
processing results:
<outDir>/
summary/ #to store outputSummary.json
epoch/ #to store feature output for 30sec windows
timeSeries/ #simple csv time series output (VMag, activity binary predictions)
nonWear/ #bouts of nonwear episodes
stationary/ #temp store for features of stationary data for calibration
clusterLogs/ #to store terminal output for each processed file
If a filesCSV exists in accDir/, process the files listed there. If not,
all files in accDir/ are processed
Then an acc processing command is written for each file and written to cmdsFile
:param str accDirs: Directory(s) with accelerometer files to process
:param str outDir: Output directory to be created containing the processing results
:param str cmdsFile: Output .txt file listing all processing commands
:param str accExt: Acc file type e.g. cwa, CWA, bin, BIN, gt3x...
:param str cmdOptions: String of processing options e.g. "--epochPeriod 10"
Type 'python3 accProccess.py -h' for full list of options
:param str filesCSV: Name of .csv file listing acc files to process
:return: New file written to <cmdsFile>
:rtype: void
:Example:
>>> import accUtils
>>> accUtils.writeStudyAccProcessingCmds("myAccDir/", "myResults/", "myProcessCmds.txt")
<cmd options written to "myProcessCmds.txt">
"""
# Create output directory structure
summaryDir = os.path.join(outDir, 'summary')
epochDir = os.path.join(outDir, 'epoch')
timeSeriesDir = os.path.join(outDir, 'timeSeries')
nonWearDir = os.path.join(outDir, 'nonWear')
stationaryDir = os.path.join(outDir, 'stationary')
logsDir = os.path.join(outDir, 'clusterLogs')
rawDir = os.path.join(outDir, 'raw')
npyDir = os.path.join(outDir, 'npy')
createDirIfNotExists(summaryDir)
createDirIfNotExists(epochDir)
createDirIfNotExists(timeSeriesDir)
createDirIfNotExists(nonWearDir)
createDirIfNotExists(stationaryDir)
createDirIfNotExists(logsDir)
createDirIfNotExists(rawDir)
createDirIfNotExists(npyDir)
createDirIfNotExists(outDir)
# Use filesCSV if provided, else process everything in accDir (and create filesCSV)
if filesCSV in os.listdir(accDir):
fileList = pd.read_csv(os.path.join(accDir, filesCSV))
else:
fileList = pd.DataFrame(
{'fileName': [f for f in os.listdir(accDir) if f.endswith(accExt)]}
)
fileList.to_csv(os.path.join(accDir, filesCSV), index=False)
with open(cmdsFile, 'w') as f:
for i, row in fileList.iterrows():
cmd = [
'python3 accProcess.py "{:s}"'.format(os.path.join(accDir, row['fileName'])),
'--summaryFolder "{:s}"'.format(summaryDir),
'--epochFolder "{:s}"'.format(epochDir),
'--timeSeriesFolder "{:s}"'.format(timeSeriesDir),
'--nonWearFolder "{:s}"'.format(nonWearDir),
'--stationaryFolder "{:s}"'.format(stationaryDir),
'--rawFolder "{:s}"'.format(rawDir),
'--npyFolder "{:s}"'.format(npyDir),
'--outputFolder "{:s}"'.format(outDir)
]
# Grab additional arguments provided in filesCSV (e.g. calibration params)
cmdOptionsCSV = ' '.join(['--{} {}'.format(col, row[col]) for col in fileList.columns[1:]])
if cmdOptions:
cmd.append(cmdOptions)
if cmdOptionsCSV:
cmd.append(cmdOptionsCSV)
cmd = ' '.join(cmd)
f.write(cmd)
f.write('\n')
print('Processing list written to ', cmdsFile)
print('Suggested dir for log files: ', logsDir)
def collateJSONfilesToSingleCSV(inputJsonDir, outputCsvFile):
"""read all summary *.json files and convert into one large CSV file
Each json file represents summary data for one participant. Therefore output
CSV file contains summary for all participants.
:param str inputJsonDir: Directory containing JSON files
:param str outputCsvFile: Output CSV filename
:return: New file written to <outputCsvFile>
:rtype: void
:Example:
>>> import accUtils
>>> accUtils.collateJSONfilesToSingleCSV("data/", "data/summary-all-files.csv")
<summary CSV of all participants/files written to "data/sumamry-all-files.csv">
"""
### First combine into <tmpJsonFile> the processed outputs from <inputJsonDir>
tmpJsonFile = outputCsvFile.replace('.csv','-tmp.json')
count = 0
with open(tmpJsonFile,'w') as fSummary:
for fStr in glob.glob(inputJsonDir + "*.json"):
if fStr == tmpJsonFile: continue
with open(fStr) as f:
if count == 0:
fSummary.write('[')
else:
fSummary.write(',')
fSummary.write(f.read().rstrip())
count += 1
fSummary.write(']')
### Convert temporary json file into csv file
dict = json.load(open(tmpJsonFile,"r"), object_pairs_hook=OrderedDict) #read json
df = pd.DataFrame.from_dict(dict) #create pandas object from json dict
refColumnItem = next((item for item in dict if item['quality-goodWearTime'] == 1), None)
dAcc = df[list(refColumnItem.keys())] #maintain intended column ordering
# infer participant ID
dAcc['eid'] = dAcc['file-name'].str.split('/').str[-1].str.replace('.CWA','.cwa').str.replace('.cwa','')
dAcc.to_csv(outputCsvFile, index=False)
#remove tmpJsonFile
os.remove(tmpJsonFile)
print('Summary of', str(len(dAcc)), 'participants written to:', outputCsvFile)
def identifyUnprocessedFiles(filesCsv, summaryCsv, outputFilesCsv):
"""identify files that have not been processed
Look through all processed accelerometer files, and find participants who do
not have records in the summary csv file. This indicates there was a problem
in processing their data. Therefore, output will be a new .csv file to
support reprocessing of these files
:param str filesCsv: CSV listing acc files in study directory
:param str summaryCsv: Summary CSV of processed dataset
:param str outputFilesCsv: Output csv listing files to be reprocessed
:return: New file written to <outputCsvFile>
:rtype: void
:Example:
>>> import accUtils
>>> accUtils.identifyUnprocessedFiles("study/files.csv", study/summary-all-files.csv",
"study/files-reprocess.csv")
<Output csv listing files to be reprocessed written to "study/files-reprocess.csv">
"""
fileList = pd.read_csv(filesCsv)
summary = pd.read_csv(summaryCsv)
output = fileList[~fileList['fileName'].isin(list(summary['file-name']))]
output = output.rename(columns={'Unnamed: 1': ''})
output.to_csv(outputFilesCsv, index=False)
print('Reprocessing for ', len(output), 'participants written to:',
outputFilesCsv)
def updateCalibrationCoefs(inputCsvFile, outputCsvFile):
"""read summary .csv file and update coefs for those with poor calibration
Look through all processed accelerometer files, and find participants that
did not have good calibration data. Then assigns the calibration coefs from
previous good use of a given device. Output will be a new .csv file to
support reprocessing of uncalibrated files with new pre-specified calibration coefs.
:param str inputCsvFile: Summary CSV of processed dataset
:param str outputCsvFile: Output CSV of files to be reprocessed with new
calibration info
:return: New file written to <outputCsvFile>
:rtype: void
:Example:
>>> import accUtils
>>> accUtils.updateCalibrationCoefs("data/summary-all-files.csv", "study/files-recalibration.csv")
<CSV of files to be reprocessed written to "study/files-recalibration.csv">
"""
d = pd.read_csv(inputCsvFile)
#select participants with good spread of stationary values for calibration
goodCal = d.loc[(d['quality-calibratedOnOwnData']==1) & (d['quality-goodCalibration']==1)]
#now only select participants whose data was NOT calibrated on a good spread of stationary values
badCal = d.loc[(d['quality-calibratedOnOwnData']==1) & (d['quality-goodCalibration']==0)]
#sort files by start time, which makes selection of most recent value easier
goodCal = goodCal.sort_values(['file-startTime'])
badCal = badCal.sort_values(['file-startTime'])
calCols = ['calibration-xOffset(g)','calibration-yOffset(g)','calibration-zOffset(g)',
'calibration-xSlope(g)','calibration-ySlope(g)','calibration-zSlope(g)',
'calibration-xTemp(C)','calibration-yTemp(C)','calibration-zTemp(C)',
'calibration-meanDeviceTemp(C)']
#print output CSV file with suggested calibration parameters
noOtherUses = 0
nextUses = 0
previousUses = 0
f = open(outputCsvFile,'w')
f.write('fileName,calOffset,calSlope,calTemp,meanTemp\n')
for ix, row in badCal.iterrows():
#first get current 'bad' file
participant, device, startTime = row[['file-name','file-deviceID','file-startTime']]
device = int(device)
#get calibration values from most recent previous use of this device
# (when it had a 'good' calibration)
prevUse = goodCal[calCols][(goodCal['file-deviceID']==device) & (goodCal['file-startTime']<startTime)].tail(1)
try:
ofX, ofY, ofZ, slpX, slpY, slpZ, tmpX, tmpY, tmpZ, calTempAvg = prevUse.iloc[0]
previousUses += 1
except:
nextUse = goodCal[calCols][(goodCal['file-deviceID']==device) & (goodCal['file-startTime']>startTime)].head(1)
if len(nextUse)<1:
print('no other uses for this device at all: ', str(device),
str(participant))
noOtherUses += 1
continue
nextUses += 1
ofX, ofY, ofZ, slpX, slpY, slpZ, tmpX, tmpY, tmpZ, calTempAvg = nextUse.iloc[0]
#now construct output
out = participant + ','
out += str(ofX) + ' ' + str(ofY) + ' ' + str(ofZ) + ','
out += str(slpX) + ' ' + str(slpY) + ' ' + str(slpZ) + ','
out += str(tmpX) + ' ' + str(tmpY) + ' ' + str(tmpZ) + ','
out += str(calTempAvg)
f.write(out + '\n')
f.close()
print('previousUses', previousUses)
print('nextUses', nextUses)
print('noOtherUses', noOtherUses)
print('Reprocessing for ', str(previousUses + nextUses),
'participants written to:', outputCsvFile)
def writeFilesWithCalibrationCoefs(inputCsvFile, outputCsvFile):
"""read summary .csv file and write files.csv with calibration coefs
Look through all processed accelerometer files, and write a new .csv file to
support reprocessing of files with pre-specified calibration coefs.
:param str inputCsvFile: Summary CSV of processed dataset
:param str outputCsvFile: Output CSV of files to process with calibration info
:return: New file written to <outputCsvFile>
:rtype: void
:Example:
>>> import accUtils
>>> accUtils.writeFilesWithCalibrationCoefs("data/summary-all-files.csv",
>>> "study/files-calibrated.csv")
<CSV of files to be reprocessed written to "study/files-calibrated.csv">
"""
d = pd.read_csv(inputCsvFile)
calCols = ['calibration-xOffset(g)','calibration-yOffset(g)','calibration-zOffset(g)',
'calibration-xSlope(g)','calibration-ySlope(g)','calibration-zSlope(g)',
'calibration-xTemp(C)','calibration-yTemp(C)','calibration-zTemp(C)',
'calibration-meanDeviceTemp(C)']
#print output CSV file with suggested calibration parameters
f = open(outputCsvFile,'w')
f.write('fileName,calOffset,calSlope,calTemp,meanTemp\n')
for ix, row in d.iterrows():
#first get current file information
participant = str(row['file-name'])
ofX, ofY, ofZ, slpX, slpY, slpZ, tmpX, tmpY, tmpZ, calTempAvg = row[calCols]
#now construct output
out = participant + ','
out += str(ofX) + ' ' + str(ofY) + ' ' + str(ofZ) + ','
out += str(slpX) + ' ' + str(slpY) + ' ' + str(slpZ) + ','
out += str(tmpX) + ' ' + str(tmpY) + ' ' + str(tmpZ) + ','
out += str(calTempAvg)
f.write(out + '\n')
f.close()
print('Files with calibration coefficients for ', str(len(d)),
'participants written to:', outputCsvFile)
def createDirIfNotExists(folder):
""" Create directory if it doesn't currently exist
:param str folder: Directory to be checked/created
:return: Dir now exists (created if didn't exist before, otherwise untouched)
:rtype: void
:Example:
>>> import accUtils
>>> accUtils.createDirIfNotExists("/myStudy/summary/dec18/")
<folder "/myStudy/summary/dec18/" now exists>
"""
if not os.path.exists(folder):
os.makedirs(folder)
def date_parser(t):
'''
Parse date a date string of the form e.g.
2020-06-14 19:01:15.123+0100 [Europe/London]
'''
tz = re.search(r'(?<=\[).+?(?=\])', t)
if tz is not None:
tz = tz.group()
t = re.sub(r'\[(.*?)\]', '', t)
return pd.to_datetime(t, utc=True).tz_convert(tz)
def date_strftime(t):
'''
Convert to time format of the form e.g.
2020-06-14 19:01:15.123+0100 [Europe/London]
'''
tz = t.tz
return t.strftime(f'%Y-%m-%d %H:%M:%S.%f%z [{tz}]')
def writeTimeSeries(e, labels, tsFile):
""" Write activity timeseries file
:param pandas.DataFrame e: Pandas dataframe of epoch data. Must contain
activity classification columns with missing rows imputed.
:param list(str) labels: Activity state labels
:param dict tsFile: output CSV filename
:return: None
:rtype: void
"""
cols = ['accImputed']
cols_new = ['acc']
labelsImputed = [l + 'Imputed' for l in labels]
cols.extend(labelsImputed)
cols_new.extend(labels)
if 'MET' in e.columns:
cols.append('METImputed')
cols_new.append('MET')
e_new = pd.DataFrame(index=e.index)
e_new.index.name = 'time'
e_new['imputed'] = e.isna().any(1).astype('int')
e_new[cols_new] = e[cols]
# make output time format contain timezone
# e.g. 2020-06-14 19:01:15.123000+0100 [Europe/London]
e_new.index = e_new.index.to_series(keep_tz=True).apply(date_strftime)
e_new.to_csv(tsFile, compression='gzip')
| aidendoherty/biobankAccelerometerAnalysis | accelerometer/accUtils.py | Python | bsd-2-clause | 17,668 |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Command line interface to aclcheck library."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
__author__ = '[email protected] (Tony Watson)'
from optparse import OptionParser
from lib import aclcheck
from lib import naming
from lib import policy
def main():
# TODO(robankeny): Lets move this to gflags
usage = 'usage: %prog [options] arg'
_parser = OptionParser(usage)
_parser.add_option('--definitions-directory', dest='definitions',
help='definitions directory', default='./def')
_parser.add_option('-p', '--policy-file', dest='pol',
help='policy file', default='./policies/sample.pol')
_parser.add_option('-d', '--destination', dest='dst',
help='destination IP', default='200.1.1.1')
_parser.add_option('-s', '--source', dest='src',
help='source IP', default='any')
_parser.add_option('--proto', '--protocol', dest='proto',
help='Protocol (tcp, udp, icmp, etc.)', default='tcp')
_parser.add_option('--dport', '--destination-port', dest='dport',
help='destination port', default='80')
_parser.add_option('--sport', '--source-port', dest='sport',
help='source port', default='1025')
(FLAGS, unused_args) = _parser.parse_args()
defs = naming.Naming(FLAGS.definitions)
policy_obj = policy.ParsePolicy(open(FLAGS.pol).read(), defs)
check = aclcheck.AclCheck(policy_obj, src=FLAGS.src, dst=FLAGS.dst,
sport=FLAGS.sport, dport=FLAGS.dport,
proto=FLAGS.proto)
print(str(check))
if __name__ == '__main__':
main()
| ryantierney513/capirca | aclcheck_cmdline.py | Python | apache-2.0 | 2,354 |
from gh import AddLabel, RemoveLabel
from .handler import Handler
from .command import GetCommands
from .issue_event import GetIssue
class LabelIssueCommentEventHandler(Handler):
def __init__(self):
super().__init__()
def handles(self, event):
return (event.type == 'IssueCommentEvent'
and (event.payload.get('action') == 'created'
or event.payload.get('action') == 'edited'))
def handle(self, g, event):
# avoid fetching until needed
issue = None
for command in GetCommands(event.payload.get('comment', {}).get('body')):
if command[0] == 'add-label':
if issue is None:
issue = GetIssue(g, event)
for label in command[1:]:
AddLabel(g, issue, label, create=False)
if command[0] == 'remove-label':
if issue is None:
issue = GetIssue(g, event)
for label in command[1:]:
RemoveLabel(g, issue, label, create=False)
LabelIssueCommentEventHandler()
| skim1420/spinnaker | spinbot/event/label_issue_comment_event_handler.py | Python | apache-2.0 | 1,094 |
__author__ = 'j'
from somecrawler.queue import PriorityQueue
from somecrawler.user import User, UserController
class QueueManager:
pQueue = PriorityQueue.PQueue()
userCon = UserController.UserController()
def __init__(self):
pass
def add_to_queue(self, pQueue, job, priority):
pQueue.put(job, priority)
def create_user_priority_queue(self, pQueue):
userList = self.userCon.getAllUsers()
self.add_dict_to_queue(userList, pQueue)
def add_dict_to_queue(self, pQueue, dict):
for i in range(len(dict)):
job = dict[str(i)]
pQueue.put(job, job.priority)
return pQueue
def emptyQueueDEBUG(self, pQueue):
i = 0
while not pQueue.empty():
print i, pQueue.get()
i += 1 | ProjectCalla/SomeCrawler | somecrawler/queue/QueueManager.py | Python | gpl-3.0 | 802 |
from jinja2 import DictLoader, Environment
MACRO_TEMPLATE = \
"""{%- macro channel(channel_name, channel, options=None) %}
<{{ channel_name }}>
<SourceChannelName>{{ channel }}</SourceChannelName>
{%- if options is mapping %}
<ContrastEnhancement>
<Normalize>
<VendorOption name="algorithm">
{{ options.algorithm|default("StretchToMinimumMaximum") }}
</VendorOption>
<VendorOption name="minValue">
{{ options.minValue|default(0) }}
</VendorOption>
<VendorOption name="maxValue">
{{ options.maxValue|default(1) }}
</VendorOption>
</Normalize>
<GammaValue>{{ options.gamma|default(0.5) }}</GammaValue>
</ContrastEnhancement>
{%- endif %}
</{{ channel_name }}>
{% endmacro -%}
{%- macro colormap(attrs) -%}
<ColorMapEntry
{% if attrs is mapping %}
{% for k,v in attrs.items() %}
{{k}}="{{v}}"
{% endfor %}
{% endif %}
/>
{%- endmacro -%}"""
RASTER_DOCUMENT_TEMPLATE = \
"""{% import "macros.xml" as macros %}
<?xml version="1.0" encoding="utf-8" ?>
<StyledLayerDescriptor version="1.0.0"
xsi:schemaLocation="http://www.opengis.net/sld StyledLayerDescriptor.xsd"
xmlns="http://www.opengis.net/sld"
xmlns:ogc="http://www.opengis.net/ogc"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<NamedLayer>
<Name>{{ name }}</Name>
<UserStyle>
<Title>{{ title }}</Title>
<IsDefault>1</IsDefault>
<FeatureTypeStyle>
<Rule>
<RasterSymbolizer>
<Opacity>{{ opacity|default(1.0) }}</Opacity>
<ChannelSelection>
{%- for c in channels -%}
{{ macros.channel(c.name, c.band, c.options|default(None)) }}
{%- endfor %}
</ChannelSelection>
{%- if colormap is defined %}
<ColorMap type="{{ colormap_type|default("ramp") }}">
{% for cm in colormap %}
{{ macros.colormap(cm) }}
{% endfor %}
</ColorMap>
{%- endif %}
</RasterSymbolizer>
</Rule>
</FeatureTypeStyle>
</UserStyle>
</NamedLayer>
</StyledLayerDescriptor>"""
SLDTemplates = Environment(loader=DictLoader({
"macros.xml": MACRO_TEMPLATE,
"raster_sld.xml": RASTER_DOCUMENT_TEMPLATE
}))
def get_multiband_raster_sld(
name, title=None, bands=(1, 2, 3),
interval=(0, 1), gamma=1.0, opacity=1.0,
channel_names=("RedChannel", "GreenChannel", "BlueChannel"), **kwargs):
# Make sure interval is a list of intervals - this test
# is buggy and should be fixed.
if not all(isinstance(e, list) or isinstance(e, tuple)
for e in interval):
interval = [interval] * len(bands)
# Set title default if it wasn't passed in
if title is None:
title = "Style for bands {} of layer {}".format(
",".join(str(b) for b in bands), name)
# Bands parameter must be a tuple or list
assert isinstance(bands, (list, tuple))
# Bands must be length of 3
assert (len(bands) == 3)
# Gamma must be a number or list
assert isinstance(gamma, (int, float, list))
# Opacity must be a number
assert isinstance(opacity, (int, float))
# Opacity must be between 0 and 1
assert (opacity >= 0 and opacity <= 1.0)
# All bands must be integers greater than 1
assert all(isinstance(e, int) and e > 0 for e in bands), \
"Bands must be specified as integer indexes starting from 1!"
# All intervals must be of length 2
assert all(len(e) == 2 for e in interval), \
"Interval must be a list of length {}".format(len(bands))
# Intervals and bands must be the same length
assert len(bands) == len(interval), \
"Number of bands ({}) must be equal " + \
"to number of intervals ({})!".format(
len(bands), len(interval))
# Make sure gamma is a list the same length as the list of bands
try:
assert len(gamma) == len(bands), \
"Number of gamma ({}) must be equal " + \
"to number of bands ({})".format(
len(gamma), len(bands))
except TypeError:
gamma = [gamma] * len(bands)
template = SLDTemplates.get_template("raster_sld.xml")
template_params = {
"title": title,
"name": name,
"opacity": opacity,
"channels": []}
for n, b, r, g in zip(channel_names, bands, interval, gamma):
template_params['channels'].append({
"name": n,
"band": b,
"options": {
"minValue": r[0],
"maxValue": r[1],
"gamma": g
}})
return template.render(**template_params)
def get_single_band_raster_sld(
name, band, title=None, opacity=1.0,
channel_name="GrayChannel", colormap=None,
colormap_type="ramp", **kwargs):
# Set title default if it wasn't passed in
if title is None:
title = "Style for band {} of layer {}".format(band, name)
# Assert band number is greater than zero
assert(band > 0)
# Get the raster template
template = SLDTemplates.get_template("raster_sld.xml")
template_params = {
"title": title,
"name": name,
"opacity": opacity,
"channels": [
{"name": channel_name,
"band": band}]
}
if colormap is not None:
# Assert colormap is a list or tuple
assert(isinstance(colormap, (list, tuple)))
# Assert colormap is a collection of dictionaries
assert(all(isinstance(c, (dict)) for c in colormap))
# Assert all colormap items has a color key
assert([c for c in colormap if 'color' in c])
# Assert all colormap items has a quantity key
assert([c for c in colormap if 'quantity' in c])
template_params['colormap'] = colormap
template_params['colormap_type'] = colormap_type
return template.render(**template_params)
# if __name__ == "__main__":
# print(get_single_band_raster_sld(
# 'nVDI', band=9, colormap=[
# {"color": "#000000", "quantity": "95", "alpha": 0.1},
# {"color": "#0000FF", "quantity": "110"},
# {"color": "#00FF00", "quantity": "135"},
# {"color": "#FF0000", "quantity": "160"},
# {"color": "#FF00FF", "quantity": "185"}]))
# print get_multiband_raster_sld(
# 'rgb', interval=[(1, 2), (2, 3), (3,4)],
# gamma=(0.1, 0.2, 0.3), opacity=0.5
# )
| danlamanna/scratch | geonotebook/vis/geoserver/sld.py | Python | apache-2.0 | 6,739 |
# import time
#
# from selenium.webdriver.common.keys import Keys
#
# # from .base_selenium_test import BaseSeleniumTest
#
#
# class TestAdminFormsInfant(BaseSeleniumTest):
#
# def login_navigate_to_admin(self):
# self.login()
# time.sleep(1)
# self.browser.get(self.live_server_url + '/admin/microbiome/')
#
# def test_infant_birth_admin(self):
# self.login_navigate_to_admin()
# time.sleep(1)
# self.browser.find_element_by_link_text('Infant Birth Feeding & Vaccinations').click()
# time.sleep(1)
# self.assertIn('infantbirthfeedvaccine/', self.browser.current_url)
# self.browser.get(self.browser.current_url + 'add/')
# self.browser.save_screenshot('microbiome/screenshots/infant_birth_admin.png')
#
# def test_infant_birth_arv_admin(self):
# self.login_navigate_to_admin()
# time.sleep(1)
# self.browser.find_element_by_link_text('Infant Birth Record: ARVs').click()
# time.sleep(1)
# self.assertIn('infantbirtharv/', self.browser.current_url)
# self.browser.get(self.browser.current_url + 'add/')
# self.browser.save_screenshot('microbiome/screenshots/infant_birth_arv_admin.png')
#
# def test_infant_birth_exam_admin(self):
# self.login_navigate_to_admin()
# time.sleep(1)
# self.browser.find_element_by_partial_link_text('Infant Birth Record: Exams').click()
# time.sleep(1)
# self.assertIn('infantbirthexam/', self.browser.current_url)
# self.browser.get(self.browser.current_url + 'add/')
# self.browser.save_screenshot('microbiome/screenshots/infact_birth_exam_admin.png')
#
# def test_infant_birth_records(self):
# self.login_navigate_to_admin()
# self.browser.find_element_by_partial_link_text('Infant Birth Records').click()
# time.sleep(1)
# self.assertIn('infantbirth/', self.browser.current_url)
# self.browser.get(self.browser.current_url + 'add/')
# self.browser.save_screenshot('microbiome/screenshots/infact_birth_records.png')
#
# def test_infant_congenital_anomalies_cardio(self):
# self.login_navigate_to_admin()
# self.browser.find_element_by_partial_link_text('Infant Congenital Anomalies:Cardios').click()
# time.sleep(1)
# self.assertIn('infantcardiovasculardisorderitems/', self.browser.current_url)
# self.browser.get(self.browser.current_url + 'add/')
# self.browser.save_screenshot('microbiome/screenshots/infant_congenital_anom_cardio.png')
#
# def test_infant_congenital_anomalies_cleft(self):
# self.login_navigate_to_admin()
# self.browser.find_element_by_partial_link_text('Infant Congenital Anomalies:Cleft').click()
# time.sleep(1)
# self.assertIn('infantcleftdisorderitems/', self.browser.current_url)
# self.browser.get(self.browser.current_url + 'add/')
# self.browser.save_screenshot('microbiome/screenshots/infant_congenital_anom_cleft.png')
#
# def test_infant_congenital_anomalies_cns(self):
# self.login_navigate_to_admin()
# self.browser.find_element_by_link_text('Infant Congenital Anomalies:Cnss').click()
# time.sleep(1)
# self.assertIn('infantcnsabnormalityitems/', self.browser.current_url)
# self.browser.get(self.browser.current_url + 'add/')
# self.browser.save_screenshot('microbiome/screenshots/infant_congenital_anom_cns.png')
#
# def test_infant_congenital_anomalies_facial(self):
# self.login_navigate_to_admin()
# self.browser.find_element_by_link_text('Infant Congenital Anomalies:Facials').click()
# time.sleep(1)
# self.assertIn('infantfacialdefectitems/', self.browser.current_url)
# self.browser.get(self.browser.current_url + 'add/')
# self.browser.save_screenshot('microbiome/screenshots/infant_congenital_anom_facials.png')
#
# def test_infant_congenital_anomalies_femalegen(self):
# self.login_navigate_to_admin()
# self.browser.find_element_by_link_text('Infant Congenital Anomalies:FemaleGens').click()
# time.sleep(1)
# self.assertIn('infantfemalegenitalanomalyitems/', self.browser.current_url)
# self.browser.get(self.browser.current_url + 'add/')
# self.browser.save_screenshot('microbiome/screenshots/infant_congenital_anom_femalegen.png')
#
# def test_infant_congenital_anomalies_malegen(self):
# self.login_navigate_to_admin()
# self.browser.find_element_by_link_text('Infant Congenital Anomalies:MaleGens').click()
# time.sleep(1)
# self.assertIn('infantmalegenitalanomalyitems/', self.browser.current_url)
# self.browser.get(self.browser.current_url + 'add/')
# self.browser.save_screenshot('microbiome/screenshots/infant_congenital_anom_malegen.png')
#
# def test_infant_congenital_anomalies_lower_gast(self):
# self.login_navigate_to_admin()
# self.browser.find_element_by_link_text('Infant Congenital Anomalies:LowerGasts').click()
# time.sleep(1)
# self.assertIn('infantlowergastrointestinalitems/', self.browser.current_url)
# self.browser.get(self.browser.current_url + 'add/')
# self.browser.save_screenshot('microbiome/screenshots/infant_congenital_anom_lower_gast.png')
#
# def test_infant_congenital_anomalies_mouth(self):
# self.login_navigate_to_admin()
# self.browser.find_element_by_link_text('Infant Congenital Anomalies:MouthUpps').click()
# time.sleep(1)
# self.assertIn('infantmouthupgastrointestinalitems/', self.browser.current_url)
# self.browser.get(self.browser.current_url + 'add/')
# self.browser.save_screenshot('microbiome/screenshots/infant_congenital_anom_mouth.png')
#
# def test_infant_congenital_anomalies_muscle(self):
# self.login_navigate_to_admin()
# self.browser.find_element_by_link_text('Infant Congenital Anomalies:Musculosks').click()
# time.sleep(1)
# self.assertIn('infantmusculoskeletalabnormalitems/', self.browser.current_url)
# self.browser.get(self.browser.current_url + 'add/')
# self.browser.save_screenshot('microbiome/screenshots/infant_congenital_anom_muscle.png')
#
# def test_infant_congenital_anomalies_renal(self):
# self.login_navigate_to_admin()
# self.browser.find_element_by_link_text('Infant Congenital Anomalies:Renals').click()
# time.sleep(1)
# self.assertIn('infantrenalanomalyitems/', self.browser.current_url)
# self.browser.get(self.browser.current_url + 'add/')
# self.browser.save_screenshot('microbiome/screenshots/infant_congenital_anom_renal.png')
#
# def test_infant_congenital_anomalies_respiratory(self):
# self.login_navigate_to_admin()
# self.browser.find_element_by_link_text('Infant Congenital Anomalies:Respitarorys').click()
# time.sleep(1)
# self.assertIn('infantrespiratorydefectitems/', self.browser.current_url)
# self.browser.get(self.browser.current_url + 'add/')
# self.browser.save_screenshot('microbiome/screenshots/infant_congenital_anom_respiratory.png')
#
# def test_infant_congenital_anomalies_skin(self):
# self.login_navigate_to_admin()
# self.browser.find_element_by_link_text('Infant Congenital Anomalies:Skins').click()
# time.sleep(1)
# self.assertIn('infantskinabnormalitems/', self.browser.current_url)
# self.browser.get(self.browser.current_url + 'add/')
# self.browser.save_screenshot('microbiome/screenshots/infant_congenital_anom_skin.png')
#
# def test_infant_congenital_anomalies_triome(self):
# self.login_navigate_to_admin()
# self.browser.find_element_by_link_text('Infant Congenital Anomalies:Trisomess').click()
# time.sleep(1)
# self.assertIn('infanttrisomieschromosomeitems/', self.browser.current_url)
# self.browser.get(self.browser.current_url + 'add/')
# self.browser.save_screenshot('microbiome/screenshots/infant_congenital_anom_triome.png')
#
# def test_infant_congenital_anomalies(self):
# self.login_navigate_to_admin()
# self.browser.find_element_by_link_text('Infant Congenital Anomaliess').click()
# time.sleep(1)
# self.assertIn('infantcongenitalanomalies/', self.browser.current_url)
# self.browser.get(self.browser.current_url + 'add/')
# self.browser.save_screenshot('microbiome/screenshots/infant_congenital_anomalies.png')
#
# def test_infant_death(self):
# self.login_navigate_to_admin()
# self.browser.find_element_by_link_text('Infant Deaths').click()
# time.sleep(1)
# self.assertIn('infantdeath/', self.browser.current_url)
# self.browser.get(self.browser.current_url + 'add/')
# self.browser.save_screenshot('microbiome/screenshots/infant_death.png')
#
# def test_infant_eligibility(self):
# self.login_navigate_to_admin()
# self.browser.find_element_by_link_text('Infant Eligibility').click()
# time.sleep(1)
# self.assertIn('infanteligibility/', self.browser.current_url)
# self.browser.get(self.browser.current_url + 'add/')
# self.browser.save_screenshot('microbiome/screenshots/infant_eligibility.png')
#
# def test_infant_visit(self):
# self.login_navigate_to_admin()
# self.browser.find_element_by_link_text('Infant Visits').click()
# time.sleep(1)
# self.assertIn('infantvisit/', self.browser.current_url)
# self.browser.get(self.browser.current_url + 'add/')
# self.browser.save_screenshot('microbiome/screenshots/infant_visit.png')
| botswana-harvard/microbiome | microbiome/apps/mb_infant/tests/test_admin_forms_infant.py | Python | gpl-2.0 | 9,810 |
'''
Python 3.X ThreadPoolExecutor 模块演示 Demo
'''
import concurrent
from concurrent.futures import ThreadPoolExecutor
from urllib import request
class TestThreadPoolExecutor(object):
def __init__(self):
self.urls = [
'https://www.baidu.com/',
'http://blog.jobbole.com/',
'http://www.csdn.net/',
'https://juejin.im/',
'https://www.zhihu.com/'
]
def get_web_content(self, url=None):
print('start get web content from: '+url)
try:
headers = {"User-Agent": "Mozilla/5.0 (X11; Linux x86_64)"}
req = request.Request(url, headers=headers)
return request.urlopen(req).read().decode("utf-8")
except BaseException as e:
print(str(e))
return None
print('get web content end from: ' + str(url))
def runner(self):
thread_pool = ThreadPoolExecutor(max_workers=2, thread_name_prefix='DEMO')
futures = dict()
for url in self.urls:
future = thread_pool.submit(self.get_web_content, url)
futures[future] = url
for future in concurrent.futures.as_completed(futures):
url = futures[future]
try:
data = future.result()
except Exception as e:
print('Run thread url ('+url+') error. '+str(e))
else:
print(url+'Request data ok. size='+str(len(data)))
print('Finished!')
if __name__ == '__main__':
TestThreadPoolExecutor().runner() | yanbober/SmallReptileTraining | ConcurrentSpider/demo_thread_pool_executor.py | Python | mit | 1,562 |
"""
Class for IQ Data
TDMS format
Xaratustrah Aug-2015
"""
import os
import time
import logging as log
import numpy as np
from iqtools.iqbase import IQBase
import pytdms
class TDMSData(IQBase):
def __init__(self, filename):
super().__init__(filename)
# Additional fields in this subclass
self.tdms_first_rec_size = 0
self.tdms_other_rec_size = 0
self.tdms_nSamplesPerRecord = 0
self.tdms_nRecordsPerFile = 0
self.information_read = False
self.rf_att = 0.0
self.date_time = ''
def read(self, nframes=10, lframes=1024, sframes=0):
self.read_samples(nframes * lframes, offset=sframes * lframes)
def read_samples(self, nsamples, offset=0):
"""
Read from TDMS Files: Check the amount needed corresponds to how many records. Then read those records only
and from them return only the desired amount. This way the memory footprint is smallest passible and it is
also fast.
"""
if not self.information_read:
self.read_tdms_information()
if nsamples > self.nsamples_total - offset:
raise ValueError(
'Requested number of samples is larger than the available {} samples.'.format(self.nsamples_total))
# let's see this amount corresponds to which start record
# start at the beginning of
start_record = int(offset / self.tdms_nSamplesPerRecord) + 1
starting_sample_within_start_record = offset % self.tdms_nSamplesPerRecord
# See how many records should we read, considering also the half-way started record?
n_records = int((starting_sample_within_start_record +
nsamples) / self.tdms_nSamplesPerRecord) + 1
# that would be too much
if start_record + n_records > self.tdms_nRecordsPerFile:
raise ValueError(
'Requested number of samples requires {} records, which is larger than the available {} records in this file.'.format(start_record + n_records, self.tdms_nRecordsPerFile))
# instead of real file size find out where to stop
absolute_size = self.tdms_first_rec_size + \
(start_record + n_records - 2) * self.tdms_other_rec_size
# We start with empty data
objects = {}
raw_data = {}
f = open(self.filename, "rb") # Open in binary mode for portability
# While there's still something left to read
while f.tell() < absolute_size:
# loop until first record is filled up
# we always need to read the first record.
# don't jump if start record is 1, just go on reading
if start_record > 1 and f.tell() == self.tdms_first_rec_size:
# reached the end of first record, now do the jump
f.seek(f.tell() + (start_record - 2)
* self.tdms_other_rec_size)
if f.tell() == self.tdms_first_rec_size:
log.info('Reached end of first record.')
# Now we read record by record
try:
objects, raw_data = pytdms.readSegment(
f, absolute_size, (objects, raw_data))
except:
log.error('File seems to end here!')
return
# ok, now close the file
f.close()
# up to now, we have read only the amount of needed records times number of samples per record
# this is of course more than what we actually need.
# convert array.array to np.array
ii = np.frombuffer(raw_data[b"/'RecordData'/'I'"], dtype=np.int16)
qq = np.frombuffer(raw_data[b"/'RecordData'/'Q'"], dtype=np.int16)
# get rid of duplicates at the beginning if start record is larger than one
if start_record > 1:
ii = ii[self.tdms_nSamplesPerRecord:]
qq = qq[self.tdms_nSamplesPerRecord:]
ii = ii[starting_sample_within_start_record:starting_sample_within_start_record + nsamples]
qq = qq[starting_sample_within_start_record:starting_sample_within_start_record + nsamples]
# Vectorized is slow, so do interleaved copy instead
self.data_array = np.zeros(2 * nsamples, dtype=np.float32)
self.data_array[::2], self.data_array[1::2] = ii, qq
self.data_array = self.data_array.view(np.complex64)
gain = np.frombuffer(
raw_data[b"/'RecordHeader'/'gain'"], dtype=np.float64)
self.scale = gain[0]
self.data_array = self.data_array * self.scale
log.info("TDMS Read finished.")
def read_complete_file(self):
"""
Read a complete TDMS file. Hope you know what you are doing!
:return:
"""
if not self.information_read:
self.read_tdms_information()
objects, raw_data = pytdms.read(self.filename)
# convert array.array to np.array
ii = np.frombuffer(raw_data[b"/'RecordData'/'I'"], dtype=np.int16)
qq = np.frombuffer(raw_data[b"/'RecordData'/'Q'"], dtype=np.int16)
# Vectorized is slow, so do interleaved copy instead
len = np.shape(ii)[0]
self.data_array = np.zeros(2 * len, dtype=np.float32)
self.data_array[::2], self.data_array[1::2] = ii, qq
self.data_array = self.data_array.view(np.complex64)
gain = np.frombuffer(
raw_data[b"/'RecordHeader'/'gain'"], dtype=np.float64)
self.scale = gain[0]
self.data_array = self.data_array * self.scale
log.info("TDMS Read finished.")
def read_tdms_information(self):
"""
Performs one read on the file in order to get the values
"""
# Usually size matters, but not in this case! because we only read 2 records, but anyway should be large enough.
sz = os.path.getsize(self.filename)
how_many = 0
last_i_ff = 0
last_q_ff = 0
# We start with empty data
objects = {}
raw_data = {}
# Read just 2 records in order to estimate the record sizes
f = open(self.filename, "rb")
while f.tell() < sz:
try:
objects, raw_data = pytdms.readSegment(
f, sz, (objects, raw_data))
except:
log.error('TDMS file seems to end here!')
return
if b"/'RecordData'/'I'" in raw_data and b"/'RecordData'/'Q'" in raw_data:
# This record has both I and Q
last_i = raw_data[b"/'RecordData'/'I'"][-1]
last_q = raw_data[b"/'RecordData'/'Q'"][-1]
offset = f.tell()
if last_i_ff != last_i and last_q_ff != last_q:
how_many += 1
last_i_ff = last_i
last_q_ff = last_q
if how_many == 1:
self.tdms_first_rec_size = offset
if how_many == 2:
self.tdms_other_rec_size = offset - self.tdms_first_rec_size
break
self.fs = float(objects[b'/'][3][b'IQRate'][1])
self.rf_att = float(objects[b'/'][3][b'RFAttentuation'][1])
self.center = float(objects[b'/'][3][b'IQCarrierFrequency'][1])
self.date_time = time.ctime(os.path.getctime(self.filename))
self.tdms_nSamplesPerRecord = int(
objects[b'/'][3][b'NSamplesPerRecord'][1])
self.tdms_nRecordsPerFile = int(
objects[b'/'][3][b'NRecordsPerFile'][1])
self.nsamples_total = self.tdms_nSamplesPerRecord * self.tdms_nRecordsPerFile
self.information_read = True
| xaratustrah/iq_suite | iqtools/tdmsdata.py | Python | gpl-2.0 | 7,679 |
import wx
from wx.lib.pubsub import Publisher as pub
from publisherconstants import *
class AbstractSessionPanel(wx.Panel):
def __init__(self, parent, session):
super(AbstractSessionPanel, self).__init__(parent)
self.session = session
pub.subscribe(self.OnSessionStatusPublish, SUBJECT_STATUS)
def OnSessionStatusPublish(self, status):
pass
| m4sterm1nd/python | betfair/AlgoView/sessions/abstractsessionpanel.py | Python | gpl-2.0 | 384 |
#!/usr/bin/env python
# $Id$
#
# gendata.py -- generates test data
#
# created on 00/05/19 by wesc
#
from random import randint, choice
from time import ctime
from sys import maxint # (value, not function)
from string import lowercase
from os.path import exists
doms = ( 'com', 'edu', 'net', 'org', 'gov' )
def main():
# this long version saves output to files which
# can be directly used with regular expressions
# (it does not write the strings to the screen)
# open new test file
i = 0
fn = '/tmp/data%d.txt' % i
while exists(fn):
i = i + 1
fn = '/tmp/data%d.txt' % i
f = open(fn, 'w')
# write test data and close file
for i in range(randint(5, 10)):
# randomly choose a date integer and
# calculate the corresponding date string
dtint = randint(0, maxint-1)
dtstr = ctime(dtint)
# the login should be between 4 and 7 chars in length;
# the domain should be
loginlen = randint(4, 7)
login = ''
for j in range(loginlen):
login = login + choice(lowercase)
domainlen = randint(loginlen, 12)
dom = ''
for j in range(domainlen):
dom = dom + choice(lowercase)
f.write('%s:%s@%s.%s:%d-%d-%d\n' % (dtstr, login,
dom, choice(doms), dtint, loginlen, domainlen))
# close test file
f.close()
if __name__ == '__main__':
main()
| opensvn/test | src/study/python/cpp/ch15/alt/gendata.py | Python | gpl-2.0 | 1,461 |
# -*- coding: utf-8 -*-
import re
from transformer.registry import register
from transformer.transforms.base import BaseTransform
# updated to include seperate regexes - testing is much easier here:
# orig and default (UNI1): https://regex101.com/r/FCS4px/1
# uni2: https://regex101.com/r/sk6MVY/1
# na: https://regex101.com/r/QYWyPc/2
# in: https://regex101.com/r/DVkuoA/1
URL_REGEX_UNI1 = r"(?:(?:\+?([1-9]|[0-9][0-9]|[0-9][0-9][0-9])\s*(?:[.-]\s*)?)?(?:\(\s*([2-9]1[02-9]|[2-9][02-8]1|[2-9][02-8][02-9])\s*\)|([0-9][1-9]|[0-9]1[02-9]|[2-9][02-8]1|[2-9][02-8][02-9]))\s*(?:[.-]\s*)?)?([2-9]1[02-9]|[2-9][02-9]1|[2-9][02-9]{2})\s*(?:[.-]\s*)?([0-9]{4})(?:\s*(?:#|x\.?|ext\.?|extension)\s*(\d+))?"
URL_REGEX_UNI2 = r"(?:\+?\d{8}(?:\d{1,5})?|\(\+?\d{2,3}\)\s?(?:\d{4}[\s*.-]?\d{4}|\d{3}[\s*.-]?\d{3,4}|\d{2}([\s*.-]?)\d{2}\1\d{2}(?:\1\d{2})?))|((\+\d{1,2}\s)?\(?\d{3}\)?[\s.-]\d{3}[\s.-]\d{4})|\(?\+?\d{1,3}?\)?[-.\s*]?\(?\d{1,3}?\)?[-.\s*]([-.\s*]?\d{1,9}){3,6}"
URL_REGEX_NA = r"(?:\+1?\d{10}?(?=\s)|1\d{10}?(?=\s)|(\+\d{1,2}\s)?\(?\d{3}\)?[\s.-]\d{3}[\s.-]\d{4})"
URL_REGEX_IN = r"(?:\+?\d{8}(?:\d{1,5})?|\(\+?\d{2,3}\)\s?(?:\d{4}[\s*.-]?\d{4}|\d{3}[\s*.-]?\d{3,4}|\d{2}([\s*.-]?)\d{2}\1\d{2}(?:\1\d{2})?))"
class StringPhoneExtractTransform(BaseTransform):
category = 'string'
name = 'phone_extract'
label = 'Extract Phone Number'
help_text = 'Find and copy a complete phone number out of a text field. Finds the first phone number only.'
noun = 'Text'
verb = 'find and copy a phone number from'
def transform(self, str_input, regex='uni1', **kwargs):
if isinstance(str_input, basestring):
if regex=='uni2':
match = re.search(URL_REGEX_UNI2, str_input)
return match.group(0) if match else u''
elif regex=='na':
match = re.search(URL_REGEX_NA, str_input)
return match.group(0) if match else u''
elif regex=='in':
match = re.search(URL_REGEX_IN, str_input)
return match.group(0) if match else u''
else:
match = re.search(URL_REGEX_UNI1, str_input)
return match.group(0) if match else u''
else:
return u''
def fields(self, *args, **kwargs):
return [
{
"type": "unicode",
"required": False,
"key": "regex",
"choices": "na|North American Number Plan (NANP) e.g. (123) 456-7890,in|International e.g. (12) 34-56-78-90,uni1|Universal 1 (includes NANP and some International),uni2|Universal 2 (includes NANP and more International)",
"label": "Phone Number Format",
"default": "uni1",
"help_text": (
'By default, the **Universal 1** search is used, which will find many NANP and International numbers. '
'If this does not work consistently, try a specific format (**NAMP** or **International**), '
'or **Universal 2**, which will find most phone numbers, but can also result in false positives.')
},
]
register(StringPhoneExtractTransform())
| zapier/transformer | transformer/transforms/string/phone_number_extract.py | Python | gpl-3.0 | 3,211 |
# -*- coding: utf-8 -*-
"""
werkzeug.debug.tbtools
~~~~~~~~~~~~~~~~~~~~~~
This module provides various traceback related utility functions.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD.
"""
import re
import os
import sys
import json
import inspect
import traceback
import codecs
from tokenize import TokenError
from werkzeug.utils import cached_property, escape
from werkzeug.debug.console import Console
from werkzeug._compat import range_type, PY2, text_type, string_types, \
to_native, to_unicode
_coding_re = re.compile(br'coding[:=]\s*([-\w.]+)')
_line_re = re.compile(br'^(.*?)$(?m)')
_funcdef_re = re.compile(r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)')
UTF8_COOKIE = b'\xef\xbb\xbf'
system_exceptions = (SystemExit, KeyboardInterrupt)
try:
system_exceptions += (GeneratorExit,)
except NameError:
pass
HEADER = u'''\
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
"http://www.w3.org/TR/html4/loose.dtd">
<html>
<head>
<title>%(title)s // Werkzeug Debugger</title>
<link rel="stylesheet" href="?__debugger__=yes&cmd=resource&f=style.css" type="text/css">
<!-- We need to make sure this has a favicon so that the debugger does not by
accident trigger a request to /favicon.ico which might change the application
state. -->
<link rel="shortcut icon" href="?__debugger__=yes&cmd=resource&f=console.png">
<script type="text/javascript" src="?__debugger__=yes&cmd=resource&f=jquery.js"></script>
<script type="text/javascript" src="?__debugger__=yes&cmd=resource&f=debugger.js"></script>
<script type="text/javascript">
var TRACEBACK = %(traceback_id)d,
CONSOLE_MODE = %(console)s,
EVALEX = %(evalex)s,
SECRET = "%(secret)s";
</script>
</head>
<body>
<div class="debugger">
'''
FOOTER = u'''\
<div class="footer">
Brought to you by <strong class="arthur">DON'T PANIC</strong>, your
friendly Werkzeug powered traceback interpreter.
</div>
</div>
</body>
</html>
'''
PAGE_HTML = HEADER + u'''\
<h1>%(exception_type)s</h1>
<div class="detail">
<p class="errormsg">%(exception)s</p>
</div>
<h2 class="traceback">Traceback <em>(most recent call last)</em></h2>
%(summary)s
<div class="plain">
<form action="/?__debugger__=yes&cmd=paste" method="post">
<p>
<input type="hidden" name="language" value="pytb">
This is the Copy/Paste friendly version of the traceback. <span
class="pastemessage">You can also paste this traceback into
a <a href="https://gist.github.com/">gist</a>:
<input type="submit" value="create paste"></span>
</p>
<textarea cols="50" rows="10" name="code" readonly>%(plaintext)s</textarea>
</form>
</div>
<div class="explanation">
The debugger caught an exception in your WSGI application. You can now
look at the traceback which led to the error. <span class="nojavascript">
If you enable JavaScript you can also use additional features such as code
execution (if the evalex feature is enabled), automatic pasting of the
exceptions and much more.</span>
</div>
''' + FOOTER + '''
<!--
%(plaintext_cs)s
-->
'''
CONSOLE_HTML = HEADER + u'''\
<h1>Interactive Console</h1>
<div class="explanation">
In this console you can execute Python expressions in the context of the
application. The initial namespace was created by the debugger automatically.
</div>
<div class="console"><div class="inner">The Console requires JavaScript.</div></div>
''' + FOOTER
SUMMARY_HTML = u'''\
<div class="%(classes)s">
%(title)s
<ul>%(frames)s</ul>
%(description)s
</div>
'''
FRAME_HTML = u'''\
<div class="frame" id="frame-%(id)d">
<h4>File <cite class="filename">"%(filename)s"</cite>,
line <em class="line">%(lineno)s</em>,
in <code class="function">%(function_name)s</code></h4>
<pre>%(current_line)s</pre>
</div>
'''
SOURCE_TABLE_HTML = u'<table class=source>%s</table>'
SOURCE_LINE_HTML = u'''\
<tr class="%(classes)s">
<td class=lineno>%(lineno)s</td>
<td>%(code)s</td>
</tr>
'''
def render_console_html(secret):
return CONSOLE_HTML % {
'evalex': 'true',
'console': 'true',
'title': 'Console',
'secret': secret,
'traceback_id': -1
}
def get_current_traceback(ignore_system_exceptions=False,
show_hidden_frames=False, skip=0):
"""Get the current exception info as `Traceback` object. Per default
calling this method will reraise system exceptions such as generator exit,
system exit or others. This behavior can be disabled by passing `False`
to the function as first parameter.
"""
exc_type, exc_value, tb = sys.exc_info()
if ignore_system_exceptions and exc_type in system_exceptions:
raise
for x in range_type(skip):
if tb.tb_next is None:
break
tb = tb.tb_next
tb = Traceback(exc_type, exc_value, tb)
if not show_hidden_frames:
tb.filter_hidden_frames()
return tb
class Line(object):
"""Helper for the source renderer."""
__slots__ = ('lineno', 'code', 'in_frame', 'current')
def __init__(self, lineno, code):
self.lineno = lineno
self.code = code
self.in_frame = False
self.current = False
def classes(self):
rv = ['line']
if self.in_frame:
rv.append('in-frame')
if self.current:
rv.append('current')
return rv
classes = property(classes)
def render(self):
return SOURCE_LINE_HTML % {
'classes': u' '.join(self.classes),
'lineno': self.lineno,
'code': escape(self.code)
}
class Traceback(object):
"""Wraps a traceback."""
def __init__(self, exc_type, exc_value, tb):
self.exc_type = exc_type
self.exc_value = exc_value
if not isinstance(exc_type, str):
exception_type = exc_type.__name__
if exc_type.__module__ not in ('__builtin__', 'exceptions'):
exception_type = exc_type.__module__ + '.' + exception_type
else:
exception_type = exc_type
self.exception_type = exception_type
# we only add frames to the list that are not hidden. This follows
# the the magic variables as defined by paste.exceptions.collector
self.frames = []
while tb:
self.frames.append(Frame(exc_type, exc_value, tb))
tb = tb.tb_next
def filter_hidden_frames(self):
"""Remove the frames according to the paste spec."""
if not self.frames:
return
new_frames = []
hidden = False
for frame in self.frames:
hide = frame.hide
if hide in ('before', 'before_and_this'):
new_frames = []
hidden = False
if hide == 'before_and_this':
continue
elif hide in ('reset', 'reset_and_this'):
hidden = False
if hide == 'reset_and_this':
continue
elif hide in ('after', 'after_and_this'):
hidden = True
if hide == 'after_and_this':
continue
elif hide or hidden:
continue
new_frames.append(frame)
# if we only have one frame and that frame is from the codeop
# module, remove it.
if len(new_frames) == 1 and self.frames[0].module == 'codeop':
del self.frames[:]
# if the last frame is missing something went terrible wrong :(
elif self.frames[-1] in new_frames:
self.frames[:] = new_frames
def is_syntax_error(self):
"""Is it a syntax error?"""
return isinstance(self.exc_value, SyntaxError)
is_syntax_error = property(is_syntax_error)
def exception(self):
"""String representation of the exception."""
buf = traceback.format_exception_only(self.exc_type, self.exc_value)
rv = ''.join(buf).strip()
return rv.decode('utf-8', 'replace') if PY2 else rv
exception = property(exception)
def log(self, logfile=None):
"""Log the ASCII traceback into a file object."""
if logfile is None:
logfile = sys.stderr
tb = self.plaintext.rstrip() + u'\n'
if PY2:
tb = tb.encode('utf-8', 'replace')
logfile.write(tb)
def paste(self):
"""Create a paste and return the paste id."""
data = json.dumps({
'description': 'Werkzeug Internal Server Error',
'public': False,
'files': {
'traceback.txt': {
'content': self.plaintext
}
}
}).encode('utf-8')
try:
from urllib2 import urlopen
except ImportError:
from urllib.request import urlopen
rv = urlopen('https://api.github.com/gists', data=data)
resp = json.loads(rv.read().decode('utf-8'))
rv.close()
return {
'url': resp['html_url'],
'id': resp['id']
}
def render_summary(self, include_title=True):
"""Render the traceback for the interactive console."""
title = ''
frames = []
classes = ['traceback']
if not self.frames:
classes.append('noframe-traceback')
if include_title:
if self.is_syntax_error:
title = u'Syntax Error'
else:
title = u'Traceback <em>(most recent call last)</em>:'
for frame in self.frames:
frames.append(u'<li%s>%s' % (
frame.info and u' title="%s"' % escape(frame.info) or u'',
frame.render()
))
if self.is_syntax_error:
description_wrapper = u'<pre class=syntaxerror>%s</pre>'
else:
description_wrapper = u'<blockquote>%s</blockquote>'
return SUMMARY_HTML % {
'classes': u' '.join(classes),
'title': title and u'<h3>%s</h3>' % title or u'',
'frames': u'\n'.join(frames),
'description': description_wrapper % escape(self.exception)
}
def render_full(self, evalex=False, secret=None):
"""Render the Full HTML page with the traceback info."""
exc = escape(self.exception)
return PAGE_HTML % {
'evalex': evalex and 'true' or 'false',
'console': 'false',
'title': exc,
'exception': exc,
'exception_type': escape(self.exception_type),
'summary': self.render_summary(include_title=False),
'plaintext': self.plaintext,
'plaintext_cs': re.sub('-{2,}', '-', self.plaintext),
'traceback_id': self.id,
'secret': secret
}
def generate_plaintext_traceback(self):
"""Like the plaintext attribute but returns a generator"""
yield u'Traceback (most recent call last):'
for frame in self.frames:
yield u' File "%s", line %s, in %s' % (
frame.filename,
frame.lineno,
frame.function_name
)
yield u' ' + frame.current_line.strip()
yield self.exception
def plaintext(self):
return u'\n'.join(self.generate_plaintext_traceback())
plaintext = cached_property(plaintext)
id = property(lambda x: id(x))
class Frame(object):
"""A single frame in a traceback."""
def __init__(self, exc_type, exc_value, tb):
self.lineno = tb.tb_lineno
self.function_name = tb.tb_frame.f_code.co_name
self.locals = tb.tb_frame.f_locals
self.globals = tb.tb_frame.f_globals
fn = inspect.getsourcefile(tb) or inspect.getfile(tb)
if fn[-4:] in ('.pyo', '.pyc'):
fn = fn[:-1]
# if it's a file on the file system resolve the real filename.
if os.path.isfile(fn):
fn = os.path.realpath(fn)
self.filename = to_unicode(fn, sys.getfilesystemencoding())
self.module = self.globals.get('__name__')
self.loader = self.globals.get('__loader__')
self.code = tb.tb_frame.f_code
# support for paste's traceback extensions
self.hide = self.locals.get('__traceback_hide__', False)
info = self.locals.get('__traceback_info__')
if info is not None:
try:
info = text_type(info)
except UnicodeError:
info = str(info).decode('utf-8', 'replace')
self.info = info
def render(self):
"""Render a single frame in a traceback."""
return FRAME_HTML % {
'id': self.id,
'filename': escape(self.filename),
'lineno': self.lineno,
'function_name': escape(self.function_name),
'current_line': escape(self.current_line.strip())
}
def get_annotated_lines(self):
"""Helper function that returns lines with extra information."""
lines = [Line(idx + 1, x) for idx, x in enumerate(self.sourcelines)]
# find function definition and mark lines
if hasattr(self.code, 'co_firstlineno'):
lineno = self.code.co_firstlineno - 1
while lineno > 0:
if _funcdef_re.match(lines[lineno].code):
break
lineno -= 1
try:
offset = len(inspect.getblock([x.code + '\n' for x
in lines[lineno:]]))
except TokenError:
offset = 0
for line in lines[lineno:lineno + offset]:
line.in_frame = True
# mark current line
try:
lines[self.lineno - 1].current = True
except IndexError:
pass
return lines
def render_source(self):
"""Render the sourcecode."""
return SOURCE_TABLE_HTML % u'\n'.join(line.render() for line in
self.get_annotated_lines())
def eval(self, code, mode='single'):
"""Evaluate code in the context of the frame."""
if isinstance(code, string_types):
if PY2 and isinstance(code, unicode):
code = UTF8_COOKIE + code.encode('utf-8')
code = compile(code, '<interactive>', mode)
return eval(code, self.globals, self.locals)
@cached_property
def sourcelines(self):
"""The sourcecode of the file as list of unicode strings."""
# get sourcecode from loader or file
source = None
if self.loader is not None:
try:
if hasattr(self.loader, 'get_source'):
source = self.loader.get_source(self.module)
elif hasattr(self.loader, 'get_source_by_code'):
source = self.loader.get_source_by_code(self.code)
except Exception:
# we munch the exception so that we don't cause troubles
# if the loader is broken.
pass
if source is None:
try:
f = open(self.filename, mode='rb')
except IOError:
return []
try:
source = f.read()
finally:
f.close()
# already unicode? return right away
if isinstance(source, text_type):
return source.splitlines()
# yes. it should be ascii, but we don't want to reject too many
# characters in the debugger if something breaks
charset = 'utf-8'
if source.startswith(UTF8_COOKIE):
source = source[3:]
else:
for idx, match in enumerate(_line_re.finditer(source)):
match = _coding_re.search(match.group())
if match is not None:
charset = match.group(1)
break
if idx > 1:
break
# on broken cookies we fall back to utf-8 too
charset = to_native(charset)
try:
codecs.lookup(charset)
except LookupError:
charset = 'utf-8'
return source.decode(charset, 'replace').splitlines()
@property
def current_line(self):
try:
return self.sourcelines[self.lineno - 1]
except IndexError:
return u''
@cached_property
def console(self):
return Console(self.globals, self.locals)
id = property(lambda x: id(x))
| thepiper/standoff | vpy/lib/python2.7/site-packages/werkzeug/debug/tbtools.py | Python | gpl-3.0 | 16,913 |
def is_pangram(word):
word = sorted(word)
i = 1
count = 0
while i < len(word):
if (word[i] != word[i-1]) & (word[i].isalpha()):
count += 1
i += 1
if count == 26:
return True
else:
return False
| ZachGangwer/Exercism | python/pangram/pangram.py | Python | gpl-3.0 | 273 |
from django.conf.urls import include, url
urlpatterns = [
url(r'^sticky-uploads/', include('stickyuploads.urls')),
]
| caktus/django-sticky-uploads | stickyuploads/tests/urls.py | Python | bsd-3-clause | 123 |
# !/usr/bin/env python
# -*- coding: UTF-8 -*-
#
#
# ==================
# VIZ MARKDOWN - multiple file, markdown format
# ==================
import os, sys
import json
from ..utils import *
from ..builder import * # loads and sets up Django
from ..viz_factory import VizFactory
class MarkdownViz(VizFactory):
"""
A simple markdown rendering in multi pages
"""
def __init__(self, ontospy_graph, title=""):
"""
Init
"""
super(MarkdownViz, self).__init__(ontospy_graph, title)
def _buildTemplates(self):
"""
OVERRIDING THIS METHOD from Factory
"""
# Ontology - MAIN PAGE
contents = self._renderTemplate(
"markdown/markdown_ontoinfo.md", extraContext=None)
FILE_NAME = "index.md"
main_url = self._save2File(contents, FILE_NAME, self.output_path)
browser_output_path = self.output_path
if self.ontospy_graph.all_classes:
# BROWSER PAGES - CLASSES ======
for entity in self.ontospy_graph.all_classes:
extra_context = {
"main_entity": entity,
"main_entity_type": "class",
"ontograph": self.ontospy_graph
}
contents = self._renderTemplate(
"markdown/markdown_classinfo.md",
extraContext=extra_context)
FILE_NAME = entity.slug + ".md"
self._save2File(contents, FILE_NAME, browser_output_path)
if self.ontospy_graph.all_properties:
# BROWSER PAGES - PROPERTIES ======
for entity in self.ontospy_graph.all_properties:
extra_context = {
"main_entity": entity,
"main_entity_type": "property",
"ontograph": self.ontospy_graph
}
contents = self._renderTemplate(
"markdown/markdown_propinfo.md",
extraContext=extra_context)
FILE_NAME = entity.slug + ".md"
self._save2File(contents, FILE_NAME, browser_output_path)
if self.ontospy_graph.all_skos_concepts:
# BROWSER PAGES - CONCEPTS ======
for entity in self.ontospy_graph.all_skos_concepts:
extra_context = {
"main_entity": entity,
"main_entity_type": "concept",
"ontograph": self.ontospy_graph
}
contents = self._renderTemplate(
"markdown/markdown_conceptinfo.md",
extraContext=extra_context)
FILE_NAME = entity.slug + ".ms"
self._save2File(contents, FILE_NAME, browser_output_path)
return main_url
# if called directly, for testing purposes pick a random ontology
if __name__ == '__main__':
TEST_ONLINE = False
try:
g = get_onto_for_testing(TEST_ONLINE)
v = MarkdownViz(g, title="")
v.build()
v.preview()
sys.exit(0)
except KeyboardInterrupt as e: # Ctrl-C
raise e
| lambdamusic/OntoSPy | ontospy/ontodocs/viz/viz_markdown.py | Python | gpl-3.0 | 3,160 |
##
## Run: git log --author="DeepBlue14" --pretty=tformat: --numstat > counter_log.txt
## Then run this file
my_sum = 0
with open("counter_log.txt") as f:
content = f.readlines()
for i in content:
#print(i)
no_ws = i.rstrip()
if no_ws:
last_char = no_ws[-1]
#print(last_char)
if last_char == 'h' or last_char == 'p':
num_str = no_ws.partition('\t')[0]
#print(num_str)
my_sum += int(num_str)
num_str2 = no_ws.partition(' ')[1]
#my_sum += int(num_str2)
print num_str2
print "Result: ", my_sum
| DeepBlue14/rqt_ide | ChangeCounter.py | Python | mit | 621 |
"""
Default configuration file.
This settings should be redifined.
"""
DEBUG = False # bool. can be set by env var PYPEMAN_DEBUG (0|1|true|false) or pypeman cmd args
TESTING = False # bool. can be set by env var PYPEMAN_TESTING (0|1|true|false) pypeman cmd args
DEBUG_PARAMS = dict(
slow_callback_duration=0.1
)
REMOTE_ADMIN_WEBSOCKET_CONFIG = {
'host': 'localhost',
'port': '8091',
'ssl': None,
'url': None, # must be set when behind a reverse proxy
}
REMOTE_ADMIN_WEB_CONFIG = {
'host': 'localhost',
'port': '8090',
'ssl': None,
}
HTTP_ENDPOINT_CONFIG = ['0.0.0.0', '8080']
PERSISTENCE_BACKEND = None
PERSISTENCE_CONFIG = {}
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(name)s %(module)s %(message)s'
},
},
'handlers': {
'console': {
'level':'DEBUG',
'class':'logging.StreamHandler',
'formatter': 'verbose'
}
},
'loggers': {
'': {
'level': 'INFO',
'handlers': ['console'],
},
'jsonrpcclient':{
'level': 'WARNING',
'propagate': False,
},
'jsonrpcserver':{
'level': 'WARNING',
'propagate': False,
}
}
}
| jrmi/pypeman | pypeman/default_settings.py | Python | apache-2.0 | 1,361 |
"""
Integration tests for the stem.control.BaseController class.
"""
import re
import threading
import time
import unittest
import stem.control
import stem.socket
import stem.util.system
import test.runner
from test.runner import require_controller
class StateObserver(object):
"""
Simple container for listening to ControlSocket state changes and
rembembering them for the test.
"""
controller = None
state = None
timestamp = None
def reset(self):
self.controller = None
self.state = None
self.timestamp = None
def listener(self, controller, state, timestamp):
self.controller = controller
self.state = state
self.timestamp = timestamp
class TestBaseController(unittest.TestCase):
@require_controller
def test_connect_repeatedly(self):
"""
Connects and closes the socket repeatedly. This is a simple attempt to
trigger concurrency issues.
"""
if stem.util.system.is_mac():
test.runner.skip(self, '(ticket #6235)')
return
with test.runner.get_runner().get_tor_socket() as control_socket:
controller = stem.control.BaseController(control_socket)
for _ in range(250):
controller.connect()
controller.close()
@require_controller
def test_msg(self):
"""
Tests a basic query with the msg() method.
"""
with test.runner.get_runner().get_tor_socket() as control_socket:
controller = stem.control.BaseController(control_socket)
test.runner.exercise_controller(self, controller)
@require_controller
def test_msg_invalid(self):
"""
Tests the msg() method against an invalid controller command.
"""
with test.runner.get_runner().get_tor_socket() as control_socket:
controller = stem.control.BaseController(control_socket)
response = controller.msg('invalid')
self.assertEqual('Unrecognized command "invalid"', str(response))
@require_controller
def test_msg_invalid_getinfo(self):
"""
Tests the msg() method against a non-existant GETINFO option.
"""
with test.runner.get_runner().get_tor_socket() as control_socket:
controller = stem.control.BaseController(control_socket)
response = controller.msg('GETINFO blarg')
self.assertEqual('Unrecognized key "blarg"', str(response))
@require_controller
def test_msg_repeatedly(self):
"""
Connects, sends a burst of messages, and closes the socket repeatedly. This
is a simple attempt to trigger concurrency issues.
"""
if stem.util.system.is_mac():
test.runner.skip(self, '(ticket #6235)')
return
with test.runner.get_runner().get_tor_socket() as control_socket:
controller = stem.control.BaseController(control_socket)
def run_getinfo():
for _ in range(150):
try:
controller.msg('GETINFO version')
controller.msg('GETINFO blarg')
controller.msg('blarg')
except stem.ControllerError:
pass
message_threads = []
for _ in range(5):
msg_thread = threading.Thread(target = run_getinfo)
message_threads.append(msg_thread)
msg_thread.setDaemon(True)
msg_thread.start()
for index in range(100):
controller.connect()
controller.close()
for msg_thread in message_threads:
msg_thread.join()
@require_controller
def test_asynchronous_event_handling(self):
"""
Check that we can both receive asynchronous events while hammering our
socket with queries, and checks that when a controller is closed the
listeners will still receive all of the enqueued events.
"""
class ControlledListener(stem.control.BaseController):
"""
Controller that blocks event handling until told to do so.
"""
def __init__(self, control_socket):
stem.control.BaseController.__init__(self, control_socket)
self.received_events = []
self.receive_notice = threading.Event()
def _handle_event(self, event_message):
self.receive_notice.wait()
self.received_events.append(event_message)
with test.runner.get_runner().get_tor_socket() as control_socket:
controller = ControlledListener(control_socket)
controller.msg('SETEVENTS BW')
# Wait for a couple events for events to be enqueued. Doing a bunch of
# GETINFO queries while waiting to better exercise the asynchronous event
# handling.
start_time = time.time()
while (time.time() - start_time) < 3:
test.runner.exercise_controller(self, controller)
# Concurrently shut down the controller. We need to do this in another
# thread because it'll block on the event handling, which in turn is
# currently blocking on the reveive_notice.
close_thread = threading.Thread(target = controller.close, name = 'Closing controller')
close_thread.setDaemon(True)
close_thread.start()
# Finally start handling the BW events that we've received. We should
# have at least a couple of them.
controller.receive_notice.set()
close_thread.join()
self.assertTrue(len(controller.received_events) >= 2)
for bw_event in controller.received_events:
self.assertTrue(re.match('BW [0-9]+ [0-9]+', str(bw_event)))
self.assertTrue(re.match('650 BW [0-9]+ [0-9]+\r\n', bw_event.raw_content()))
self.assertEqual(('650', ' '), bw_event.content()[0][:2])
@require_controller
def test_get_latest_heartbeat(self):
"""
Basic check for get_latest_heartbeat().
"""
# makes a getinfo query, then checks that the heartbeat is close to now
with test.runner.get_runner().get_tor_socket() as control_socket:
controller = stem.control.BaseController(control_socket)
controller.msg('GETINFO version')
self.assertTrue((time.time() - controller.get_latest_heartbeat()) < 5)
@require_controller
def test_status_notifications(self):
"""
Checks basic functionality of the add_status_listener() and
remove_status_listener() methods.
"""
state_observer = StateObserver()
with test.runner.get_runner().get_tor_socket(False) as control_socket:
controller = stem.control.BaseController(control_socket)
controller.add_status_listener(state_observer.listener, False)
controller.close()
self.assertEqual(controller, state_observer.controller)
self.assertEqual(stem.control.State.CLOSED, state_observer.state)
self.assertTrue(state_observer.timestamp <= time.time())
self.assertTrue(state_observer.timestamp > time.time() - 1.0)
state_observer.reset()
controller.connect()
self.assertEqual(controller, state_observer.controller)
self.assertEqual(stem.control.State.INIT, state_observer.state)
self.assertTrue(state_observer.timestamp <= time.time())
self.assertTrue(state_observer.timestamp > time.time() - 1.0)
state_observer.reset()
# cause the socket to shut down without calling close()
controller.msg('Blarg!')
self.assertRaises(stem.SocketClosed, controller.msg, 'blarg')
self.assertEqual(controller, state_observer.controller)
self.assertEqual(stem.control.State.CLOSED, state_observer.state)
self.assertTrue(state_observer.timestamp <= time.time())
self.assertTrue(state_observer.timestamp > time.time() - 1.0)
state_observer.reset()
# remove listener and make sure we don't get further notices
controller.remove_status_listener(state_observer.listener)
controller.connect()
self.assertEqual(None, state_observer.controller)
self.assertEqual(None, state_observer.state)
self.assertEqual(None, state_observer.timestamp)
state_observer.reset()
# add with spawn as true, we need a little delay on this since we then
# get the notice asynchronously
controller.add_status_listener(state_observer.listener, True)
controller.close()
time.sleep(0.1) # not much work going on so this doesn't need to be much
self.assertEqual(controller, state_observer.controller)
self.assertEqual(stem.control.State.CLOSED, state_observer.state)
self.assertTrue(state_observer.timestamp <= time.time())
self.assertTrue(state_observer.timestamp > time.time() - 1.0)
state_observer.reset()
| FedericoCeratto/stem | test/integ/control/base_controller.py | Python | lgpl-3.0 | 8,385 |
# Copyright (c) 2011 Mounier Florian
# Copyright (c) 2011 Paul Colomiets
# Copyright (c) 2012 Craig Barnes
# Copyright (c) 2012, 2014 Tycho Andersen
# Copyright (c) 2013 Tao Sauvage
# Copyright (c) 2014 ramnes
# Copyright (c) 2014 Sean Vig
# Copyright (c) 2014 dmpayton
# Copyright (c) 2014 dequis
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import division
from .base import SingleWindow
class Zoomy(SingleWindow):
"""
A layout with single active windows, and few other previews at the
right
"""
defaults = [
("columnwidth", 150, "Width of the right column"),
("property_name", "ZOOM", "Property to set on zoomed window"),
("property_small", "0.1", "Property value to set on zoomed window"),
("property_big", "1.0", "Property value to set on normal window"),
("margin", 0, "Margin of the layout"),
]
def __init__(self, **config):
SingleWindow.__init__(self, **config)
self.add_defaults(Zoomy.defaults)
self.clients = []
self.focused = None
def _get_window(self):
return self.focused
def focus_first(self):
if self.clients:
return self.clients[0]
def focus_last(self):
if self.clients:
return self.clients[-1]
def focus_next(self, client):
if client not in self.clients:
return
idx = self.clients.index(client)
return self.clients[(idx + 1) % len(self.clients)]
def focus_previous(self, client):
if not self.clients:
return
idx = self.clients.index(client)
return self.clients[idx - 1]
def clone(self, group):
c = SingleWindow.clone(self, group)
c.clients = []
return c
def add(self, client):
self.clients.insert(0, client)
self.focus(client)
def remove(self, client):
if client not in self.clients:
return
if self.focused == client:
self.focused = self.focus_previous(client)
if self.focused == client:
self.focused = None
self.clients.remove(client)
return self.focused
def configure(self, client, screen):
left, right = screen.hsplit(screen.width - self.columnwidth)
if client is self.focused:
client.place(
left.x,
left.y,
left.width,
left.height,
0,
None,
margin=self.margin,
)
else:
h = right.width * left.height // left.width
client_index = self.clients.index(client)
focused_index = self.clients.index(self.focused)
offset = client_index - focused_index - 1
if offset < 0:
offset += len(self.clients)
if h * (len(self.clients) - 1) < right.height:
client.place(
right.x,
right.y + h * offset,
right.width,
h,
0,
None,
margin=self.margin,
)
else:
hh = (right.height - h) // (len(self.clients) - 1)
client.place(
right.x,
right.y + hh * offset,
right.width,
h,
0,
None,
margin=self.margin,
)
client.unhide()
def info(self):
d = SingleWindow.info(self)
d["clients"] = [x.name for x in self.clients]
return d
def focus(self, win):
if self.focused and self.property_name and self.focused.window.get_property(
self.property_name,
"UTF8_STRING"
) is not None:
self.focused.window.set_property(
self.property_name,
self.property_small,
"UTF8_STRING",
format=8
)
SingleWindow.focus(self, win)
if self.property_name:
self.focused = win
win.window.set_property(
self.property_name,
self.property_big,
"UTF8_STRING",
format=8
)
def cmd_next(self):
client = self.focus_next(self.focused) or self.focus_first()
self.group.focus(client, False)
cmd_down = cmd_next
def cmd_previous(self):
client = self.focus_previous(self.focused) or self.focus_last()
self.group.focus(client, False)
cmd_up = cmd_previous
| xplv/qtile | libqtile/layout/zoomy.py | Python | mit | 5,680 |
import os
import platform
# toolchains options
ARCH='risc-v'
CPU='nuclei'
CROSS_TOOL='gcc'
if os.getenv('RTT_CC'):
CROSS_TOOL = os.getenv('RTT_CC')
if CROSS_TOOL == 'gcc':
PLATFORM = 'gcc'
if platform.system().lower() == "windows":
EXEC_PATH = 'D:/NucleiStudio/toolchain/gcc/bin'
else:
EXEC_PATH = '~/NucleiStudio/toolchain/gcc/bin'
if os.path.exists(EXEC_PATH) == False:
print("Warning: Toolchain path %s doesn't exist, assume it is already in PATH" % EXEC_PATH)
EXEC_PATH = '' # Don't set path if not exist
else:
print("CROSS_TOOL = %s not yet supported" % CROSS_TOOL)
if os.getenv('RTT_EXEC_PATH'):
EXEC_PATH = os.getenv('RTT_EXEC_PATH')
BUILD = 'debug'
# Fixed configurations below
NUCLEI_SDK_SOC = "demosoc"
NUCLEI_SDK_BOARD = "nuclei_fpga_eval"
# Configurable options below
# DOWNLOAD: https://doc.nucleisys.com/nuclei_sdk/develop/buildsystem.html#download
NUCLEI_SDK_DOWNLOAD = "ilm"
# CORE: See https://doc.nucleisys.com/nuclei_sdk/develop/buildsystem.html#core
NUCLEI_SDK_CORE = "nx600"
if PLATFORM == 'gcc':
# toolchains
PREFIX = 'riscv-nuclei-elf-'
CC = PREFIX + 'gcc'
CXX = PREFIX + 'g++'
AS = PREFIX + 'gcc'
AR = PREFIX + 'ar'
LINK = PREFIX + 'gcc'
GDB = PREFIX + 'gdb'
TARGET_EXT = 'elf'
SIZE = PREFIX + 'size'
OBJDUMP = PREFIX + 'objdump'
OBJCPY = PREFIX + 'objcopy'
CFLAGS = ' -ffunction-sections -fdata-sections -fno-common '
AFLAGS = CFLAGS
LFLAGS = ' --specs=nano.specs --specs=nosys.specs -nostartfiles -Wl,--gc-sections '
LFLAGS += ' -Wl,-cref,-Map=rtthread.map'
LFLAGS += ' -u _isatty -u _write -u _sbrk -u _read -u _close -u _fstat -u _lseek '
CPATH = ''
LPATH = ''
LIBS = ['stdc++']
if BUILD == 'debug':
CFLAGS += ' -O2 -ggdb'
AFLAGS += ' -ggdb'
else:
CFLAGS += ' -O2 -Os'
CXXFLAGS = CFLAGS
DUMP_ACTION = OBJDUMP + ' -D -S $TARGET > rtt.asm\n'
POST_ACTION = OBJCPY + ' -O binary $TARGET rtthread.bin\n' + SIZE + ' $TARGET \n'
def dist_handle(BSP_ROOT, dist_dir):
import sys
cwd_path = os.getcwd()
sys.path.append(os.path.join(os.path.dirname(BSP_ROOT), 'tools'))
from sdk_dist import dist_do_building
dist_do_building(BSP_ROOT, dist_dir)
| nongxiaoming/rt-thread | bsp/nuclei/nuclei_fpga_eval/rtconfig.py | Python | apache-2.0 | 2,308 |
import booby
from booby import fields
from booby.inspection import get_fields, is_model
from booby.validators import Required
from pydoc import locate
from collections import OrderedDict
from collections import OrderedDict
from tabulate import tabulate
import readline
MODEL_MAP = {}
class tabCompleter(object):
"""
A tab completer that can either complete from
the filesystem or from a list.
Partially taken from:
http://stackoverflow.com/questions/5637124/tab-completion-in-pythons-raw-input
"""
def createListCompleter(self, ll):
"""
This is a closure that creates a method that autocompletes from
the given list.
Since the autocomplete function can't be given a list to complete from
a closure is used to create the listCompleter function with a list to complete
from.
"""
def listCompleter(text, state):
line = readline.get_line_buffer()
if not line:
return [c + " " for c in ll][state]
else:
return [c + " " for c in ll if c.startswith(line)][state]
self.listCompleter = listCompleter
def ensure_json_value(value):
if is_model(value):
return dict(value)
else:
return value
def ensure_json(value):
if isinstance(value, (list, tuple)):
return [ensure_json_value(w) for w in value]
else:
return ensure_json_value(value)
class EditModel(object):
def __init__(self, model_type, current_value, help_map):
self.model_type = model_type
self.current_value = current_value
self.new_value = {}
self.help_map = help_map
def get_fields(self):
required_details = OrderedDict()
non_required_details = OrderedDict()
for k, f in sorted(get_fields(self.model_type).iteritems()):
if is_required(f):
required_details[k] = f
else:
non_required_details[k] = f
details = OrderedDict()
for k, f in required_details.iteritems():
details[k] = f
for k, f in non_required_details.iteritems():
details[k] = f
return details
def edit_field(self, field_name):
new_field_value = self.ask_field(field_name)
# field = get_fields(self.current_value).get(field_name)
value = ensure_json(new_field_value)
self.new_value[field_name] = value
def ask_field(self, field_name):
field_type = self.model_type.__dict__.get(field_name, None)
if not field_type:
print "No field of that name."
new_value = ask_detail_for_field(
field_name, field_type, None, self.help_map)
if is_model(new_value):
new_value = new_value.to_json()
return new_value
def print_current(self):
fields = self.get_fields()
table = []
i = 1
for k, v in fields.iteritems():
value = getattr(self.current_value, k, None)
row = [k, convert_for_print(value)]
table.append(row)
i = i + 1
print tabulate(table)
def print_new(self):
print self.new_value
def convert_value_to_print(value):
f = getattr(value, 'to_json', None)
if callable(f):
value = value.to_json()
return value
def convert_for_print(value):
if isinstance(value, (list, tuple)):
if len(value) > 0:
value = (convert_value_to_print(w) for w in value)
value = "[" + ", ".join(value) + "]"
else:
value = ""
else:
value = convert_value_to_print(value)
return value
def get_type(model):
if type(model) == fields.Integer or model == fields.Integer:
return 'Integer'
elif type(model) == fields.String or model == fields.String:
return 'String'
else:
return model.__name__
def is_required(field):
return next((True for x in field.validators if isinstance(x, Required)), False)
def convert_to_proper_base_type(base_type, value):
'''
Converts the string input in the appropriate value type.
'''
if get_type(base_type) == 'Integer':
return int(value)
elif get_type(base_type) == 'String':
return value
elif get_type(base_type) == 'Boolean':
return bool(value)
else:
return value
def edit_details_for_type(model_type, old_object, help_map={}):
'''
Asks for user input to change an existing model.
'''
m = EditModel(model_type, old_object, help_map)
print
print "Current values:"
print
m.print_current()
print
selection = "xxx"
print
print "Caution: the new value will replace the old value, not be added to it."
print
while selection:
selection = raw_input("field to edit ('enter' to finish): ")
if selection:
print
m.edit_field(selection)
print
return m.new_value
def ask_details_for_type(model_type, ask_only_required=True, help_map={}):
'''
Asks for user input to create an object of a specified type.
If the type is registered in a model/builder map, the function associated
with this type is used to create the object instead of the auto-generated
query.
'''
if MODEL_MAP.get(model_type, None):
func = MODEL_MAP[model_type]
return func()
required_details = OrderedDict()
non_required_details = OrderedDict()
values = {}
for k, f in sorted(get_fields(model_type).iteritems()):
if is_required(f):
required_details[k] = f
else:
non_required_details[k] = f
print
print "Enter values for fields below. Enter '?' or '? arg1 [arg2]' for help for each field."
print
print "Required fields:"
print "----------------"
print
for k, f in required_details.iteritems():
while True:
value = ask_detail_for_field(k, f, ask_only_required, help_map)
if value:
values[k] = value
break
else:
print
print "This is a required field, please enter value for {}.".format(k)
print
if not ask_only_required:
print
print "Optional fields, press 'Enter' to ignore a field."
print "-------------------------------------------------"
print
for k, f in non_required_details.iteritems():
value = ask_detail_for_field(k, f, ask_only_required, help_map)
if value:
values[k] = value
print
obj = model_type(**values)
return obj
def ask_collection_detail(name, detail_type, ask_only_required=True, help_map={}):
result = []
print "Enter details for '{}', multiple entries possible, press enter to continue to next field.".format(name)
print
while True:
cd = ask_detail_for_field(
name, detail_type, ask_only_required, help_map)
if not cd:
break
else:
result.append(cd)
return result
def parse_for_help(answer, help_func):
if answer.startswith('?'):
args = answer.split(' ')[1:]
if not help_func:
print 'Sorry, no help available for this field.'
else:
print
help_func(*args)
print
return True
else:
return False
def ask_simple_field(name, field_type, help_map={}):
type_name = get_type(field_type)
answer = raw_input(" - {} ({}): ".format(name, type_name))
if not answer:
return None
if parse_for_help(answer, help_map.get(name, None)):
return ask_simple_field(name, field_type, help_map)
try:
value = convert_to_proper_base_type(field_type, answer)
except Exception as e:
print "Can't convert input: ", e
return ask_simple_field(name, field_type, help_map)
return value
def ask_detail_for_field(name, detail_type, ask_only_required=True, help_map={}):
value = None
if MODEL_MAP.get(type(detail_type), None):
func = MODEL_MAP[type(detail_type)]
value = func()
return value
# collections are a special case
if type(detail_type) == booby.fields.Collection:
# collection
value = ask_collection_detail(
name, detail_type.model, ask_only_required, help_map)
elif is_model(detail_type):
# collection, and model field
value = ask_details_for_type(detail_type, ask_only_required, help_map)
elif issubclass(type(detail_type), booby.fields.Field):
# non-collection, and non-model field
value = ask_simple_field(name, type(detail_type), help_map)
elif issubclass(detail_type, booby.fields.Field):
# collection, and non-model field
value = ask_simple_field(name, detail_type, help_map)
return value
| makkus/pyclist | pyclist/model_helpers.py | Python | apache-2.0 | 8,972 |
"""
The unit tests in here interact directly with hydra-base (rather than using the Web API).
"""
from helpers import *
from fixtures import *
from hydra_base_fixtures import *
from hydra_pywr.importer import PywrHydraImporter
from hydra_pywr.template import generate_pywr_attributes, generate_pywr_template, pywr_template_name, PYWR_DEFAULT_DATASETS
import hydra_base
import pytest
import json
def test_add_network(pywr_json_filename, session_with_pywr_template, projectmaker, root_user_id):
project = projectmaker.create()
template = JSONObject(hydra_base.get_template_by_name(pywr_template_name('Full')))
importer = PywrHydraImporter(pywr_json_filename, template)
# First the attributes must be added.
attributes = [JSONObject(a) for a in importer.add_attributes_request_data()]
# The response attributes have ids now.
response_attributes = hydra_base.add_attributes(attributes)
# Convert to a simple dict for local processing.
# TODO change this variable name to map or lookup
attribute_ids = {a.name: a.id for a in response_attributes}
# Now we try to create the network
network = importer.add_network_request_data(attribute_ids, project.id)
# Check transformed data is about right
with open(pywr_json_filename) as fh:
pywr_data = json.load(fh)
assert_hydra_pywr(network, pywr_data)
hydra_network = hydra_base.add_network(JSONObject(network), user_id=root_user_id)
def test_add_template(session, root_user_id):
attributes = [JSONObject(a) for a in generate_pywr_attributes()]
# The response attributes have ids now.
response_attributes = hydra_base.add_attributes(attributes)
# Convert to a simple dict for local processing.
attribute_ids = {a.name: a.id for a in response_attributes}
default_data_set_ids = {}
for attribute_name, dataset in PYWR_DEFAULT_DATASETS.items():
hydra_dataset = hydra_base.add_dataset(flush=True, **dataset)
default_data_set_ids[attribute_name] = hydra_dataset.id
template = generate_pywr_template(attribute_ids, default_data_set_ids, 'full')
hydra_base.add_template(JSONObject(template))
| UMWRG/PywrApp | tests/test_hydra_base_importing.py | Python | gpl-3.0 | 2,167 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import xbmc, xbmcvfs, xbmcgui, xbmcplugin, xbmcaddon
import os
import sys
import shutil
import urllib
import urllib.request
import urllib.parse
import http.cookiejar
import json
import gzip
import threading
import time
import datetime
import hashlib
from traceback import format_exc
import http.server
import socketserver
import re
# produce web pages parseable by https://github.com/xbmc/xbmc/blob/master/xbmc/filesystem/HTTPDirectory.cpp
__addon__ = xbmcaddon.Addon()
ADDONVERSION = __addon__.getAddonInfo('version')
ADDONNAME = __addon__.getAddonInfo('name')
ADDONID = __addon__.getAddonInfo('id')
ADDONICON = xbmcvfs.translatePath(__addon__.getAddonInfo('icon'))
ADDONPROFILE = xbmcvfs.translatePath(__addon__.getAddonInfo('profile'))
if getattr(xbmcgui.Dialog, 'notification', False):
def message_ok(message):
xbmcgui.Dialog().notification("Soap4.me", message, icon=xbmcgui.NOTIFICATION_INFO, sound=False)
def message_error(message):
xbmcgui.Dialog().notification("Soap4.me", message, icon=xbmcgui.NOTIFICATION_ERROR, sound=False)
else:
def show_message(message):
xbmc.executebuiltin('XBMC.Notification("%s", "%s", %s, "%s")'%("Soap4.me", message, 3000, ADDONICON))
message_ok = show_message
message_error = show_message
soappath = os.path.join(ADDONPROFILE, "soap4me-proxy")
class Main:
def __init__(self):
watched_status = WatchedStatus()
api = SoapApi(watched_status)
watched_status.soap_api = api
api.main()
try:
httpd = socketserver.TCPServer(("", KodiConfig.get_web_port()), WebHandler)
httpd.api = api
kodi_waiter = threading.Thread(target=self.kodi_waiter_thread, args=(httpd, watched_status,))
kodi_waiter.start()
httpd.serve_forever()
except:
message_error("Cannot create web-server, port is busy")
xbmc.log('%s: %s' % (ADDONID, format_exc()), xbmc.LOGERROR)
#raise
@staticmethod
def kodi_waiter_thread(httpd, watched_status):
monitor = KodiMonitor(watched_status)
while not monitor.abortRequested():
if monitor.waitForAbort(3):
break
xbmc.log('%s: Exiting' % (ADDONID))
httpd.shutdown()
# noinspection PyPep8Naming
class KodiMonitor(xbmc.Monitor):
def __init__(self, watched_status):
self.watched_status = watched_status
def onScanStarted(self, library):
xbmc.Monitor.onScanStarted(self, library)
xbmc.log('%s: Library scan \'%s\' started' % (ADDONID, library))
def onScanFinished(self, library):
xbmc.Monitor.onScanFinished(self, library)
xbmc.log('%s: Library scan \'%s\' finished' % (ADDONID, library))
self.watched_status.sync_status() # TODO: do in new thread
def onNotification(self, sender, method, data):
xbmc.Monitor.onNotification(self, sender, method, data)
xbmc.log('%s: Notification %s from %s, params: %s' % (ADDONID, method, sender, str(data)))
if method == 'VideoLibrary.OnUpdate':
params = json.loads(data)
if 'item' in params and 'type' in params['item']:
item_type = params['item']['type']
if item_type == 'episode' and 'id' in params['item'] and 'playcount' in params:
item_id = params['item']['id']
playcount = params['playcount']
self.watched_status.update_server_status(item_id, playcount > 0)
elif method == 'Player.OnStop':
params = json.loads(data)
if 'item' in params and 'type' in params['item']:
item_type = params['item']['type']
if item_type == 'episode' and 'id' in params['item']:
item_id = params['item']['id']
end = params['end']
if end:
self.watched_status.update_server_status(item_id, True)
else:
# resume time is not still updated so need to re-check time later
threading.Timer(3.0, self.onPlayerStopped, args=(item_id, )).start()
def onPlayerStopped(self, item_id):
episode_details = KodiApi.get_episode_details(item_id)
position = episode_details['resume']['position']
total = episode_details['resume']['total']
if total > 0 and position / total > 0.9:
self.watched_status.update_server_status(item_id, True)
else:
self.watched_status.update_server_position(item_id, position)
class WatchedStatus(object):
watched_status = dict()
show_position = dict()
soap_api = None
def set_server_status(self, imdb, season, episode, watched, position):
# xbmc.log('%s: Watched status %s/%s/%s/%s' % (ADDONID, imdb, season, episode, watched))
show_watched_status = self.watched_status.get(imdb)
if show_watched_status is None:
show_watched_status = dict()
self.watched_status[imdb] = show_watched_status
show_position = self.show_position.get(imdb)
if show_position is None:
show_position = dict()
self.show_position[imdb] = show_position
episode_key = season + '/' + episode
show_watched_status[episode_key] = watched
show_position[episode_key] = position
def update_server_status(self, episode_id, watched):
episode_details = KodiApi.get_episode_details(episode_id)
show_id = episode_details['tvshowid']
season = str(episode_details['season'])
episode = str(episode_details['episode'])
show = KodiApi.get_show_details(show_id)
imdb = show['imdbnumber']
show_watched_status = self.watched_status.get(imdb)
if show_watched_status is None:
show_watched_status = dict()
self.watched_status[imdb] = show_watched_status
episode_key = season + '/' + episode
if show_watched_status.get(episode_key) != watched:
eid = self.get_soap_episode_id(episode_details)
if eid is not None:
xbmc.log('%s: Updating remote watched status of show \'%s\' season %s episode %s to %s' % (ADDONID, imdb, season, episode, watched))
sid = self.get_soap_season_id(episode_details)
self.soap_api.mark_watched(sid, eid, watched)
show_watched_status[episode_key] = watched
def update_server_position(self, episode_id, position):
if position >= 0:
episode_details = KodiApi.get_episode_details(episode_id)
eid = self.get_soap_episode_id(episode_details)
if eid is not None:
show_id = episode_details['tvshowid']
season = str(episode_details['season'])
episode = str(episode_details['episode'])
show = KodiApi.get_show_details(show_id)
imdb = show['imdbnumber']
xbmc.log('%s: Updating position of show \'%s\' season %s episode %s to %s' % (ADDONID, imdb, season, episode, str(position)))
sid = self.get_soap_season_id(episode_details)
self.soap_api.set_position(sid, eid, position)
show_position = self.show_position.get(imdb)
if show_position is None:
show_position = dict()
self.show_position[imdb] = show_position
episode_key = season + '/' + episode
show_position[episode_key] = position
def sync_status(self):
for show in KodiApi.get_shows():
imdb = show['imdbnumber']
show_watched_status = self.watched_status.get(imdb)
show_position = self.show_position.get(imdb)
if show_watched_status is not None:
show_id = show['tvshowid']
for e in KodiApi.get_episodes(show_id):
season = str(e['season'])
episode = str(e['episode'])
kodi_watched = e['playcount'] > 0
episode_key = season + '/' + episode
watched = show_watched_status.get(episode_key)
if kodi_watched != watched:
xbmc.log('%s: Updating local watched status of show \'%s\' season %s episode %s to %s' % (ADDONID, imdb, season, episode, watched))
episode_id = e['episodeid']
KodiApi.set_watched(episode_id, watched)
kodi_position = e['resume']['position']
position = show_position.get(episode_key)
if position is not None and kodi_position != int(position):
xbmc.log('%s: Updating local position of show \'%s\' season %s episode %s from %s to %s' % (ADDONID, imdb, season, episode, kodi_position, position))
episode_id = e['episodeid']
KodiApi.set_position(episode_id, position)
@staticmethod
def get_soap_episode_id(episode_details):
url = episode_details['file']
return WebHandler.get_episode_id(url)
@staticmethod
def get_soap_season_id(episode_details):
url = episode_details['file']
return WebHandler.get_season_id(url)
class SoapCache(object):
translation_table = dict.fromkeys(map(ord, ',./'), None)
def __init__(self, path, lifetime=30):
self.path = os.path.join(path, "cache")
if not os.path.exists(self.path):
os.makedirs(self.path)
self.lifetime = lifetime
def get(self, cache_id, use_lifetime=True):
cache_id = cache_id.translate(self.translation_table)
filename = os.path.join(self.path, str(cache_id))
if not os.path.exists(filename) or not os.path.isfile(filename):
return False
max_time = time.time() - self.lifetime * 60
if use_lifetime and self and os.path.getmtime(filename) <= max_time:
return False
with open(filename, mode="r", encoding="utf8") as f:
return f.read()
def set(self, cache_id, text):
cache_id = cache_id.translate(self.translation_table)
# if cache was removed
if not os.path.exists(self.path):
os.makedirs(self.path)
filename = os.path.join(self.path, str(cache_id))
with open(filename, mode="w", encoding="utf8") as f:
f.write(text)
def rm(self, cache_id):
cache_id = cache_id.translate(self.translation_table)
filename = os.path.join(self.path, str(cache_id))
try:
os.remove(filename)
return True
except OSError:
return False
def rmall(self):
shutil.rmtree(self.path)
os.makedirs(self.path)
class SoapCookies(object):
def __init__(self):
self.CJ = http.cookiejar.CookieJar()
self._cookies = None
self.path = soappath
def _cookies_init(self):
if self.CJ is None:
return
urllib.request.install_opener(
urllib.request.build_opener(
urllib.request.HTTPCookieProcessor(self.CJ)
)
)
self.cookie_path = os.path.join(self.path, 'cookies')
if not os.path.exists(self.cookie_path):
os.makedirs(self.cookie_path)
# print '[%s]: os.makedirs(cookie_path=%s)' % (addon_id, cookie_path)
def _cookies_load(self, req):
if self.CJ is None:
return
cookie_send = {}
for cookie_fname in os.listdir(self.cookie_path):
cookie_file = os.path.join(self.cookie_path, cookie_fname)
if os.path.isfile(cookie_file):
cf = open(cookie_file, 'r')
cookie_send[os.path.basename(cookie_file)] = cf.read()
cf.close()
# else: print '[%s]: NOT os.path.isfile(cookie_file=%s)' % (addon_id, cookie_file)
cookie_string = urllib.parse.urlencode(cookie_send).replace('&', '; ')
req.add_header('Cookie', cookie_string)
def _cookies_save(self):
if self.CJ is None:
return
for Cook in self.CJ:
cookie_file = os.path.join(self.cookie_path, Cook.name)
cf = open(cookie_file, 'w')
cf.write(Cook.value)
cf.close()
class SoapHttpClient(SoapCookies):
HOST = 'https://api.soap4.me/v2'
def __init__(self):
self.token = None
self.cache = SoapCache(soappath, 5)
SoapCookies.__init__(self)
def set_token(self, token):
self.token = token
def _post_data(self, params=None):
if not isinstance(params, dict):
return None
return urllib.parse.urlencode(params).encode('utf-8')
def _request(self, url, params=None):
xbmc.log('{0}: REQUEST: {1} {2}'.format(ADDONID, url, params))
self._cookies_init()
req = urllib.request.Request(self.HOST + url)
req.add_header('User-Agent', 'Kodi: plugin.soap4me-proxy v{0}'.format(ADDONVERSION))
req.add_header('Accept-encoding', 'gzip')
req.add_header('Kodi-Debug', '{0}'.format(xbmc.getInfoLabel('System.BuildVersion')))
if self.token is not None:
self._cookies_load(req)
req.add_header('X-API-TOKEN', self.token)
post_data = self._post_data(params)
if params is not None:
req.add_header('Content-Type', 'application/x-www-form-urlencoded')
with urllib.request.urlopen(req, post_data) as response:
self._cookies_save()
if response.info().get('Content-Encoding') == 'gzip':
fstream = gzip.GzipFile(fileobj=response)
text = fstream.read().decode('utf-8')
else:
text = response.read().decode('utf-8')
return text
def request(self, url, params=None, use_cache=False):
text = None
if use_cache:
text = self.cache.get(url)
if text is None or not text:
text = self._request(url, params)
if use_cache:
self.cache.set(url, text)
else:
xbmc.log('%s: Url \'%s\' present in cache' % (ADDONID, url))
try:
return json.loads(text)
except:
return text
def clean(self, url):
if self.cache.rm(url):
xbmc.log('%s: Url \'%s\' removed from cache' % (ADDONID, url))
def clean_all(self):
self.cache.rmall()
to_int = lambda s: int(s) if s != '' else 0
class KodiConfig(object):
@classmethod
def soap_get_auth(cls):
return {
'token': __addon__.getSetting('_token'),
'token_sid': __addon__.getSetting('_token_sid'),
'token_till': to_int(__addon__.getSetting('_token_till')),
'token_valid': to_int(__addon__.getSetting('_token_valid')),
'token_check': to_int(__addon__.getSetting('_token_check')),
'message_till_days': to_int(__addon__.getSetting('_message_till_days'))
}
@classmethod
def soap_set_auth(cls, params):
__addon__.setSetting('_token', params.get('token', ''))
__addon__.setSetting('_token_till', str(params.get('till', 0)))
__addon__.setSetting('_token_sid', str(params.get('sid', '')))
__addon__.setSetting('_message_till_days', '')
cls.soap_set_token_valid()
@classmethod
def soap_set_token_valid(cls):
__addon__.setSetting('_token_valid', str(int(time.time()) + 86400 * 7))
@classmethod
def soap_set_token_check(cls):
__addon__.setSetting('_token_check', str(int(time.time()) + 600))
@classmethod
def message_till_days(cls):
mtd = __addon__.getSetting('_message_till_days')
if mtd == '' or int(mtd) < time.time():
__addon__.setSetting('_message_till_days', str(int(time.time()) + 43200))
till = to_int(__addon__.getSetting('_token_till'))
if till != 0:
message_ok("Осталось {0} дней".format(int(round(till - time.time()) / 86400)))
@classmethod
def kodi_get_auth(cls):
username = __addon__.getSetting('username')
password = __addon__.getSetting('password')
while len(username) == 0 or len(password) == 0:
__addon__.openSettings()
username = __addon__.getSetting('username')
password = __addon__.getSetting('password')
return {
'login': username,
'password': password
}
@classmethod
def get_web_port(cls):
return to_int(__addon__.getSetting('port'))
@classmethod
def is_hide_watched_shows(cls):
return __addon__.getSetting('hide_watched_shows') == 'true'
class SoapConfig(object):
def __init__(self):
self.language = to_int(__addon__.getSetting('language')) # 0 rus, 1 orig
self.subtitles_language = to_int(__addon__.getSetting('subtitles_language')) # 0 rus, 1 orig
self.quality = to_int(__addon__.getSetting('quality')) # 0 SD, 1 720p, 2 FullHD, 3 2K, 4 4K
class SoapAuth(object):
AUTH_URL = '/auth/'
CHECK_URL = '/auth/check/'
def __init__(self, client):
self.client = client
self.is_auth = False
def login(self):
self.client.set_token(None)
data = self.client.request(self.AUTH_URL, KodiConfig.kodi_get_auth())
if not isinstance(data, dict) or data.get('ok') != 1:
message_error("Login or password are incorrect")
return False
KodiConfig.soap_set_auth(data)
return True
def check(self):
params = KodiConfig.soap_get_auth()
if params['token'] == '':
return False
if params['token_valid'] < time.time():
return False
if params['token_till'] + 10 < time.time():
return False
self.client.set_token(params['token'])
if params['token_check'] > time.time():
return True
data = self.client.request(self.CHECK_URL)
if isinstance(data, dict) and data.get('loged') == 1:
KodiConfig.soap_set_token_check()
return True
return False
def auth(self):
if not self.check():
if not self.login():
return False
params = KodiConfig.soap_get_auth()
if not params['token']:
message_error("Auth error")
return False
self.client.set_token(params['token'])
self.is_auth = True
class SoapApi(object):
MY_SHOWS_URL = '/soap/my/'
EPISODES_URL = '/episodes/{0}/'
# WATCHING_URL = {
# 'watch': '/soap/watch/{sid}/',
# 'unwatch': '/soap/unwatch/{sid}/'
# }
#
PLAY_EPISODES_URL = '/play/episode/{eid}/'
SAVE_POSITION_URL = '/play/episode/{eid}/savets/'
MARK_WATCHED = '/episodes/watch/{eid}/'
MARK_UNWATCHED = '/episodes/unwatch/{eid}/'
def __init__(self, watched_status):
self.client = SoapHttpClient()
self.auth = SoapAuth(self.client)
self.config = SoapConfig()
self.watched_status = watched_status
self.auth.auth()
@property
def is_auth(self):
return self.auth.is_auth
def main(self):
KodiConfig.message_till_days()
def my_shows(self, hide_watched=False):
data = self.client.request(self.MY_SHOWS_URL, use_cache=True)
if hide_watched:
# here we can match shows from two sources only by name ('imdbnumber' from Kodi is trash)
active_shows = set(map(lambda row: row['label'], KodiApi.get_in_progress_shows()))
xbmc.log('%s: active_shows: %s' % (ADDONID, str(active_shows)))
data = filter(lambda row: row['unwatched'] > 0 or row['title'] in active_shows, data)
# TODO: tvdb_id is used as IMDB because Kodi uses TVDB internally for imdbnumber key
return map(lambda row: {
'name': row['title'],
'id': row['sid'],
'IMDB': row['tvdb_id'].replace('tt', ''),
'updated': row['updated'],
'total_episodes': row['total_episodes'],
'small_cover': row['covers']['small'] if 'covers' in row and 'small' in row['covers'] else None
}, data)
def episodes(self, sid, imdb):
data = self.client.request(self.EPISODES_URL.format(sid), use_cache=True)
data = data['episodes']
if data is None:
return []
for e in data:
self.watched_status.set_server_status(imdb, e['season'], e['episode'], e['watched'] == 1, e['start_from'])
return map(lambda row: self.get_episode(row), data)
def get_episode(self, row):
f = self.get_best_file(row['files'])
return {'season': row['season'], 'episode': row['episode'], 'id': f['eid'], 'hash': f['hash']}
def get_best_file(self, files):
return max(files, key=self.get_file_order)
def get_file_order(self, f):
translate = int(f['translate']) - 1 # from 0
quality = int(f['quality']) - 1 # from 0
translate_matrix = \
[
[-4, -3, -1, 0], # eng
[-1, -1, -3, -2], # rus with eng subs
[-2, -3, -1, -1], # eng with rus subs
[ 0, -1, -3, -4], # rus
]
config_translate_index = 2 * self.config.language + self.config.subtitles_language
translate_order = translate_matrix[translate][config_translate_index]
quality_order = \
(quality - self.config.quality) \
if (quality <= self.config.quality) \
else (self.config.quality - quality - 10)
return 100 * translate_order + quality_order # translation has priority over quality
def get_episode_url(self, sid, eid, ehash):
# TODO: warn if quality is bigger than configured
string = str(self.client.token) + str(eid) + str(sid) + str(ehash)
myhash = hashlib.md5(string.encode('utf-8')).hexdigest()
data = {
"eid": eid,
"hash": myhash
}
result = self.client.request(self.PLAY_EPISODES_URL.format(eid=eid), data)
return result['stream']
def mark_watched(self, sid, eid, watched):
url = self.MARK_WATCHED if watched else self.MARK_UNWATCHED
self.client.request(url.format(eid=eid), {'eid': eid})
# clean cache for show
url = self.EPISODES_URL.format(sid)
self.client.clean(url)
def set_position(self, sid, eid, position):
url = self.SAVE_POSITION_URL
self.client.request(url.format(eid=eid), {'eid': eid, 'time': position})
# clean cache for show
url = self.EPISODES_URL.format(sid)
self.client.clean(url)
class KodiApi(object):
@staticmethod
def get_shows():
postdata = json.dumps({"jsonrpc": "2.0",
"id": 1,
'method': 'VideoLibrary.GetTVShows',
"params": {
"properties": ["imdbnumber"]
}})
json_query = xbmc.executeJSONRPC(postdata)
# json_query = unicode(json_query, 'utf-8', errors='ignore')
json_query = json.loads(json_query)['result']['tvshows']
return json_query
@staticmethod
def get_in_progress_shows():
postdata = json.dumps({"jsonrpc": "2.0",
"id": 1,
'method': 'VideoLibrary.GetInProgressTVShows',
"params": {
"properties": ["imdbnumber"]
}})
json_query = xbmc.executeJSONRPC(postdata)
# json_query = unicode(json_query, 'utf-8', errors='ignore')
json_query = json.loads(json_query)['result']['tvshows']
return json_query
@staticmethod
def get_show_details(show_id):
postdata = json.dumps({"jsonrpc": "2.0",
"id": 1,
'method': 'VideoLibrary.GetTVShowDetails',
"params": {
'tvshowid': show_id,
"properties": ["imdbnumber"]
}})
json_query = xbmc.executeJSONRPC(postdata)
# json_query = unicode(json_query, 'utf-8', errors='ignore')
json_query = json.loads(json_query)['result']['tvshowdetails']
return json_query
@staticmethod
def get_episodes(show_id):
postdata = json.dumps({"jsonrpc": "2.0",
"id": 1,
'method': 'VideoLibrary.GetEpisodes',
"params": {
'tvshowid': show_id,
"properties": ["season", "episode", "playcount", "resume"]
}})
json_query = xbmc.executeJSONRPC(postdata)
json_query = json.loads(json_query)
if 'error' in json_query:
xbmc.log('%s: ERROR: %s' % (ADDONID, json_query['error']['stack']['message']))
return None
json_query = json_query['result']['episodes']
return json_query
@staticmethod
def get_episode_details(episode_id):
postdata = json.dumps({"jsonrpc": "2.0",
"id": 1,
'method': 'VideoLibrary.GetEpisodeDetails',
"params": {
'episodeid': episode_id,
"properties": ["season", "episode", "tvshowid", "playcount", "file", "resume"]
}})
json_query = xbmc.executeJSONRPC(postdata)
# json_query = unicode(json_query, 'utf-8', errors='ignore')
json_query = json.loads(json_query)
if 'error' in json_query:
xbmc.log('%s: ERROR: %s' % (ADDONID, json_query['error']['stack']['message']))
return None
json_query = json_query['result']['episodedetails']
return json_query
@staticmethod
def set_watched(episode_id, watched):
postdata = json.dumps({"jsonrpc": "2.0",
"id": 1,
'method': 'VideoLibrary.SetEpisodeDetails',
"params": {
'episodeid': episode_id,
'playcount': 1 if watched else 0,
}})
xbmc.executeJSONRPC(postdata)
@staticmethod
def set_position(episode_id, position):
postdata = json.dumps({"jsonrpc": "2.0",
"id": 1,
'method': 'VideoLibrary.SetEpisodeDetails',
"params": {
'episodeid': episode_id,
'resume': {'position': int(position)},
}})
xbmc.executeJSONRPC(postdata)
# NOTE: standard ?param=value¶m2=value2... notation is not used for url parameters because of
# issue with endless directory scanning by Kodi
# so for folder is used only show name
# and for file name custom prefix containing all required IDs is used
class WebHandler(http.server.SimpleHTTPRequestHandler):
match = None
def do_GET(self):
# Parse path to find out what was passed
xbmc.log('%s: Serve \'%s\'' % (ADDONID, self.path))
parsed_params = urllib.parse.urlparse(self.path)
path = urllib.parse.unquote(parsed_params.path)
if path == '/':
xbmc.log('%s: Listing shows' % ADDONID)
shows = self.server.api.my_shows(KodiConfig.is_hide_watched_shows())
self.out_folders(shows)
elif self.matches('^/(.*)/$', path):
show = self.match.group(1)
show_details = self.find_show(show)
if show_details is not None:
sid = show_details['id']
imdb = show_details['IMDB']
xbmc.log('%s: Listing episodes of \'%s\'' % (ADDONID, show))
episodes = self.server.api.episodes(sid, imdb)
# format parsable by TVDB scraper
name_lambda = lambda e: sid + '_' + e['id'] + '_' + e['hash'] + '_S' + e['season'] + 'E' + e['episode'] + '.avi'
self.out_files(map(name_lambda, episodes))
else:
xbmc.log('%s: ERROR: Show \'%s\' not found' % (ADDONID, show))
elif self.matches('^/(.*)/(\d+)_(\d+)_([0-9a-f]+)_S(\d+)E(\d+).avi$', path):
show = self.match.group(1)
sid = self.match.group(2)
eid = self.match.group(3)
ehash = self.match.group(4)
season = self.match.group(5)
episode = self.match.group(6)
xbmc.log('%s: Requested episode %s from season %s of \'%s\'' % (ADDONID, episode, season, show))
url = self.server.api.get_episode_url(sid, eid, ehash)
xbmc.log("%s: Redirect to '%s'" % (ADDONID, url))
self.send_response(301)
self.send_header('Location', url)
self.end_headers()
else:
self.send_response(404)
self.end_headers()
def do_HEAD(self):
# Parse path to find out what was passed
xbmc.log('%s: Head \'%s\'' % (ADDONID, self.path))
parsed_params = urllib.parse.urlparse(self.path)
path = urllib.parse.unquote(parsed_params.path)
if path == '/':
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
elif self.matches('^/(.*)/$', path):
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
elif self.matches('^/(.*)/(\d+)_(\d+)_([0-9a-f]+)_S(\d+)E(\d+).avi$', path):
self.send_response(200)
self.send_header("Content-type", "video/mp4")
self.end_headers()
else:
self.send_response(404)
self.end_headers()
def matches(self, regexp, s):
self.match = re.match(regexp, s, re.M | re.I)
return self.match is not None
@staticmethod
def get_episode_id(url):
parsed_params = urllib.parse.urlparse(url)
file_name = os.path.basename(parsed_params.path)
return file_name.split('_')[1]
@staticmethod
def get_season_id(url):
parsed_params = urllib.parse.urlparse(url)
file_name = os.path.basename(parsed_params.path)
return file_name.split('_')[0]
def out_folders(self, folders):
self.out_elements(map(lambda f: "<tr>"
" <td valign=\"top\"><img src=\"%s\" alt=\"[DIR]\"></td>"
" <td><a href=\"%s/\">%s</a></td>"
" <td align=\"right\">%s</td>"
" <td align=\"right\">%s</td>"
" <td> </td>"
"</tr>\n" % (
f.get('small_cover', '/icons/folder.gif'),
urllib.parse.quote(f['name']),
f['name'],
datetime.datetime.utcfromtimestamp(float(f['updated'])).strftime('%Y-%m-%d %H:%M') if f.get('updated', None) is not None else '-',
f.get('total_episodes', '-')
), folders), True)
def out_files(self, files):
self.out_elements(map(lambda f: "<tr> "
" <td valign=\"top\"><img src=\"/icons/movie.gif\" alt=\"[VID]\"></td>"
" <td><a href=\"%s\">%s</a></td>"
" <td align=\"right\">2016-11-01 23:08</td>"
" <td align=\"right\"> 0 </td>"
" <td> </td>"
"</tr>\n" % (f, f), files), False)
def out_elements(self, elements, root):
self.send_response(200)
self.send_header('Content-Type', 'text/html;charset=UTF-8')
self.end_headers()
self.wfile.write(b"<html>\n")
self.wfile.write(b" <head>\n")
self.wfile.write(b" <title>Index of /</title>\n")
self.wfile.write(b" </head>\n")
self.wfile.write(b" <body>\n")
self.wfile.write(b"<h1>Index of /</h1>\n")
self.wfile.write(b" <table>\n")
self.wfile.write(b" <tr><th valign=\"top\"><img src=\"/icons/blank.gif\" alt=\"[ICO]\"></th><th><a href=\"?C=N;O=D\">Name</a></th><th><a href=\"?C=M;O=A\">Last modified</a></th><th><a href=\"?C=S;O=A\">Size</a></th><th><a href=\"?C=D;O=A\">Description</a></th></tr>\n")
self.wfile.write(b" <tr><th colspan=\"5\"><hr></th></tr>\n")
if not root:
self.wfile.write(b" <tr><td valign=\"top\"><img src=\"/icons/back.gif\" alt=\"[PARENTDIR]\"></td><td><a href=\"/\">Parent Directory</a></td><td> </td><td align=\"right\"> - </td><td> </td></tr>\n")
for e in elements:
self.wfile.write(e.encode())
self.wfile.write(b" <tr><th colspan=\"5\"><hr></th></tr>\n")
self.wfile.write(b"</table>\n")
self.wfile.write(b"</body></html>\n")
# self.wfile.close()
def find_show(self, show):
shows = self.server.api.my_shows() # should be cached
return next(filter(lambda s: show == s['name'], shows), None)
# next methods were added to minimize number of messages printed to log
# because Kodi closes socket connection on error code
def handle_one_request(self):
try:
http.server.SimpleHTTPRequestHandler.handle_one_request(self)
except IOError:
pass # it's OK
def finish(self):
try:
http.server.SimpleHTTPRequestHandler.finish(self) # super.finish()
except IOError:
pass # it's OK
def log_request(self, code='-', size='-'):
pass # already logged
def clean_cache():
SoapCache(soappath, 5).rmall()
__addon__.setSetting('_token', '0')
__addon__.setSetting('_token_sid', '0')
__addon__.setSetting('_token_valid', '0')
__addon__.setSetting('_token_till', '0')
__addon__.setSetting('_token_check', '0')
__addon__.setSetting('_message_till_days', '0')
if __name__ == "__main__":
if len(sys.argv) > 1 and sys.argv[1] == 'clearcache':
clean_cache()
message_ok('Done')
exit(0)
xbmc.log('%s: Version %s started' % (ADDONID, ADDONVERSION))
Main()
| eschava/soap4me-proxy | service.py | Python | lgpl-3.0 | 35,156 |
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
orm.Package.objects.filter(pkgbase=None).update(pkgbase=models.F('pkgname'))
def backwards(self, orm):
if not db.dry_run:
orm.Package.objects.filter(pkgbase=models.F('pkgname')).update(pkgbase=None)
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'main.altforum': {
'Meta': {'object_name': 'AltForum', 'db_table': "'alt_forums'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'main.arch': {
'Meta': {'object_name': 'Arch', 'db_table': "'arches'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
'main.donor': {
'Meta': {'object_name': 'Donor', 'db_table': "'donors'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
'main.externalproject': {
'Meta': {'object_name': 'ExternalProject'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'main.mirror': {
'Meta': {'object_name': 'Mirror'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'admin_email': ('django.db.models.fields.EmailField', [], {'max_length': '255', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'isos': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'rsync_password': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50', 'blank': 'True'}),
'rsync_user': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50', 'blank': 'True'}),
'tier': ('django.db.models.fields.SmallIntegerField', [], {'default': '2'}),
'upstream': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Mirror']", 'null': 'True'})
},
'main.mirrorprotocol': {
'Meta': {'object_name': 'MirrorProtocol'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'protocol': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '10'})
},
'main.mirrorrsync': {
'Meta': {'object_name': 'MirrorRsync'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.CharField', [], {'max_length': '24'}),
'mirror': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'rsync_ips'", 'to': "orm['main.Mirror']"})
},
'main.mirrorurl': {
'Meta': {'object_name': 'MirrorUrl'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mirror': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'urls'", 'to': "orm['main.Mirror']"}),
'protocol': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'urls'", 'to': "orm['main.MirrorProtocol']"}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'main.news': {
'Meta': {'object_name': 'News', 'db_table': "'news'"},
'author': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'news_author'", 'to': "orm['auth.User']"}),
'content': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'postdate': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'main.package': {
'Meta': {'object_name': 'Package', 'db_table': "'packages'"},
'arch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Arch']"}),
'build_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'compressed_size': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'files_last_update': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'installed_size': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'last_update': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'license': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'needupdate': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'pkgdesc': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'pkgname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'repo': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Repo']"}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'main.packagedepend': {
'Meta': {'object_name': 'PackageDepend', 'db_table': "'package_depends'"},
'depname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'depvcmp': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"})
},
'main.packagefile': {
'Meta': {'object_name': 'PackageFile', 'db_table': "'package_files'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"})
},
'main.press': {
'Meta': {'object_name': 'Press', 'db_table': "'press'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'main.repo': {
'Meta': {'object_name': 'Repo', 'db_table': "'repos'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'testing': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'main.signoff': {
'Meta': {'object_name': 'Signoff'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'packager': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}),
'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'main.todolist': {
'Meta': {'object_name': 'Todolist', 'db_table': "'todolists'"},
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'date_added': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'main.todolistpkg': {
'Meta': {'unique_together': "(('list', 'pkg'),)", 'object_name': 'TodolistPkg', 'db_table': "'todolist_pkgs'"},
'complete': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'list': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Todolist']"}),
'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"})
},
'main.userprofile': {
'Meta': {'object_name': 'UserProfile', 'db_table': "'user_profiles'"},
'alias': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'allowed_repos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.Repo']", 'blank': 'True'}),
'favorite_distros': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'interests': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'languages': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'notify': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'occupation': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'other_contact': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'picture': ('django.db.models.fields.files.FileField', [], {'default': "'devs/silhouette.png'", 'max_length': '100'}),
'public_email': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'roles': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'userprofile_user'", 'unique': 'True', 'to': "orm['auth.User']"}),
'website': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'yob': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['main']
| pyropeter/archweb | main/migrations/0016_always_fill_pkgbase.py | Python | gpl-2.0 | 15,215 |
#!/usr/bin/env python
#
#title :const.py
#description :
#
# const class does not allow rebinding value to name so that name is constant.
#
#==========================================================================================
class _const:
class ConstError(TypeError): pass
def __setattr__(self,name,value):
if name in self.__dict__:
message = "Can't rebind const " + name
raise self.ConstError(message)
self.__dict__[name]=value
import sys
sys.modules[__name__]=_const()
| libengu/llilc | test/const.py | Python | mit | 540 |
from django.contrib import admin
# Register your models here.
from Food.models import Food
admin.site.register(Food) | neewy/InStoKiloGram | Food/admin.py | Python | apache-2.0 | 118 |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2.
# tracing-level: trace
from __future__ import absolute_import
import os
from bindings import tracing
from testutil.autofix import eq
from testutil.dott import feature, sh, testtmp # noqa: F401
idtopath = {}
def getidtopath():
"""Return a dict mapping from id (in hex form) to path"""
output = sh.hg("debugmanifestdirs", "-rall()")
# debugmanifestdirs prints "<id> <path>" per line
result = dict(l.split() for l in output.splitlines())
return result
def collectprefetch(command):
"""Updating to commit, check prefetched paths"""
d = tracing.tracingdata()
with d:
(sh % command).output
ids = []
for spans in d.treespans().values():
for span in spans.flatten():
name = span.get("name")
if name == "tree::store::prefetch":
ids += span["ids"].split()
elif name == "tree::store::get":
ids.append(span["id"])
idtopath.update(getidtopath())
paths = set(idtopath[id] for id in set(ids)) - {"/"}
# Translate ids to paths
return sorted(filter(None, paths))
# Use some production settings. They avoid expensive paths.
sh % "setconfig experimental.copytrace=off copytrace.fastcopytrace=true perftweaks.disablecasecheck=true"
sh % "enable sparse treemanifest rebase copytrace"
# flatcompat calls '.text()' which invalidates fast paths. So disable it.
sh % "setconfig treemanifest.flatcompat=0"
sh % "newrepo"
(
sh % "drawdag"
<< r"""
B # B/x/x/y/z=B1
| # B/y/x/y/z=B2
|
|
| D # D/d/d/d/d=D1
| |
| C # C/c/c/c/c=C1
|/
A # A/x/x/y/z=A1
# A/y/x/y/z=A2
# A/z/x/y/z=A3
"""
)
sh % "hg sparse include x"
# Good: Updating to A should avoid downloading y/ or z/
eq(collectprefetch("hg update -q $A"), ["x", "x/x", "x/x/y"])
# Good: Updating to B should avoid downloading y/
eq(collectprefetch("hg update -q $B"), ["x", "x/x", "x/x/y"])
sh % "hg update -q $D"
# Good: Rebasing B to D should avoid downloading d/ or c/, or z/.
# (This is optimized by "rebase: use matcher to optimize manifestmerge",
# https://www.mercurial-scm.org/repo/hg/rev/4d504e541d3d,
# fbsource-hg: 94ad1b49ede1f8e5897c7c9381304785746fa460)
eq(
collectprefetch("hg rebase -r $B -d $D -q"),
["x", "x/x", "x/x/y", "y", "y/x", "y/x/y"],
)
# Good: Changing sparse profile should not download everything.
eq(collectprefetch("hg sparse exclude y"), ["x", "x/x", "x/x/y"])
# Test sparse profile change.
sh % "newrepo"
(
sh % "drawdag"
<< r"""
# B/profile=[include]\nx\ny
B # B/x/x/x=2
| # B/y/y/y=2
| # B/z/z/z=2
|
A # A/profile=[include]\nx
# A/x/x/x=1
# A/y/y/y=1
# A/z/z/z=1
"""
)
idtopath = getidtopath()
eq(collectprefetch("hg sparse enable profile"), [])
# Good: Updating to A should avoid downloading y/ or z/
eq(collectprefetch("hg update -q $A"), ["x", "x/x"])
# Good: Updating to B should avoid downloading z/
eq(collectprefetch("hg update -q $B"), ["x", "x/x", "y", "y/y"])
# Test 'status'.
sh % "newrepo"
(
sh % "drawdag"
<< r"""
A # A/x/x/x=1
# A/y/y/y=1
# A/z/z/z=1
"""
)
eq(collectprefetch("hg sparse include x"), [])
sh % "hg up -q $A"
open("y", "w").write("2")
os.mkdir("z")
open("z/1", "w").write("2")
open("z/z", "w").write("2")
# Good: 'status' should avoid downloading y/ or z/.
eq(sorted(set(collectprefetch("hg status")) - {"x", "x/x"}), [])
| facebookexperimental/eden | eden/hg-server/tests/test-sparse-fetch-t.py | Python | gpl-2.0 | 3,529 |
""" progressbar2 related utils"""
from codekit.codetools import warn
from public import public
from time import sleep
import progressbar
import functools
@public
def setup_logging(verbosity=0):
"""Configure progressbar sys.stderr wrapper which is required to play nice
with logging and not have strange formatting artifacts.
"""
progressbar.streams.wrap_stderr()
@public
def countdown_timer(seconds=10):
"""Show a simple countdown progress bar
Parameters
----------
seconds
Period of time the progress bar takes to reach zero.
"""
tick = 0.1 # seconds
n_ticks = int(seconds / tick)
widgets = ['Pause for panic: ', progressbar.ETA(), ' ', progressbar.Bar()]
pbar = progressbar.ProgressBar(
widgets=widgets, max_value=n_ticks
).start()
for i in range(n_ticks):
pbar.update(i)
sleep(tick)
pbar.finish()
@public
def wait_for_user_panic(**kwargs):
"""Display a scary message and count down progresss bar so an interative
user a chance to panic and kill the program.
Parameters
----------
kwargs
Passed verbatim to countdown_timer()
"""
warn('Now is the time to panic and Ctrl-C')
countdown_timer(**kwargs)
@public
@functools.lru_cache()
def wait_for_user_panic_once(**kwargs):
"""Same functionality as wait_for_user_panic() but will only display a
countdown once, reguardless of how many times it is called.
Parameters
----------
kwargs
Passed verbatim to wait_for_user_panic()
"""
wait_for_user_panic(**kwargs)
@public
def eta_bar(msg, max_value):
"""Display an adaptive ETA / countdown bar with a message.
Parameters
----------
msg: str
Message to prefix countdown bar line with
max_value: max_value
The max number of progress bar steps/updates
"""
widgets = [
"{msg}:".format(msg=msg),
progressbar.Bar(), ' ', progressbar.AdaptiveETA(),
]
return progressbar.ProgressBar(widgets=widgets, max_value=max_value)
| lsst-sqre/sqre-codekit | codekit/progressbar.py | Python | mit | 2,070 |
"""Shark IQ Integration."""
import asyncio
import async_timeout
from sharkiqpy import (
AylaApi,
SharkIqAuthError,
SharkIqAuthExpiringError,
SharkIqNotAuthedError,
get_ayla_api,
)
from homeassistant import exceptions
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from .const import _LOGGER, API_TIMEOUT, COMPONENTS, DOMAIN
from .update_coordinator import SharkIqUpdateCoordinator
class CannotConnect(exceptions.HomeAssistantError):
"""Error to indicate we cannot connect."""
async def async_setup(hass, config):
"""Set up the sharkiq environment."""
hass.data.setdefault(DOMAIN, {})
return True
async def async_connect_or_timeout(ayla_api: AylaApi) -> bool:
"""Connect to vacuum."""
try:
with async_timeout.timeout(API_TIMEOUT):
_LOGGER.debug("Initialize connection to Ayla networks API")
await ayla_api.async_sign_in()
except SharkIqAuthError:
_LOGGER.error("Authentication error connecting to Shark IQ api")
return False
except asyncio.TimeoutError as exc:
_LOGGER.error("Timeout expired")
raise CannotConnect from exc
return True
async def async_setup_entry(hass, config_entry):
"""Initialize the sharkiq platform via config entry."""
ayla_api = get_ayla_api(
username=config_entry.data[CONF_USERNAME],
password=config_entry.data[CONF_PASSWORD],
websession=hass.helpers.aiohttp_client.async_get_clientsession(),
)
try:
if not await async_connect_or_timeout(ayla_api):
return False
except CannotConnect as exc:
raise exceptions.ConfigEntryNotReady from exc
shark_vacs = await ayla_api.async_get_devices(False)
device_names = ", ".join([d.name for d in shark_vacs])
_LOGGER.debug("Found %d Shark IQ device(s): %s", len(shark_vacs), device_names)
coordinator = SharkIqUpdateCoordinator(hass, config_entry, ayla_api, shark_vacs)
await coordinator.async_refresh()
if not coordinator.last_update_success:
raise exceptions.ConfigEntryNotReady
hass.data[DOMAIN][config_entry.entry_id] = coordinator
for component in COMPONENTS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, component)
)
return True
async def async_disconnect_or_timeout(coordinator: SharkIqUpdateCoordinator):
"""Disconnect to vacuum."""
_LOGGER.debug("Disconnecting from Ayla Api")
with async_timeout.timeout(5):
try:
await coordinator.ayla_api.async_sign_out()
except (SharkIqAuthError, SharkIqAuthExpiringError, SharkIqNotAuthedError):
pass
async def async_update_options(hass, config_entry):
"""Update options."""
await hass.config_entries.async_reload(config_entry.entry_id)
async def async_unload_entry(hass, config_entry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(config_entry, component)
for component in COMPONENTS
]
)
)
if unload_ok:
domain_data = hass.data[DOMAIN][config_entry.entry_id]
try:
await async_disconnect_or_timeout(coordinator=domain_data)
except SharkIqAuthError:
pass
hass.data[DOMAIN].pop(config_entry.entry_id)
return unload_ok
| turbokongen/home-assistant | homeassistant/components/sharkiq/__init__.py | Python | apache-2.0 | 3,446 |
from django.conf.urls import patterns, include, url
urlpatterns = patterns('',
url(r'^(?P<project_id>\d+)/stories/(?P<tag_filter>\w+)/(?P<hash_key>\w+)/$', 'pivotal.views.stories', name='pivotal_stories'),
url(r'^(?P<project_id>\d+)/stories/(?P<tag_filter>\w+)/(?P<hash_key>\w+)/(?P<story_id>\d+)/$', 'pivotal.views.story_details', name='story_details'),
url(r'^(?P<project_id>\d+)/stories/(?P<tag_filter>\w+)/(?P<hash_key>\w+)/(?P<story_id>\d+)/tasks/$', 'pivotal.views.tasks', name='pivotal_tasks'),
url(r'^(?P<project_id>\d+)/stories/(?P<tag_filter>\w+)/(?P<hash_key>\w+)/(?P<story_id>\d+)/hours.json$', 'pivotal.views.tasks', name='pivotal_tasks'),
)
| cloudanswers/task-status-portal | pivotal/urls.py | Python | gpl-2.0 | 672 |
import unittest
import os
import sys
from base_positive_integer_sort_test import BasePositiveIntegerSortTest
sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, 'sort'))
import counting_sort
class CountingSortTest(unittest.TestCase,
BasePositiveIntegerSortTest):
def setUp(self):
self.sort = counting_sort.sort
if __name__ == '__main__':
unittest.main()
| GrowingWithTheWeb/py-sorting | test/counting_sort_test.py | Python | mit | 404 |
from __future__ import absolute_import
from django.conf.urls import url
from oauth2_provider import views
from .views import CoffeestatsApplicationRegistration, \
CoffeestatsApplicationDetail, \
CoffeestatsApplicationApproval, \
CoffeestatsApplicationRejection, \
CoffeestatsApplicationFullList
urlpatterns = (
url(r'^authorize/$', views.AuthorizationView.as_view(), name="authorize"),
url(r'^token/$', views.TokenView.as_view(), name="token"),
url(r'^revoke_token/$', views.RevokeTokenView.as_view(),
name="revoke-token"),
)
# Application management views
urlpatterns += (
url(r'^applications/$', views.ApplicationList.as_view(), name="list"),
url(r'^applications/register/$',
CoffeestatsApplicationRegistration.as_view(), name="register"),
url(r'^applications/(?P<pk>\d+)/$', CoffeestatsApplicationDetail.as_view(),
name="detail"),
url(r'^applications/(?P<pk>\d+)/delete/$',
views.ApplicationDelete.as_view(), name="delete"),
url(r'^applications/(?P<pk>\d+)/update/$',
views.ApplicationUpdate.as_view(), name="update"),
url(r'^applications/(?P<pk>\d+)/approve/$',
CoffeestatsApplicationApproval.as_view(), name="approve"),
url(r'^applications/(?P<pk>\d+)/reject/$',
CoffeestatsApplicationRejection.as_view(), name="reject"),
url(r'^all-applications/$',
CoffeestatsApplicationFullList.as_view(), name="list_all"),
)
urlpatterns += (
url(r'^authorized_tokens/$', views.AuthorizedTokensListView.as_view(),
name="authorized-token-list"),
url(r'^authorized_tokens/(?P<pk>\d+)/delete/$',
views.AuthorizedTokenDeleteView.as_view(),
name="authorized-token-delete"),
)
| coffeestats/coffeestats-django | coffeestats/caffeine_oauth2/urls.py | Python | mit | 1,723 |
from dataclasses import dataclass, field
from enum import Enum
from typing import Any
from .state import ClientStateHolder
from .session import Session
class TextActionType(Enum):
Nothing = 0
SearchResults = 1
BasketRefresh = 2
Feedback = 3
Shutdown = 4
@dataclass
class TextAction:
action_type: TextActionType
content: Any = None
class Application:
def __init__(self, config):
self.config = config
self.state = ClientStateHolder()
self.session = Session(config)
self.state.transition_initial_shopping(self.session)
self.retain_amount = None
@property
def basket(self):
return self.state.basket
@property
def catalog(self):
return self.state.catalog
def text_action(self, text):
if text == "":
if self.basket.purchases:
self.state.transition_shopping_purchase()
self.state.transition_purchase_shopping()
return TextAction(TextActionType.BasketRefresh), TextAction(TextActionType.Feedback, "Transaction successful")
else:
return TextAction(TextActionType.Nothing),
results = self.catalog.search(text)
if not results:
return TextAction(TextActionType.Nothing),
if len(results) == 1:
self.basket.add(results[0])
return TextAction(TextActionType.BasketRefresh),
return TextAction(TextActionType.SearchResults, results),
def keycode_action(self, text, code):
results = self.catalog.keycode(code)
if not results:
return TextAction(TextActionType.Nothing),
amount = 1
try:
amount = int(text)
except:
pass
if len(results) == 1:
self.basket.add(results[0], amount=amount)
return TextAction(TextActionType.BasketRefresh),
self.retain_amount = amount
return TextAction(TextActionType.SearchResults, results),
def search_submit(self, product):
self.basket.add(product, self.retain_amount)
self.retain_amount = None
return TextAction(TextActionType.BasketRefresh),
def search_cancel(self):
self.retain_amount = None
return TextAction(TextActionType.Nothing),
| thijsmie/tantalus | pos_endpoint/application.py | Python | mit | 2,322 |
# Copyright 2016, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Provides behavior that supports request bundling.
:func:`compute_bundle_id` is used generate ids linking API requests to the
appropriate bundles.
:class:`Event` is the result of scheduling a bundled api call. It is a
decorated :class:`threading.Event`; its ``wait`` and ``is_set`` methods
are used wait for the bundle request to complete or determine if it has
been completed respectively.
:class:`Task` manages the sending of all the requests in a specific bundle.
:class:`Executor` has a ``schedule`` method that is used add bundled api calls
to a new or existing :class:`Task`.
"""
from __future__ import absolute_import
import collections
import copy
import logging
import threading
_LOG = logging.getLogger(__name__)
def _str_dotted_getattr(obj, name):
"""Expands extends getattr to allow dots in x to indicate nested objects.
Args:
obj (object): an object.
name (str): a name for a field in the object.
Returns:
Any: the value of named attribute.
Raises:
AttributeError: if the named attribute does not exist.
"""
for part in name.split('.'):
obj = getattr(obj, part)
return str(obj) if obj else None
def compute_bundle_id(obj, discriminator_fields):
"""Computes a bundle id from the discriminator fields of `obj`.
discriminator_fields may include '.' as a separator, which is used to
indicate object traversal. This is meant to allow fields in the
computed bundle_id.
the id is a tuple computed by going through the discriminator fields in
order and obtaining the str(value) object field (or nested object field)
if any discriminator field cannot be found, ValueError is raised.
Args:
obj (object): an object.
discriminator_fields (Sequence[str]): a list of discriminator fields in
the order to be to be used in the id.
Returns:
Tuple[str]: computed as described above.
Raises:
AttributeError: if any discriminator fields attribute does not exist.
"""
return tuple(_str_dotted_getattr(obj, x) for x in discriminator_fields)
_WARN_DEMUX_MISMATCH = ('cannot demultiplex the bundled response, got'
' %d subresponses; want %d, each bundled request will'
' receive all responses')
class Task(object):
"""Coordinates the execution of a single bundle."""
# pylint: disable=too-many-instance-attributes
def __init__(self, api_call, bundle_id, bundled_field, bundling_request,
kwargs, subresponse_field=None):
"""
Args:
api_call (Callable[Sequence[object], object]): the func that is this
tasks's API call.
bundle_id (Tuple[str]): the id of this bundle.
bundled_field (str): the field used to create the bundled request.
bundling_request (object): the request to pass as the arg to
api_call.
kwargs (dict): keyword arguments passed to api_call.
subresponse_field (str): optional field used to demultiplex
responses.
"""
self._api_call = api_call
self._bundling_request = bundling_request
self._kwargs = kwargs
self.bundle_id = bundle_id
self.bundled_field = bundled_field
self.subresponse_field = subresponse_field
self.timer = None
self._in_deque = collections.deque()
self._event_deque = collections.deque()
@property
def element_count(self):
"""The number of bundled elements in the repeated field."""
return sum(len(elts) for elts in self._in_deque)
@property
def request_bytesize(self):
"""The size of in bytes of the bundled field elements."""
return sum(len(str(e)) for elts in self._in_deque for e in elts)
def run(self):
"""Call the task's func.
The task's func will be called with the bundling requests func
"""
if not self._in_deque:
return
req = self._bundling_request
del getattr(req, self.bundled_field)[:]
getattr(req, self.bundled_field).extend(
[e for elts in self._in_deque for e in elts])
subresponse_field = self.subresponse_field
if subresponse_field:
self._run_with_subresponses(req, subresponse_field, self._kwargs)
else:
self._run_with_no_subresponse(req, self._kwargs)
def _run_with_no_subresponse(self, req, kwargs):
try:
resp = self._api_call(req, **kwargs)
for event in self._event_deque:
event.result = resp
event.set()
except Exception as exc: # pylint: disable=broad-except
for event in self._event_deque:
event.result = exc
event.set()
finally:
self._in_deque.clear()
self._event_deque.clear()
def _run_with_subresponses(self, req, subresponse_field, kwargs):
try:
resp = self._api_call(req, **kwargs)
in_sizes = [len(elts) for elts in self._in_deque]
all_subresponses = getattr(resp, subresponse_field)
if len(all_subresponses) != sum(in_sizes):
_LOG.warning(_WARN_DEMUX_MISMATCH, len(all_subresponses),
sum(in_sizes))
for event in self._event_deque:
event.result = resp
event.set()
else:
start = 0
for i, event in zip(in_sizes, self._event_deque):
next_copy = copy.copy(resp)
subresponses = all_subresponses[start:start + i]
next_copy.ClearField(subresponse_field)
getattr(next_copy, subresponse_field).extend(subresponses)
start += i
event.result = next_copy
event.set()
except Exception as exc: # pylint: disable=broad-except
for event in self._event_deque:
event.result = exc
event.set()
finally:
self._in_deque.clear()
self._event_deque.clear()
def extend(self, elts):
"""Adds elts to the tasks.
Args:
elts (Sequence): a iterable of elements that can be appended to the
task's bundle_field.
Returns:
Event: an event that can be used to wait on the response.
"""
# Use a copy, not a reference, as it is later necessary to mutate
# the proto field from which elts are drawn in order to construct
# the bundled request.
elts = elts[:]
self._in_deque.append(elts)
event = self._event_for(elts)
self._event_deque.append(event)
return event
def _event_for(self, elts):
"""Creates an Event that is set when the bundle with elts is sent."""
event = Event()
event.canceller = self._canceller_for(elts, event)
return event
def _canceller_for(self, elts, event):
"""Obtains a cancellation function that removes elts.
The returned cancellation function returns ``True`` if all elements
was removed successfully from the _in_deque, and false if it was not.
"""
def canceller():
"""Cancels submission of ``elts`` as part of this bundle.
Returns:
bool: ``False`` if any of elements had already been sent,
otherwise ``True``.
"""
try:
self._event_deque.remove(event)
self._in_deque.remove(elts)
return True
except ValueError:
return False
return canceller
TIMER_FACTORY = threading.Timer # pylint: disable=invalid-name
"""A class with an interface similar to threading.Timer.
Defaults to threading.Timer. This makes it easy to plug-in alternate
timer implementations."""
class Executor(object):
"""Organizes bundling for an api service that requires it."""
# pylint: disable=too-few-public-methods
def __init__(self, options):
"""Constructor.
Args:
options (gax.BundleOptions): configures strategy this instance
uses when executing bundled functions.
"""
self._options = options
self._tasks = {}
self._task_lock = threading.RLock()
def schedule(self, api_call, bundle_id, bundle_desc, bundling_request,
kwargs=None):
"""Schedules bundle_desc of bundling_request as part of bundle_id.
The returned value an :class:`Event` that
* has a ``result`` attribute that will eventually be set to the result
the api call
* will be used to wait for the response
* holds the canceller function for canceling this part of the bundle
Args:
api_call (callable[[object], object]): the scheduled API call.
bundle_id (str): identifies the bundle on which the API call should be
made.
bundle_desc (gax.BundleDescriptor): describes the structure of the
bundled call.
bundling_request (object): the request instance to use in the API
call.
kwargs (dict): optional, the keyword arguments passed to the API call.
Returns:
Event: the scheduled event.
"""
kwargs = kwargs or dict()
bundle = self._bundle_for(api_call, bundle_id, bundle_desc,
bundling_request, kwargs)
elts = getattr(bundling_request, bundle_desc.bundled_field)
event = bundle.extend(elts)
# Run the bundle if the count threshold was reached.
count_threshold = self._options.element_count_threshold
if count_threshold > 0 and bundle.element_count >= count_threshold:
self._run_now(bundle.bundle_id)
# Run the bundle if the size threshold was reached.
size_threshold = self._options.request_byte_threshold
if size_threshold > 0 and bundle.request_bytesize >= size_threshold:
self._run_now(bundle.bundle_id)
return event
def _bundle_for(self, api_call, bundle_id, bundle_desc, bundling_request,
kwargs):
with self._task_lock:
bundle = self._tasks.get(bundle_id)
if bundle is None:
bundle = Task(api_call, bundle_id, bundle_desc.bundled_field,
bundling_request, kwargs,
subresponse_field=bundle_desc.subresponse_field)
delay_threshold = self._options.delay_threshold
if delay_threshold > 0:
self._run_later(bundle, delay_threshold)
self._tasks[bundle_id] = bundle
return bundle
def _run_later(self, bundle, delay_threshold):
with self._task_lock:
if bundle.timer is None:
the_timer = TIMER_FACTORY(
delay_threshold,
self._run_now,
args=[bundle.bundle_id])
the_timer.start()
bundle.timer = the_timer
def _run_now(self, bundle_id):
with self._task_lock:
if bundle_id in self._tasks:
a_task = self._tasks.pop(bundle_id)
a_task.run()
class Event(object):
"""Wraps a threading.Event, adding, canceller and result attributes."""
def __init__(self):
"""Constructor.
"""
self._event = threading.Event()
self.result = None
self.canceller = None
def is_set(self):
"""Calls ``is_set`` on the decorated :class:`threading.Event`."""
return self._event.is_set()
def set(self):
"""Calls ``set`` on the decorated :class:`threading.Event`."""
return self._event.set()
def clear(self):
"""Calls ``clear`` on the decorated :class:`threading.Event`.
Also resets the result if one has been set.
"""
self.result = None
return self._event.clear()
def wait(self, timeout=None):
"""Calls ``wait`` on the decorated :class:`threading.Event`."""
return self._event.wait(timeout=timeout)
def cancel(self):
"""Invokes the cancellation function provided on construction."""
if self.canceller:
return self.canceller()
else:
return False
| googleapis/gax-python | google/gax/bundling.py | Python | bsd-3-clause | 14,059 |
# Copyright 2015 ETH Zurich
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
:mod:`defines` --- Constants
============================
Contains constant definitions used throughout the codebase.
"""
# Stdlib
import os
#: SCION protocol version
SCION_PROTO_VERSION = 0
#: Max TTL of a PathSegment in realtime seconds.
# TODO(shitz): This value should be externally configurable. The problem is that
# the revocation hash tree TTL needs to be at least as large as MAX_SEGMENT_TTL,
# but having a TTL of 1 day makes the hash tree generation costly enough that it
# times out on CircleCI. Thus, we should have one external config file for the
# Docker/CircleCI environment and one for production.
MAX_SEGMENT_TTL = 30 * 60
#: Time unit for HOF expiration.
EXP_TIME_UNIT = MAX_SEGMENT_TTL // 256
#: Max number of supported HopByHop extensions (does not include SCMP)
MAX_HOPBYHOP_EXT = 3
#: Number of bytes per 'line'. Used for padding in many places.
LINE_LEN = 8
#: Generated files directory
GEN_PATH = 'gen'
#: Topology configuration
TOPO_FILE = "topology.yml"
#: AS configuration
AS_CONF_FILE = "as.yml"
#: Path policy config
PATH_POLICY_FILE = "path_policy.yml"
#: Networks config
NETWORKS_FILE = "networks.conf"
#: IFIDs list
IFIDS_FILE = "ifids.yml"
#: AS list
AS_LIST_FILE = "as_list.yml"
#: Buffer size for receiving packets
SCION_BUFLEN = 65535
#: Default SCION endhost data port
SCION_UDP_EH_DATA_PORT = 30041
#: Default SCION filter command port
SCION_FILTER_CMD_PORT = 30042
#: Default DNS UDP/TCP port
SCION_DNS_PORT = 30053
#: Default SCION router UDP port.
SCION_ROUTER_PORT = 50000
#: Default SCION dispatcher host addr
SCION_DISPATCHER_ADDR = "/run/shm/dispatcher.sock"
#: Default SCION dispatcher port
SCION_DISPATCHER_PORT = 3334
#: Default SCION dispatcher UNIX socket directory
DISPATCHER_DIR = "/run/shm/dispatcher"
#: Default SCION dispatcher ID
DEFAULT_DISPATCHER_ID = "default"
BEACON_SERVICE = "bs"
CERTIFICATE_SERVICE = "cs"
DNS_SERVICE = "ds"
PATH_SERVICE = "ps"
ROUTER_SERVICE = "br"
SIBRA_SERVICE = "sb"
#: All the service types
SERVICE_TYPES = (
BEACON_SERVICE,
CERTIFICATE_SERVICE,
DNS_SERVICE,
PATH_SERVICE,
ROUTER_SERVICE,
SIBRA_SERVICE,
)
#: Dispatcher registration timeout
DISPATCHER_TIMEOUT = 60
#: How often IFID packet is sent to neighboring router.
IFID_PKT_TOUT = 1
#: Default MTU - assumes overlay is ipv4+udp
DEFAULT_MTU = 1500 - 20 - 8
#: IPv6 min value
SCION_MIN_MTU = 1280
#: Length of opaque fields
OPAQUE_FIELD_LEN = 8
#: How long certain warnings should be suppresed after startup
STARTUP_QUIET_PERIOD = 30
#: Number of seconds per sibra tick
SIBRA_TICK = 4
#: How far in the future a steady path can reserve at a time.
SIBRA_MAX_STEADY_TICKS = 45
#: How far in the future an ephemeral path can reserve at a time.
SIBRA_MAX_EPHEMERAL_TICKS = 4
#: Length of steady path ID in bytes
SIBRA_STEADY_ID_LEN = 8
#: Length of ephemeral path ID in bytes
SIBRA_EPHEMERAL_ID_LEN = 16
#: SIBRA Bandwidth multiplier
SIBRA_BW_FACTOR = 16 * 1024
#: SIBRA max reservation index
SIBRA_MAX_IDX = 16
PATH_FLAG_SIBRA = "SIBRA"
MAX_HOST_ADDR_LEN = 16
# Time per Epoch
HASHTREE_EPOCH_TIME = 10
# The tolerable error in epoch in seconds.
HASHTREE_EPOCH_TOLERANCE = 5
# Max time to live
HASHTREE_TTL = MAX_SEGMENT_TTL
# Number of epochs in one TTL per interface
HASHTREE_N_EPOCHS = HASHTREE_TTL // HASHTREE_EPOCH_TIME
# How much time in advance to compute the next hash tree
HASHTREE_UPDATE_WINDOW = HASHTREE_TTL // 3
# TCP polling timeouts, used by accept() and recv().
TCP_ACCEPT_POLLING_TOUT = 1
# SCION control-plane TCP connection timeout.
TCP_TIMEOUT = 5
| caterinaurban/Typpete | typpete/tests/scion_err/lib/defines.py | Python | mpl-2.0 | 4,117 |
import os
import unittest
from dataset_creator.dataset import Dataset
from .data import test_data
FASTA_DATA_PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'Fasta')
class TestGenBankFASTA(unittest.TestCase):
def setUp(self):
self.maxDiff = None
def test_dataset(self):
dataset = Dataset(test_data, format='GenBankFASTA', partitioning='by gene')
result = dataset.dataset_str
expected = open(os.path.join(FASTA_DATA_PATH, 'dataset_genbankfasta.fas')).read()
self.assertEqual(expected, result)
| carlosp420/dataset-creator | tests/test_genbankfasta.py | Python | bsd-2-clause | 562 |
##########################################################################
#
# Copyright (c) 2010, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import math
import unittest
import random
import imath
import IECore
import IECoreScene
class LimitSmoothSkinningInfluencesOpTest( unittest.TestCase ) :
def createSSD( self, weights ) :
names = IECore.StringVectorData( [ 'jointA', 'jointB', 'jointC' ] )
poses = IECore.M44fVectorData( [imath.M44f(1),imath.M44f(2),imath.M44f(3)] )
offsets = IECore.IntVectorData( [0, 2, 5, 6, 8] )
counts = IECore.IntVectorData( [2, 3, 1, 2, 3] )
indices = IECore.IntVectorData( [0, 1, 0, 1, 2, 1, 1, 2, 0, 1, 2] )
ssd = IECoreScene.SmoothSkinningData( names, poses, offsets, counts, indices, weights )
return ssd
def original( self ) :
weights = IECore.FloatVectorData( [0.7, 0.7, 0.2, 0.6, 0.0, 0.1, 1.2, 0.8, 0.4, 0.6, 0.4] )
return self.createSSD( weights )
def weightLimited( self ) :
weights = IECore.FloatVectorData( [0.7, 0.7, 0.0, 0.6, 0.0, 0.0, 1.2, 0.8, 0.0, 0.6, 0.0] )
return self.createSSD( weights )
def weightLimitedWithLocks( self ) :
weights = IECore.FloatVectorData( [0.7, 0.7, 0.2, 0.6, 0.0, 0.0, 1.2, 0.8, 0.4, 0.6, 0.0] )
return self.createSSD( weights )
def maxInfluenced( self ) :
weights = IECore.FloatVectorData( [0.7, 0.7, 0.2, 0.6, 0.0, 0.1, 1.2, 0.8, 0.0, 0.6, 0.4] )
return self.createSSD( weights )
def maxInfluencedWithLocks( self ) :
weights = IECore.FloatVectorData( [0.7, 0.0, 0.2, 0.0, 0.0, 0.1, 1.2, 0.0, 0.4, 0.0, 0.0] )
return self.createSSD( weights )
def indexed( self ) :
weights = IECore.FloatVectorData( [0.0, 0.7, 0.0, 0.6, 0.0, 0.1, 1.2, 0.0, 0.0, 0.6, 0.0] )
return self.createSSD( weights )
def compressedAfterIndexed( self ) :
names = IECore.StringVectorData( [ 'jointA', 'jointB', 'jointC' ] )
poses = IECore.M44fVectorData( [imath.M44f(1),imath.M44f(2),imath.M44f(3)] )
offsets = IECore.IntVectorData( [0, 1, 2, 2, 3] )
counts = IECore.IntVectorData( [1, 1, 0, 1, 2] )
indices = IECore.IntVectorData( [0, 0, 2, 0, 2] )
weights = IECore.FloatVectorData( [0.7, 0.2, 0.8, 0.4, 0.4] )
return IECoreScene.SmoothSkinningData( names, poses, offsets, counts, indices, weights )
def testTypes( self ) :
""" Test LimitSmoothSkinningInfluencesOp types"""
ssd = self.original()
op = IECoreScene.LimitSmoothSkinningInfluencesOp()
self.assertEqual( type(op), IECoreScene.LimitSmoothSkinningInfluencesOp )
self.assertEqual( op.typeId(), IECoreScene.TypeId.LimitSmoothSkinningInfluencesOp )
op.parameters()['input'].setValue( IECore.IntData(1) )
self.assertRaises( RuntimeError, op.operate )
def testWeightLimitMode( self ) :
""" Test LimitSmoothSkinningInfluencesOp in weight limit mode"""
ssd = self.original()
weightLimited = self.weightLimited()
op = IECoreScene.LimitSmoothSkinningInfluencesOp()
op.parameters()['input'].setValue( ssd )
op.parameters()['mode'].setValue( IECoreScene.LimitSmoothSkinningInfluencesOp.Mode.WeightLimit )
op.parameters()['minWeight'].setValue( 0.401 )
op.parameters()['compressResult'].setTypedValue( False )
op.parameters()['applyLocks'].setValue( False )
result = op.operate()
self.assertEqual( result.influenceNames(), ssd.influenceNames() )
self.assertEqual( result.influencePose(), ssd.influencePose() )
self.assertEqual( result.pointInfluenceIndices(), ssd.pointInfluenceIndices() )
self.assertEqual( result.pointIndexOffsets(), ssd.pointIndexOffsets() )
self.assertEqual( result.pointInfluenceCounts(), ssd.pointInfluenceCounts() )
self.assertNotEqual( result.pointInfluenceWeights(), ssd.pointInfluenceWeights() )
self.assertNotEqual( result, ssd )
self.assertEqual( result.influenceNames(), weightLimited.influenceNames() )
self.assertEqual( result.influencePose(), weightLimited.influencePose() )
self.assertEqual( result.pointIndexOffsets(), weightLimited.pointIndexOffsets() )
self.assertEqual( result.pointInfluenceCounts(), weightLimited.pointInfluenceCounts() )
self.assertEqual( result.pointInfluenceIndices(), weightLimited.pointInfluenceIndices() )
self.assertEqual( result.pointInfluenceWeights(), weightLimited.pointInfluenceWeights() )
self.assertEqual( result, weightLimited )
def testWeightLimitModeWithLocks( self ) :
""" Test LimitSmoothSkinningInfluencesOp locking mechanism in weight limit mode"""
ssd = self.original()
weightLimited = self.weightLimitedWithLocks()
op = IECoreScene.LimitSmoothSkinningInfluencesOp()
op.parameters()['input'].setValue( ssd )
op.parameters()['mode'].setValue( IECoreScene.LimitSmoothSkinningInfluencesOp.Mode.WeightLimit )
op.parameters()['minWeight'].setValue( 0.401 )
op.parameters()['compressResult'].setTypedValue( False )
op.parameters()['applyLocks'].setValue( True )
op.parameters()['influenceLocks'].setValue( IECore.BoolVectorData( [ True, False, False ] ) )
result = op.operate()
self.assertEqual( result.influenceNames(), ssd.influenceNames() )
self.assertEqual( result.influencePose(), ssd.influencePose() )
self.assertEqual( result.pointInfluenceIndices(), ssd.pointInfluenceIndices() )
self.assertEqual( result.pointIndexOffsets(), ssd.pointIndexOffsets() )
self.assertEqual( result.pointInfluenceCounts(), ssd.pointInfluenceCounts() )
self.assertNotEqual( result.pointInfluenceWeights(), ssd.pointInfluenceWeights() )
self.assertNotEqual( result, ssd )
self.assertEqual( result.influenceNames(), weightLimited.influenceNames() )
self.assertEqual( result.influencePose(), weightLimited.influencePose() )
self.assertEqual( result.pointIndexOffsets(), weightLimited.pointIndexOffsets() )
self.assertEqual( result.pointInfluenceCounts(), weightLimited.pointInfluenceCounts() )
self.assertEqual( result.pointInfluenceIndices(), weightLimited.pointInfluenceIndices() )
self.assertEqual( result.pointInfluenceWeights(), weightLimited.pointInfluenceWeights() )
self.assertEqual( result, weightLimited )
# make sure locked weights did not change
dop = IECoreScene.DecompressSmoothSkinningDataOp()
dop.parameters()['input'].setValue( result )
decompressedResult = dop.operate()
dop.parameters()['input'].setValue( ssd )
decompressedOrig = dop.operate()
resultIndices = decompressedResult.pointInfluenceIndices()
resultWeights = decompressedResult.pointInfluenceWeights()
origWeights = decompressedOrig.pointInfluenceWeights()
for i in range( 0, resultWeights.size() ) :
if resultIndices[i] == 0 :
self.assertAlmostEqual( resultWeights[i], origWeights[i], 6 )
def testMaxInfluencesModeAllLocked( self ) :
""" Test LimitSmoothSkinningInfluencesOp locking mechanism in weight limit mode with all influences locked"""
ssd = self.original()
maxInfluenced = self.maxInfluencedWithLocks()
op = IECoreScene.LimitSmoothSkinningInfluencesOp()
op.parameters()['input'].setValue( ssd )
op.parameters()['mode'].setValue( IECoreScene.LimitSmoothSkinningInfluencesOp.Mode.WeightLimit )
op.parameters()['minWeight'].setValue( 0.401 )
op.parameters()['compressResult'].setTypedValue( False )
op.parameters()['applyLocks'].setValue( True )
op.parameters()['influenceLocks'].setValue( IECore.BoolVectorData( [ True, True, True ] ) )
result = op.operate()
self.assertEqual( result.influenceNames(), ssd.influenceNames() )
self.assertEqual( result.influencePose(), ssd.influencePose() )
self.assertEqual( result.pointInfluenceIndices(), ssd.pointInfluenceIndices() )
self.assertEqual( result.pointIndexOffsets(), ssd.pointIndexOffsets() )
self.assertEqual( result.pointInfluenceCounts(), ssd.pointInfluenceCounts() )
self.assertEqual( result.pointInfluenceWeights(), ssd.pointInfluenceWeights() )
self.assertEqual( result, ssd )
def testMaxInfluencesMode( self ) :
""" Test LimitSmoothSkinningInfluencesOp in max influences mode"""
ssd = self.original()
maxInfluenced = self.maxInfluenced()
op = IECoreScene.LimitSmoothSkinningInfluencesOp()
op.parameters()['input'].setValue( ssd )
op.parameters()['mode'].setValue( IECoreScene.LimitSmoothSkinningInfluencesOp.Mode.MaxInfluences )
op.parameters()['maxInfluences'].setValue( 2 )
op.parameters()['compressResult'].setTypedValue( False )
op.parameters()['applyLocks'].setValue( False )
result = op.operate()
self.assertEqual( result.influenceNames(), ssd.influenceNames() )
self.assertEqual( result.influencePose(), ssd.influencePose() )
self.assertEqual( result.pointInfluenceIndices(), ssd.pointInfluenceIndices() )
self.assertEqual( result.pointIndexOffsets(), ssd.pointIndexOffsets() )
self.assertEqual( result.pointInfluenceCounts(), ssd.pointInfluenceCounts() )
self.assertNotEqual( result.pointInfluenceWeights(), ssd.pointInfluenceWeights() )
self.assertNotEqual( result, ssd )
self.assertEqual( result.influenceNames(), maxInfluenced.influenceNames() )
self.assertEqual( result.influencePose(), maxInfluenced.influencePose() )
self.assertEqual( result.pointIndexOffsets(), maxInfluenced.pointIndexOffsets() )
self.assertEqual( result.pointInfluenceCounts(), maxInfluenced.pointInfluenceCounts() )
self.assertEqual( result.pointInfluenceIndices(), maxInfluenced.pointInfluenceIndices() )
self.assertEqual( result.pointInfluenceWeights(), maxInfluenced.pointInfluenceWeights() )
self.assertEqual( result, maxInfluenced )
def testMaxInfluencesModeWithLocks( self ) :
""" Test LimitSmoothSkinningInfluencesOp locking mechanism in max influences mode"""
ssd = self.original()
maxInfluenced = self.maxInfluencedWithLocks()
op = IECoreScene.LimitSmoothSkinningInfluencesOp()
op.parameters()['input'].setValue( ssd )
op.parameters()['mode'].setValue( IECoreScene.LimitSmoothSkinningInfluencesOp.Mode.MaxInfluences )
op.parameters()['maxInfluences'].setValue( 1 )
op.parameters()['compressResult'].setTypedValue( False )
op.parameters()['applyLocks'].setValue( True )
op.parameters()['influenceLocks'].setValue( IECore.BoolVectorData( [ True, False, False ] ) )
result = op.operate()
self.assertEqual( result.influenceNames(), ssd.influenceNames() )
self.assertEqual( result.influencePose(), ssd.influencePose() )
self.assertEqual( result.pointInfluenceIndices(), ssd.pointInfluenceIndices() )
self.assertEqual( result.pointIndexOffsets(), ssd.pointIndexOffsets() )
self.assertEqual( result.pointInfluenceCounts(), ssd.pointInfluenceCounts() )
self.assertNotEqual( result.pointInfluenceWeights(), ssd.pointInfluenceWeights() )
self.assertNotEqual( result, ssd )
self.assertEqual( result.influenceNames(), maxInfluenced.influenceNames() )
self.assertEqual( result.influencePose(), maxInfluenced.influencePose() )
self.assertEqual( result.pointIndexOffsets(), maxInfluenced.pointIndexOffsets() )
self.assertEqual( result.pointInfluenceCounts(), maxInfluenced.pointInfluenceCounts() )
self.assertEqual( result.pointInfluenceIndices(), maxInfluenced.pointInfluenceIndices() )
self.assertEqual( result.pointInfluenceWeights(), maxInfluenced.pointInfluenceWeights() )
self.assertEqual( result, maxInfluenced )
# make sure locked weights did not change
dop = IECoreScene.DecompressSmoothSkinningDataOp()
dop.parameters()['input'].setValue( result )
decompressedResult = dop.operate()
dop.parameters()['input'].setValue( ssd )
decompressedOrig = dop.operate()
resultIndices = decompressedResult.pointInfluenceIndices()
resultWeights = decompressedResult.pointInfluenceWeights()
origWeights = decompressedOrig.pointInfluenceWeights()
for i in range( 0, resultWeights.size() ) :
if resultIndices[i] == 0 :
self.assertAlmostEqual( resultWeights[i], origWeights[i], 6 )
def testMaxInfluencesModeAllLocked( self ) :
""" Test LimitSmoothSkinningInfluencesOp locking mechanism in max influences mode with all influences locked"""
ssd = self.original()
maxInfluenced = self.maxInfluencedWithLocks()
op = IECoreScene.LimitSmoothSkinningInfluencesOp()
op.parameters()['input'].setValue( ssd )
op.parameters()['mode'].setValue( IECoreScene.LimitSmoothSkinningInfluencesOp.Mode.MaxInfluences )
op.parameters()['maxInfluences'].setValue( 1 )
op.parameters()['compressResult'].setTypedValue( False )
op.parameters()['applyLocks'].setValue( True )
op.parameters()['influenceLocks'].setValue( IECore.BoolVectorData( [ True, True, True ] ) )
result = op.operate()
self.assertEqual( result.influenceNames(), ssd.influenceNames() )
self.assertEqual( result.influencePose(), ssd.influencePose() )
self.assertEqual( result.pointInfluenceIndices(), ssd.pointInfluenceIndices() )
self.assertEqual( result.pointIndexOffsets(), ssd.pointIndexOffsets() )
self.assertEqual( result.pointInfluenceCounts(), ssd.pointInfluenceCounts() )
self.assertEqual( result.pointInfluenceWeights(), ssd.pointInfluenceWeights() )
self.assertEqual( result, ssd )
def testIndexedMode( self ) :
""" Test LimitSmoothSkinningInfluencesOp in indexed mode"""
ssd = self.original()
indexed = self.indexed()
op = IECoreScene.LimitSmoothSkinningInfluencesOp()
op.parameters()['input'].setValue( ssd )
op.parameters()['mode'].setValue( IECoreScene.LimitSmoothSkinningInfluencesOp.Mode.Indexed )
op.parameters()['influenceIndices'].setFrameListValue( IECore.FrameList.parse( "0-2x2" ) )
op.parameters()['compressResult'].setTypedValue( False )
op.parameters()['applyLocks'].setValue( False )
result = op.operate()
self.assertEqual( result.influenceNames(), ssd.influenceNames() )
self.assertEqual( result.influencePose(), ssd.influencePose() )
self.assertEqual( result.pointInfluenceIndices(), ssd.pointInfluenceIndices() )
self.assertEqual( result.pointIndexOffsets(), ssd.pointIndexOffsets() )
self.assertEqual( result.pointInfluenceCounts(), ssd.pointInfluenceCounts() )
self.assertNotEqual( result.pointInfluenceWeights(), ssd.pointInfluenceWeights() )
self.assertNotEqual( result, ssd )
self.assertEqual( result.influenceNames(), indexed.influenceNames() )
self.assertEqual( result.influencePose(), indexed.influencePose() )
self.assertEqual( result.pointIndexOffsets(), indexed.pointIndexOffsets() )
self.assertEqual( result.pointInfluenceCounts(), indexed.pointInfluenceCounts() )
self.assertEqual( result.pointInfluenceIndices(), indexed.pointInfluenceIndices() )
self.assertEqual( result.pointInfluenceWeights(), indexed.pointInfluenceWeights() )
self.assertEqual( result, indexed )
def testCompressionParameter( self ) :
""" Test LimitSmoothSkinningInfluencesOp in indexed mode with compression on"""
ssd = self.original()
compressedAfterIndexed = self.compressedAfterIndexed()
op = IECoreScene.LimitSmoothSkinningInfluencesOp()
op.parameters()['input'].setValue( ssd )
op.parameters()['mode'].setValue( IECoreScene.LimitSmoothSkinningInfluencesOp.Mode.Indexed )
op.parameters()['influenceIndices'].setFrameListValue( IECore.FrameList.parse( "1" ) )
op.parameters()['compressResult'].setTypedValue( True )
op.parameters()['applyLocks'].setValue( False )
result = op.operate()
self.assertEqual( result.influenceNames(), ssd.influenceNames() )
self.assertEqual( result.influencePose(), ssd.influencePose() )
self.assertNotEqual( result.pointInfluenceIndices(), ssd.pointInfluenceIndices() )
self.assertNotEqual( result.pointIndexOffsets(), ssd.pointIndexOffsets() )
self.assertNotEqual( result.pointInfluenceCounts(), ssd.pointInfluenceCounts() )
self.assertNotEqual( result.pointInfluenceWeights(), ssd.pointInfluenceWeights() )
self.assertNotEqual( result, ssd )
self.assertEqual( result.influenceNames(), compressedAfterIndexed.influenceNames() )
self.assertEqual( result.influencePose(), compressedAfterIndexed.influencePose() )
self.assertEqual( result.pointIndexOffsets(), compressedAfterIndexed.pointIndexOffsets() )
self.assertEqual( result.pointInfluenceCounts(), compressedAfterIndexed.pointInfluenceCounts() )
self.assertEqual( result.pointInfluenceIndices(), compressedAfterIndexed.pointInfluenceIndices() )
self.assertEqual( result.pointInfluenceWeights(), compressedAfterIndexed.pointInfluenceWeights() )
self.assertEqual( result, compressedAfterIndexed )
if __name__ == "__main__":
unittest.main()
| appleseedhq/cortex | test/IECoreScene/LimitSmoothSkinningInfluencesOpTest.py | Python | bsd-3-clause | 17,858 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2019 Bitergia
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA.
#
# Authors:
# Santiago Dueñas <[email protected]>
# Alvaro del Castillo San Felix <[email protected]>
#
import csv
import datetime
import logging
import re
import bs4
import dateutil.tz
from grimoirelab_toolkit.datetime import str_to_datetime
from ...backend import (Backend,
BackendCommand,
BackendCommandArgumentParser)
from ...client import HttpClient
from ...errors import BackendError, ParseError
from ...utils import DEFAULT_DATETIME, xml_to_dict
CATEGORY_BUG = "bug"
MAX_BUGS = 200 # Maximum number of bugs per query
MAX_BUGS_CSV = 10000 # Maximum number of bugs per CSV query
logger = logging.getLogger(__name__)
class Bugzilla(Backend):
"""Bugzilla backend.
This class allows the fetch the bugs stored in Bugzilla
repository. To initialize this class the URL of the server
must be provided. The `url` will be set as the origin of
the data.
:param url: Bugzilla server URL
:param user: Bugzilla user
:param password: Bugzilla user password
:param max_bugs: maximum number of bugs requested on the same query
:param tag: label used to mark the data
:param archive: archive to store/retrieve items
"""
version = '0.10.3'
CATEGORIES = [CATEGORY_BUG]
def __init__(self, url, user=None, password=None,
max_bugs=MAX_BUGS, max_bugs_csv=MAX_BUGS_CSV,
tag=None, archive=None):
origin = url
super().__init__(origin, tag=tag, archive=archive)
self.url = url
self.user = user
self.password = password
self.max_bugs_csv = max_bugs_csv
self.client = None
self.max_bugs = max(1, max_bugs)
def fetch(self, category=CATEGORY_BUG, from_date=DEFAULT_DATETIME):
"""Fetch the bugs from the repository.
The method retrieves, from a Bugzilla repository, the bugs
updated since the given date.
:param category: the category of items to fetch
:param from_date: obtain bugs updated since this date
:returns: a generator of bugs
"""
if not from_date:
from_date = DEFAULT_DATETIME
kwargs = {"from_date": from_date}
items = super().fetch(category, **kwargs)
return items
def fetch_items(self, category, **kwargs):
"""Fetch the bugs
:param category: the category of items to fetch
:param kwargs: backend arguments
:returns: a generator of items
"""
from_date = kwargs['from_date']
logger.info("Looking for bugs: '%s' updated from '%s'",
self.url, str(from_date))
buglist = [bug for bug in self.__fetch_buglist(from_date)]
nbugs = 0
tbugs = len(buglist)
for i in range(0, tbugs, self.max_bugs):
chunk = buglist[i:i + self.max_bugs]
bugs_ids = [b['bug_id'] for b in chunk]
logger.info("Fetching bugs: %s/%s", i, tbugs)
bugs = self.__fetch_and_parse_bugs_details(bugs_ids)
for bug in bugs:
bug_id = bug['bug_id'][0]['__text__']
bug['activity'] = self.__fetch_and_parse_bug_activity(bug_id)
nbugs += 1
yield bug
logger.info("Fetch process completed: %s/%s bugs fetched",
nbugs, tbugs)
@classmethod
def has_archiving(cls):
"""Returns whether it supports archiving items on the fetch process.
:returns: this backend supports items archive
"""
return True
@classmethod
def has_resuming(cls):
"""Returns whether it supports to resume the fetch process.
:returns: this backend supports items resuming
"""
return True
@staticmethod
def metadata_id(item):
"""Extracts the identifier from a Bugzilla item."""
return item['bug_id'][0]['__text__']
@staticmethod
def metadata_updated_on(item):
"""Extracts and coverts the update time from a Bugzilla item.
The timestamp is extracted from 'delta_ts' field. This date is
converted to UNIX timestamp format. Due Bugzilla servers ignore
the timezone on HTTP requests, it will be ignored during the
conversion, too.
:param item: item generated by the backend
:returns: a UNIX timestamp
"""
ts = item['delta_ts'][0]['__text__']
ts = str_to_datetime(ts)
ts = ts.replace(tzinfo=dateutil.tz.tzutc())
return ts.timestamp()
@staticmethod
def metadata_category(item):
"""Extracts the category from a Bugzilla item.
This backend only generates one type of item which is
'bug'.
"""
return CATEGORY_BUG
@staticmethod
def parse_buglist(raw_csv):
"""Parse a Bugzilla CSV bug list.
The method parses the CSV file and returns an iterator of
dictionaries. Each one of this, contains the summary of a bug.
:param raw_csv: CSV string to parse
:returns: a generator of parsed bugs
"""
reader = csv.DictReader(raw_csv.split('\n'),
delimiter=',', quotechar='"')
for row in reader:
yield row
@staticmethod
def parse_bugs_details(raw_xml):
"""Parse a Bugilla bugs details XML stream.
This method returns a generator which parses the given XML,
producing an iterator of dictionaries. Each dictionary stores
the information related to a parsed bug.
If the given XML is invalid or does not contains any bug, the
method will raise a ParseError exception.
:param raw_xml: XML string to parse
:returns: a generator of parsed bugs
:raises ParseError: raised when an error occurs parsing
the given XML stream
"""
bugs = xml_to_dict(raw_xml)
if 'bug' not in bugs:
cause = "No bugs found. XML stream seems to be invalid."
raise ParseError(cause=cause)
for bug in bugs['bug']:
yield bug
@staticmethod
def parse_bug_activity(raw_html):
"""Parse a Bugzilla bug activity HTML stream.
This method extracts the information about activity from the
given HTML stream. The bug activity is stored into a HTML
table. Each parsed activity event is returned into a dictionary.
If the given HTML is invalid, the method will raise a ParseError
exception.
:param raw_html: HTML string to parse
:returns: a generator of parsed activity events
:raises ParseError: raised when an error occurs parsing
the given HTML stream
"""
def is_activity_empty(bs):
EMPTY_ACTIVITY = "No changes have been made to this (?:bug|issue) yet."
tag = bs.find(text=re.compile(EMPTY_ACTIVITY))
return tag is not None
def find_activity_table(bs):
# The first table with 5 columns is the table of activity
tables = bs.find_all('table')
for tb in tables:
nheaders = len(tb.tr.find_all('th', recursive=False))
if nheaders == 5:
return tb
raise ParseError(cause="Table of bug activity not found.")
def remove_tags(bs):
HTML_TAGS_TO_REMOVE = ['a', 'i', 'span']
for tag in bs.find_all(HTML_TAGS_TO_REMOVE):
tag.replaceWith(tag.text)
def format_text(bs):
strings = [s.strip(' \n\t') for s in bs.stripped_strings]
s = ' '.join(strings)
return s
# Parsing starts here
bs = bs4.BeautifulSoup(raw_html, 'html.parser')
if is_activity_empty(bs):
fields = []
else:
activity_tb = find_activity_table(bs)
remove_tags(activity_tb)
fields = activity_tb.find_all('td')
while fields:
# First two fields: 'Who' and 'When'.
who = fields.pop(0)
when = fields.pop(0)
# The attribute 'rowspan' of 'who' field tells how many
# changes were made on the same date.
n = int(who.get('rowspan'))
# Next fields are split into chunks of three elements:
# 'What', 'Removed' and 'Added'. These chunks share
# 'Who' and 'When' values.
for _ in range(n):
what = fields.pop(0)
removed = fields.pop(0)
added = fields.pop(0)
event = {'Who': format_text(who),
'When': format_text(when),
'What': format_text(what),
'Removed': format_text(removed),
'Added': format_text(added)}
yield event
def _init_client(self, from_archive=False):
"""Init client"""
return BugzillaClient(self.url, user=self.user, password=self.password,
max_bugs_csv=self.max_bugs_csv,
archive=self.archive, from_archive=from_archive)
def __fetch_buglist(self, from_date):
buglist = self.__fetch_and_parse_buglist_page(from_date)
while buglist:
bug = buglist.pop(0)
last_date = bug['changeddate']
yield bug
# Bugzilla does not support pagination. Due to this,
# the next list of bugs is requested adding one second
# to the last date obtained.
if not buglist:
from_date = str_to_datetime(last_date)
from_date += datetime.timedelta(seconds=1)
buglist = self.__fetch_and_parse_buglist_page(from_date)
def __fetch_and_parse_buglist_page(self, from_date):
logger.debug("Fetching and parsing buglist page from %s", str(from_date))
raw_csv = self.client.buglist(from_date=from_date)
buglist = self.parse_buglist(raw_csv)
return [bug for bug in buglist]
def __fetch_and_parse_bugs_details(self, *bug_ids):
logger.debug("Fetching and parsing bugs details")
raw_bugs = self.client.bugs(*bug_ids)
return self.parse_bugs_details(raw_bugs)
def __fetch_and_parse_bug_activity(self, bug_id):
logger.debug("Fetching and parsing bug #%s activity", bug_id)
raw_activity = self.client.bug_activity(bug_id)
activity = self.parse_bug_activity(raw_activity)
return [event for event in activity]
class BugzillaCommand(BackendCommand):
"""Class to run Bugzilla backend from the command line."""
BACKEND = Bugzilla
@staticmethod
def setup_cmd_parser():
"""Returns the Bugzilla argument parser."""
parser = BackendCommandArgumentParser(from_date=True,
basic_auth=True,
archive=True)
# Bugzilla options
group = parser.parser.add_argument_group('Bugzilla arguments')
group.add_argument('--max-bugs', dest='max_bugs',
type=int, default=MAX_BUGS,
help="Maximum number of bugs requested on the same query")
group.add_argument('--max-bugs-csv', dest='max_bugs_csv',
type=int, default=MAX_BUGS_CSV,
help="Maximum number of bugs requested on CSV queries")
# Required arguments
parser.parser.add_argument('url',
help="URL of the Bugzilla server")
return parser
class BugzillaClient(HttpClient):
"""Bugzilla API client.
This class implements a simple client to retrieve distinct
kind of data from a Bugzilla repository. Currently, it only
supports 3.x and 4.x servers.
When it is initialized, it checks if the given Bugzilla is
available and retrieves its version.
:param base_url: URL of the Bugzilla server
:param user: Bugzilla user
:param password: user password
:param max_bugs_cvs: max bugs requested per CSV query
:param archive: an archive to store/read fetched data
:param from_archive: it tells whether to write/read the archive
:raises BackendError: when an error occurs initilizing the
client
"""
URL = "%(base)s/%(cgi)s"
# Regular expression to check the Bugzilla version
VERSION_REGEX = re.compile(r'.+bugzilla version="([^"]+)"',
flags=re.DOTALL)
# Bugzilla versions that follow the old style queries
OLD_STYLE_VERSIONS = ['3.2.3', '3.2.2']
# CGI methods
CGI_LOGIN = 'index.cgi'
CGI_BUGLIST = 'buglist.cgi'
CGI_BUG = 'show_bug.cgi'
CGI_BUG_ACTIVITY = 'show_activity.cgi'
# CGI params
PBUGZILLA_LOGIN = 'Bugzilla_login'
PBUGZILLA_PASSWORD = 'Bugzilla_password'
PLIMIT = 'limit'
PLOGIN = 'GoAheadAndLogIn'
PLOGOUT = 'logout'
PBUG_ID = 'id'
PCHFIELD_FROM = 'chfieldfrom'
PCTYPE = 'ctype'
PORDER = 'order'
PEXCLUDE_FIELD = 'excludefield'
# Content-type values
CTYPE_CSV = 'csv'
CTYPE_XML = 'xml'
def __init__(self, base_url, user=None, password=None,
max_bugs_csv=MAX_BUGS_CSV, archive=None, from_archive=False):
self.version = None
super().__init__(base_url, archive=archive, from_archive=from_archive)
if user is not None and password is not None:
self.login(user, password)
self.max_bugs_csv = max_bugs_csv
def login(self, user, password):
"""Authenticate a user in the server.
:param user: Bugzilla user
:param password: user password
"""
url = self.URL % {'base': self.base_url, 'cgi': self.CGI_LOGIN}
payload = {
self.PBUGZILLA_LOGIN: user,
self.PBUGZILLA_PASSWORD: password,
self.PLOGIN: 'Log in'
}
headers = {'Referer': self.base_url}
req = self.fetch(url, payload=payload, headers=headers, method=HttpClient.POST)
# Check if the authentication went OK. When this string
# is found means that the authentication was successful
if req.text.find("index.cgi?logout=1") < 0:
cause = ("Bugzilla client could not authenticate user %s. "
"Please check user and password parameters. "
"URLs may also need a trailing '/'.") % user
raise BackendError(cause=cause)
logger.debug("Bugzilla user %s authenticated in %s",
user, self.base_url)
def logout(self):
"""Logout from the server."""
params = {
self.PLOGOUT: '1'
}
self.call(self.CGI_LOGIN, params)
self._close_http_session()
logger.debug("Bugzilla user logged out from %s",
self.base_url)
def metadata(self):
"""Get metadata information in XML format."""
params = {
self.PCTYPE: self.CTYPE_XML
}
response = self.call(self.CGI_BUG, params)
return response
def buglist(self, from_date=DEFAULT_DATETIME):
"""Get a summary of bugs in CSV format.
:param from_date: retrieve bugs that where updated from that date
"""
if not self.version:
self.version = self.__fetch_version()
if self.version in self.OLD_STYLE_VERSIONS:
order = 'Last+Changed'
else:
order = 'changeddate'
date = from_date.strftime("%Y-%m-%d %H:%M:%S")
params = {
self.PCHFIELD_FROM: date,
self.PCTYPE: self.CTYPE_CSV,
self.PLIMIT: self.max_bugs_csv,
self.PORDER: order
}
response = self.call(self.CGI_BUGLIST, params)
return response
def bugs(self, *bug_ids):
"""Get the information of a list of bugs in XML format.
:param bug_ids: list of bug identifiers
"""
params = {
self.PBUG_ID: bug_ids,
self.PCTYPE: self.CTYPE_XML,
self.PEXCLUDE_FIELD: 'attachmentdata'
}
response = self.call(self.CGI_BUG, params)
return response
def bug_activity(self, bug_id):
"""Get the activity of a bug in HTML format.
:param bug_id: bug identifier
"""
params = {
self.PBUG_ID: bug_id
}
response = self.call(self.CGI_BUG_ACTIVITY, params)
return response
def call(self, cgi, params):
"""Run an API command.
:param cgi: cgi command to run on the server
:param params: dict with the HTTP parameters needed to run
the given command
"""
url = self.URL % {'base': self.base_url, 'cgi': cgi}
logger.debug("Bugzilla client calls command: %s params: %s",
cgi, str(params))
req = self.fetch(url, payload=params)
return req.text
@staticmethod
def sanitize_for_archive(url, headers, payload):
"""Sanitize payload of a HTTP request by removing the login and password information
before storing/retrieving archived items
:param: url: HTTP url request
:param: headers: HTTP headers request
:param: payload: HTTP payload request
:returns url, headers and the sanitized payload
"""
if BugzillaClient.PBUGZILLA_LOGIN in payload:
payload.pop(BugzillaClient.PBUGZILLA_LOGIN)
if BugzillaClient.PBUGZILLA_PASSWORD in payload:
payload.pop(BugzillaClient.PBUGZILLA_PASSWORD)
if BugzillaClient.PLOGIN in payload:
payload.pop(BugzillaClient.PLOGIN)
return url, headers, payload
def __fetch_version(self):
response = self.metadata()
m = re.match(self.VERSION_REGEX, response)
if m:
version = m.group(1)
logger.debug("Bugzilla server is online: %s (v. %s)",
self.base_url, version)
return version
else:
cause = "Bugzilla client could not determine the server version"
raise BackendError(cause=cause)
| valeriocos/perceval | perceval/backends/core/bugzilla.py | Python | gpl-3.0 | 19,041 |
#!/usr/bin/env python
"""
General Utilities
(part of web.py)
"""
from __future__ import print_function
__all__ = [
"Storage", "storage", "storify",
"Counter", "counter",
"iters",
"rstrips", "lstrips", "strips",
"safeunicode", "safestr",
"timelimit",
"Memoize", "memoize",
"re_compile", "re_subm",
"group", "uniq", "iterview",
"IterBetter", "iterbetter",
"safeiter", "safewrite",
"dictreverse", "dictfind", "dictfindall", "dictincr", "dictadd",
"requeue", "restack",
"listget", "intget", "datestr",
"numify", "denumify", "commify", "dateify",
"nthstr", "cond",
"CaptureStdout", "capturestdout", "Profile", "profile",
"tryall",
"ThreadedDict", "threadeddict",
"autoassign",
"to36",
"safemarkdown",
"sendmail"
]
import re, sys, time, threading, itertools, traceback, os
import subprocess
import datetime
from threading import local as threadlocal
from .py3helpers import PY2, itervalues, iteritems, text_type, string_types, imap, is_iter
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
class Storage(dict):
"""
A Storage object is like a dictionary except `obj.foo` can be used
in addition to `obj['foo']`.
>>> o = storage(a=1)
>>> o.a
1
>>> o['a']
1
>>> o.a = 2
>>> o['a']
2
>>> del o.a
>>> o.a
Traceback (most recent call last):
...
AttributeError: 'a'
"""
def __getattr__(self, key):
try:
return self[key]
except KeyError as k:
raise AttributeError(k)
def __setattr__(self, key, value):
self[key] = value
def __delattr__(self, key):
try:
del self[key]
except KeyError as k:
raise AttributeError(k)
def __repr__(self):
return '<Storage ' + dict.__repr__(self) + '>'
storage = Storage
def storify(mapping, *requireds, **defaults):
"""
Creates a `storage` object from dictionary `mapping`, raising `KeyError` if
d doesn't have all of the keys in `requireds` and using the default
values for keys found in `defaults`.
For example, `storify({'a':1, 'c':3}, b=2, c=0)` will return the equivalent of
`storage({'a':1, 'b':2, 'c':3})`.
If a `storify` value is a list (e.g. multiple values in a form submission),
`storify` returns the last element of the list, unless the key appears in
`defaults` as a list. Thus:
>>> storify({'a':[1, 2]}).a
2
>>> storify({'a':[1, 2]}, a=[]).a
[1, 2]
>>> storify({'a':1}, a=[]).a
[1]
>>> storify({}, a=[]).a
[]
Similarly, if the value has a `value` attribute, `storify will return _its_
value, unless the key appears in `defaults` as a dictionary.
>>> storify({'a':storage(value=1)}).a
1
>>> storify({'a':storage(value=1)}, a={}).a
<Storage {'value': 1}>
>>> storify({}, a={}).a
{}
"""
_unicode = defaults.pop('_unicode', False)
# if _unicode is callable object, use it convert a string to unicode.
to_unicode = safeunicode
if _unicode is not False and hasattr(_unicode, "__call__"):
to_unicode = _unicode
def unicodify(s):
if _unicode and isinstance(s, str): return to_unicode(s)
else: return s
def getvalue(x):
if hasattr(x, 'file') and hasattr(x, 'value'):
return x.value
elif hasattr(x, 'value'):
return unicodify(x.value)
else:
return unicodify(x)
stor = Storage()
for key in requireds + tuple(mapping.keys()):
value = mapping[key]
if isinstance(value, list):
if isinstance(defaults.get(key), list):
value = [getvalue(x) for x in value]
else:
value = value[-1]
if not isinstance(defaults.get(key), dict):
value = getvalue(value)
if isinstance(defaults.get(key), list) and not isinstance(value, list):
value = [value]
setattr(stor, key, value)
for (key, value) in iteritems(defaults):
result = value
if hasattr(stor, key):
result = stor[key]
if value == () and not isinstance(result, tuple):
result = (result,)
setattr(stor, key, result)
return stor
class Counter(storage):
"""Keeps count of how many times something is added.
>>> c = counter()
>>> c.add('x')
>>> c.add('x')
>>> c.add('x')
>>> c.add('x')
>>> c.add('x')
>>> c.add('y')
>>> c['y']
1
>>> c['x']
5
>>> c.most()
['x']
"""
def add(self, n):
self.setdefault(n, 0)
self[n] += 1
def most(self):
"""Returns the keys with maximum count."""
m = max(itervalues(self))
return [k for k, v in iteritems(self) if v == m]
def least(self):
"""Returns the keys with mininum count."""
m = min(self.itervalues())
return [k for k, v in iteritems(self) if v == m]
def percent(self, key):
"""Returns what percentage a certain key is of all entries.
>>> c = counter()
>>> c.add('x')
>>> c.add('x')
>>> c.add('x')
>>> c.add('y')
>>> c.percent('x')
0.75
>>> c.percent('y')
0.25
"""
return float(self[key])/sum(self.values())
def sorted_keys(self):
"""Returns keys sorted by value.
>>> c = counter()
>>> c.add('x')
>>> c.add('x')
>>> c.add('y')
>>> c.sorted_keys()
['x', 'y']
"""
return sorted(self.keys(), key=lambda k: self[k], reverse=True)
def sorted_values(self):
"""Returns values sorted by value.
>>> c = counter()
>>> c.add('x')
>>> c.add('x')
>>> c.add('y')
>>> c.sorted_values()
[2, 1]
"""
return [self[k] for k in self.sorted_keys()]
def sorted_items(self):
"""Returns items sorted by value.
>>> c = counter()
>>> c.add('x')
>>> c.add('x')
>>> c.add('y')
>>> c.sorted_items()
[('x', 2), ('y', 1)]
"""
return [(k, self[k]) for k in self.sorted_keys()]
def __repr__(self):
return '<Counter ' + dict.__repr__(self) + '>'
counter = Counter
iters = [list, tuple, set, frozenset]
class _hack(tuple): pass
iters = _hack(iters)
iters.__doc__ = """
A list of iterable items (like lists, but not strings). Includes whichever
of lists, tuples, sets, and Sets are available in this version of Python.
"""
def _strips(direction, text, remove):
if isinstance(remove, iters):
for subr in remove:
text = _strips(direction, text, subr)
return text
if direction == 'l':
if text.startswith(remove):
return text[len(remove):]
elif direction == 'r':
if text.endswith(remove):
return text[:-len(remove)]
else:
raise ValueError("Direction needs to be r or l.")
return text
def rstrips(text, remove):
"""
removes the string `remove` from the right of `text`
>>> rstrips("foobar", "bar")
'foo'
"""
return _strips('r', text, remove)
def lstrips(text, remove):
"""
removes the string `remove` from the left of `text`
>>> lstrips("foobar", "foo")
'bar'
>>> lstrips('http://foo.org/', ['http://', 'https://'])
'foo.org/'
>>> lstrips('FOOBARBAZ', ['FOO', 'BAR'])
'BAZ'
>>> lstrips('FOOBARBAZ', ['BAR', 'FOO'])
'BARBAZ'
"""
return _strips('l', text, remove)
def strips(text, remove):
"""
removes the string `remove` from the both sides of `text`
>>> strips("foobarfoo", "foo")
'bar'
"""
return rstrips(lstrips(text, remove), remove)
def safeunicode(obj, encoding='utf-8'):
r"""
Converts any given object to unicode string.
>>> safeunicode('hello')
u'hello'
>>> safeunicode(2)
u'2'
>>> safeunicode('\xe1\x88\xb4')
u'\u1234'
"""
t = type(obj)
if t is text_type:
return obj
elif t is bytes:
return obj.decode(encoding)
elif t in [int, float, bool]:
return unicode(obj)
#elif hasattr(obj, '__unicode__') or isinstance(obj, unicode):
# return unicode(obj)
#else:
# return str(obj).decode(encoding)
else:
return unicode(obj)
def safestr(obj, encoding='utf-8'):
r"""
Converts any given object to utf-8 encoded string.
>>> safestr('hello')
'hello'
>>> safestr(2)
'2'
"""
if PY2 and isinstance(obj, unicode):
return obj.encode(encoding)
elif is_iter(obj):
return imap(safestr, obj)
else:
return str(obj)
if not PY2:
#Since Python3, utf-8 encoded strings and unicode strings are the same thing
safeunicode = safestr
def timelimit(timeout):
"""
A decorator to limit a function to `timeout` seconds, raising `TimeoutError`
if it takes longer.
>>> import time
>>> def meaningoflife():
... time.sleep(.2)
... return 42
>>>
>>> timelimit(.1)(meaningoflife)()
Traceback (most recent call last):
...
RuntimeError: took too long
>>> timelimit(1)(meaningoflife)()
42
_Caveat:_ The function isn't stopped after `timeout` seconds but continues
executing in a separate thread. (There seems to be no way to kill a thread.)
inspired by <http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/473878>
"""
def _1(function):
def _2(*args, **kw):
class Dispatch(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.result = None
self.error = None
self.setDaemon(True)
self.start()
def run(self):
try:
self.result = function(*args, **kw)
except:
self.error = sys.exc_info()
c = Dispatch()
c.join(timeout)
if c.isAlive():
raise RuntimeError('took too long')
if c.error:
raise c.error[1]
return c.result
return _2
return _1
class Memoize:
"""
'Memoizes' a function, caching its return values for each input.
If `expires` is specified, values are recalculated after `expires` seconds.
If `background` is specified, values are recalculated in a separate thread.
>>> calls = 0
>>> def howmanytimeshaveibeencalled():
... global calls
... calls += 1
... return calls
>>> fastcalls = memoize(howmanytimeshaveibeencalled)
>>> howmanytimeshaveibeencalled()
1
>>> howmanytimeshaveibeencalled()
2
>>> fastcalls()
3
>>> fastcalls()
3
>>> import time
>>> fastcalls = memoize(howmanytimeshaveibeencalled, .1, background=False)
>>> fastcalls()
4
>>> fastcalls()
4
>>> time.sleep(.2)
>>> fastcalls()
5
>>> def slowfunc():
... time.sleep(.1)
... return howmanytimeshaveibeencalled()
>>> fastcalls = memoize(slowfunc, .2, background=True)
>>> fastcalls()
6
>>> timelimit(.05)(fastcalls)()
6
>>> time.sleep(.2)
>>> timelimit(.05)(fastcalls)()
6
>>> timelimit(.05)(fastcalls)()
6
>>> time.sleep(.2)
>>> timelimit(.05)(fastcalls)()
7
>>> fastcalls = memoize(slowfunc, None, background=True)
>>> threading.Thread(target=fastcalls).start()
>>> time.sleep(.01)
>>> fastcalls()
9
"""
def __init__(self, func, expires=None, background=True):
self.func = func
self.cache = {}
self.expires = expires
self.background = background
self.running = {}
def __call__(self, *args, **keywords):
key = (args, tuple(keywords.items()))
if not self.running.get(key):
self.running[key] = threading.Lock()
def update(block=False):
if self.running[key].acquire(block):
try:
self.cache[key] = (self.func(*args, **keywords), time.time())
finally:
self.running[key].release()
if key not in self.cache:
update(block=True)
elif self.expires and (time.time() - self.cache[key][1]) > self.expires:
if self.background:
threading.Thread(target=update).start()
else:
update()
return self.cache[key][0]
memoize = Memoize
re_compile = memoize(re.compile) #@@ threadsafe?
re_compile.__doc__ = """
A memoized version of re.compile.
"""
class _re_subm_proxy:
def __init__(self):
self.match = None
def __call__(self, match):
self.match = match
return ''
def re_subm(pat, repl, string):
"""
Like re.sub, but returns the replacement _and_ the match object.
>>> t, m = re_subm('g(oo+)fball', r'f\\1lish', 'goooooofball')
>>> t
'foooooolish'
>>> m.groups()
('oooooo',)
"""
compiled_pat = re_compile(pat)
proxy = _re_subm_proxy()
compiled_pat.sub(proxy.__call__, string)
return compiled_pat.sub(repl, string), proxy.match
def group(seq, size):
"""
Returns an iterator over a series of lists of length size from iterable.
>>> list(group([1,2,3,4], 2))
[[1, 2], [3, 4]]
>>> list(group([1,2,3,4,5], 2))
[[1, 2], [3, 4], [5]]
"""
def take(seq, n):
for i in range(n):
yield next(seq)
if not hasattr(seq, 'next'):
seq = iter(seq)
while True:
x = list(take(seq, size))
if x:
yield x
else:
break
def uniq(seq, key=None):
"""
Removes duplicate elements from a list while preserving the order of the rest.
>>> uniq([9,0,2,1,0])
[9, 0, 2, 1]
The value of the optional `key` parameter should be a function that
takes a single argument and returns a key to test the uniqueness.
>>> uniq(["Foo", "foo", "bar"], key=lambda s: s.lower())
['Foo', 'bar']
"""
key = key or (lambda x: x)
seen = set()
result = []
for v in seq:
k = key(v)
if k in seen:
continue
seen.add(k)
result.append(v)
return result
def iterview(x):
"""
Takes an iterable `x` and returns an iterator over it
which prints its progress to stderr as it iterates through.
"""
WIDTH = 70
def plainformat(n, lenx):
return '%5.1f%% (%*d/%d)' % ((float(n)/lenx)*100, len(str(lenx)), n, lenx)
def bars(size, n, lenx):
val = int((float(n)*size)/lenx + 0.5)
if size - val:
spacing = ">" + (" "*(size-val))[1:]
else:
spacing = ""
return "[%s%s]" % ("="*val, spacing)
def eta(elapsed, n, lenx):
if n == 0:
return '--:--:--'
if n == lenx:
secs = int(elapsed)
else:
secs = int((elapsed/n) * (lenx-n))
mins, secs = divmod(secs, 60)
hrs, mins = divmod(mins, 60)
return '%02d:%02d:%02d' % (hrs, mins, secs)
def format(starttime, n, lenx):
out = plainformat(n, lenx) + ' '
if n == lenx:
end = ' '
else:
end = ' ETA '
end += eta(time.time() - starttime, n, lenx)
out += bars(WIDTH - len(out) - len(end), n, lenx)
out += end
return out
starttime = time.time()
lenx = len(x)
for n, y in enumerate(x):
sys.stderr.write('\r' + format(starttime, n, lenx))
yield y
sys.stderr.write('\r' + format(starttime, n+1, lenx) + '\n')
class IterBetter:
"""
Returns an object that can be used as an iterator
but can also be used via __getitem__ (although it
cannot go backwards -- that is, you cannot request
`iterbetter[0]` after requesting `iterbetter[1]`).
>>> import itertools
>>> c = iterbetter(itertools.count())
>>> c[1]
1
>>> c[5]
5
>>> c[3]
Traceback (most recent call last):
...
IndexError: already passed 3
It is also possible to get the first value of the iterator or None.
>>> c = iterbetter(iter([3, 4, 5]))
>>> print(c.first())
3
>>> c = iterbetter(iter([]))
>>> print(c.first())
None
For boolean test, IterBetter peeps at first value in the itertor without effecting the iteration.
>>> c = iterbetter(iter(range(5)))
>>> bool(c)
True
>>> list(c)
[0, 1, 2, 3, 4]
>>> c = iterbetter(iter([]))
>>> bool(c)
False
>>> list(c)
[]
"""
def __init__(self, iterator):
self.i, self.c = iterator, 0
def first(self, default=None):
"""Returns the first element of the iterator or None when there are no
elements.
If the optional argument default is specified, that is returned instead
of None when there are no elements.
"""
try:
return next(iter(self))
except StopIteration:
return default
def __iter__(self):
if hasattr(self, "_head"):
yield self._head
while 1:
yield next(self.i)
self.c += 1
def __getitem__(self, i):
#todo: slices
if i < self.c:
raise IndexError("already passed "+str(i))
try:
while i > self.c:
next(self.i)
self.c += 1
# now self.c == i
self.c += 1
return next(self.i)
except StopIteration:
raise IndexError(str(i))
def __nonzero__(self):
if hasattr(self, "__len__"):
return self.__len__() != 0
elif hasattr(self, "_head"):
return True
else:
try:
self._head = next(self.i)
except StopIteration:
return False
else:
return True
__bool__ = __nonzero__
iterbetter = IterBetter
def safeiter(it, cleanup=None, ignore_errors=True):
"""Makes an iterator safe by ignoring the exceptions occured during the iteration.
"""
def next():
while True:
try:
return next(it)
except StopIteration:
raise
except:
traceback.print_exc()
it = iter(it)
while True:
yield next()
def safewrite(filename, content):
"""Writes the content to a temp file and then moves the temp file to
given filename to avoid overwriting the existing file in case of errors.
"""
f = file(filename + '.tmp', 'w')
f.write(content)
f.close()
os.rename(f.name, filename)
def dictreverse(mapping):
"""
Returns a new dictionary with keys and values swapped.
>>> dictreverse({1: 2, 3: 4})
{2: 1, 4: 3}
"""
return dict([(value, key) for (key, value) in iteritems(mapping)])
def dictfind(dictionary, element):
"""
Returns a key whose value in `dictionary` is `element`
or, if none exists, None.
>>> d = {1:2, 3:4}
>>> dictfind(d, 4)
3
>>> dictfind(d, 5)
"""
for (key, value) in iteritems(dictionary):
if element is value:
return key
def dictfindall(dictionary, element):
"""
Returns the keys whose values in `dictionary` are `element`
or, if none exists, [].
>>> d = {1:4, 3:4}
>>> dictfindall(d, 4)
[1, 3]
>>> dictfindall(d, 5)
[]
"""
res = []
for (key, value) in iteritems(dictionary):
if element is value:
res.append(key)
return res
def dictincr(dictionary, element):
"""
Increments `element` in `dictionary`,
setting it to one if it doesn't exist.
>>> d = {1:2, 3:4}
>>> dictincr(d, 1)
3
>>> d[1]
3
>>> dictincr(d, 5)
1
>>> d[5]
1
"""
dictionary.setdefault(element, 0)
dictionary[element] += 1
return dictionary[element]
def dictadd(*dicts):
"""
Returns a dictionary consisting of the keys in the argument dictionaries.
If they share a key, the value from the last argument is used.
>>> dictadd({1: 0, 2: 0}, {2: 1, 3: 1})
{1: 0, 2: 1, 3: 1}
"""
result = {}
for dct in dicts:
result.update(dct)
return result
def requeue(queue, index=-1):
"""Returns the element at index after moving it to the beginning of the queue.
>>> x = [1, 2, 3, 4]
>>> requeue(x)
4
>>> x
[4, 1, 2, 3]
"""
x = queue.pop(index)
queue.insert(0, x)
return x
def restack(stack, index=0):
"""Returns the element at index after moving it to the top of stack.
>>> x = [1, 2, 3, 4]
>>> restack(x)
1
>>> x
[2, 3, 4, 1]
"""
x = stack.pop(index)
stack.append(x)
return x
def listget(lst, ind, default=None):
"""
Returns `lst[ind]` if it exists, `default` otherwise.
>>> listget(['a'], 0)
'a'
>>> listget(['a'], 1)
>>> listget(['a'], 1, 'b')
'b'
"""
if len(lst)-1 < ind:
return default
return lst[ind]
def intget(integer, default=None):
"""
Returns `integer` as an int or `default` if it can't.
>>> intget('3')
3
>>> intget('3a')
>>> intget('3a', 0)
0
"""
try:
return int(integer)
except (TypeError, ValueError):
return default
def datestr(then, now=None):
"""
Converts a (UTC) datetime object to a nice string representation.
>>> from datetime import datetime, timedelta
>>> d = datetime(1970, 5, 1)
>>> datestr(d, now=d)
'0 microseconds ago'
>>> for t, v in iteritems({
... timedelta(microseconds=1): '1 microsecond ago',
... timedelta(microseconds=2): '2 microseconds ago',
... -timedelta(microseconds=1): '1 microsecond from now',
... -timedelta(microseconds=2): '2 microseconds from now',
... timedelta(microseconds=2000): '2 milliseconds ago',
... timedelta(seconds=2): '2 seconds ago',
... timedelta(seconds=2*60): '2 minutes ago',
... timedelta(seconds=2*60*60): '2 hours ago',
... timedelta(days=2): '2 days ago',
... }):
... assert datestr(d, now=d+t) == v
>>> datestr(datetime(1970, 1, 1), now=d)
'January 1'
>>> datestr(datetime(1969, 1, 1), now=d)
'January 1, 1969'
>>> datestr(datetime(1970, 6, 1), now=d)
'June 1, 1970'
>>> datestr(None)
''
"""
def agohence(n, what, divisor=None):
if divisor: n = n // divisor
out = str(abs(n)) + ' ' + what # '2 day'
if abs(n) != 1: out += 's' # '2 days'
out += ' ' # '2 days '
if n < 0:
out += 'from now'
else:
out += 'ago'
return out # '2 days ago'
oneday = 24 * 60 * 60
if not then: return ""
if not now: now = datetime.datetime.utcnow()
if type(now).__name__ == "DateTime":
now = datetime.datetime.fromtimestamp(now)
if type(then).__name__ == "DateTime":
then = datetime.datetime.fromtimestamp(then)
elif type(then).__name__ == "date":
then = datetime.datetime(then.year, then.month, then.day)
delta = now - then
deltaseconds = int(delta.days * oneday + delta.seconds + delta.microseconds * 1e-06)
deltadays = abs(deltaseconds) // oneday
if deltaseconds < 0: deltadays *= -1 # fix for oddity of floor
if deltadays:
if abs(deltadays) < 4:
return agohence(deltadays, 'day')
try:
out = then.strftime('%B %e') # e.g. 'June 3'
except ValueError:
# %e doesn't work on Windows.
out = then.strftime('%B %d') # e.g. 'June 03'
if then.year != now.year or deltadays < 0:
out += ', %s' % then.year
return out
if int(deltaseconds):
if abs(deltaseconds) > (60 * 60):
return agohence(deltaseconds, 'hour', 60 * 60)
elif abs(deltaseconds) > 60:
return agohence(deltaseconds, 'minute', 60)
else:
return agohence(deltaseconds, 'second')
deltamicroseconds = delta.microseconds
if delta.days: deltamicroseconds = int(delta.microseconds - 1e6) # datetime oddity
if abs(deltamicroseconds) > 1000:
return agohence(deltamicroseconds, 'millisecond', 1000)
return agohence(deltamicroseconds, 'microsecond')
def numify(string):
"""
Removes all non-digit characters from `string`.
>>> numify('800-555-1212')
'8005551212'
>>> numify('800.555.1212')
'8005551212'
"""
return ''.join([c for c in str(string) if c.isdigit()])
def denumify(string, pattern):
"""
Formats `string` according to `pattern`, where the letter X gets replaced
by characters from `string`.
>>> denumify("8005551212", "(XXX) XXX-XXXX")
'(800) 555-1212'
"""
out = []
for c in pattern:
if c == "X":
out.append(string[0])
string = string[1:]
else:
out.append(c)
return ''.join(out)
def commify(n):
"""
Add commas to an integer `n`.
>>> commify(1)
'1'
>>> commify(123)
'123'
>>> commify(1234)
'1,234'
>>> commify(1234567890)
'1,234,567,890'
>>> commify(123.0)
'123.0'
>>> commify(1234.5)
'1,234.5'
>>> commify(1234.56789)
'1,234.56789'
>>> commify('%.2f' % 1234.5)
'1,234.50'
>>> commify(None)
>>>
"""
if n is None: return None
n = str(n)
if '.' in n:
dollars, cents = n.split('.')
else:
dollars, cents = n, None
r = []
for i, c in enumerate(str(dollars)[::-1]):
if i and (not (i % 3)):
r.insert(0, ',')
r.insert(0, c)
out = ''.join(r)
if cents:
out += '.' + cents
return out
def dateify(datestring):
"""
Formats a numified `datestring` properly.
"""
return denumify(datestring, "XXXX-XX-XX XX:XX:XX")
def nthstr(n):
"""
Formats an ordinal.
Doesn't handle negative numbers.
>>> nthstr(1)
'1st'
>>> nthstr(0)
'0th'
>>> [nthstr(x) for x in [2, 3, 4, 5, 10, 11, 12, 13, 14, 15]]
['2nd', '3rd', '4th', '5th', '10th', '11th', '12th', '13th', '14th', '15th']
>>> [nthstr(x) for x in [91, 92, 93, 94, 99, 100, 101, 102]]
['91st', '92nd', '93rd', '94th', '99th', '100th', '101st', '102nd']
>>> [nthstr(x) for x in [111, 112, 113, 114, 115]]
['111th', '112th', '113th', '114th', '115th']
"""
assert n >= 0
if n % 100 in [11, 12, 13]: return '%sth' % n
return {1: '%sst', 2: '%snd', 3: '%srd'}.get(n % 10, '%sth') % n
def cond(predicate, consequence, alternative=None):
"""
Function replacement for if-else to use in expressions.
>>> x = 2
>>> cond(x % 2 == 0, "even", "odd")
'even'
>>> cond(x % 2 == 0, "even", "odd") + '_row'
'even_row'
"""
if predicate:
return consequence
else:
return alternative
class CaptureStdout:
"""
Captures everything `func` prints to stdout and returns it instead.
>>> def idiot():
... print("foo")
>>> capturestdout(idiot)()
'foo\\n'
**WARNING:** Not threadsafe!
"""
def __init__(self, func):
self.func = func
def __call__(self, *args, **keywords):
out = StringIO()
oldstdout = sys.stdout
sys.stdout = out
try:
self.func(*args, **keywords)
finally:
sys.stdout = oldstdout
return out.getvalue()
capturestdout = CaptureStdout
class Profile:
"""
Profiles `func` and returns a tuple containing its output
and a string with human-readable profiling information.
>>> import time
>>> out, inf = profile(time.sleep)(.001)
>>> out
>>> inf[:10].strip()
'took 0.0'
"""
def __init__(self, func):
self.func = func
def __call__(self, *args): ##, **kw): kw unused
import cProfile, pstats, os, tempfile ##, time already imported
f, filename = tempfile.mkstemp()
os.close(f)
prof = cProfile.Profile()
stime = time.time()
result = prof.runcall(self.func, *args)
stime = time.time() - stime
out = StringIO()
stats = pstats.Stats(prof, stream=out)
stats.strip_dirs()
stats.sort_stats('time', 'calls')
stats.print_stats(40)
stats.print_callers()
x = '\n\ntook '+ str(stime) + ' seconds\n'
x += out.getvalue()
# remove the tempfile
try:
os.remove(filename)
except IOError:
pass
return result, x
profile = Profile
import traceback
# hack for compatibility with Python 2.3:
if not hasattr(traceback, 'format_exc'):
from cStringIO import StringIO
def format_exc(limit=None):
strbuf = StringIO()
traceback.print_exc(limit, strbuf)
return strbuf.getvalue()
traceback.format_exc = format_exc
def tryall(context, prefix=None):
"""
Tries a series of functions and prints their results.
`context` is a dictionary mapping names to values;
the value will only be tried if it's callable.
>>> tryall(dict(j=lambda: True))
j: True
----------------------------------------
results:
True: 1
For example, you might have a file `test/stuff.py`
with a series of functions testing various things in it.
At the bottom, have a line:
if __name__ == "__main__": tryall(globals())
Then you can run `python test/stuff.py` and get the results of
all the tests.
"""
context = context.copy() # vars() would update
results = {}
for (key, value) in iteritems(context):
if not hasattr(value, '__call__'):
continue
if prefix and not key.startswith(prefix):
continue
print(key + ':', end=" ")
try:
r = value()
dictincr(results, r)
print(r)
except:
print('ERROR')
dictincr(results, 'ERROR')
print(' ' + '\n '.join(traceback.format_exc().split('\n')))
print('-'*40)
print('results:')
for (key, value) in iteritems(results):
print(' '*2, str(key)+':', value)
class ThreadedDict(threadlocal):
"""
Thread local storage.
>>> d = ThreadedDict()
>>> d.x = 1
>>> d.x
1
>>> import threading
>>> def f(): d.x = 2
...
>>> t = threading.Thread(target=f)
>>> t.start()
>>> t.join()
>>> d.x
1
"""
_instances = set()
def __init__(self):
ThreadedDict._instances.add(self)
def __del__(self):
ThreadedDict._instances.remove(self)
def __hash__(self):
return id(self)
def clear_all():
"""Clears all ThreadedDict instances.
"""
for t in list(ThreadedDict._instances):
t.clear()
clear_all = staticmethod(clear_all)
# Define all these methods to more or less fully emulate dict -- attribute access
# is built into threading.local.
def __getitem__(self, key):
return self.__dict__[key]
def __setitem__(self, key, value):
self.__dict__[key] = value
def __delitem__(self, key):
del self.__dict__[key]
def __contains__(self, key):
return key in self.__dict__
has_key = __contains__
def clear(self):
self.__dict__.clear()
def copy(self):
return self.__dict__.copy()
def get(self, key, default=None):
return self.__dict__.get(key, default)
def items(self):
return self.__dict__.items()
def iteritems(self):
return iteritems(self.__dict__)
def keys(self):
return self.__dict__.keys()
def iterkeys(self):
return iterkeys(self.__dict__)
iter = iterkeys
def values(self):
return self.__dict__.values()
def itervalues(self):
return itervalues(self.__dict__)
def pop(self, key, *args):
return self.__dict__.pop(key, *args)
def popitem(self):
return self.__dict__.popitem()
def setdefault(self, key, default=None):
return self.__dict__.setdefault(key, default)
def update(self, *args, **kwargs):
self.__dict__.update(*args, **kwargs)
def __repr__(self):
return '<ThreadedDict %r>' % self.__dict__
__str__ = __repr__
threadeddict = ThreadedDict
def autoassign(self, locals):
"""
Automatically assigns local variables to `self`.
>>> self = storage()
>>> autoassign(self, dict(a=1, b=2))
>>> self.a
1
>>> self.b
2
Generally used in `__init__` methods, as in:
def __init__(self, foo, bar, baz=1): autoassign(self, locals())
"""
for (key, value) in iteritems(locals):
if key == 'self':
continue
setattr(self, key, value)
def to36(q):
"""
Converts an integer to base 36 (a useful scheme for human-sayable IDs).
>>> to36(35)
'z'
>>> to36(119292)
'2k1o'
>>> int(to36(939387374), 36)
939387374
>>> to36(0)
'0'
>>> to36(-393)
Traceback (most recent call last):
...
ValueError: must supply a positive integer
"""
if q < 0: raise ValueError("must supply a positive integer")
letters = "0123456789abcdefghijklmnopqrstuvwxyz"
converted = []
while q != 0:
q, r = divmod(q, 36)
converted.insert(0, letters[r])
return "".join(converted) or '0'
r_url = re_compile('(?<!\()(http://(\S+))')
def safemarkdown(text):
"""
Converts text to HTML following the rules of Markdown, but blocking any
outside HTML input, so that only the things supported by Markdown
can be used. Also converts raw URLs to links.
(requires [markdown.py](http://webpy.org/markdown.py))
"""
from markdown import markdown
if text:
text = text.replace('<', '<')
# TODO: automatically get page title?
text = r_url.sub(r'<\1>', text)
text = markdown(text)
return text
def sendmail(from_address, to_address, subject, message, headers=None, **kw):
"""
Sends the email message `message` with mail and envelope headers
for from `from_address_` to `to_address` with `subject`.
Additional email headers can be specified with the dictionary
`headers.
Optionally cc, bcc and attachments can be specified as keyword arguments.
Attachments must be an iterable and each attachment can be either a
filename or a file object or a dictionary with filename, content and
optionally content_type keys.
If `web.config.smtp_server` is set, it will send the message
to that SMTP server. Otherwise it will look for
`/usr/sbin/sendmail`, the typical location for the sendmail-style
binary. To use sendmail from a different path, set `web.config.sendmail_path`.
"""
attachments = kw.pop("attachments", [])
mail = _EmailMessage(from_address, to_address, subject, message, headers, **kw)
for a in attachments:
if isinstance(a, dict):
mail.attach(a['filename'], a['content'], a.get('content_type'))
elif hasattr(a, 'read'): # file
filename = os.path.basename(getattr(a, "name", ""))
content_type = getattr(a, 'content_type', None)
mail.attach(filename, a.read(), content_type)
elif isinstance(a, basestring):
f = open(a, 'rb')
content = f.read()
f.close()
filename = os.path.basename(a)
mail.attach(filename, content, None)
else:
raise ValueError("Invalid attachment: %s" % repr(a))
mail.send()
class _EmailMessage:
def __init__(self, from_address, to_address, subject, message, headers=None, **kw):
def listify(x):
if not isinstance(x, list):
return [safestr(x)]
else:
return [safestr(a) for a in x]
subject = safestr(subject)
message = safestr(message)
from_address = safestr(from_address)
to_address = listify(to_address)
cc = listify(kw.get('cc', []))
bcc = listify(kw.get('bcc', []))
recipients = to_address + cc + bcc
import email.utils
self.from_address = email.utils.parseaddr(from_address)[1]
self.recipients = [email.utils.parseaddr(r)[1] for r in recipients]
self.headers = dictadd({
'From': from_address,
'To': ", ".join(to_address),
'Subject': subject
}, headers or {})
if cc:
self.headers['Cc'] = ", ".join(cc)
self.message = self.new_message()
self.message.add_header("Content-Transfer-Encoding", "7bit")
self.message.add_header("Content-Disposition", "inline")
self.message.add_header("MIME-Version", "1.0")
self.message.set_payload(message, 'utf-8')
self.multipart = False
def new_message(self):
from email.message import Message
return Message()
def attach(self, filename, content, content_type=None):
if not self.multipart:
msg = self.new_message()
msg.add_header("Content-Type", "multipart/mixed")
msg.attach(self.message)
self.message = msg
self.multipart = True
import mimetypes
try:
from email import encoders
except:
from email import Encoders as encoders
content_type = content_type or mimetypes.guess_type(filename)[0] or "application/octet-stream"
msg = self.new_message()
msg.set_payload(content)
msg.add_header('Content-Type', content_type)
msg.add_header('Content-Disposition', 'attachment', filename=filename)
if not content_type.startswith("text/"):
encoders.encode_base64(msg)
self.message.attach(msg)
def prepare_message(self):
for k, v in iteritems(self.headers):
if k.lower() == "content-type":
self.message.set_type(v)
else:
self.message.add_header(k, v)
self.headers = {}
def send(self):
try:
from . import webapi
except ImportError:
webapi = Storage(config=Storage())
self.prepare_message()
message_text = self.message.as_string()
if webapi.config.get('smtp_server'):
server = webapi.config.get('smtp_server')
port = webapi.config.get('smtp_port', 0)
username = webapi.config.get('smtp_username')
password = webapi.config.get('smtp_password')
debug_level = webapi.config.get('smtp_debuglevel', None)
starttls = webapi.config.get('smtp_starttls', False)
import smtplib
smtpserver = smtplib.SMTP(server, port)
if debug_level:
smtpserver.set_debuglevel(debug_level)
if starttls:
smtpserver.ehlo()
smtpserver.starttls()
smtpserver.ehlo()
if username and password:
smtpserver.login(username, password)
smtpserver.sendmail(self.from_address, self.recipients, message_text)
smtpserver.quit()
elif webapi.config.get('email_engine') == 'aws':
import boto.ses
c = boto.ses.SESConnection(
aws_access_key_id=webapi.config.get('aws_access_key_id'),
aws_secret_access_key=webapi.config.get('aws_secret_access_key'))
c.send_raw_email(self.from_address, message_text, self.recipients)
else:
sendmail = webapi.config.get('sendmail_path', '/usr/sbin/sendmail')
assert not self.from_address.startswith('-'), 'security'
for r in self.recipients:
assert not r.startswith('-'), 'security'
cmd = [sendmail, '-f', self.from_address] + self.recipients
p = subprocess.Popen(cmd, stdin=subprocess.PIPE)
p.stdin.write(message_text.encode('utf-8'))
p.stdin.close()
p.wait()
def __repr__(self):
return "<EmailMessage>"
def __str__(self):
return self.message.as_string()
if __name__ == "__main__":
import doctest
doctest.testmod()
| TheXYZLAB/RaspCloudPrint | web/utils.py | Python | mit | 42,406 |
import json
import six
from .. import base
from .api_test import DockerClientTest, url_prefix, response
from docker.utils import create_ipam_config, create_ipam_pool
try:
from unittest import mock
except ImportError:
import mock
class NetworkTest(DockerClientTest):
@base.requires_api_version('1.21')
def test_list_networks(self):
networks = [
{
"name": "none",
"id": "8e4e55c6863ef424",
"type": "null",
"endpoints": []
},
{
"name": "host",
"id": "062b6d9ea7913fde",
"type": "host",
"endpoints": []
},
]
get = mock.Mock(return_value=response(
status_code=200, content=json.dumps(networks).encode('utf-8')))
with mock.patch('docker.Client.get', get):
self.assertEqual(self.client.networks(), networks)
self.assertEqual(get.call_args[0][0], url_prefix + 'networks')
filters = json.loads(get.call_args[1]['params']['filters'])
self.assertFalse(filters)
self.client.networks(names=['foo'])
filters = json.loads(get.call_args[1]['params']['filters'])
self.assertEqual(filters, {'name': ['foo']})
self.client.networks(ids=['123'])
filters = json.loads(get.call_args[1]['params']['filters'])
self.assertEqual(filters, {'id': ['123']})
@base.requires_api_version('1.21')
def test_create_network(self):
network_data = {
"id": 'abc12345',
"warning": "",
}
network_response = response(status_code=200, content=network_data)
post = mock.Mock(return_value=network_response)
with mock.patch('docker.Client.post', post):
result = self.client.create_network('foo')
self.assertEqual(result, network_data)
self.assertEqual(
post.call_args[0][0],
url_prefix + 'networks/create')
self.assertEqual(
json.loads(post.call_args[1]['data']),
{"name": "foo"})
opts = {
'com.docker.network.bridge.enable_icc': False,
'com.docker.network.bridge.enable_ip_masquerade': False,
}
self.client.create_network('foo', 'bridge', opts)
self.assertEqual(
json.loads(post.call_args[1]['data']),
{"name": "foo", "driver": "bridge", "options": opts})
ipam_pool_config = create_ipam_pool(subnet="192.168.52.0/24",
gateway="192.168.52.254")
ipam_config = create_ipam_config(pool_configs=[ipam_pool_config])
self.client.create_network("bar", driver="bridge",
ipam=ipam_config)
self.assertEqual(
json.loads(post.call_args[1]['data']),
{
"name": "bar",
"driver": "bridge",
"ipam": {
"driver": "default",
"config": [{
"iprange": None,
"gateway": "192.168.52.254",
"subnet": "192.168.52.0/24",
"auxaddresses": None
}]
}
})
@base.requires_api_version('1.21')
def test_remove_network(self):
network_id = 'abc12345'
delete = mock.Mock(return_value=response(status_code=200))
with mock.patch('docker.Client.delete', delete):
self.client.remove_network(network_id)
args = delete.call_args
self.assertEqual(args[0][0],
url_prefix + 'networks/{0}'.format(network_id))
@base.requires_api_version('1.21')
def test_inspect_network(self):
network_id = 'abc12345'
network_name = 'foo'
network_data = {
six.u('name'): network_name,
six.u('id'): network_id,
six.u('driver'): 'bridge',
six.u('containers'): {},
}
network_response = response(status_code=200, content=network_data)
get = mock.Mock(return_value=network_response)
with mock.patch('docker.Client.get', get):
result = self.client.inspect_network(network_id)
self.assertEqual(result, network_data)
args = get.call_args
self.assertEqual(args[0][0],
url_prefix + 'networks/{0}'.format(network_id))
@base.requires_api_version('1.21')
def test_connect_container_to_network(self):
network_id = 'abc12345'
container_id = 'def45678'
post = mock.Mock(return_value=response(status_code=201))
with mock.patch('docker.Client.post', post):
self.client.connect_container_to_network(
{'Id': container_id}, network_id)
self.assertEqual(
post.call_args[0][0],
url_prefix + 'networks/{0}/connect'.format(network_id))
self.assertEqual(
json.loads(post.call_args[1]['data']),
{'container': container_id})
@base.requires_api_version('1.21')
def test_disconnect_container_from_network(self):
network_id = 'abc12345'
container_id = 'def45678'
post = mock.Mock(return_value=response(status_code=201))
with mock.patch('docker.Client.post', post):
self.client.disconnect_container_from_network(
{'Id': container_id}, network_id)
self.assertEqual(
post.call_args[0][0],
url_prefix + 'networks/{0}/disconnect'.format(network_id))
self.assertEqual(
json.loads(post.call_args[1]['data']),
{'container': container_id})
| rhatdan/docker-py | tests/unit/network_test.py | Python | apache-2.0 | 5,949 |
# Copyright 2021 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Test 'explain'.
"""
# isort: LOCAL
from stratis_cli._constants import PoolMaintenanceErrorCode
from .._misc import TEST_RUNNER, SimTestCase, device_name_list
_DEVICE_STRATEGY = device_name_list(1)
class ExplainTestCase(SimTestCase):
"""
Test 'explain'.
"""
_MENU = ["--propagate", "pool", "explain"]
def test_explain(self):
"""
Test that every valid code works.
"""
for item in list(PoolMaintenanceErrorCode):
command_line = self._MENU + [str(item)]
TEST_RUNNER(command_line)
| stratis-storage/stratis-cli | tests/whitebox/integration/pool/test_explain.py | Python | apache-2.0 | 1,139 |
# coding: utf-8
"""
ORCID Member
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: Latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class AmountV30Rc1(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'value': 'str',
'currency_code': 'str'
}
attribute_map = {
'value': 'value',
'currency_code': 'currency-code'
}
def __init__(self, value=None, currency_code=None): # noqa: E501
"""AmountV30Rc1 - a model defined in Swagger""" # noqa: E501
self._value = None
self._currency_code = None
self.discriminator = None
if value is not None:
self.value = value
self.currency_code = currency_code
@property
def value(self):
"""Gets the value of this AmountV30Rc1. # noqa: E501
:return: The value of this AmountV30Rc1. # noqa: E501
:rtype: str
"""
return self._value
@value.setter
def value(self, value):
"""Sets the value of this AmountV30Rc1.
:param value: The value of this AmountV30Rc1. # noqa: E501
:type: str
"""
self._value = value
@property
def currency_code(self):
"""Gets the currency_code of this AmountV30Rc1. # noqa: E501
:return: The currency_code of this AmountV30Rc1. # noqa: E501
:rtype: str
"""
return self._currency_code
@currency_code.setter
def currency_code(self, currency_code):
"""Sets the currency_code of this AmountV30Rc1.
:param currency_code: The currency_code of this AmountV30Rc1. # noqa: E501
:type: str
"""
if currency_code is None:
raise ValueError("Invalid value for `currency_code`, must not be `None`") # noqa: E501
self._currency_code = currency_code
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(AmountV30Rc1, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AmountV30Rc1):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| Royal-Society-of-New-Zealand/NZ-ORCID-Hub | orcid_api_v3/models/amount_v30_rc1.py | Python | mit | 3,888 |
#!/usr/bin/python
import getopt
import pymqi, CMQC, CMQCFC, CMQXC
import sys
def usage():
print """Usage: list_mq_queues.py -H <hostName> -g <qmgrName> -a <channelName> [-p <portNumber>] [-t <queueType>] [-u <usageType>]"""
def help():
usage()
print """
List MQ queues
-H, --host Host name
-g, --qmgr Queue Manager Name
-p, --port-number port number (default 1414)
-a, --channel-name channel name
-t, --queueType Queue types filter (default: local)
-d, --definitionType Queue Definition type (default: predefined)
-u, --usageType filter for normal or transmission queues (default: normal)
Valid queue types are:
all All queue types
local Local queues
alias Alias queue definition
remote Local definition of remote queues
cluster Cluster queues
model Model queue definition
Valid queue definition types are:
all All queue types
predefined Predefined permanent queue.
permanent_dynamic Dynamically defined permanent queue.
shared_dynamic Dynamically defined shared queue. This option is available on z/OS only.
temporary_dynamic Dynamically defined temporary queue.
Valid usage types are:
all All queue usage types
normal Normal usage.
transmission Transmission queue.
example:
list_mq_queues.py -H host1 -g QM1 -a SYSTEM.ADMIN.SVRCONN
list_mq_queues.py -H host2 -g QM2 -a SYSTEM.ADMIN.SVRCONN -t remote
list_mq_queues.py -H 127.0.0.1 -g QM3 -a SYSTEM.ADMIN.SVRCONN --usageType=transmission
"""
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], "hH:g:p:a:t:d:u:", ["help", "host=","qmgrName=","port=","channel=","queueType=","definitionType=","usageType="])
except getopt.GetoptError, err:
print str(err) # will print something like "option -a not recognized"
usage()
sys.exit(-1)
hostName = None
qmgrName = None
portNumber = 1414
channelName = None
queueTypeName = "local"
definitionTypeName = "predefined"
usageTypeName = "normal"
for o, a in opts:
if o in ("-h", "--help"):
help()
sys.exit()
elif o in ("-H", "--host"):
hostName = a
elif o in ("-g", "--qmgr"):
qmgrName = a
elif o in ("-p", "--port"):
portNumber = int(a)
elif o in ("-a", "--channel"):
channelName = a
elif o in ("-t", "--queueType"):
queueTypeName = a
elif o in ("-d", "--definitionType"):
definitionTypeName = a
elif o in ("-u", "--usageType"):
usageTypeName = a
else:
assert False, "unhandled option"
if ((not (hostName and portNumber and channelName and qmgrName)) or
(queueTypeName not in ["all","local","alias","remote","cluster","model"]) or
(definitionTypeName not in ["all","predefined","permanent_dynamic","shared_dynamic","temporary_dynamic"]) or
(usageTypeName not in ["all","normal","transmission"])):
sys.stderr.write("Wrong Parameters.\n")
usage()
sys.exit(-1)
prefix = "*"
if queueTypeName == "all":
queue_type = CMQC.MQQT_ALL
elif queueTypeName == "local":
queue_type = CMQC.MQQT_LOCAL
elif queueTypeName == "alias":
queue_type = CMQC.MQQT_ALIAS
elif queueTypeName == "remote":
queue_type = CMQC.MQQT_REMOTE
elif queueTypeName == "cluster":
queue_type = CMQC.MQQT_CLUSTER
elif queueTypeName == "model":
queue_type = CMQC.MQQT_MODEL
args = {CMQC.MQCA_Q_NAME: prefix,
CMQC.MQIA_Q_TYPE: queue_type,
CMQCFC.MQIACF_Q_ATTRS: CMQCFC.MQIACF_ALL,}
qmgr = None
try:
qmgr = pymqi.connect(qmgrName,channelName,"%s(%s)" % (hostName,portNumber))
pcf = pymqi.PCFExecute(qmgr)
response = pcf.MQCMD_INQUIRE_Q(args)
for queue in response:
queue_name = queue[CMQC.MQCA_Q_NAME]
definition_type = queue.get(CMQC.MQIA_DEFINITION_TYPE,"all")
usageType = queue.get(CMQC.MQIA_USAGE,"all")
if (((definitionTypeName == "all") or
(definitionTypeName == "predefined" and definition_type == CMQC.MQQDT_PREDEFINED) or
(definitionTypeName == "permanent_dynamic" and definition_type == CMQC.MQQDT_PERMANENT_DYNAMIC) or
(definitionTypeName == "shared_dynamic" and definition_type == CMQC.MQQDT_SHARED_DYNAMIC) or
(definitionTypeName == "temporary_dynamic" and definition_type == CMQC.MQQDT_TEMPORARY_DYNAMIC)) and
((usageTypeName == "all") or
(usageTypeName == "normal" and usageType == CMQC.MQUS_NORMAL) or
(usageTypeName == "transmission" and usageType == CMQC.MQUS_TRANSMISSION))):
print(queue_name)
except pymqi.MQMIError, e:
sys.stderr.write("Error on executing PCF command: INQUIRE_Q, reason: %s" % (e))
sys.exit(e.reason)
try:
if qmgr:
qmgr.disconnect()
except pymqi.MQMIError, e:
pass
if __name__ == "__main__":
main()
| klapper/nagios-plugins-mq | list_mq_queues.py | Python | mit | 5,325 |
#!/usr/bin/python
#
# Copyright (c) 2017 Obezimnaka Boms, <[email protected]>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_dnsrecordset_facts
version_added: "2.4"
short_description: Get DNS Record Set facts.
description:
- Get facts for a specific DNS Record Set in a Zone, or a specific type in all Zones or in one Zone etc.
options:
relative_name:
description:
- Only show results for a Record Set.
resource_group:
description:
- Limit results by resource group. Required when filtering by name or type.
zone_name:
description:
- Limit results by zones. Required when filtering by name or type.
record_type:
description:
- Limit record sets by record type.
top:
description:
- Limit the maximum number of record sets to return
default: 100
extends_documentation_fragment:
- azure
- azure_tags
author:
- "Ozi Boms (@ozboms)"
'''
EXAMPLES = '''
- name: Get facts for one Record Set
azure_rm_dnsrecordset_facts:
resource_group: Testing
zone_name: example.com
relative_name: server10
record_type: A
- name: Get facts for all Type A Record Sets in a Zone
azure_rm_dnsrecordset_facts:
resource_group: Testing
zone_name: example.com
record_type: A
- name: Get all record sets in one zone
azure_rm_dnsrecordset_facts:
resource_group: Testing
zone_name: example.com
'''
RETURN = '''
azure_dnsrecordset:
description: List of record set dicts.
returned: always
type: list
example: [
{
"etag": "60ac0480-44dd-4881-a2ed-680d20b3978e",
"id": "/subscriptions/XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX/resourceGroups/testing/providers/Microsoft.Network/dnszones/newzone.com/A/servera",
"name": "servera",
"properties": {
"ARecords": [
{
"ipv4Address": "10.4.5.7"
},
{
"ipv4Address": "2.4.5.8"
}
],
"TTL": 12900
},
"type": "Microsoft.Network/dnszones/A"
}
]
'''
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
try:
from msrestazure.azure_exceptions import CloudError
from azure.common import AzureMissingResourceHttpError, AzureHttpError
except:
# This is handled in azure_rm_common
pass
AZURE_OBJECT_CLASS = 'RecordSet'
class AzureRMRecordSetFacts(AzureRMModuleBase):
def __init__(self):
# define user inputs into argument
self.module_arg_spec = dict(
relative_name=dict(type='str'),
resource_group=dict(type='str'),
zone_name=dict(type='str'),
record_type=dict(type='str'),
top=dict(type='str', default='100')
)
# store the results of the module operation
self.results = dict(
changed=False,
ansible_facts=dict(azure_dnsrecordset=[])
)
self.relative_name = None
self.resource_group = None
self.zone_name = None
self.record_type = None
self.top = None
super(AzureRMRecordSetFacts, self).__init__(self.module_arg_spec)
def exec_module(self, **kwargs):
for key in self.module_arg_spec:
setattr(self, key, kwargs[key])
# create conditionals to catch errors when calling record facts
if self.relative_name and not self.resource_group:
self.fail("Parameter error: resource group required when filtering by name or record type.")
if self.relative_name and not self.zone_name:
self.fail("Parameter error: DNS Zone required when filtering by name or record type.")
# list the conditions for what to return based on input
if self.relative_name is not None:
# if there is a name listed, they want only facts about that specific Record Set itself
self.results['ansible_facts']['azure_dnsrecordset'] = self.get_item()
elif self.record_type:
# else, they just want all the record sets of a specific type
self.results['ansible_facts']['azure_dnsrecordset'] = self.list_type()
elif self.zone_name:
# if there is a zone name listed, then they want all the record sets in a zone
self.results['ansible_facts']['azure_dnsrecordset'] = self.list_zone()
return self.results
def get_item(self):
self.log('Get properties for {0}'.format(self.relative_name))
item = None
results = []
# try to get information for specific Record Set
try:
item = self.dns_client.record_sets.get(self.resource_group, self.zone_name, self.relative_name, self.record_type)
except CloudError:
pass
results = [self.serialize_obj(item, AZURE_OBJECT_CLASS)]
return results
def list_type(self):
self.log('Lists the record sets of a specified type in a DNS zone')
try:
response = self.dns_client.record_sets.list_by_type(self.resource_group, self.zone_name, self.record_type, top=int(self.top))
except AzureHttpError as exc:
self.fail("Failed to list for record type {0} - {1}".format(self.record_type, str(exc)))
results = []
for item in response:
results.append(self.serialize_obj(item, AZURE_OBJECT_CLASS))
return results
def list_zone(self):
self.log('Lists all record sets in a DNS zone')
try:
response = self.dns_client.record_sets.list_by_dns_zone(self.resource_group, self.zone_name, top=int(self.top))
except AzureHttpError as exc:
self.fail("Failed to list for zone {0} - {1}".format(self.zone_name, str(exc)))
results = []
for item in response:
results.append(self.serialize_obj(item, AZURE_OBJECT_CLASS))
return results
def main():
AzureRMRecordSetFacts()
if __name__ == '__main__':
main()
| veger/ansible | lib/ansible/modules/cloud/azure/azure_rm_dnsrecordset_facts.py | Python | gpl-3.0 | 6,511 |
#! /usr/bin/env python
import amqpav
from kombu import Connection
from kombu import Exchange
from kombu import Queue
import uuid
import datetime
import base64
import socket
import time
import random
# AV exchanges
av_exchange = Exchange('check', 'fanout', durable=True)
reply_exchange = Exchange('check-result', 'fanout', durable=True)
# client ID
client_id = 'test-1'
msg_id = ''
# connection to AV service
with Connection('amqp://guest:guest@prophet/antivir') as conn:
conn.connect()
producer = conn.Producer()
# create result queue
resultq = Queue(
client_id,
exchange=reply_exchange,
)
resultq(conn.channel()).declare()
# file for AV check
# eicar - EICAR test file for example
bin_data = open('eicar', 'rb').read()
# create message
#
# generate message ID
msg_id = str(uuid.uuid4())
# generate correlation ID
cor_id = str(uuid.uuid4())
message = amqpav.AVMessageRequest(
msg_id=msg_id,
correlation_id=cor_id,
created=str(datetime.datetime.now()),
interface='something',
content_type='application/octet-stream',
data=bin_data,
)
headers = message.headers()
# send message
producer.publish(
message.body(),
exchange=av_exchange,
headers=headers,
**message.properties()
)
print(' + Message sent')
print('Headers: {}'.format(headers))
print('Properties: {}'.format(message.properties()))
# sleep random time
time.sleep(random.randint(1, 8))
# stop flag
stop = False
# process reply
def p_reply(body, message):
response = amqpav.AVMessageResponse()
response.load(message)
print(' * Message received')
print('Response headers: {}'.format(response.headers()))
print('Response properties: {}'.format(response.properties()))
# get message's parent UUID
cid = response.correlation_id
if (msg_id == cid):
message.ack()
print('=== Message received.')
global stop
stop = True
else:
print('=== Message skipped.')
# wait for response
rq = Queue(client_id, exchange=reply_exchange)
# change URL for your server
with Connection('amqp://guest:guest@prophet/antivir') as conn:
with conn.Consumer(rq, callbacks=[p_reply], no_ack=False) as consumer:
while not stop:
try:
conn.drain_events(timeout=5)
except socket.timeout:
print('timeout')
consumer.recover()
| dvoraka/amqpav | examples/avclient.py | Python | gpl-2.0 | 2,556 |
# Copyright (c) 2017 pandas-gbq Authors All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
"""Helper methods for loading data into BigQuery"""
import io
from google.cloud import bigquery
from pandas_gbq.features import FEATURES
import pandas_gbq.schema
def encode_chunk(dataframe):
"""Return a file-like object of CSV-encoded rows.
Args:
dataframe (pandas.DataFrame): A chunk of a dataframe to encode
"""
csv_buffer = io.StringIO()
dataframe.to_csv(
csv_buffer,
index=False,
header=False,
encoding="utf-8",
float_format="%.17g",
date_format="%Y-%m-%d %H:%M:%S.%f",
)
# Convert to a BytesIO buffer so that unicode text is properly handled.
# See: https://github.com/pydata/pandas-gbq/issues/106
body = csv_buffer.getvalue()
body = body.encode("utf-8")
return io.BytesIO(body)
def split_dataframe(dataframe, chunksize=None):
dataframe = dataframe.reset_index(drop=True)
if chunksize is None:
yield 0, dataframe
return
remaining_rows = len(dataframe)
total_rows = remaining_rows
start_index = 0
while start_index < total_rows:
end_index = start_index + chunksize
chunk = dataframe[start_index:end_index]
start_index += chunksize
remaining_rows = max(0, remaining_rows - chunksize)
yield remaining_rows, chunk
def load_chunks(
client,
dataframe,
destination_table_ref,
chunksize=None,
schema=None,
location=None,
):
job_config = bigquery.LoadJobConfig()
job_config.write_disposition = "WRITE_APPEND"
job_config.source_format = "CSV"
job_config.allow_quoted_newlines = True
# Explicit schema? Use that!
if schema is not None:
schema = pandas_gbq.schema.remove_policy_tags(schema)
job_config.schema = pandas_gbq.schema.to_google_cloud_bigquery(schema)
# If not, let BigQuery determine schema unless we are encoding the CSV files ourselves.
elif not FEATURES.bigquery_has_from_dataframe_with_csv:
schema = pandas_gbq.schema.generate_bq_schema(dataframe)
schema = pandas_gbq.schema.remove_policy_tags(schema)
job_config.schema = pandas_gbq.schema.to_google_cloud_bigquery(schema)
chunks = split_dataframe(dataframe, chunksize=chunksize)
for remaining_rows, chunk in chunks:
yield remaining_rows
if FEATURES.bigquery_has_from_dataframe_with_csv:
client.load_table_from_dataframe(
chunk,
destination_table_ref,
job_config=job_config,
location=location,
).result()
else:
try:
chunk_buffer = encode_chunk(chunk)
client.load_table_from_file(
chunk_buffer,
destination_table_ref,
job_config=job_config,
location=location,
).result()
finally:
chunk_buffer.close()
| pydata/pandas-gbq | pandas_gbq/load.py | Python | bsd-3-clause | 3,099 |
"""
Sets up the terminal color scheme.
"""
import functools
import os
import sys
from django.utils import termcolors
def supports_color():
"""
Returns True if the running system's terminal supports color,
and False otherwise.
"""
plat = sys.platform
supported_platform = plat != 'Pocket PC' and (plat != 'win32' or 'ANSICON' in os.environ)
# isatty is not always implemented, #6223.
is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()
if not supported_platform or not is_a_tty:
return False
return True
class Style:
pass
def make_style(config_string=''):
"""
Create a Style object from the given config_string.
If config_string is empty django.utils.termcolors.DEFAULT_PALETTE is used.
"""
style = Style()
color_settings = termcolors.parse_color_setting(config_string)
# The nocolor palette has all available roles.
# Use that palette as the basis for populating
# the palette as defined in the environment.
for role in termcolors.PALETTES[termcolors.NOCOLOR_PALETTE]:
if color_settings:
format = color_settings.get(role, {})
style_func = termcolors.make_style(**format)
else:
def style_func(x):
return x
setattr(style, role, style_func)
# For backwards compatibility,
# set style for ERROR_OUTPUT == ERROR
style.ERROR_OUTPUT = style.ERROR
return style
@functools.lru_cache(maxsize=None)
def no_style():
"""
Returns a Style object with no color scheme.
"""
return make_style('nocolor')
def color_style():
"""
Returns a Style object from the Django color scheme.
"""
if not supports_color():
return no_style()
return make_style(os.environ.get('DJANGO_COLORS', ''))
| mattseymour/django | django/core/management/color.py | Python | bsd-3-clause | 1,824 |
from __future__ import print_function
from __future__ import division
__author__ = """Alex "O." Holcombe""" ## double-quotes will be silently removed, single quotes will be left, eg, O'Connor
import numpy as np
import itertools #to calculate all subsets
from copy import deepcopy
from math import atan, pi, cos, sin, sqrt, ceil
import time, sys, platform, os, StringIO, gc
from psychopy import visual, core
import random
#If you run this code stand-alone, it will do a demo of the basic stimulus it is designed to provide
#BEGIN helper functions from primes.py
def gcd(a,b):
"""Return greatest common divisor using Euclid's Algorithm."""
while b:
a, b = b, a % b
return a
def lcm(a,b):
"""Return lowest common multiple."""
return (a*b)/gcd(a,b)
def LCM(terms):
"Return lcm of a list of numbers."
return reduce(lambda a,b: lcm(a,b), terms)
#END helper functions from primes.py
def calcCondsPerNumTargets(numRings,numTargets):
#numRings is number of rings, each of which can have up to one target
#numTargets is list or array of numTarget conditions, e.g. 1,2,3 means the experiment includes 1, 2, and 3 targets
#Each target can be placed randomly in any of the rings.
#Want all possibilities to be covered equally often. That means each target number condition has to include all the combinations
# of places that number of targets can go.
#So that some targetNum conditinos don't have more trials than others, have to scale up each targetNum condition to the worst case.
#Actually it's worse than that. To make them fit evenly, have to use least common multiple
#3 rings choose 2 for targets, 3 rings choose 1 for target, have to have as many conditions as the maximum.
#To find maximum, determine length of each.
ringNums = np.arange(numRings)
numPossibilitiesEach = list()
for k in numTargets:
numPossibilitiesCouldPutKtargets = len( list(itertools.combinations(ringNums,k)) )
#print(numPossibilitiesCouldPutKtargets)
numPossibilitiesEach.append( numPossibilitiesCouldPutKtargets )
m = max( numPossibilitiesEach ) #because the worst case (number of targets) requires this many, have to have this many for all. Actually,
leastCommonMultiple = LCM( numPossibilitiesEach ) #to have equal number of trials per numtargets, would have to use this figure for each
#print('biggest=',m, ' Least common multiple=', leastCommonMultiple)
return leastCommonMultiple
def accelerateComputer(slowFast, process_priority, disable_gc):
# process_priority = 'normal' 'high' or 'realtime'
if slowFast:
if process_priority == 'normal':
pass
elif process_priority == 'high':
core.rush(True)
elif process_priority == 'realtime': # Only makes a diff compared to 'high' on Windows.
core.rush(True, realtime = True)
else:
print('Invalid process priority:',process_priority,"Process running at normal.")
process_priority = 'normal'
if disable_gc:
gc.disable()
if slowFast==0: #turn off the speed-up
if disable_gc:
gc.enable()
core.rush(False)
def openMyStimWindow(monitorSpec,widthPix,heightPix,bgColor,allowGUI,units,fullscr,scrn,waitBlank): #make it a function because have to do it several times, want to be sure is identical each time
myWin = visual.Window(monitor=monitorSpec,size=(widthPix,heightPix),allowGUI=allowGUI,units=units,color=bgColor,colorSpace='rgb',fullscr=fullscr,screen=scrn,waitBlanking=waitBlank) #Holcombe lab monitor
if myWin is None:
print('ERROR: Failed to open window in openMyStimWindow!')
core.quit()
return myWin
def constructRingsAsGratings(myWin,numRings,radii,ringRadialMaskEachRing,numObjects,patchAngle,colors,stimColorIdxsOrder,gratingTexPix,blobToCueEachRing,ppLog):
#Originally to construct a grating formed of the colors in order of stimColorIdxsOrder
antialiasGrating = True
autoLogging = False
texEachRing=list() #texture which will draw the ring of objects via openGL texture on grating
cueTexEachRing=list() #making a separate grating for the cue, wherein everything background color except the location of the cue
ringsRadial=list(); #after making the rings of object, put them in this list
cueRings=list() #after making grating for each cue, put it in this cue
stimColorIdxsOrder= stimColorIdxsOrder[::-1] #reverse order of indices, because grating texture is rendered in reverse order than is blobs version
radialMaskEachRing=[[0,0,0,1,1,] ,[0,0,0,0,0,0,1,1,],[0,0,0,0,0,0,0,0,0,0,1,1,]]
numUniquePatches= len( max(stimColorIdxsOrder,key=len) )
numCycles =(1.0*numObjects) / numUniquePatches
angleSegment = 360./(numUniquePatches*numCycles)
if gratingTexPix % numUniquePatches >0: #gratingTexPix contains numUniquePatches. numCycles will control how many total objects there are around circle
ppLog.warn('Warning: could not exactly render a '+str(numUniquePatches)+'-segment pattern radially, will be off by '+str( (gratingTexPix%numUniquePatches)*1.0 /gratingTexPix ) )
if numObjects % numUniquePatches >0:
msg= 'Warning: numUniquePatches ('+str(numUniquePatches)+') not go evenly into numObjects'; ppLog.warn(msg)
#create texture for red-green-blue-red-green-blue etc. radial grating
for i in range(numRings):
#myTex.append(np.zeros([gratingTexPix,gratingTexPix,3])+[1,-1,1])
texEachRing.append( np.zeros([gratingTexPix,gratingTexPix,3])+bgColor[0] ) #start with all channels in all locs = bgColor
cueTexEachRing.append( np.ones([gratingTexPix,gratingTexPix,3])*bgColor[0] )
if patchAngle > angleSegment:
msg='Error: patchAngle requested ('+str(patchAngle)+') bigger than maximum possible ('+str(angleSegment)+') numUniquePatches='+str(numUniquePatches)+' numCycles='+str(numCycles);
print(msg); ppLog.error(msg)
oneCycleAngle = 360./numCycles
segmentSizeTexture = angleSegment/oneCycleAngle *gratingTexPix #I call it segment because includes spaces in between, that I'll write over subsequently
patchSizeTexture = patchAngle/oneCycleAngle *gratingTexPix
patchSizeTexture = round(patchSizeTexture) #best is odd number, even space on either size
patchFlankSize = (segmentSizeTexture-patchSizeTexture)/2.
patchAngleActual = patchSizeTexture / gratingTexPix * oneCycleAngle
if abs(patchAngleActual - patchAngle) > .01:
msg = 'Desired patchAngle = '+str(patchAngle)+' but closest can get with '+str(gratingTexPix)+' gratingTexPix is '+str(patchAngleActual);
ppLog.warn(msg)
for colrI in range(numUniquePatches): #for that portion of texture, set color
start = colrI*segmentSizeTexture
end = start + segmentSizeTexture
start = round(start) #don't round until after do addition, otherwise can fall short
end = round(end)
ringColr=list();
for i in range(numRings):
ringColr.append(colors[ stimColorIdxsOrder[i][colrI] ])
for colorChannel in range(3):
for i in range(numRings):
texEachRing[i][:, start:end, colorChannel] = ringColr[i][colorChannel];
for cycle in range(int(round(numCycles))):
base = cycle*gratingTexPix/numCycles
for i in range(numRings):
cueTexEachRing[i][:, base+start/numCycles:base+end/numCycles, colorChannel] = ringColr[1][colorChannel]
#draw bgColor area (emptySizeEitherSideOfPatch) by overwriting first and last entries of segment
for i in range(numRings):
texEachRing[i][:, start:start+patchFlankSize, :] = bgColor[0]; #one flank
texEachRing[i][:, end-1-patchFlankSize:end, :] = bgColor[0]; #other flank
for cycle in range(int(round(numCycles))):
base = cycle*gratingTexPix/numCycles
for i in range(numRings):
cueTexEachRing[i][:,base+start/numCycles:base+(start+patchFlankSize)/numCycles,:] =bgColor[0];
cueTexEachRing[i][:,base+(end-1-patchFlankSize)/numCycles:base+end/numCycles,:] =bgColor[0]
#color the segment to be cued white. First, figure out cue segment len
segmentLen = gratingTexPix/numCycles*1/numUniquePatches
WhiteCueSizeAdj=0 # adust the white cue marker wingAdd 20110923
if numObjects==3:WhiteCueSizeAdj=110
elif numObjects==6:WhiteCueSizeAdj=25
elif numObjects==12:WhiteCueSizeAdj=-15
elif numObjects==2:WhiteCueSizeAdj=200
for i in range(numRings): #color cue position white
if blobToCueEachRing[i] >=0: #-999 means dont cue anything
blobToCueCorrectForRingReversal = numObjects-1 - blobToCueEachRing[i] #grating seems to be laid out in opposite direction than blobs, this fixes postCueNumBlobsAway so positive is in direction of motion
if blobToCueCorrectForRingReversal==0 and numObjects==12: WhiteCueSizeAdj=0
cueStartEntry = blobToCueCorrectForRingReversal*segmentLen+WhiteCueSizeAdj
cueEndEntry = cueStartEntry + segmentLen-2*WhiteCueSizeAdj
cueTexEachRing[i][:, cueStartEntry:cueEndEntry, :] = -1*bgColor[0] #-1*bgColor is that what makes it white?
blackGrains = round( .25*(cueEndEntry-cueStartEntry) )#number of "pixels" of texture at either end of cue sector to make black. Need to update this to reflect patchAngle
cueTexEachRing[i][:, cueStartEntry:cueStartEntry+blackGrains, :] = bgColor[0]; #this one doesn't seem to do anything?
cueTexEachRing[i][:, cueEndEntry-1-blackGrains:cueEndEntry, :] = bgColor[0];
angRes = 100 #100 is default. I have not seen any effect. This is currently not printed to log file!
for i in range(numRings):
ringsRadial.append(visual.RadialStim(myWin, tex=texEachRing[i], color=[1,1,1],size=radii[i],#myTexInner is the actual colored pattern. radial grating used to make it an annulus
mask=ringRadialMaskEachRing[i], # this is a 1-D mask dictating the behaviour from the centre of the stimulus to the surround.
radialCycles=0, angularCycles=numObjects*1.0/numUniquePatches,
angularRes=angRes, interpolate=antialiasGrating, autoLog=autoLogging))
#the mask is radial and indicates that should show only .3-.4 as one moves radially, creating an annulus
#end preparation of colored rings
#draw cueing grating for tracking task. Have entire grating be empty except for one white sector
cueRings.append(visual.RadialStim(myWin, tex=cueTexEachRing[i], color=[1,1,1],size=radii[i], #cueTexInner is white. Only one sector of it shown by mask
mask = radialMaskEachRing[i], radialCycles=0, angularCycles=1, #only one cycle because no pattern actually repeats- trying to highlight only one sector
angularRes=angRes, interpolate=antialiasGrating, autoLog=autoLogging) )#depth doesn't seem to work, just always makes it invisible?
currentlyCuedBlobEachRing = blobToCueEachRing #this will mean that don't have to redraw
return ringsRadial,cueRings,currentlyCuedBlobEachRing
######### End constructRingAsGrating ###########################################################
#########################################
def gratingAngleToEuclidean(theta):
euclidean = -1.0 * theta #because grating angles are clockwise and euclidean is counter-clockwise
euclidean += 90 #because gratings start with 0 at North and Euclidean 0 is East
return euclidean
def constructThickThinWedgeRingsTargetAndCue(myWin,initialAngle,radius,radialMask,radialMaskTarget,cueRadialMask,visibleWedge,numObjects,patchAngleThick,patchAngleThin,bgColor,
thickWedgeColor,thinWedgeColor,targetAngleOffset,targetRadialOffset,gratingTexPix,cueColor,objToCue,ppLog):
#Construct a grating formed of the colors in order of stimColorIdxsOrder
#Also construct a similar cueRing grating with same colors, but one blob potentially highlighted.
#cueRing Has different spacing than ringRadial, not sure why, I think because calculations tend to be off as it's
#always one cycle.
#radialMask doesn't seem to eliminate very-central part, bizarre
antialiasGrating = False #Don't set this to true because in present context, it's like imposing a radial Gaussian ramp on each object
autoLogging = False
numCycles = numObjects
segmentAngle = 360./numCycles
#create texture for red-green-blue-red-green-blue etc. radial grating
#2-D texture which will draw the ring of objects via openGL texture on grating
ringTex = np.zeros([gratingTexPix,gratingTexPix,3])+bgColor[0] #start with all channels in all locs = bgColor
cueTex = np.zeros([gratingTexPix,gratingTexPix,3])+bgColor[0] #start with all channels in all locs = bgColor
oneCycleAngle = 360./numCycles
def patchSizeForTexture(segmentAngle, patchAngle, oneCycleAngle, gratingTexPix):
segmentSizeTexture = segmentAngle/oneCycleAngle *gratingTexPix #I call it segment because includes spaces between objects, that I'll write over subsequently
if patchAngle > segmentAngle:
msg='Error: patchAngle requested ('+str(patchAngle)+') bigger than maximum possible ('+str(segmentAngle)+') numCycles='+str(numCycles)
print(msg); ppLog.error(msg)
patchSizeTexture = patchAngle*1.0/oneCycleAngle *gratingTexPix
patchSizeTexture = round(patchSizeTexture) #best is odd number, even space on either size
patchFlankSize =int( (segmentSizeTexture-patchSizeTexture)/2. )#this area will be drawn in bgColor
patchAngleActual = patchSizeTexture*1.0 / gratingTexPix * oneCycleAngle
if abs(patchAngleActual - patchAngle) > .01:
msg = 'Desired patchAngle = '+str(patchAngle)+' but closest can get with '+str(gratingTexPix)+' gratingTexPix is '+str(patchAngleActual);
ppLog.warn(msg)
return segmentSizeTexture, patchSizeTexture, patchFlankSize
#thick wedges. Create texture for visual.radialStim
segmentSizeTexture, patchSizeTexture, patchFlankSize = patchSizeForTexture(segmentAngle, patchAngleThick, oneCycleAngle, gratingTexPix)
start = 0 #identify starting texture position for this segment
end = int( round( start + segmentSizeTexture ) ) #don't round until after do addition, otherwise can fall short
angRes = 200 #100 is default. I have not seen an artifact at present when set to 100, two things drawn don't overlap exactly
#First draw the entire segment in patchColr, then erase sides (flankers) leaving only the patchAngle
ringTex[:, start:end, :] = thickWedgeColor[:]
#spaces in between objects are termed the flanks, should be bgColor,
ringTex[:, start:start+patchFlankSize, :] = bgColor[:] #one flank
ringTex[:, end-1-patchFlankSize:end, :] = bgColor[:] #other flank
#thin wedges. Create texture for visual.radialStim
segmentSizeTexture, thinWedgeSizeTexture, patchFlankSizeThinWedge = patchSizeForTexture(segmentAngle, patchAngleThin, oneCycleAngle, gratingTexPix)
#Instead of drawing the red and undisplaced blue with the same call to radialStim,
#We will draw the red with one call to radialStim, then the thinner blue sliver on top, using radialMask so it's only the sliver and leaves the
#remainder of the red showing.
#First draw the thick red contexts thickWedges
ringRadialThickWedges= visual.RadialStim(myWin, tex=ringTex, color=[1,1,1],size=radius,#ringTex is the actual colored pattern. radial grating used to make it an annulus
visibleWedge=visibleWedge,
ori = initialAngle, #essentially the phase of the grating
mask=radialMask, # this is a 1-D mask masking the centre, to create an annulus
radialCycles=0, angularCycles=numObjects,
angularRes=angRes, interpolate=antialiasGrating, autoLog=autoLogging)
#thinWedge, the usually-blue target
#First draw the entire segment in thinWedgeColor, then erase sides (flankers) leaving only the patchAngle (wedge angle)
thinRingTex = np.zeros([gratingTexPix,gratingTexPix,3])+bgColor[0] #start with all channels in all locs = bgColor
thinRingTex[:, start:end, :] = thinWedgeColor[:]
#spaces in between objects are termed the flanks, should be bgColor,
thinRingTex[:, start:start+patchFlankSize, :] = bgColor[:] #one flank
thinRingTex[:, end-1-patchFlankSize:end, :] = bgColor[:] #other flank
ringRadialThinWedges= visual.RadialStim(myWin, tex=thinRingTex, color=[1,1,1],size=radius,#ringTex is the actual colored pattern. radial grating used to make it an annulus
visibleWedge=visibleWedge,
ori = initialAngle, #essentially the phase of the grating
mask=radialMaskTarget, # this is a 1-D mask masking the centre, to create an annulus
radialCycles=0, angularCycles=numObjects,
angularRes=angRes, interpolate=antialiasGrating, autoLog=autoLogging)
#Draw target (task is to judge offset of thin wedge relative to thick wedge.
#So, overdraw a single segment of the grating by using visibleWedge
#angularPhase =
#I need to not show the part of the thick wedge that will be displaced, while showing enough of thick wedge to overdraw previous location of thin wedge
targetCorrectedForRingReversal = objToCue #numObjects-1 - objToCue #grating seems to be laid out in opposite direction than blobs, this fixes postCueNumBlobsAway so positive is in direction of motion
kludgeWiden= 5
visibleAngleStart = targetCorrectedForRingReversal*segmentAngle + (segmentAngle-patchAngleThick)/2 - kludgeWiden
visibleAngleEnd = (visibleAngleStart+kludgeWiden) + patchAngleThick + kludgeWiden
print('targetCorrectedForRingReversal = ',targetCorrectedForRingReversal,'targetRing initialAngle=', initialAngle, ' visibleAngleStart=',visibleAngleStart,' visibleAngleEnd=',visibleAngleEnd)
if targetAngleOffset >= 0:
visibleAngleEnd -= targetAngleOffset #don't show the part of the thick wedge that would be displaced
else: #shifted the other way, towards the start, so spillover on that side needs to be avoided by not drawing it
visibleAngleStart -= targetAngleOffset
#DRAW THE TARGET RING, like the above ringRadial except displaced
#Below call is identical to ringRadial except ori
#set visibleWedge so it only highlights a single thick wedge
targetRadial= visual.RadialStim(myWin, tex=thinRingTex, color=[1,1,1],size=radius+targetRadialOffset,#ringTex is the actual colored pattern. radial grating used to make it an annulus
visibleWedge=[visibleAngleStart,visibleAngleEnd],
ori = initialAngle+targetAngleOffset, #Always zero in the new version where the task is to judge the radial offset of the blue thin wedge
mask=radialMaskTarget, # this is a 1-D mask masking the centre, to create an annulus
radialCycles=0, angularCycles=numObjects,
angularRes=angRes, interpolate=antialiasGrating, autoLog=autoLogging)
#MAKE A COPY of the thick red ring to draw over undisplaced blue
ringRadialThickWedgesCopy= visual.RadialStim(myWin, tex=ringTex, color=[1,1,1],size=radius,#ringTex is the actual colored pattern. radial grating used to make it an annulus
visibleWedge= (visibleAngleStart,visibleAngleEnd),
ori=initialAngle,
mask=radialMask, # this is a 1-D mask masking the centre, to create an annulus
radialCycles=0, angularCycles=numObjects,
angularRes=angRes, interpolate=antialiasGrating, autoLog=autoLogging)
#Draw lines (alternative target)
lines=[]
#calculate estimated eccentricity that grating target would be at, if were using grating targets
#Find the center of the ones in radialMaskTarget, multiply by grating radius
oneIndices = np.where(radialMaskTarget==1)[0]
oneIndexMean = np.mean(oneIndices)
proportnOfRadius = oneIndexMean / len(radialMaskTarget)
proportnOfRadius += 0.5 * 1/len(radialMaskTarget) #Because grating mask doesn't work with centering, rather it's the beginning or something so need to add to get to center of wedge
eccentricity = proportnOfRadius* radius
eccentricity = eccentricity / 1.97 #Don't know why need to divide by almost 2
#print("oneIndexMean = ", oneIndexMean, "proportnOfRadius = ", proportnOfRadius, "eccentricity = ", eccentricity)
#Calculate appropriate line width in deg
wedgeThicknessFraction = len( np.where(radialMask)[0] ) * 1.0 / len(radialMask)
wedgeThickness = wedgeThicknessFraction*radius/2
targeti = targetCorrectedForRingReversal % numObjects # (targetCorrectedForRingReversal-1) % numObjects #dont know why have to subtract 1. Then have to mod numObjects so negative number gets turned into positive
targetFillColors = np.array([[.9,.9,.9],[-.8,-.8,-.8]]) # [-.3,-.3,-.3]
for i in xrange(0,numObjects):
lineHeight = wedgeThickness * 1.0# *1.0
lineWidth = lineHeight / 10
angleDeg = initialAngle
angleDeg+= (visibleAngleStart+visibleAngleEnd)/2 #because when gratings are drawn, there's this additional offset for which bit of the grating is visible
angleDeg += i/numObjects*360
tangentialOrientation = i/numObjects*360
if __name__ != "__main__": #not self-test
halfAngle = 360/numObjects/2 #For some reason target is offset by half the distance between two objects, even though that doesn't happen in helpersAOH self-test
tangentialOrientation += halfAngle
x = cos( gratingAngleToEuclidean(angleDeg)*pi/180 ) * eccentricity
y = sin( gratingAngleToEuclidean(angleDeg)*pi/180 ) * eccentricity
lineColor = targetFillColors[0]
if i == targeti:
print("line targeti=", targeti, " angleDeg=",angleDeg, "Euclidean angle=",gratingAngleToEuclidean(angleDeg) )
orientation = tangentialOrientation
if targetRadialOffset<0: #it's always one of two values, a negative one and a positive one
#orientation = tangentialOrientation + 90
lineColor = targetFillColors[1] #opposite color
else:
#orientation = tangentialOrientation + random.randint(0,1)*90
lineColor = targetFillColors[ random.randint(0,1) ]
#if orientation==tangentialOrientation: #make bigger because harder to see
# lineHeight *= 1.4 #for tangential, make longer
#else: lineHeight *=.8
#print("Drawing line ",i," at x=",x, " y=", y, "targetCorrectedForRingReversal=", targetCorrectedForRingReversal )
#thisLine = visual.Rect(myWin, width=lineWidth, height=lineHeight, pos=(x,y), ori=orientation, fillColor=lineColor, lineColor=None, autoLog=autoLogging)
thisLine = visual.Circle(myWin, radius=lineWidth, pos=(x,y), fillColor=lineColor, lineColor=None, autoLog=autoLogging)
lines.append(thisLine)
#CREATING CUE TEXTURE
#Both inner and outer cue arcs can be drawn in one go via a radial mask
#use visibleWedge so it only highlights a single thick wedge
#draw texture for cueRing
start = 0 #identify starting texture position for this segment
start = int( round( start+patchFlankSize ) )
end = int( round(start + segmentSizeTexture - patchFlankSize) )#don't round until after do addition, otherwise can fall short
cueTex[:, start:end, :] = cueColor[:]
#Actually because I'm only showing a tiny sliver via visibleAngle, could color the whole thing
cueTex[:, :, :] = cueColor[:]
#draw cue
visibleAngleStart = 0; visibleAngleEnd=360
if objToCue>=0:
objToCueCorrectdForRingReversal = objToCue #numObjects-1 - objToCue #grating seems to be laid out in opposite direction than blobs, this fixes postCueNumBlobsAway so positive is in direction of motion
visibleAngleStart = objToCueCorrectdForRingReversal*segmentAngle + (segmentAngle-patchAngleThick)/2
visibleAngleEnd = visibleAngleStart + patchAngleThick
print('objToCueCorrectdForRingReversal = ',objToCueCorrectdForRingReversal,' visibleAngleStart=',visibleAngleStart,' visibleAngleEnd=',visibleAngleEnd)
cueRing = visual.RadialStim(myWin, tex=cueTex, color=[1,1,1],size=radius, #cueTexInner is white. Only one sector of it shown by mask
visibleWedge=[visibleAngleStart,visibleAngleEnd],
ori = initialAngle,
mask = cueRadialMask, radialCycles=0, angularCycles=1, #only one cycle because no pattern actually repeats- trying to highlight only one sector
angularRes=angRes, interpolate=antialiasGrating, autoLog=autoLogging)
return ringRadialThickWedges,ringRadialThickWedgesCopy,ringRadialThinWedges,targetRadial,cueRing,lines
######### End constructThickThinWedgeRingsTargetAndCue ###########################################################
########################################################### ###########################################################
if __name__ == "__main__": #do self-tests
from psychopy import *
from psychopy import monitors, logging
monitorwidth = 38.5 #28.5 #monitor width in centimeters
viewdist = 57.; #cm
mon = monitors.Monitor("testMonitor",width=monitorwidth, distance=viewdist) #fetch the most recent calib for this monitor
bgColor = [0,0,0] # [-1,-1,-1]
allowGUI = True; units='deg'; fullscr=0; scrn=0; waitBlank=False
#mon.setSizePix( (widthPix,heightPix) )
widthPix = 800; heightPix = 600
myWin = openMyStimWindow(mon,widthPix,heightPix,bgColor,allowGUI,units,fullscr,scrn,waitBlank)
widthPix = myWin.size[0]; heightPix = myWin.size[1]
#Task will be to judge which thick wedge has the thin wedge offset within it
numObjects = 8
initialAngle =random.random()*360
gratingTexPix= 1024
objToCue=0
radius = 25.
visibleWedge = [0,360]
patchAngleThickWedges = 360/numObjects/2
thickWedgeColor = [1,-1,-1]
thinWedgeColor=[0,0,1]
cueColor=[0,1,1]
radialMask = np.array( [0,0,0,0,1,0,0,0,0] ) #This is for the larger wedge that the TargetSliver is embedded in
radialMaskThinWedge = np.array( [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0] ) #This is the sliver that's offset relative to the larger wedge, that you have to judge the offset of
wedgeRadiusFraction = np.where(radialMask)[0][0]*1.0 / len(radialMask)
print('wedgeRadiusFraction = ',wedgeRadiusFraction)
wedgeThicknessFraction = len( np.where(radialMask)[0] )*1.0 / len(radialMask)
print('wedgeThickness = ',wedgeThicknessFraction*radius)
wedgeCenterFraction = wedgeRadiusFraction + wedgeThicknessFraction/2.
targetSliverRadiusFraction = np.where(radialMaskThinWedge)[0][0]*1.0 / len(radialMaskThinWedge)
print('targetSliverRadiusFraction = ',targetSliverRadiusFraction)
targetSliverThicknessFraction = len( np.where(radialMaskThinWedge)[0] )*1.0 / len(radialMaskThinWedge)
targetSliverCenterFraction = targetSliverRadiusFraction + targetSliverThicknessFraction/2.
print('targetSliverThickness = ',targetSliverThicknessFraction*radius, ' targetSliverCenterFraction=',targetSliverCenterFraction)
#distance of cue arc
desiredArcDistanceFractionRadius = .23
cueInnerArcDesiredFraction = wedgeCenterFraction - desiredArcDistanceFractionRadius
cueOuterArcDesiredFraction = wedgeCenterFraction + desiredArcDistanceFractionRadius
if cueOuterArcDesiredFraction > 1:
msg='Can"t start outer arc at fraction='+str(cueOuterArcDesiredFraction)
logging.error(msg); print(msg)
fractionResolution = .02 #Quantisation of possible positions of cue arc
binsNeeded = 1.0 / fractionResolution
cueRadialMask = np.zeros( binsNeeded )
#For the cueRadialMask, want everything zero except just inside and outside of the wedges.
innerArcCenterPos = round( binsNeeded*cueInnerArcDesiredFraction )
outerArcCenterPos = round( binsNeeded*cueOuterArcDesiredFraction )
cueRadialMask[ innerArcCenterPos ] = 1
cueRadialMask[ outerArcCenterPos ] = 1
print('cueInnerArcDesiredFraction = ',cueInnerArcDesiredFraction, ' actual = ', innerArcCenterPos*1.0/len(cueRadialMask) )
print('cueOuterArcDesiredFraction = ',cueOuterArcDesiredFraction, ' actual = ', outerArcCenterPos*1.0/len(cueRadialMask) )
targetAngleOffset = 0; targetRadialOffset = -1
thickWedgesRing,thickWedgesRingCopy, thinWedgesRing, targetRing, cueRing, lines = \
constructThickThinWedgeRingsTargetAndCue(myWin,initialAngle,radius,radialMask,radialMaskThinWedge,cueRadialMask,visibleWedge,numObjects,
patchAngleThickWedges,patchAngleThickWedges,bgColor,thickWedgeColor,thinWedgeColor,targetAngleOffset,targetRadialOffset,
gratingTexPix,cueColor,objToCue,ppLog=logging)
keepGoing = True
while keepGoing:
thickWedgesRing.draw()
thinWedgesRing.draw()
cueRing.draw()
#Draw thin wedges at same time as thick wedges. But when time to draw target, draw over old position of target thin wedge and draw displaced version
#Now program the cue arcs and the target-displaced ring
myWin.flip()
for key in event.getKeys(): #check if pressed abort-type key
if key in ['escape','q']:
keepGoing = False
respcount = 1
else: #key in [
print('key =', key)
keepGoing = True #draw target superposed
while keepGoing:
#The thickWedgesRing, typically red, are drawn as a radial grating that occupies all 360 deg circular, with a texture to mask out everything else to create a ring
#The thinWedgesRing, typically blue, are centered in the red and one of these wedges will be later displaced to create a target.
#The targetRing is the displaced blue wedge. Actually a full circular radial grating, but visibleWedge set to subtend only the part where the target is.
#The thickWedgesRingCopy is to draw over the old, undisplaced blue wedge, only in the target area. It is thus a copy of the thickWedgesRing,
# with visibleWedge set to show only the target part
#The cueRing is two white arcs to bring attention to the target area.
thickWedgesRing.draw() #Draw red thick wedges
thinWedgesRing.draw() #Draw thin blue wedge centered in thick red wedges
#When time to draw target, draw over old position of target thin wedge and draw displaced version
thickWedgesRingCopy.draw()
targetRing.draw() #this is the particular blue patch offset. And drawing the rest in red, so that the undisplaced doesn't show through.
for line in lines:
line.draw()
myWin.flip()
for key in event.getKeys(): #check if pressed abort-type key
if key in ['escape','q']:
keepGoing = False
respcount = 1
else: #key in [
print('key =', key)
| alexholcombe/MOTcircular | experiment_specific/transient_attention/helpersAOHtargetFinalCueLocatn.py | Python | mit | 31,372 |
def foo():
global <weak_warning descr="Global variable 'bar' is undefined at the module level">bar</weak_warning>
bar = "something"
| siosio/intellij-community | python/testData/inspections/PyGlobalUndefinedInspection/reassignedAndAbsent.py | Python | apache-2.0 | 140 |
from django.core.exceptions import ImproperlyConfigured
from django.db.models.fields import CharField, DecimalField
from django.utils import six
from django.utils.translation import ugettext_lazy as _
from phonenumber_field.modelfields import PhoneNumberField
from oscar.core import validators
from oscar.forms import fields
from oscar.models.fields.autoslugfield import AutoSlugField
AutoSlugField = AutoSlugField
PhoneNumberField = PhoneNumberField
# https://github.com/django/django/blob/64200c14e0072ba0ffef86da46b2ea82fd1e019a/django/db/models/fields/subclassing.py#L31-L44
class Creator(object):
"""
A placeholder class that provides a way to set the attribute on the model.
"""
def __init__(self, field):
self.field = field
def __get__(self, obj, type=None):
if obj is None:
return self
return obj.__dict__[self.field.name]
def __set__(self, obj, value):
obj.__dict__[self.field.name] = self.field.to_python(value)
class ExtendedURLField(CharField):
description = _("URL")
def __init__(self, verbose_name=None, name=None,
verify_exists=None, **kwargs):
kwargs['max_length'] = kwargs.get('max_length', 200)
CharField.__init__(self, verbose_name, name, **kwargs)
# 'verify_exists' was deprecated in Django 1.4. To ensure backwards
# compatibility, it is still accepted here, but only passed
# on to the parent class if it was specified.
self.verify_exists = verify_exists
if verify_exists is not None:
validator = validators.ExtendedURLValidator(
verify_exists=verify_exists)
else:
validator = validators.ExtendedURLValidator()
self.validators.append(validator)
def formfield(self, **kwargs):
# As with CharField, this will cause URL validation to be performed
# twice.
defaults = {
'form_class': fields.ExtendedURLField,
'verify_exists': self.verify_exists
}
defaults.update(kwargs)
return super(ExtendedURLField, self).formfield(**defaults)
def deconstruct(self):
"""
deconstruct() is needed by Django's migration framework
"""
name, path, args, kwargs = super(ExtendedURLField, self).deconstruct()
# Add verify_exists to kwargs if it's not the default value.
if self.verify_exists is not None:
kwargs['verify_exists'] = self.verify_exists
# We have a default value for max_length; remove it in that case
if self.max_length == 200:
del kwargs['max_length']
return name, path, args, kwargs
class PositiveDecimalField(DecimalField):
"""
A simple subclass of ``django.db.models.fields.DecimalField`` that
restricts values to be non-negative.
"""
def formfield(self, **kwargs):
return super(PositiveDecimalField, self).formfield(min_value=0)
class UppercaseCharField(CharField):
"""
A simple subclass of ``django.db.models.fields.CharField`` that
restricts all text to be uppercase.
Defined with the with_metaclass helper so that to_python is called
https://docs.djangoproject.com/en/1.6/howto/custom-model-fields/#the-subfieldbase-metaclass # NOQA
"""
def contribute_to_class(self, cls, name, **kwargs):
super(UppercaseCharField, self).contribute_to_class(
cls, name, **kwargs)
setattr(cls, self.name, Creator(self))
def from_db_value(self, value, expression, connection, context):
return self.to_python(value)
def to_python(self, value):
val = super(UppercaseCharField, self).to_python(value)
if isinstance(val, six.string_types):
return val.upper()
else:
return val
class NullCharField(CharField):
"""
CharField that stores '' as None and returns None as ''
Useful when using unique=True and forms. Implies null==blank==True.
When a ModelForm with a CharField with null=True gets saved, the field will
be set to '': https://code.djangoproject.com/ticket/9590
This breaks usage with unique=True, as '' is considered equal to another
field set to ''.
"""
description = "CharField that stores '' as None and returns None as ''"
def __init__(self, *args, **kwargs):
if not kwargs.get('null', True) or not kwargs.get('blank', True):
raise ImproperlyConfigured(
"NullCharField implies null==blank==True")
kwargs['null'] = kwargs['blank'] = True
super(NullCharField, self).__init__(*args, **kwargs)
def contribute_to_class(self, cls, name, **kwargs):
super(NullCharField, self).contribute_to_class(cls, name, **kwargs)
setattr(cls, self.name, Creator(self))
def from_db_value(self, value, expression, connection, context):
return self.to_python(value)
def to_python(self, value):
val = super(NullCharField, self).to_python(value)
return val if val is not None else u''
def get_prep_value(self, value):
prepped = super(NullCharField, self).get_prep_value(value)
return prepped if prepped != u"" else None
def deconstruct(self):
"""
deconstruct() is needed by Django's migration framework
"""
name, path, args, kwargs = super(NullCharField, self).deconstruct()
del kwargs['null']
del kwargs['blank']
return name, path, args, kwargs
| vicky2135/lucious | src/oscar/models/fields/__init__.py | Python | bsd-3-clause | 5,511 |
import collections
import inspect
import json
class Named(object):
@property
def name(self):
name = Named.typename(self.__class__)
hex_hash = '%016x' % abs(hash(self))
return '%s//%s' % (name, hex_hash)
@staticmethod
def typename(typ):
module = typ.__module__
module = module + '.' if module not in [None, '__main__'] else ''
return module + typ.__name__
@staticmethod
def components(name):
name = name.name if isinstance(name, Named) else name
def subkey(s):
return tuple(s.split('//')) if '//' in s else s
return tuple(subkey(s) for s in name.split('.'))
@property
def sortkey(self):
return Named.components(self.name)
class ClassHierarchyRoot(object):
@classmethod
def subclasses(cls):
subs = cls.__subclasses__()
return set(subs) | {c for s in subs for c in s.subclasses()}
class Specced(Named):
def __new__(typ, *args, **kwargs):
spec = CallSpec(typ.__init__, *args, **kwargs)
o = object.__new__(typ, *args, **kwargs)
o._callspec = (Named.typename(typ), spec)
o._callspec_labels = set()
return o
def __repr__(self):
name, spec = self._callspec
args = ', '.join('%s=%r' % (k, v) for k, v in spec.items())
return '%s(%s)' % (name, args)
@property
def key(self):
return (self._callspec, frozenset(self._callspec_labels))
def __hash__(self):
return hash(self.key)
def __eq__(self, other):
return self.key == other.key
def __ne__(self, other):
return self.key != other.key
def __gt__(self, other): # Greater than means: more specific
return self.key > other.key # Should be the other way around
def __ge__(self, other):
return self.key >= other.key
def __lt__(self, other):
return self.key < other.key
def __le__(self, other):
return self.key <= other.key
class CallSpec(collections.OrderedDict):
"""Match names to arguments for the given function."""
def __init__(self, f, *varargs, **keywords):
spec = inspect.getargspec(f)
names = spec.args[1:] if inspect.ismethod(f) else spec.args
named = zip(names, varargs)
varargs = varargs[len(named):]
named += [(name, keywords[name]) for name in names if name in keywords]
for name in names:
if name in keywords:
del keywords[name]
if len(varargs) > 0:
if not spec.varargs:
raise ValueError('Varargs are not supported by %r' % f)
named += [(spec.varargs, varargs)]
if len(keywords) > 0:
if not spec.keywords:
msg = 'Extended keyword arguments are not supported by %r' % f
raise ValueError(msg)
named += [(spec.keywords, keywords)]
super(CallSpec, self).__init__(named)
def __hash__(self):
return hash(json.dumps(self, sort_keys=True))
def __eq__(self, other): # Self specifies the same arguments as other
return self <= other and not self < other
def __ne__(self, other):
return not self == other
def __gt__(self, other):
return other < self
def __ge__(self, other):
return other <= self
def __lt__(self, other):
return set(self.keys()) < set(other.keys()) and self <= other
def __le__(self, other): # Self is not more specific than other
if set(self.keys()) <= set(other.keys()):
for k in self.keys():
if self[k] != other[k]:
return False
return True
return False
| solidsnack/confit | confit/meta.py | Python | apache-2.0 | 3,756 |
"""Agrega alturas.codprov y alturas.cp
Revision ID: f5195fe91e09
Revises: fccbcd8362d7
Create Date: 2017-07-09 22:01:51.280360
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'f5195fe91e09'
down_revision = 'fccbcd8362d7'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('alturas', sa.Column('codprov', sa.String(length=1), nullable=True))
op.add_column('alturas', sa.Column('cp', sa.Integer(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('alturas', 'cp')
op.drop_column('alturas', 'codprov')
# ### end Alembic commands ###
| OpenDataCordoba/codigo-postal-argentino | alembic/versions/f5195fe91e09_agrega_alturas_codprov_y_alturas_cp.py | Python | gpl-2.0 | 796 |
#!/usr/bin/env python
# Copyright 2014-2019 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: James Smith <[email protected]>
#
# All diatomic bond lengths taken from:
# http://cccbdb.nist.gov/diatomicexpbondx.asp
"""
All output is deleted after the run to keep the directory neat. Comment out the
cleanup section to view output files.
"""
import time
from pyscf import gto, scf, mcscf, dmrgscf
from pyscf.shciscf import shci
t0 = time.time()
#
# Mean Field
#
mol = gto.M(verbose=4, atom="O 0 0 0; O 0 0 1.208", basis="ccpvdz")
mf = scf.RHF(mol).run()
#
# Multireference WF
#
ncas = 8
nelecas = 12
mc = mcscf.CASSCF(mf, ncas, nelecas)
e_CASSCF = mc.mc1step()[0]
# Create SHCI molecule for just variational opt.
# Active spaces chosen to reflect valence active space.
mc = shci.SHCISCF(mf, ncas, nelecas)
mc.fcisolver.mpiprefix = "mpirun -np 2"
mc.fcisolver.stochastic = True
mc.fcisolver.nPTiter = 0 # Turn off perturbative calc.
mc.fcisolver.sweep_iter = [0]
# Setting large epsilon1 thresholds highlights improvement from perturbation.
mc.fcisolver.sweep_epsilon = [5e-3]
e_noPT = mc.mc1step()[0]
# Run a single SHCI iteration with perturbative correction.
mc.fcisolver.stochastic = False # Turns on deterministic PT calc.
mc.fcisolver.epsilon2 = 1e-8
shci.writeSHCIConfFile(mc.fcisolver, [nelecas / 2, nelecas / 2], False)
shci.executeSHCI(mc.fcisolver)
e_PT = shci.readEnergy(mc.fcisolver)
# Comparison Calculations
del_PT = e_PT - e_noPT
del_shci = e_CASSCF - e_PT
print("\n\nEnergies for O2 give in E_h.")
print("=====================================")
print("SHCI Variational: %6.12f" % e_noPT)
# Prints the total energy including the perturbative component.
print("SHCI Perturbative: %6.12f" % e_PT)
print("Perturbative Change: %6.12f" % del_PT)
print("CASSCF Total Energy: %6.12f" % e_CASSCF)
print("E(CASSCF) - E(SHCI): %6.12f" % del_shci)
print("Total Time: ", time.time() - t0)
# File cleanup
mc.fcisolver.cleanup_dice_files()
| gkc1000/pyscf | pyscf/shciscf/examples/01_O2_PT.py | Python | apache-2.0 | 2,512 |
from .ruv import RUVIE
from .abc import ABCIE
from .academicearth import AcademicEarthCourseIE
from .addanime import AddAnimeIE
from .adultswim import AdultSwimIE
from .aftonbladet import AftonbladetIE
from .anitube import AnitubeIE
from .anysex import AnySexIE
from .aol import AolIE
from .allocine import AllocineIE
from .aparat import AparatIE
from .appletrailers import AppleTrailersIE
from .archiveorg import ArchiveOrgIE
from .ard import ARDIE, ARDMediathekIE
from .arte import (
ArteTvIE,
ArteTVPlus7IE,
ArteTVCreativeIE,
ArteTVConcertIE,
ArteTVFutureIE,
ArteTVDDCIE,
ArteTVEmbedIE,
)
from .audiomack import AudiomackIE
from .auengine import AUEngineIE
from .bambuser import BambuserIE, BambuserChannelIE
from .bandcamp import BandcampIE, BandcampAlbumIE
from .bbccouk import BBCCoUkIE
from .beeg import BeegIE
from .behindkink import BehindKinkIE
from .bild import BildIE
from .bilibili import BiliBiliIE
from .blinkx import BlinkxIE
from .bliptv import BlipTVIE, BlipTVUserIE
from .bloomberg import BloombergIE
from .br import BRIE
from .breakcom import BreakIE
from .brightcove import BrightcoveIE
from .byutv import BYUtvIE
from .c56 import C56IE
from .canal13cl import Canal13clIE
from .canalplus import CanalplusIE
from .canalc2 import Canalc2IE
from .cbs import CBSIE
from .cbsnews import CBSNewsIE
from .ceskatelevize import CeskaTelevizeIE
from .channel9 import Channel9IE
from .chilloutzone import ChilloutzoneIE
from .cinemassacre import CinemassacreIE
from .clipfish import ClipfishIE
from .cliphunter import CliphunterIE
from .clipsyndicate import ClipsyndicateIE
from .cloudy import CloudyIE
from .clubic import ClubicIE
from .cmt import CMTIE
from .cnet import CNETIE
from .cnn import (
CNNIE,
CNNBlogsIE,
)
from .collegehumor import CollegeHumorIE
from .comedycentral import ComedyCentralIE, ComedyCentralShowsIE
from .condenast import CondeNastIE
from .cracked import CrackedIE
from .criterion import CriterionIE
from .crunchyroll import (
CrunchyrollIE,
CrunchyrollShowPlaylistIE
)
from .cspan import CSpanIE
from .dailymotion import (
DailymotionIE,
DailymotionPlaylistIE,
DailymotionUserIE,
)
from .daum import DaumIE
from .dbtv import DBTVIE
from .deezer import DeezerPlaylistIE
from .dfb import DFBIE
from .dotsub import DotsubIE
from .dreisat import DreiSatIE
from .drtuber import DrTuberIE
from .drtv import DRTVIE
from .dump import DumpIE
from .defense import DefenseGouvFrIE
from .discovery import DiscoveryIE
from .divxstage import DivxStageIE
from .dropbox import DropboxIE
from .ebaumsworld import EbaumsWorldIE
from .ehow import EHowIE
from .eighttracks import EightTracksIE
from .einthusan import EinthusanIE
from .eitb import EitbIE
from .ellentv import (
EllenTVIE,
EllenTVClipsIE,
)
from .elpais import ElPaisIE
from .empflix import EMPFlixIE
from .engadget import EngadgetIE
from .eporner import EpornerIE
from .escapist import EscapistIE
from .everyonesmixtape import EveryonesMixtapeIE
from .exfm import ExfmIE
from .expotv import ExpoTVIE
from .extremetube import ExtremeTubeIE
from .facebook import FacebookIE
from .faz import FazIE
from .fc2 import FC2IE
from .firedrive import FiredriveIE
from .firstpost import FirstpostIE
from .firsttv import FirstTVIE
from .fivemin import FiveMinIE
from .fktv import (
FKTVIE,
FKTVPosteckeIE,
)
from .flickr import FlickrIE
from .fourtube import FourTubeIE
from .franceculture import FranceCultureIE
from .franceinter import FranceInterIE
from .francetv import (
PluzzIE,
FranceTvInfoIE,
FranceTVIE,
GenerationQuoiIE,
CultureboxIE,
)
from .freesound import FreesoundIE
from .freespeech import FreespeechIE
from .funnyordie import FunnyOrDieIE
from .gamekings import GamekingsIE
from .gameone import (
GameOneIE,
GameOnePlaylistIE,
)
from .gamespot import GameSpotIE
from .gamestar import GameStarIE
from .gametrailers import GametrailersIE
from .gdcvault import GDCVaultIE
from .generic import GenericIE
from .glide import GlideIE
from .globo import GloboIE
from .godtube import GodTubeIE
from .golem import GolemIE
from .googleplus import GooglePlusIE
from .googlesearch import GoogleSearchIE
from .gorillavid import GorillaVidIE
from .goshgay import GoshgayIE
from .grooveshark import GroovesharkIE
from .hark import HarkIE
from .heise import HeiseIE
from .helsinki import HelsinkiIE
from .hentaistigma import HentaiStigmaIE
from .hornbunny import HornBunnyIE
from .hostingbulk import HostingBulkIE
from .hotnewhiphop import HotNewHipHopIE
from .howcast import HowcastIE
from .howstuffworks import HowStuffWorksIE
from .huffpost import HuffPostIE
from .hypem import HypemIE
from .iconosquare import IconosquareIE
from .ign import IGNIE, OneUPIE
from .imdb import (
ImdbIE,
ImdbListIE
)
from .ina import InaIE
from .infoq import InfoQIE
from .instagram import InstagramIE, InstagramUserIE
from .internetvideoarchive import InternetVideoArchiveIE
from .iprima import IPrimaIE
from .ivi import (
IviIE,
IviCompilationIE
)
from .izlesene import IzleseneIE
from .jadorecettepub import JadoreCettePubIE
from .jeuxvideo import JeuxVideoIE
from .jove import JoveIE
from .jukebox import JukeboxIE
from .jpopsukitv import JpopsukiIE
from .kankan import KankanIE
from .keezmovies import KeezMoviesIE
from .khanacademy import KhanAcademyIE
from .kickstarter import KickStarterIE
from .keek import KeekIE
from .kontrtube import KontrTubeIE
from .krasview import KrasViewIE
from .ku6 import Ku6IE
from .la7 import LA7IE
from .laola1tv import Laola1TvIE
from .lifenews import LifeNewsIE
from .liveleak import LiveLeakIE
from .livestream import (
LivestreamIE,
LivestreamOriginalIE,
LivestreamShortenerIE,
)
from .lrt import LRTIE
from .lynda import (
LyndaIE,
LyndaCourseIE
)
from .m6 import M6IE
from .macgamestore import MacGameStoreIE
from .mailru import MailRuIE
from .malemotion import MalemotionIE
from .mdr import MDRIE
from .metacafe import MetacafeIE
from .metacritic import MetacriticIE
from .mgoon import MgoonIE
from .ministrygrid import MinistryGridIE
from .mit import TechTVMITIE, MITIE, OCWMITIE
from .mitele import MiTeleIE
from .mixcloud import MixcloudIE
from .mlb import MLBIE
from .mpora import MporaIE
from .moevideo import MoeVideoIE
from .mofosex import MofosexIE
from .mojvideo import MojvideoIE
from .moniker import MonikerIE
from .mooshare import MooshareIE
from .morningstar import MorningstarIE
from .motherless import MotherlessIE
from .motorsport import MotorsportIE
from .movieclips import MovieClipsIE
from .moviezine import MoviezineIE
from .movshare import MovShareIE
from .mtv import (
MTVIE,
MTVServicesEmbeddedIE,
MTVIggyIE,
)
from .muenchentv import MuenchenTVIE
from .musicplayon import MusicPlayOnIE
from .musicvault import MusicVaultIE
from .muzu import MuzuTVIE
from .myspace import MySpaceIE
from .myspass import MySpassIE
from .myvideo import MyVideoIE
from .naver import NaverIE
from .nba import NBAIE
from .nbc import (
NBCIE,
NBCNewsIE,
)
from .ndr import NDRIE
from .ndtv import NDTVIE
from .newgrounds import NewgroundsIE
from .newstube import NewstubeIE
from .nfb import NFBIE
from .nfl import NFLIE
from .nhl import NHLIE, NHLVideocenterIE
from .niconico import NiconicoIE, NiconicoPlaylistIE
from .ninegag import NineGagIE
from .noco import NocoIE
from .normalboots import NormalbootsIE
from .nosvideo import NosVideoIE
from .novamov import NovaMovIE
from .nowness import NownessIE
from .nowvideo import NowVideoIE
from .npo import (
NPOIE,
TegenlichtVproIE,
)
from .nrk import (
NRKIE,
NRKTVIE,
)
from .ntv import NTVIE
from .nytimes import NYTimesIE
from .nuvid import NuvidIE
from .oktoberfesttv import OktoberfestTVIE
from .ooyala import OoyalaIE
from .orf import (
ORFTVthekIE,
ORFOE1IE,
ORFFM4IE,
)
from .parliamentliveuk import ParliamentLiveUKIE
from .patreon import PatreonIE
from .pbs import PBSIE
from .phoenix import PhoenixIE
from .photobucket import PhotobucketIE
from .planetaplay import PlanetaPlayIE
from .played import PlayedIE
from .playfm import PlayFMIE
from .playvid import PlayvidIE
from .podomatic import PodomaticIE
from .pornhd import PornHdIE
from .pornhub import PornHubIE
from .pornotube import PornotubeIE
from .pornoxo import PornoXOIE
from .promptfile import PromptFileIE
from .prosiebensat1 import ProSiebenSat1IE
from .pyvideo import PyvideoIE
from .quickvid import QuickVidIE
from .radiofrance import RadioFranceIE
from .rai import RaiIE
from .rbmaradio import RBMARadioIE
from .redtube import RedTubeIE
from .reverbnation import ReverbNationIE
from .ringtv import RingTVIE
from .ro220 import Ro220IE
from .rottentomatoes import RottenTomatoesIE
from .roxwel import RoxwelIE
from .rtbf import RTBFIE
from .rtlnl import RtlXlIE
from .rtlnow import RTLnowIE
from .rts import RTSIE
from .rtve import RTVEALaCartaIE, RTVELiveIE
from .ruhd import RUHDIE
from .rutube import (
RutubeIE,
RutubeChannelIE,
RutubeMovieIE,
RutubePersonIE,
)
from .rutv import RUTVIE
from .ruv import RUVIE
from .sapo import SapoIE
from .savefrom import SaveFromIE
from .sbs import SBSIE
from .scivee import SciVeeIE
from .screencast import ScreencastIE
from .servingsys import ServingSysIE
from .sexykarma import SexyKarmaIE
from .shared import SharedIE
from .sharesix import ShareSixIE
from .sina import SinaIE
from .slideshare import SlideshareIE
from .slutload import SlutloadIE
from .smotri import (
SmotriIE,
SmotriCommunityIE,
SmotriUserIE,
SmotriBroadcastIE,
)
from .snotr import SnotrIE
from .sockshare import SockshareIE
from .sohu import SohuIE
from .soundcloud import (
SoundcloudIE,
SoundcloudSetIE,
SoundcloudUserIE,
SoundcloudPlaylistIE
)
from .soundgasm import SoundgasmIE
from .southpark import (
SouthParkIE,
SouthparkDeIE,
)
from .space import SpaceIE
from .spankwire import SpankwireIE
from .spiegel import SpiegelIE, SpiegelArticleIE
from .spiegeltv import SpiegeltvIE
from .spike import SpikeIE
from .sport5 import Sport5IE
from .sportbox import SportBoxIE
from .sportdeutschland import SportDeutschlandIE
from .srmediathek import SRMediathekIE
from .stanfordoc import StanfordOpenClassroomIE
from .steam import SteamIE
from .streamcloud import StreamcloudIE
from .streamcz import StreamCZIE
from .sunporno import SunPornoIE
from .swrmediathek import SWRMediathekIE
from .syfy import SyfyIE
from .sztvhu import SztvHuIE
from .tagesschau import TagesschauIE
from .tapely import TapelyIE
from .teachertube import (
TeacherTubeIE,
TeacherTubeUserIE,
)
from .teachingchannel import TeachingChannelIE
from .teamcoco import TeamcocoIE
from .techtalks import TechTalksIE
from .ted import TEDIE
from .telecinco import TelecincoIE
from .telemb import TeleMBIE
from .tenplay import TenPlayIE
from .testurl import TestURLIE
from .tf1 import TF1IE
from .theonion import TheOnionIE
from .theplatform import ThePlatformIE
from .thesixtyone import TheSixtyOneIE
from .thisav import ThisAVIE
from .tinypic import TinyPicIE
from .tlc import TlcIE, TlcDeIE
from .tnaflix import TNAFlixIE
from .thvideo import (
THVideoIE,
THVideoPlaylistIE
)
from .toutv import TouTvIE
from .toypics import ToypicsUserIE, ToypicsIE
from .traileraddict import TrailerAddictIE
from .trilulilu import TriluliluIE
from .trutube import TruTubeIE
from .tube8 import Tube8IE
from .tudou import TudouIE
from .tumblr import TumblrIE
from .turbo import TurboIE
from .tutv import TutvIE
from .tvigle import TvigleIE
from .tvp import TvpIE
from .tvplay import TVPlayIE
from .twitch import TwitchIE
from .ubu import UbuIE
from .udemy import (
UdemyIE,
UdemyCourseIE
)
from .unistra import UnistraIE
from .urort import UrortIE
from .ustream import UstreamIE, UstreamChannelIE
from .vbox7 import Vbox7IE
from .veehd import VeeHDIE
from .veoh import VeohIE
from .vesti import VestiIE
from .vevo import VevoIE
from .vgtv import VGTVIE
from .vh1 import VH1IE
from .vice import ViceIE
from .viddler import ViddlerIE
from .videobam import VideoBamIE
from .videodetective import VideoDetectiveIE
from .videolecturesnet import VideoLecturesNetIE
from .videofyme import VideofyMeIE
from .videomega import VideoMegaIE
from .videopremium import VideoPremiumIE
from .videott import VideoTtIE
from .videoweed import VideoWeedIE
from .vidme import VidmeIE
from .vidzi import VidziIE
from .vimeo import (
VimeoIE,
VimeoAlbumIE,
VimeoChannelIE,
VimeoGroupsIE,
VimeoLikesIE,
VimeoReviewIE,
VimeoUserIE,
VimeoWatchLaterIE,
)
from .vimple import VimpleIE
from .vine import (
VineIE,
VineUserIE,
)
from .viki import VikiIE
from .vk import VKIE
from .vodlocker import VodlockerIE
from .vporn import VpornIE
from .vrt import VRTIE
from .vube import VubeIE
from .vuclip import VuClipIE
from .vulture import VultureIE
from .walla import WallaIE
from .washingtonpost import WashingtonPostIE
from .wat import WatIE
from .wayofthemaster import WayOfTheMasterIE
from .wdr import (
WDRIE,
WDRMobileIE,
WDRMausIE,
)
from .weibo import WeiboIE
from .wimp import WimpIE
from .wistia import WistiaIE
from .worldstarhiphop import WorldStarHipHopIE
from .wrzuta import WrzutaIE
from .xbef import XBefIE
from .xboxclips import XboxClipsIE
from .xhamster import XHamsterIE
from .xnxx import XNXXIE
from .xvideos import XVideosIE
from .xtube import XTubeUserIE, XTubeIE
from .yahoo import (
YahooIE,
YahooSearchIE,
)
from .ynet import YnetIE
from .youjizz import YouJizzIE
from .youku import YoukuIE
from .youporn import YouPornIE
from .yourupload import YourUploadIE
from .youtube import (
YoutubeIE,
YoutubeChannelIE,
YoutubeFavouritesIE,
YoutubeHistoryIE,
YoutubePlaylistIE,
YoutubeRecommendedIE,
YoutubeSearchDateIE,
YoutubeSearchIE,
YoutubeSearchURLIE,
YoutubeShowIE,
YoutubeSubscriptionsIE,
YoutubeTopListIE,
YoutubeTruncatedURLIE,
YoutubeUserIE,
YoutubeWatchLaterIE,
)
from .zdf import ZDFIE
_ALL_CLASSES = [
klass
for name, klass in globals().items()
if name.endswith('IE') and name != 'GenericIE'
]
_ALL_CLASSES.append(GenericIE)
def gen_extractors():
""" Return a list of an instance of every supported extractor.
The order does matter; the first extractor matched is the one handling the URL.
"""
return [klass() for klass in _ALL_CLASSES]
def get_info_extractor(ie_name):
"""Returns the info extractor class with the given ie_name"""
return globals()[ie_name+'IE']
| kthordarson/youtube-dl-ruv | youtube_dl/extractor/__init__.py | Python | unlicense | 14,468 |
import numpy as np
import math
import time
import random
import sys
from rick.A_star_planning import *
from math import pi
import matplotlib.pyplot as plt
def compute_euclidean_path(pos_rob,pos_obj, points = 5): #pos_rob is a 1x3 matrix with(x,y,teta) & pos_obj is a 1x2 matrix with(x,y)
x = np.linspace(pos_rob[0], pos_obj[0], num=points)
y = np.linspace(pos_rob[1], pos_obj[1], num=points)
angle = math.atan2(pos_obj[1]-pos_rob[1], pos_obj[0]-pos_rob[0])
angle = math.degrees(angle)
if angle < 0:
angle = 360-angle
angle_vec = np.ones(points)
angle_vec.fill(angle)
path = np.array([x,y,angle_vec])
#print(path)
return path
## Now this functionc compute a piecewise euclidean path with the intermediate point pos_1
def compute_piecewise_path(pos_rob,pos_1,pos_obj,points=10):
x1=np.linspace(pos_rob[0],pos_1[0],num=round(points/2))
y1=np.linspace(pos_rob[1],pos_1[1],num=round(points/2))
x2=np.linspace(pos_1[0],pos_obj[0],num=round(points/2)+1)
y2=np.linspace(pos_1[1],pos_obj[1],num=round(points/2)+1)
x2=x2[1:]
y2=y2[1:]
x=np.concatenate((x1,x2))
y=np.concatenate((y1,y2))
angle1=math.atan2(pos_1[1]-pos_rob[1],pos_1[0]-pos_rob[0])
angle2=math.atan2(pos_obj[1]-pos_1[1],pos_obj[0]-pos_1[0])
angle1=math.degrees(angle1)
angle2=math.degrees(angle2)
if angle1<0:
angle1=360-angle1
if angle2<0:
angle2=360-angle2
angle_vec1 = np.ones(x1.shape)
angle_vec2=np.ones(x2.shape)
angle_vec1.fill(angle1)
angle_vec2.fill(angle2)
angle_vec=np.concatenate((angle_vec1,angle_vec2))
path = np.array([x,y,angle_vec])
plt.plot(path[0,:],path[1,:])
plt.axis([-100, 300, -100, 300])
plt.show()
return path
def compute_A_star_path(origin,goal,Map):
path=A_star(origin,goal,Map)
#path_array=np.array(path)
#print(path_array.shape)
path_rev=np.flip(path, axis=0)
return path_rev
def robot_control(pos_rob,target, K_x=1,K_y=1,K_an=1): #pos_rob is a 1x3 matrix with(x,y,teta) & target is a 1x2 matrix with(x,y)
# Radius and wheel width in cm
L = 14.5
R = 1.7
theta_star=np.arctan2(target[1]-pos_rob[1], target[0]-pos_rob[0])*180/np.pi
if theta_star<0:
theta_star=360-abs(theta_star)
theta=pos_rob[2]
err_theta=theta_star-theta
# GET wheel velocities through curvature
M_r2wheels= np.array([[1/R, -L/(2*R) ],[1/R, L/(2*R)]]) # --> (Vr,Vteta) = M * (w_rigth, w_left)
vel_wheels = np.ones(2)
distance_x = (target[0]-pos_rob[0])*np.sin(pos_rob[2]*pi/180) - (target[1]-pos_rob[1])*np.cos(pos_rob[2]*pi/180)
l= np.sqrt(np.power(target[0]-pos_rob[0],2)+np.power(target[1]-pos_rob[1],2))
#print("L is: ",l)()
C = -distance_x/np.power(l,2)
w = 2*R;
kt=0.05
#A = (1-(C*L)/2)/(1+(C*L)/2)
#vel_wheels[0] = w*L/(R*(1+A))
#vel_wheels[1] = vel_wheels[0]*A
if abs(err_theta)>60 and abs(err_theta)<300:
vel_robot=np.array([0,60])
# print("JUST SPINNING",abs(err_theta),theta_star,theta)
else:
vel_robot = np.array([w, w*C])
#print("velocidad del robot",vel_robot)
vel_wheels =np.matmul(M_r2wheels,vel_robot)
vel_wheels[0] = 180/pi * vel_wheels[0]
vel_wheels[1] = 180/pi * vel_wheels[1]
#print(vel_wheels)
if np.absolute(vel_wheels[0]) > 400 :
vel_wheels[0] = np.sign(vel_wheels[0])*400
if np.absolute(vel_wheels[1]) > 400:
vel_wheels[1] = np.sign(vel_wheels[1])*400
#print(vel_wheels)
return vel_wheels
def forward_localization(pos_rob, vel_wheels, Ts): # position of the robot (x,y,teta) , vel_wheels 1x2:(vel_right, vel_left) and Ts(sampling time)
L = 14.5
R = 1.7
vel_wheels[0] = vel_wheels[0] * pi/180
vel_wheels[1] = vel_wheels[1] * pi/180
M_wheels2rob= np.array([[R/2,R/2],[-R/L,R/L]])
M_rob2w = np.array([[np.cos(pos_rob[2]*pi/180),0],[np.sin(pos_rob[2]*pi/180),0],[0,1]])
#print(M_rob2w)
vel_robot = np.matmul(M_wheels2rob,vel_wheels)
#print('vel_robot: ', vel_robot)
vel_world = np.matmul(M_rob2w,vel_robot)
new_pos_rob = np.zeros(3)
#new_pos_rob[0] = pos_rob[0] + Ts*vel_world[0]
#new_pos_rob[1] = pos_rob[1] + Ts*vel_world[1]
#new_pos_rob[2] = pos_rob[2] + Ts*vel_world[2]
incr_r = vel_robot[0]*Ts
incr_teta = vel_robot[1]*Ts * 180/pi
#print('radial increment:',incr_r,' angular increment: ',incr_teta)
new_pos_rob[0] = pos_rob[0] + incr_r*np.cos((pos_rob[2]+incr_teta/2)*pi/180)
new_pos_rob[1] = pos_rob[1] + incr_r*np.sin((pos_rob[2]+incr_teta/2)*pi/180)
new_pos_rob[2] = pos_rob[2] + incr_teta
#print('new pos: ', new_pos_rob)
if new_pos_rob[2] >360:
new_pos_rob[2] = new_pos_rob[2] - 360
elif new_pos_rob[2] < 0 :
new_pos_rob[2] = 360 + new_pos_rob[2]
#print(new_pos_rob)
return new_pos_rob
def odometry_localization(pos_rob, odom_r, odom_l, Ts): # position of the robot (x,y,teta) , vel_wheels 1x2:(vel_right, vel_left) and Ts(sampling time)
L = 14.5
R = 1.7
M_wheels2rob= np.array([[R/2,R/2],[-R/L,R/L]])
M_rob2w = np.array([[np.cos(pos_rob[2]*pi/180),0],[np.sin(pos_rob[2]*pi/180),0],[0,1]])
#print(M_rob2w)
odom_r = odom_r*pi/180
odom_l = odom_l*pi/180
vel_wheels = np.array([odom_r,odom_l])
vel_robot = np.matmul(M_wheels2rob,vel_wheels)
#print('vel_robot: ', vel_robot)
vel_world = np.matmul(M_rob2w,vel_robot)
new_pos_rob = np.zeros(3)
#new_pos_rob[0] = pos_rob[0] + Ts*vel_world[0]
#new_pos_rob[1] = pos_rob[1] + Ts*vel_world[1]
#new_pos_rob[2] = pos_rob[2] + Ts*vel_world[2]
incr_r = vel_robot[0]
incr_teta = vel_robot[1] * 180/pi
#print(incr_teta)
#print('radial increment:',incr_r,' angular increment: ',incr_teta)
new_pos_rob[0] = pos_rob[0] + incr_r*np.cos((pos_rob[2]+incr_teta/2)*pi/180)
new_pos_rob[1] = pos_rob[1] + incr_r*np.sin((pos_rob[2]+incr_teta/2)*pi/180)
new_pos_rob[2] = pos_rob[2] + incr_teta
if new_pos_rob[2] >360:
new_pos_rob[2] = new_pos_rob[2] - 360
elif new_pos_rob[2] < 0 :
new_pos_rob[2] = 360 + new_pos_rob[2]
#print(new_pos_rob)
return new_pos_rob
def select_target(pos_rob,path):
#print("path inside select target",path)
#print(np.size(path))
shortest_dist = 100000000000;
shd2 = 100000000000;
output=0
num=2
if path.shape[0]<=num:
num=path.shape[0]
for i in range(num): #compute the euclidean distance for all the possible points to go
#distance2 = np.sqrt(np.power(path[0,i]-pos_rob[0],2)+np.power(path[1,i]-pos_rob[1],2))
#distance = np.absolute((path[0,i]-pos_rob[0])*np.sin(pos_rob[2]*pi/180) - (path[1,i]-pos_rob[1])*np.cos(pos_rob[2]*pi/180))
distance = np.absolute((path[i,0]-pos_rob[0])*np.sin(pos_rob[2]*pi/180) - (path[i,1]-pos_rob[1])*np.cos(pos_rob[2]*pi/180))
#distance= np.sqrt(np.power(path[i,0]-pos_rob[0],2)+np.power(path[i,1]-pos_rob[1],2))
if distance <= shortest_dist :
#print("distance",distance)
shortest_dist = distance
output = i
if output == path.shape[0]-1:
output = i-1
if shortest_dist<2:
new_path = path[(output+1):,:]
target = path[output+1,:]
else:
new_path = path[(output):,:]
target = path[output,:]
print('Point to go : ',target,'and new path',new_path.shape)
#print('new path : ',new_path)
return target , new_path
def kalman_filter(odom_r,odom_l,pos_rob,marker_list, marker_map,Ts,P):
L = 14.5
R = 1.7
#From degrees to radians
odom_l = odom_l*pi/180
odom_r = odom_r*pi/180
# get increments
incr_r = R/2*(odom_r+odom_l)
incr_teta = R/L*(odom_l-odom_r) * 180/pi
## A and B matrixes
increment_R = R/2*(odom_r + odom_l)
increment_teta = R/L*(odom_l-odom_r) * 180/pi # We want the increment in teta in degrees
A = np.identity(3)
A[0,2] = -increment_R*np.sin((pos_rob[2]+increment_teta/2)*pi/180)
A[1,2] = increment_R*np.cos((pos_rob[2]+increment_teta/2)*pi/180)
c = np.cos((pos_rob[2]+increment_teta/2)*pi/180); s = np.sin((pos_rob[2]+increment_teta/2)*pi/180)
B = np.zeros([3,2])
B[0,0] = R/2*c+R*increment_R*R/(2*L)*s
B[0,1] = R/2*c-R*increment_R*R/(2*L)*s
B[1,0] = R/2*s-increment_R*R/(2*L)*c
B[1,1] = R/2*s+increment_R*R/(2*L)*c
B[2,0] = -R/L
B[2,1] = R/L
# H Matrix
marker_list=np.array(marker_list)
markers = []
for i in range (0,marker_list.shape[0]):
#print("marker list",marker_list)
if marker_list[i,0] < 900:
distance = np.power(marker_map[i,0]-pos_rob[0],2) + np.power(marker_map[i,1]-pos_rob[1],2)
if distance != 0:
markers.append(i)
#The size of the H array is related with the number of markers we see
#H = np.zeros([len(markers)*3,3])
H = np.zeros([len(markers)*2,3])
#R = np.zeros([3*len(markers),3*len(markers)])
R = np.zeros([2*len(markers),2*len(markers)])
for i in range(0,len(markers)):
distance = np.power(marker_map[markers[i],0]-pos_rob[0],2) + np.power(marker_map[markers[i],1]-pos_rob[1],2)
'''
H[i*3,0] = (marker_map[markers[i],1]-pos_rob[1])/distance
H[i*3,1] = -(marker_map[markers[i],0]-pos_rob[0])/distance
H[i*3,2] = -1
H[i*3+1,0] = (pos_rob[0]-marker_map[markers[i],0])/np.sqrt(distance)
H[i*3+1,1]= (pos_rob[1]-marker_map[markers[i],1])/np.sqrt(distance)
H[i*3+1,2] = 0
H[i*3+2,0] = 0
H[i*3+2,1] = 0
H[i*3+2,2] = -1
'''
H[i*2,0] = (marker_map[markers[i],1]-pos_rob[1])/distance
H[i*2,1] = -(marker_map[markers[i],0]-pos_rob[0])/distance
H[i*2,2] = -1
H[i*2+1,0] = (pos_rob[0]-marker_map[markers[i],0])/np.sqrt(distance)
H[i*2+1,1]= (pos_rob[1]-marker_map[markers[i],1])/np.sqrt(distance)
H[i*2+1,2] = 0
#Noise of the measuremenets
#R[i*3,i*3] = 1/np.power(10,5)
#R[i*3+1,i*3+1] = 1/np.power(10,6)
#R[i*3+2,i*3+2] = 1/np.power(10,6)
R[i*2,i*2] = 1/np.power(10,5)
R[i*2+1,i*2+1] = 1/np.power(10,6)
# Process noise
#print(H)
#noise variance of the encoders
noise_enc = 1/np.power(10,7)
var_noise_enc = np.power(noise_enc/Ts,2)
#noise variance of the model
Q = np.zeros([3,3])
Q[0,0] = 1/np.power(10,4)
Q[1,1] = 1/np.power(10,4)
Q[2,2] = 1/np.power(7.62,5)
# Kalman init
#Prediction step
P_pred = np.add(np.add(np.multiply(A,np.multiply(P,np.transpose(A))), var_noise_enc*np.dot(B,np.transpose(B))),Q)
pos_rob_pred = np.ones(3)
pos_rob_pred[0] = pos_rob[0] + incr_r*np.cos((pos_rob[2]+incr_teta/2)*pi/180)
pos_rob_pred[1] = pos_rob[1] + incr_r*np.sin((pos_rob[2]+incr_teta/2)*pi/180)
pos_rob_pred[2] = (pos_rob[2] + incr_teta)*pi/180
if pos_rob_pred[2] > pi:
pos_rob_pred[2] = pos_rob_pred[2]-(2*pi)
if pos_rob_pred[2] < -pi:
pos_rob_pred[2] = pos_rob_pred[2]+(2*pi)
#Measurements prediction & measurements
#meas_vec = np.zeros(len(markers)*3)
meas_vec = np.zeros(len(markers)*2)
#z = np.zeros(3*len(markers))
z = np.zeros(2*len(markers))
for i in range(0,len(markers)):
#z[i*3] = np.arctan2(marker_map[markers[i],1]-pos_rob_pred[1],marker_map[markers[i],0]-pos_rob_pred[0])- pos_rob_pred[2]
#z[i*3+1] = np.sqrt(np.power(marker_map[markers[i],0]-pos_rob_pred[0],2) + np.power(marker_map[markers[i],1]-pos_rob_pred[1],2))
#z[i*3+2] = marker_map[markers[i],2]- pos_rob_pred[2]
z[i*2] = np.arctan2(marker_map[markers[i],1]-pos_rob_pred[1],marker_map[markers[i],0]-pos_rob_pred[0])- pos_rob_pred[2]
z[i*2+1] = np.sqrt(np.power(marker_map[markers[i],0]-pos_rob_pred[0],2) + np.power(marker_map[markers[i],1]-pos_rob_pred[1],2))
'''
if z[i*3] > pi:
z[i*3] = z[i*3]-(2*pi)
if z[i*3] < -pi:
z[i*3] = z[i*3]+(2*pi)
if z[i*3+2] > pi:
z[i*3+2] = z[i*3+2]-(2*pi)
if z[i*3+2] < -pi:
z[i*3+2] = z[i*3+2]+(2*pi)
'''
if z[i*2] > pi:
z[i*2] = z[i*2]-(2*pi)
if z[i*2] < -pi:
z[i*2] = z[i*2]+(2*pi)
'''
meas_vec[i*3] = marker_list[markers[i],0]
meas_vec[i*3+1] = marker_list[markers[i],1]
meas_vec[i*3+2] = marker_list[markers[i],2]
'''
meas_vec[i*2] = marker_list[markers[i],0]
meas_vec[i*2+1] = marker_list[markers[i],1]
HPHR = np.add(np.dot(H,np.dot(P_pred,np.transpose(H))),R)
K = np.dot(P_pred,np.dot(np.transpose(H),np.linalg.inv(HPHR)))
IKH = np.add(np.identity(3),-np.dot(K,H))
P = np.add(np.dot(IKH,np.dot(P_pred,np.transpose(IKH))),np.dot(K,np.dot(R,np.transpose(K))))
#Kalman's state estimation :
pos_incr = np.dot(K,np.add(z,-meas_vec))
#print('expected: ',z)
#print('real: ', meas_vec)
#print('measurement error : ',pos_incr)
pos_rob = np.add(pos_rob_pred,-pos_incr)
pos_rob[2] = pos_rob[2]* 180/pi
if pos_rob[2] >360:
pos_rob[2] = pos_rob[2] - 360
elif pos_rob[2] < 0 :
pos_rob[2] = 360 + pos_rob[2]
#print(new_pos_rob)
return pos_rob,P
def create_fake_measurements(pos_rob, odom_l,odom_r , marker_map, num_mar = 4):
L = 14.5
R = 1.7
# ODOMETRY
#From degrees to radians
odom_l = odom_l*pi/180
odom_r = odom_r*pi/180
# get increments
incr_r = R/2*(odom_r+odom_l)
incr_teta = R/L*(odom_l-odom_r) * 180/pi
# REAL COMPUTATION OF THE STATE :
pos_rob_pred = np.zeros(3)
pos_rob_pred[0] = pos_rob[0] + incr_r*np.cos((pos_rob[2]+incr_teta/2)*pi/180)
pos_rob_pred[1] = pos_rob[1] + incr_r*np.sin((pos_rob[2]+incr_teta/2)*pi/180)
pos_rob_pred[2] = (pos_rob[2] + incr_teta)*pi/180
# Measurements
z = np.zeros([num_mar,3])
for i in range(num_mar):
z[i,0] = np.arctan2(marker_map[i,1]-pos_rob_pred[1],marker_map[i,0]-pos_rob_pred[0])- pos_rob_pred[2]
z[i,1] = np.sqrt(np.power(marker_map[i,0]-pos_rob_pred[0],2) + np.power(marker_map[i,1]-pos_rob_pred[1],2))
z[i,2] = marker_map[i,2]- pos_rob_pred[2]
if z[i,0] > pi:
z[i,0] = z[i,0]-(2*pi)
if z[i,0] < -pi:
z[i,0] = z[i,0]+(2*pi)
if z[i,0+2] > pi:
z[i,0+2] = z[i,0+2]-(2*pi)
if z[i,0+2] < -pi:
z[i,0+2] = z[i,0+2]+(2*pi)
pos_rob = pos_rob_pred
pos_rob[2] = pos_rob[2]* 180/pi
if pos_rob[2] >360:
pos_rob[2] = pos_rob[2] - 360
elif pos_rob[2] < 0 :
pos_rob[2] = 360 + pos_rob[2]
return pos_rob , z
def euclidian_path_planning_control(pos_rob,pos_obj, Ts, points=5,K_x=1,K_y = 1, K_an = 1 , iteration = 0, path = [] , odom_r = 0,odom_l= 0):
if iteration == 0 :
path = compute_euclidean_path(pos_rob,pos_obj,points)
target, new_path = select_target(pos_rob, path)
#Only Odometry
estim_rob_pos= odometry_localization(pos_rob,odom_r,odom_l,Ts)
vel_wheels = robot_control(estim_rob_pos, target, K_x,K_y,K_an)
return estim_rob_pos,vel_wheels,new_path
def piecewise_path_planning_control(pos_rob,pos1,pos_obj, Ts, points=5,K_x=1,K_y = 1, K_an = 1 , iteration = 0, path = [] , odom_r = 0,odom_l= 0):
if iteration == 0 :
path = compute_piecewise_path(pos_rob,pos1,pos_obj,points)
target, new_path = select_target(pos_rob, path)
#Only Odometry
estim_rob_pos= odometry_localization(pos_rob,odom_r,odom_l,Ts)
vel_wheels = robot_control(estim_rob_pos, target, K_x,K_y,K_an)
return estim_rob_pos,vel_wheels,new_path
def A_star_path_planning_control(pos_rob,pos_obj,Map,Ts,K_x=1,K_y = 1, K_an = 1 , iteration = 0, path = [] , odom_r = 0,odom_l= 0):
if iteration == 0 :
path =compute_A_star_path(pos_rob[0:2],pos_obj,Map)
target, new_path = select_target(pos_rob, path)
#Only Odometry
estim_rob_pos= odometry_localization(pos_rob,odom_r,odom_l,Ts)
vel_wheels = robot_control(estim_rob_pos, target, K_x,K_y,K_an)
return estim_rob_pos,vel_wheels,new_path
def A_star_control(pos_rob,pos_obj,Map,Ts,K_x=1,K_y = 1, K_an = 1 , iteration = 0, path = [] , odom_r = 0,odom_l= 0):
if iteration == 0 :
path =compute_A_star_path(pos_rob[0:2],pos_obj,Map)
target, new_path = select_target(pos_rob, path)
print("###########################################")
print("target: ", target)
print("pos rob: ", pos_rob)
print("###########################################")
#Only Odometry
estim_rob_pos= pos_rob
vel_wheels = robot_control(estim_rob_pos, target, K_x,K_y,K_an)
return vel_wheels,new_path
def A_star_kalman(pos_rob,pos_obj,Map, Ts, points=5,K_x=1,K_y = 1, K_an = 1 , iteration = 0, path = [] , odom_r = 0,odom_l= 0, P=np.identity(3), marker_list = [],marker_map=[], real_bot=[]):
if iteration == 0 :
path =compute_A_star_path(pos_rob[0:2],pos_obj,Map)
target, new_path = select_target(pos_rob, path)
estim_rob_pos, P = kalman_filter(odom_r,odom_l,pos_rob,marker_list, marker_map,Ts,P)
vel_wheels = robot_control(estim_rob_pos, target, K_x,K_y,K_an)
return estim_rob_pos,vel_wheels,new_path , P
def euclidian_kalman(pos_rob,pos_obj, Ts, points=5,K_x=1,K_y = 1, K_an = 1 , iteration = 0, path = [] , odom_r = 0,odom_l= 0, P=np.identity(3), marker_list = [],marker_map=[], real_bot=[]):
if iteration == 0 :
path = compute_euclidean_path(pos_rob,pos_obj,points)
#print(path.shape)
target, new_path = select_target(pos_rob, path)
real_robot_pos, marker_list = create_fake_measurements(real_bot, odom_l,odom_r , marker_map)
estim_rob_pos, P = kalman_filter(odom_r,odom_l,pos_rob,marker_list, marker_map,Ts,P)
vel_wheels = robot_control(estim_rob_pos, target, K_x,K_y,K_an)
return estim_rob_pos,vel_wheels,new_path , P , real_robot_pos
| TheCamusean/DLRCev3 | rick/rick/mc_please_github_donot_fuck_with_this_ones.py | Python | mit | 16,563 |
import threading
import RPi.GPIO as GPIO
import time
import random
import math
from . import switch_timer
from .. import utils
from .. import pins
from .. import queue_common
from .. import event
# For reading the color sensors
from color_sensors import ColorSensors
from leds import LedStrip
from sensor_targets import *
from .rpi_ws281x.python.neopixel import *
Command = queue_common.Command
COMMAND_SET_MODE = 1
MODE_QUIT = -1
# In this mode don't do anything
MODE_IDLE = 0
# In this mode just records the time between lifting and replacing the Cube.
MODE_TIME = 1
# In this mode generates a pattern and checks for that pattern on down.
MODE_PATTERN = 2
# In this mode use the test interface
MODE_TEST = 3
CLEAR_INDEX = 0
RED_INDEX = 1
GREEN_INDEX = 2
BLUE_INDEX = 3
RED = 0xaa0000
GREEN = 0x00aa00
BLUE = 0x0000aa
YELLOW = 0xcccc00
ORANGE = 0xbb4000
WHITE = 0xaaaaaa
LED_COLORS = [RED, GREEN, BLUE, YELLOW, ORANGE, WHITE]
LED_CODES = ['R', 'G', 'B', 'Y', 'O', 'W']
TEST_PATTERN = ['W', 'G', 'B', 'Y', 'O', 'R', 'G', 'B', 'G']
TEST_PATTERN2 = ['W', 'W', 'W', 'B', 'B', 'G', 'B', 'B', 'B']
ALL_RED = ['R', 'R', 'R', 'R', 'R', 'R', 'R', 'R', 'R']
LED_PATTERN_BRIGHTNESS = 20
# The LEDs and the color sensors are transposes of each other, so this
# mapping works both ways.
DISPLAY_TO_READ_INDEX = [0, 3, 6, 1, 4, 7, 2, 5, 8]
LED_SENSOR_COLOR = Color(255, 235, 130)
LED_SENSOR_BRIGHTNESS = 15
# How long to flash each color to indicate the solve timer is going.
TIMER_BLINK_TIME = 0.5
# Time to show the pattern to solve for in seconds.
PATTERN_DISPLAY_TIME = 5.0
# How frequently to check the pattern while the cube is down in case we misread it.
CHECK_PATTERN_DELAY = 0.25
# Timeout for cancelling the game if not won yet in seconds.
RUNNING_TIMEOUT = 30 * 60
# The max attempts before the game is lost.
MAX_ATTEMPTS = 25
# All the target arrays in one big array
COLOR_TARGETS = COLOR_TARGETS_15
STATE_NOT_RUNNING = 0
STATE_WAITING_TO_START = 1
STATE_DISPLAYING_PATTERN = 2
STATE_WAITING_WRONG_PATTERN = 3
STATE_TIMING = 4
STATE_CUBE_NOT_DOWN = 5
# Does not solve a Rubik cube, but either times how long it took to solve or
# requires a specific pattern be created.
class RubikSolver(threading.Thread, queue_common.QueueCommon):
def __init__(self):
self. _state = STATE_NOT_RUNNING
self._mode = MODE_IDLE
self._source = event.SOURCE_MATCH
self._pattern = []
self._result = []
self._attempts = 0
self._start_time = 0
self._pattern_shown_time = 0
self._last_check_time = 0
self._timer = switch_timer.SwitchTimer()
self._stop_event = threading.Event()
self._color_sensors = None
self._led_strip = None
self._waiting_led_on = False
self._blink_color = None
threading.Thread.__init__(self)
queue_common.QueueCommon.__init__(self)
def handle_command(self, command):
if command.command == COMMAND_SET_MODE:
new_mode = command.data
self.set_mode(new_mode)
def set_mode(self, mode):
if mode != self._mode:
self.hide_pattern()
self._state = STATE_NOT_RUNNING
self._mode = mode
if self._mode == MODE_TIME:
self._source = event.SOURCE_TIMER
else:
self._source = event.SOURCE_MATCH
def generate_pattern(self):
if utils.in_test():
self._pattern = TEST_PATTERN2
return
self._pattern = []
for i in range(9):
self._pattern.append(LED_CODES[random.randint(0, 5)])
def show_pattern(self, pattern):
self._led_strip.set_brightness(LED_PATTERN_BRIGHTNESS)
for i in range(len(pattern)):
self._led_strip.set_led(i, LED_COLORS[LED_CODES.index(pattern[i])])
def hide_pattern(self):
self._led_strip.set_brightness(0)
self._led_strip.set_all_leds(0)
def set_led(self, led, color):
self._led_strip.set_brightness(LED_PATTERN_BRIGHTNESS)
self._led_strip.set_led(led, color)
def set_all_leds(self, color):
self._led_strip.set_brightness(LED_PATTERN_BRIGHTNESS)
self._led_strip.set_all_leds(color)
def is_all_same(self):
color_codes = self.read_colors()
color = color_codes[0]
for i in range(1, len(color_codes)):
if color_codes[i] != color:
return False
return True
def is_pattern_correct(self):
color_codes = self.read_colors()
print("Checking colors: expected " + str(self._pattern) + ", actual " + str(color_codes))
for i in range(len(color_codes)):
if color_codes[i] != self._pattern[DISPLAY_TO_READ_INDEX[i]]:
return False
return True
def read_colors(self):
self._led_strip.set_brightness(LED_SENSOR_BRIGHTNESS)
self._led_strip.set_all_leds(LED_SENSOR_COLOR)
time.sleep(0.05)
results = []
for i in range(9):
guess_index = guess_color(i, self._read_color(i))
if guess_index >= 0:
results.append(LED_CODES[guess_index])
else:
results.append('F')
self.hide_pattern()
time.sleep(0.05)
return results
def cube_is_down(self):
colors = self._read_color(4)
if colors[CLEAR_INDEX] <= 5:
return True
return False
def check_timeout(self):
if self._state != STATE_NOT_RUNNING:
if time.time() - self._start_time > RUNNING_TIMEOUT:
self._state = STATE_NOT_RUNNING
self._mode = MODE_IDLE
self.send_event(event.Event(self._source, event.EVENT_FAILURE, 3599.99))
def update_solver_state(self):
# Timeout protection
self.check_timeout()
if self._state == STATE_NOT_RUNNING:
self._start_time = time.time()
if self.cube_is_down():
self._state = STATE_WAITING_TO_START
else:
self._state = STATE_CUBE_NOT_DOWN
return
if self._state == STATE_CUBE_NOT_DOWN:
blink_count = int(math.floor((time.time() - self._start_time) / TIMER_BLINK_TIME))
turn_on_wait = (blink_count % 2) == 1
if turn_on_wait != self._waiting_led_on:
if turn_on_wait:
self.set_led(4, 0xbbbbbb)
else:
self.hide_pattern()
self._waiting_led_on = turn_on_wait
elif not turn_on_wait:
if self.cube_is_down():
self._state = STATE_WAITING_TO_START
return
if self._state == STATE_WAITING_TO_START:
if not self.cube_is_down():
self._start_time = time.time()
self._state = STATE_DISPLAYING_PATTERN
return
if self._state == STATE_DISPLAYING_PATTERN:
curr_time = self._update_time()
color_index = int(math.floor(curr_time / TIMER_BLINK_TIME))
if color_index < len(LED_COLORS):
self.set_all_leds(LED_COLORS[color_index])
else:
self.hide_pattern()
self._state = STATE_TIMING
return
if self._state == STATE_TIMING:
curr_time = self._update_time()
if self.cube_is_down() and self.is_all_same():
# Double check the cube is still down and we didn't try to read
# the ambient light or something
time.sleep(0.2)
if self.cube_is_down():
self._state = STATE_NOT_RUNNING
self.send_event(event.Event(self._source, event.EVENT_SUCCESS, curr_time))
self.set_mode(MODE_IDLE)
return
# While in the pattern matching game that state machine looks like this:
# 1. Not running
# 2. Waiting for the cube to be picked up
# 3. Showing the pattern
# 4. Waiting for the cube to be put down again
# 5. Checking the pattern, if wrong go back to 3 on pick up.
# 6. If correct, send the winning time and go back to idle mode.
def update_pattern_state(self):
# Timeout protection
self.check_timeout()
if self._state == STATE_NOT_RUNNING:
self.generate_pattern()
self._attempts = 0
# This isn't the actual start time but is used for the timeout
self._start_time = time.time()
self._state = STATE_WAITING_TO_START
return
if self._state == STATE_WAITING_TO_START:
if not self.cube_is_down():
self.show_pattern(self._pattern)
self._start_time = time.time()
self._pattern_shown_time = self._start_time
self._state = STATE_DISPLAYING_PATTERN
return
if self._state == STATE_DISPLAYING_PATTERN:
self._update_time()
if (time.time() - self._pattern_shown_time) > 5:
self.hide_pattern()
self._state = STATE_TIMING
return
if self._state == STATE_TIMING:
curr_time = self._update_time()
if self.cube_is_down():
if self.is_pattern_correct():
self._state = STATE_NOT_RUNNING
self.send_event(event.Event(self._source, event.EVENT_SUCCESS, curr_time))
self.set_mode(MODE_IDLE)
else:
self._last_check_time = curr_time
self._attempts += 1
if self._attempts >= MAX_ATTEMPTS:
self._state = STATE_NOT_RUNNING
self.send_event(event.Event(self._source, event.EVENT_FAILURE, curr_time))
self.set_mode(MODE_IDLE)
else:
self._state = STATE_WAITING_WRONG_PATTERN
return
if self._state == STATE_WAITING_WRONG_PATTERN:
curr_time = self._update_time()
if not self.cube_is_down():
self.show_pattern(self._pattern)
self._state = STATE_DISPLAYING_PATTERN
self._pattern_shown_time = time.time()
elif curr_time - self._last_check_time > CHECK_PATTERN_DELAY:
if self.is_pattern_correct():
self._state = STATE_NOT_RUNNING
self.send_event(event.Event(self._source, event.EVENT_SUCCESS, curr_time))
self.set_mode(MODE_IDLE)
self._last_check_time = curr_time
# Sends the current playtime as an event and returns the playtime.
def _update_time(self):
curr_time = time.time() - self._start_time
self.send_event(event.Event(self._source, event.EVENT_UPDATE, curr_time))
return curr_time
def _read_color(self, index):
self._color_sensors.set_sensor(index)
colors = self._color_sensors.getColors()
return colors
def _setup(self):
print("Setup of RubikSolver")
if self._led_strip is not None:
return
self._color_sensors = ColorSensors()
self._led_strip = LedStrip()
def _teardown(self):
print("Teardown of RubikSolver")
GPIO.remove_event_detect(pins.RUBIK_CUBE_SWITCH)
self._color_sensors.clear_active()
self.hide_pattern()
# Is this cleanup necessary?
del self._led_strip
self._led_strip = None
del self._color_sensors
self._color_sensors = None
def stop(self):
print("Stopping RubikSolver! Time is " + str(utils.curr_time_s()))
self._stop_event.set()
def run(self):
self._setup()
while self._mode != MODE_QUIT:
if self._mode == MODE_TEST:
print("test (p)attern, (g)uessing, or (c)ollect data? (q)uit")
value = utils.get_char()
if value == 'p':
self.test_pattern_display()
elif value == 'c':
csv = open("csv_colors.txt", 'w')
self.collect_color_data(csv)
csv.close()
elif value == 'g':
self.test_guessing()
elif value == 'q':
self._mode = MODE_QUIT
elif self._mode == MODE_IDLE:
time.sleep(0.1)
elif self._mode == MODE_PATTERN:
self.update_pattern_state()
time.sleep(0.05)
elif self._mode == MODE_TIME:
self.update_solver_state()
time.sleep(0.05)
self.check_queue()
self._teardown()
def test_pattern_display(self):
pattern = TEST_PATTERN
self.show_pattern(pattern)
utils.get_char()
self.hide_pattern()
self.generate_pattern()
self.show_pattern(pattern)
utils.get_char()
self.hide_pattern()
def test_guessing(self):
while True:
print("Check colors? Y/N")
value = utils.get_char()
if value.upper() != 'Y':
return
print("Colors: " + str(self.read_colors()))
def collect_color_data(self, csv):
self._setup()
print("Ready to collect color data. Press r to retry or any other key to continue.")
value = 'r'
while value == 'r':
self._led_strip.set_brightness(0)
time.sleep(.2)
self._led_strip.set_brightness(LED_SENSOR_BRIGHTNESS)
self._led_strip.set_all_leds(LED_SENSOR_COLOR)
value = utils.get_char()
value = 'y'
while value == 'y':
sums = [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]]
samples = 11
incr_brightness = False
brightness = -5 if incr_brightness else LED_SENSOR_BRIGHTNESS
csv.write("Sensor, Red/Green ratio, Red/Blue ratio, Green/Blue ratio, sample\n")
for i in range(9 * samples):
sensor = i % 9
if sensor is 0:
print("\nSAMPLE " + str(i / 9) + ":\n")
if incr_brightness:
brightness += 10
self._led_strip.set_brightness(brightness)
# self.color_sensors.set_sensor(sensor)
# colors = self.color_sensors.getColors()
colors = self._read_color(sensor)
red_green_ratio = colors[RED_INDEX] / float(colors[GREEN_INDEX])
red_blue_ratio = colors[RED_INDEX] / float(colors[BLUE_INDEX])
green_blue_ratio = colors[GREEN_INDEX] / float(colors[BLUE_INDEX])
sums[sensor][0] += red_green_ratio
sums[sensor][1] += red_blue_ratio
sums[sensor][2] += green_blue_ratio
guess = get_color_ratio_string(colors)
csv.write(str(sensor) + ", " + guess + ", " + str(i) + "\n")
print(str(sensor) + ": " + guess + " at Brightness=" + str(brightness))
time.sleep(.1)
print("\n\nAverages:\n")
for i in range(9):
print("Sensor " + str(i) + ": r/g=" + str(sums[i][0] / samples)
+ ", r/b=" + str(sums[i][1] / samples)
+ ", g/b=" + str(sums[i][2] / samples) + "\n")
print("\n[")
for i in range(9):
print("[" + str(round(sums[i][0] / samples, 3))
+ ", " + str(round(sums[i][1] / samples, 3))
+ ", " + str(round(sums[i][2] / samples, 3)) + "], # sensor " + str(i))
print("]")
print("\n\n Again? y/n")
value = utils.get_char()
def get_color_ratio_string(colors):
if colors[CLEAR_INDEX] < 1 or colors[GREEN_INDEX] < 1 or colors[BLUE_INDEX] < 1:
return "none, none, none"
red_green_ratio = colors[RED_INDEX] / float(colors[GREEN_INDEX])
red_blue_ratio = colors[RED_INDEX] / float(colors[BLUE_INDEX])
green_blue_ratio = colors[GREEN_INDEX] / float(colors[BLUE_INDEX])
ratio_string = str(red_green_ratio) + ", " + str(red_blue_ratio) + ", " + str(green_blue_ratio)
return ratio_string
def guess_color(sensor, colors):
if colors[GREEN_INDEX] < 1 or colors[BLUE_INDEX] < 1:
return -1 # Too dark
red_green_ratio = colors[RED_INDEX] / float(colors[GREEN_INDEX])
red_blue_ratio = colors[RED_INDEX] / float(colors[BLUE_INDEX])
green_blue_ratio = colors[GREEN_INDEX] / float(colors[BLUE_INDEX])
dist = 500
best_guess = -1
for color_index in range(6):
rg_target = COLOR_TARGETS[color_index][sensor][0]
rb_target = COLOR_TARGETS[color_index][sensor][1]
gb_target = COLOR_TARGETS[color_index][sensor][2]
curr_dist = math.sqrt((red_green_ratio - rg_target) ** 2
+ (red_blue_ratio - rb_target) ** 2
+ (green_blue_ratio - gb_target) ** 2)
if curr_dist < dist:
dist = curr_dist
best_guess = color_index
if best_guess is -1:
print("Bad reading! r/g=" + str(red_green_ratio) + ", r/b=" + str(red_blue_ratio))
return 0
print("Guess is " + str(LED_CODES[best_guess]) + " at dist " + str(dist))
return best_guess
def test_color_sensors():
bm017 = ColorSensors(True)
bm017.debug = True
bm017.readStatus()
bm017.isSDL_BM017There()
bm017.getColors()
bm017.readStatus()
bm017.disableDevice()
bm017.setIntegrationTimeAndGain(0x00, 0x03)
bm017.getColors()
bm017.readStatus()
bm017.readStatus()
# this will turn on the LED if LEDON is connected to INT and LEDVDD is connected to VDD_LED
bm017.setInterrupt(True)
time.sleep(5.0)
bm017.setInterrupt(False)
| RoboErik/RUBIK | Rubik/RubikSolver/rubik_solver.py | Python | apache-2.0 | 18,065 |
from pySecDec import make_package
make_package(
name = 'easy',
integration_variables = ['x','y'],
regulators = ['eps'],
requested_orders = [0],
polynomials_to_decompose = ['(x+y)^(-2+eps)'],
)
| mppmu/secdec | examples/easy_cuda/generate_easy.py | Python | gpl-3.0 | 197 |
"""
WSGI config for captain project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
import site
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "captain.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "captain.settings")
# Add the app dir to the python path so we can import manage.
wsgidir = os.path.dirname(__file__)
site.addsitedir(os.path.abspath(os.path.join(wsgidir, '../')))
# Manage adds /vendor to the Python path.
import manage
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| mozilla/captain | captain/wsgi.py | Python | mpl-2.0 | 1,653 |
"""
Django settings for erp project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
ROOT_PATH = os.path.realpath(os.path.join(os.path.realpath(__file__), '../..'))
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'dg@x1l%)dr)zb8lul#av4(12f=0x#6ep9meq1x#b+oz6o0cubf'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'extjs',
# 'apps.depot',
# 'django-groundwork',
'django_extensions',
'apps.myerp',
# 'apps.sims',
'apps.workflow',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
#STATICFILES_FINDERS = (
# 'django.contrib.staticfiles.finders.AppDirectoriesFinder',
#)
ROOT_URLCONF = 'erp.urls'
WSGI_APPLICATION = 'erp.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'shopping',
'USER': 'root',
'PASSWORD': '112358',
'PORT': 3306,
'HOST': '',
'OPTIONS': {
'autocommit': True,
},
}
}
TEMPLATE_LOADERS = (
'django.template.loaders.app_directories.Loader',
#'django.template.loaders.filesystem.Loader',
)
#TEMPLATE_DIRS = (
# os.path.join(BASE_DIR, 'apps/depot/templates'),
#)
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
#LANGUAGE_CODE = 'en-us'
LANGUAGE_CODE = 'zh-cn'
TIME_ZONE = 'UTC'
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
# STATIC_ROOT = '/360云盘/PythonProject/erp/static/'
# LOGIN_URL = '/sims/login/'
#LOGOUT_URL = '/sims/logout/'
# LOGIN_REDIRECT_URL = '/sims/index/'
INTERNAL_IPS = (
'127.0.0.1',
)
# 数据文件化配置
DATA_DOCUMENTED_SETTINGS = {
'BASE_DIR': os.path.join(ROOT_PATH, 'apps/myerp/static/myerp/localdata/'),
'product_catagory_primary_file_name': 'product_catagory_primary.json',
'product_catagory_file_name': 'product_catagory.json',
} | HarrisonHDU/myerp | erp/settings.py | Python | mit | 3,334 |
#!/usr/bin/python
#
# (c) 2017, Dario Zanzico ([email protected])
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: docker_swarm_service
author: "Dario Zanzico (@dariko), Jason Witkowski (@jwitko)"
short_description: docker swarm service
description: |
Manage docker services. Allows live altering of already defined services
version_added: "2.7"
options:
name:
required: true
description:
- Service name
image:
required: true
description:
- Service image path and tag.
Maps docker service IMAGE parameter.
state:
required: true
default: present
description:
- Service state.
choices:
- present
- absent
args:
required: false
default: []
description:
- List comprised of the command and the arguments to be run inside
- the container
constraints:
required: false
default: []
description:
- List of the service constraints.
- Maps docker service --constraint option.
hostname:
required: false
default: ""
description:
- Container hostname
- Maps docker service --hostname option.
- Requires api_version >= 1.25
tty:
required: false
type: bool
default: False
description:
- Allocate a pseudo-TTY
- Maps docker service --tty option.
- Requires api_version >= 1.25
dns:
required: false
default: []
description:
- List of custom DNS servers.
- Maps docker service --dns option.
- Requires api_version >= 1.25
dns_search:
required: false
default: []
description:
- List of custom DNS search domains.
- Maps docker service --dns-search option.
- Requires api_version >= 1.25
dns_options:
required: false
default: []
description:
- List of custom DNS options.
- Maps docker service --dns-option option.
- Requires api_version >= 1.25
force_update:
required: false
type: bool
default: False
description:
- Force update even if no changes require it.
- Maps to docker service update --force option.
- Requires api_version >= 1.25
labels:
required: false
description:
- List of the service labels.
- Maps docker service --label option.
container_labels:
required: false
description:
- List of the service containers labels.
- Maps docker service --container-label option.
default: []
endpoint_mode:
required: false
description:
- Service endpoint mode.
- Maps docker service --endpoint-mode option.
default: vip
choices:
- vip
- dnsrr
env:
required: false
default: []
description:
- List of the service environment variables.
- Maps docker service --env option.
log_driver:
required: false
default: json-file
description:
- Configure the logging driver for a service
log_driver_options:
required: false
default: []
description:
- Options for service logging driver
limit_cpu:
required: false
default: 0.000
description:
- Service CPU limit. 0 equals no limit.
- Maps docker service --limit-cpu option.
reserve_cpu:
required: false
default: 0.000
description:
- Service CPU reservation. 0 equals no reservation.
- Maps docker service --reserve-cpu option.
limit_memory:
required: false
default: 0
description:
- Service memory limit in MB. 0 equals no limit.
- Maps docker service --limit-memory option.
reserve_memory:
required: false
default: 0
description:
- Service memory reservation in MB. 0 equals no reservation.
- Maps docker service --reserve-memory option.
mode:
required: false
default: replicated
description:
- Service replication mode.
- Maps docker service --mode option.
mounts:
required: false
description:
- List of dictionaries describing the service mounts.
- Every item must be a dictionary exposing the keys source, target, type (defaults to 'bind'), readonly (defaults to false)
- Maps docker service --mount option.
default: []
secrets:
required: false
description:
- List of dictionaries describing the service secrets.
- Every item must be a dictionary exposing the keys secret_id, secret_name, filename, uid (defaults to 0), gid (defaults to 0), mode (defaults to 0o444)
- Maps docker service --secret option.
default: []
configs:
required: false
description:
- List of dictionaries describing the service configs.
- Every item must be a dictionary exposing the keys config_id, config_name, filename, uid (defaults to 0), gid (defaults to 0), mode (defaults to 0o444)
- Maps docker service --config option.
default: null
networks:
required: false
default: []
description:
- List of the service networks names.
- Maps docker service --network option.
publish:
default: []
required: false
description:
- List of dictionaries describing the service published ports.
- Every item must be a dictionary exposing the keys published_port, target_port, protocol (defaults to 'tcp')
- Only used with api_version >= 1.25
- If api_version >= 1.32 and docker python library >= 3.0.0 attribute 'mode' can be set to 'ingress' or 'host' (default 'ingress').
replicas:
required: false
default: -1
description:
- Number of containers instantiated in the service. Valid only if ``mode=='replicated'``.
- If set to -1, and service is not present, service replicas will be set to 1.
- If set to -1, and service is present, service replicas will be unchanged.
- Maps docker service --replicas option.
restart_policy:
required: false
default: none
description:
- Restart condition of the service.
- Maps docker service --restart-condition option.
choices:
- none
- on-failure
- any
restart_policy_attempts:
required: false
default: 0
description:
- Maximum number of service restarts.
- Maps docker service --restart-max-attempts option.
restart_policy_delay:
required: false
default: 0
description:
- Delay between restarts.
- Maps docker service --restart-delay option.
restart_policy_window:
required: false
default: 0
description:
- Restart policy evaluation window.
- Maps docker service --restart-window option.
update_delay:
required: false
default: 10
description:
- Rolling update delay
- Maps docker service --update-delay option
update_parallelism:
required: false
default: 1
description:
- Rolling update parallelism
- Maps docker service --update-parallelism option
update_failure_action:
required: false
default: continue
description:
- Action to take in case of container failure
- Maps to docker service --update-failure-action option
choices:
- continue
- pause
update_monitor:
required: false
default: 5000000000
description:
- Time to monitor updated tasks for failures, in nanoseconds.
- Maps to docker service --update-monitor option
update_max_failure_ratio:
required: false
default: 0.00
description:
- Fraction of tasks that may fail during an update before the failure action is invoked
- Maps to docker service --update-max-failure-ratio
update_order:
required: false
default: null
description:
- Specifies the order of operations when rolling out an updated task.
- Maps to docker service --update-order
- Requires docker api version >= 1.29
user:
required: false
default: root
description:
- username or UID.
- "If set to C(null) the image provided value (or the one already
set for the service) will be used"
extends_documentation_fragment:
- docker
requirements:
- "docker-py >= 2.0"
- "Please note that the L(docker-py,https://pypi.org/project/docker-py/) Python
module has been superseded by L(docker,https://pypi.org/project/docker/)
(see L(here,https://github.com/docker/docker-py/issues/1310) for details).
Version 2.1.0 or newer is only available with the C(docker) module."
'''
RETURN = '''
ansible_swarm_service:
returned: always
type: dict
description:
- Dictionary of variables representing the current state of the service.
Matches the module parameters format.
- Note that facts are not part of registered vars but accessible directly.
sample: '{
"args": [
"sleep",
"3600"
],
"constraints": [],
"container_labels": {},
"endpoint_mode": "vip",
"env": [
"ENVVAR1=envvar1"
],
"force_update": False,
"image": "alpine",
"labels": {},
"limit_cpu": 0.0,
"limit_memory": 0,
"log_driver": "json-file",
"log_driver_options": {},
"mode": "replicated",
"mounts": [
{
"source": "/tmp/",
"target": "/remote_tmp/",
"type": "bind"
}
],
"secrets": [],
"configs": [],
"networks": [],
"publish": [],
"replicas": 1,
"reserve_cpu": 0.0,
"reserve_memory": 0,
"restart_policy": "any",
"restart_policy_attempts": 5,
"restart_policy_delay": 0,
"restart_policy_window": 30,
"update_delay": 10,
"update_parallelism": 1,
"update_failure_action": "continue",
"update_monitor": 5000000000
"update_max_failure_ratio": 0,
"update_order": "stop-first"
}'
changes:
returned: always
description:
- List of changed service attributes if a service has been altered,
[] otherwhise
type: list
sample: ['container_labels', 'replicas']
rebuilt:
returned: always
description:
- True if the service has been recreated (removed and created)
type: bool
sample: True
'''
EXAMPLES = '''
- name: define myservice
docker_swarm_service:
name: myservice
image: "alpine"
args:
- "sleep"
- "3600"
mounts:
- source: /tmp/
target: /remote_tmp/
type: bind
env:
- "ENVVAR1=envvar1"
log_driver: fluentd
log_driver_options:
fluentd-address: "127.0.0.1:24224"
fluentd-async-connect: true
tag: "{{.Name}}/{{.ID}}"
restart_policy: any
restart_policy_attempts: 5
restart_policy_window: 30
register: dss_out1
- name: change myservice.env
docker_swarm_service:
name: myservice
image: "alpine"
args:
- "sleep"
- "7200"
mounts:
- source: /tmp/
target: /remote_tmp/
type: bind
env:
- "ENVVAR1=envvar1"
restart_policy: any
restart_policy_attempts: 5
restart_policy_window: 30
register: dss_out2
- name: test for changed myservice facts
fail:
msg: unchanged service
when: "{{ dss_out1 == dss_out2 }}"
- name: change myservice.image
docker_swarm_service:
name: myservice
image: "alpine:edge"
args:
- "sleep"
- "7200"
mounts:
- source: /tmp/
target: /remote_tmp/
type: bind
env:
- "ENVVAR1=envvar1"
restart_policy: any
restart_policy_attempts: 5
restart_policy_window: 30
register: dss_out3
- name: test for changed myservice facts
fail:
msg: unchanged service
when: "{{ dss_out2 == dss_out3 }}"
- name: remove mount
docker_swarm_service:
name: myservice
image: "alpine:edge"
args:
- "sleep"
- "7200"
env:
- "ENVVAR1=envvar1"
restart_policy: any
restart_policy_attempts: 5
restart_policy_window: 30
register: dss_out4
- name: test for changed myservice facts
fail:
msg: unchanged service
when: "{{ dss_out3 == dss_out4 }}"
- name: keep service as it is
docker_swarm_service:
name: myservice
image: "alpine:edge"
args:
- "sleep"
- "7200"
env:
- "ENVVAR1=envvar1"
restart_policy: any
restart_policy_attempts: 5
restart_policy_window: 30
register: dss_out5
- name: test for changed service facts
fail:
msg: changed service
when: "{{ dss_out5 != dss_out5 }}"
- name: remove myservice
docker_swarm_service:
name: myservice
state: absent
'''
import time
from ansible.module_utils.docker_common import (
DockerBaseClass,
AnsibleDockerClient,
docker_version,
DifferenceTracker,
)
from ansible.module_utils.basic import human_to_bytes
from ansible.module_utils._text import to_text
try:
from distutils.version import LooseVersion
from docker import types
except Exception:
# missing docker-py handled in ansible.module_utils.docker
pass
class DockerService(DockerBaseClass):
def __init__(self):
super(DockerService, self).__init__()
self.constraints = []
self.image = ""
self.args = []
self.endpoint_mode = "vip"
self.dns = []
self.hostname = ""
self.tty = False
self.dns_search = []
self.dns_options = []
self.env = []
self.force_update = None
self.log_driver = "json-file"
self.log_driver_options = {}
self.labels = {}
self.container_labels = {}
self.limit_cpu = 0.000
self.limit_memory = 0
self.reserve_cpu = 0.000
self.reserve_memory = 0
self.mode = "replicated"
self.user = "root"
self.mounts = []
self.configs = None
self.secrets = []
self.constraints = []
self.networks = []
self.publish = []
self.replicas = -1
self.service_id = False
self.service_version = False
self.restart_policy = None
self.restart_policy_attempts = None
self.restart_policy_delay = None
self.restart_policy_window = None
self.update_delay = None
self.update_parallelism = 1
self.update_failure_action = "continue"
self.update_monitor = 5000000000
self.update_max_failure_ratio = 0.00
self.update_order = None
def get_facts(self):
return {
'image': self.image,
'mounts': self.mounts,
'configs': self.configs,
'networks': self.networks,
'args': self.args,
'tty': self.tty,
'dns': self.dns,
'dns_search': self.dns_search,
'dns_options': self.dns_options,
'hostname': self.hostname,
'env': self.env,
'force_update': self.force_update,
'log_driver': self.log_driver,
'log_driver_options': self.log_driver_options,
'publish': self.publish,
'constraints': self.constraints,
'labels': self.labels,
'container_labels': self.container_labels,
'mode': self.mode,
'replicas': self.replicas,
'endpoint_mode': self.endpoint_mode,
'restart_policy': self.restart_policy,
'limit_cpu': self.limit_cpu,
'limit_memory': self.limit_memory,
'reserve_cpu': self.reserve_cpu,
'reserve_memory': self.reserve_memory,
'restart_policy_delay': self.restart_policy_delay,
'restart_policy_attempts': self.restart_policy_attempts,
'restart_policy_window': self.restart_policy_window,
'update_delay': self.update_delay,
'update_parallelism': self.update_parallelism,
'update_failure_action': self.update_failure_action,
'update_monitor': self.update_monitor,
'update_max_failure_ratio': self.update_max_failure_ratio,
'update_order': self.update_order}
@staticmethod
def from_ansible_params(ap, old_service):
s = DockerService()
s.constraints = ap['constraints']
s.image = ap['image']
s.args = ap['args']
s.endpoint_mode = ap['endpoint_mode']
s.dns = ap['dns']
s.dns_search = ap['dns_search']
s.dns_options = ap['dns_options']
s.hostname = ap['hostname']
s.tty = ap['tty']
s.env = ap['env']
s.log_driver = ap['log_driver']
s.log_driver_options = ap['log_driver_options']
s.labels = ap['labels']
s.container_labels = ap['container_labels']
s.limit_cpu = ap['limit_cpu']
s.reserve_cpu = ap['reserve_cpu']
s.mode = ap['mode']
s.networks = ap['networks']
s.restart_policy = ap['restart_policy']
s.restart_policy_attempts = ap['restart_policy_attempts']
s.restart_policy_delay = ap['restart_policy_delay']
s.restart_policy_window = ap['restart_policy_window']
s.update_delay = ap['update_delay']
s.update_parallelism = ap['update_parallelism']
s.update_failure_action = ap['update_failure_action']
s.update_monitor = ap['update_monitor']
s.update_max_failure_ratio = ap['update_max_failure_ratio']
s.update_order = ap['update_order']
s.user = ap['user']
if ap['force_update']:
s.force_update = int(str(time.time()).replace('.', ''))
if ap['replicas'] == -1:
if old_service:
s.replicas = old_service.replicas
else:
s.replicas = 1
else:
s.replicas = ap['replicas']
for param_name in ['reserve_memory', 'limit_memory']:
if ap.get(param_name):
try:
setattr(s, param_name, human_to_bytes(ap[param_name]))
except ValueError as exc:
raise Exception("Failed to convert %s to bytes: %s" % (param_name, exc))
s.publish = []
for param_p in ap['publish']:
service_p = {}
service_p['protocol'] = param_p.get('protocol', 'tcp')
service_p['mode'] = param_p.get('mode', None)
service_p['published_port'] = int(param_p['published_port'])
service_p['target_port'] = int(param_p['target_port'])
if service_p['protocol'] not in ['tcp', 'udp']:
raise ValueError("got publish.protocol '%s', valid values:'tcp', 'udp'" %
service_p['protocol'])
if service_p['mode'] not in [None, 'ingress', 'host']:
raise ValueError("got publish.mode '%s', valid values:'ingress', 'host'" %
service_p['mode'])
s.publish.append(service_p)
s.mounts = []
for param_m in ap['mounts']:
service_m = {}
service_m['readonly'] = bool(param_m.get('readonly', False))
service_m['type'] = param_m.get('type', 'bind')
service_m['source'] = param_m['source']
service_m['target'] = param_m['target']
s.mounts.append(service_m)
s.configs = None
if ap['configs']:
s.configs = []
for param_m in ap['configs']:
service_c = {}
service_c['config_id'] = param_m['config_id']
service_c['config_name'] = str(param_m['config_name'])
service_c['filename'] = param_m.get('filename', service_c['config_name'])
service_c['uid'] = int(param_m.get('uid', "0"))
service_c['gid'] = int(param_m.get('gid', "0"))
service_c['mode'] = param_m.get('mode', 0o444)
s.configs.append(service_c)
s.secrets = []
for param_m in ap['secrets']:
service_s = {}
service_s['secret_id'] = param_m['secret_id']
service_s['secret_name'] = str(param_m['secret_name'])
service_s['filename'] = param_m.get('filename', service_s['secret_name'])
service_s['uid'] = int(param_m.get('uid', "0"))
service_s['gid'] = int(param_m.get('gid', "0"))
service_s['mode'] = param_m.get('mode', 0o444)
s.secrets.append(service_s)
return s
def compare(self, os):
differences = DifferenceTracker()
needs_rebuild = False
force_update = False
if self.endpoint_mode != os.endpoint_mode:
differences.add('endpoint_mode', parameter=self.endpoint_mode, active=os.endpoint_mode)
if self.env != os.env:
differences.add('env', parameter=self.env, active=os.env)
if self.log_driver != os.log_driver:
differences.add('log_driver', parameter=self.log_driver, active=os.log_driver)
if self.log_driver_options != os.log_driver_options:
differences.add('log_opt', parameter=self.log_driver_options, active=os.log_driver_options)
if self.mode != os.mode:
needs_rebuild = True
differences.add('mode', parameter=self.mode, active=os.mode)
if self.mounts != os.mounts:
differences.add('mounts', parameter=self.mounts, active=os.mounts)
if self.configs != os.configs:
differences.add('configs', parameter=self.configs, active=os.configs)
if self.secrets != os.secrets:
differences.add('secrets', parameter=self.secrets, active=os.secrets)
if self.networks != os.networks:
differences.add('networks', parameter=self.networks, active=os.networks)
needs_rebuild = True
if self.replicas != os.replicas:
differences.add('replicas', parameter=self.replicas, active=os.replicas)
if self.args != os.args:
differences.add('args', parameter=self.args, active=os.args)
if self.constraints != os.constraints:
differences.add('constraints', parameter=self.constraints, active=os.constraints)
if self.labels != os.labels:
differences.add('labels', parameter=self.labels, active=os.labels)
if self.limit_cpu != os.limit_cpu:
differences.add('limit_cpu', parameter=self.limit_cpu, active=os.limit_cpu)
if self.limit_memory != os.limit_memory:
differences.add('limit_memory', parameter=self.limit_memory, active=os.limit_memory)
if self.reserve_cpu != os.reserve_cpu:
differences.add('reserve_cpu', parameter=self.reserve_cpu, active=os.reserve_cpu)
if self.reserve_memory != os.reserve_memory:
differences.add('reserve_memory', parameter=self.reserve_memory, active=os.reserve_memory)
if self.container_labels != os.container_labels:
differences.add('container_labels', parameter=self.container_labels, active=os.container_labels)
if self.publish != os.publish:
differences.add('publish', parameter=self.publish, active=os.publish)
if self.restart_policy != os.restart_policy:
differences.add('restart_policy', parameter=self.restart_policy, active=os.restart_policy)
if self.restart_policy_attempts != os.restart_policy_attempts:
differences.add('restart_policy_attempts', parameter=self.restart_policy_attempts, active=os.restart_policy_attempts)
if self.restart_policy_delay != os.restart_policy_delay:
differences.add('restart_policy_delay', parameter=self.restart_policy_delay, active=os.restart_policy_delay)
if self.restart_policy_window != os.restart_policy_window:
differences.add('restart_policy_window', parameter=self.restart_policy_window, active=os.restart_policy_window)
if self.update_delay != os.update_delay:
differences.add('update_delay', parameter=self.update_delay, active=os.update_delay)
if self.update_parallelism != os.update_parallelism:
differences.add('update_parallelism', parameter=self.update_parallelism, active=os.update_parallelism)
if self.update_failure_action != os.update_failure_action:
differences.add('update_failure_action', parameter=self.update_failure_action, active=os.update_failure_action)
if self.update_monitor != os.update_monitor:
differences.add('update_monitor', parameter=self.update_monitor, active=os.update_monitor)
if self.update_max_failure_ratio != os.update_max_failure_ratio:
differences.add('update_max_failure_ratio', parameter=self.update_max_failure_ratio, active=os.update_max_failure_ratio)
if self.update_order != os.update_order:
differences.add('update_order', parameter=self.update_order, active=os.update_order)
if self.image != os.image.split('@')[0]:
differences.add('image', parameter=self.image, active=os.image.split('@')[0])
if self.user and self.user != os.user:
differences.add('user', parameter=self.user, active=os.user)
if self.dns != os.dns:
differences.add('dns', parameter=self.dns, active=os.dns)
if self.dns_search != os.dns_search:
differences.add('dns_search', parameter=self.dns_search, active=os.dns_search)
if self.dns_options != os.dns_options:
differences.add('dns_options', parameter=self.dns_options, active=os.dns_options)
if self.hostname != os.hostname:
differences.add('hostname', parameter=self.hostname, active=os.hostname)
if self.tty != os.tty:
differences.add('tty', parameter=self.tty, active=os.tty)
if self.force_update:
force_update = True
return not differences.empty or force_update, differences, needs_rebuild, force_update
def __str__(self):
return str({
'mode': self.mode,
'env': self.env,
'endpoint_mode': self.endpoint_mode,
'mounts': self.mounts,
'configs': self.configs,
'secrets': self.secrets,
'networks': self.networks,
'replicas': self.replicas})
def generate_docker_py_service_description(self, name, docker_networks):
mounts = []
for mount_config in self.mounts:
mounts.append(
types.Mount(target=mount_config['target'],
source=mount_config['source'],
type=mount_config['type'],
read_only=mount_config['readonly'])
)
configs = None
if self.configs:
configs = []
for config_config in self.configs:
configs.append(
types.ConfigReference(
config_id=config_config['config_id'],
config_name=config_config['config_name'],
filename=config_config.get('filename'),
uid=config_config.get('uid'),
gid=config_config.get('gid'),
mode=config_config.get('mode')
)
)
secrets = []
for secret_config in self.secrets:
secrets.append(
types.SecretReference(
secret_id=secret_config['secret_id'],
secret_name=secret_config['secret_name'],
filename=secret_config.get('filename'),
uid=secret_config.get('uid'),
gid=secret_config.get('gid'),
mode=secret_config.get('mode')
)
)
cspec = types.ContainerSpec(
image=self.image,
user=self.user,
dns_config=types.DNSConfig(nameservers=self.dns, search=self.dns_search, options=self.dns_options),
args=self.args,
env=self.env,
tty=self.tty,
hostname=self.hostname,
labels=self.container_labels,
mounts=mounts,
secrets=secrets,
configs=configs
)
log_driver = types.DriverConfig(name=self.log_driver, options=self.log_driver_options)
placement = types.Placement(constraints=self.constraints)
restart_policy = types.RestartPolicy(
condition=self.restart_policy,
delay=self.restart_policy_delay,
max_attempts=self.restart_policy_attempts,
window=self.restart_policy_window)
resources = types.Resources(
cpu_limit=int(self.limit_cpu * 1000000000.0),
mem_limit=self.limit_memory,
cpu_reservation=int(self.reserve_cpu * 1000000000.0),
mem_reservation=self.reserve_memory
)
update_policy = types.UpdateConfig(
parallelism=self.update_parallelism,
delay=self.update_delay,
failure_action=self.update_failure_action,
monitor=self.update_monitor,
max_failure_ratio=self.update_max_failure_ratio,
order=self.update_order
)
task_template = types.TaskTemplate(
container_spec=cspec,
log_driver=log_driver,
restart_policy=restart_policy,
placement=placement,
resources=resources,
force_update=self.force_update)
if self.mode == 'global':
self.replicas = None
mode = types.ServiceMode(self.mode, replicas=self.replicas)
networks = []
for network_name in self.networks:
network_id = None
try:
network_id = list(filter(lambda n: n['name'] == network_name, docker_networks))[0]['id']
except Exception:
pass
if network_id:
networks.append({'Target': network_id})
else:
raise Exception("no docker networks named: %s" % network_name)
ports = {}
for port in self.publish:
if port['mode']:
ports[int(port['published_port'])] = (int(port['target_port']), port['protocol'], port['mode'])
else:
ports[int(port['published_port'])] = (int(port['target_port']), port['protocol'])
endpoint_spec = types.EndpointSpec(mode=self.endpoint_mode, ports=ports)
return update_policy, task_template, networks, endpoint_spec, mode, self.labels
# def fail(self, msg):
# self.parameters.client.module.fail_json(msg=msg)
#
# @property
# def exists(self):
# return True if self.service else False
class DockerServiceManager():
def get_networks_names_ids(self):
return [{'name': n['Name'], 'id': n['Id']} for n in self.client.networks()]
def get_service(self, name):
raw_data = self.client.services(filters={'name': name})
if len(raw_data) == 0:
return None
raw_data = raw_data[0]
networks_names_ids = self.get_networks_names_ids()
ds = DockerService()
task_template_data = raw_data['Spec']['TaskTemplate']
update_config_data = raw_data['Spec']['UpdateConfig']
ds.image = task_template_data['ContainerSpec']['Image']
ds.user = task_template_data['ContainerSpec'].get('User', 'root')
ds.env = task_template_data['ContainerSpec'].get('Env', [])
ds.args = task_template_data['ContainerSpec'].get('Args', [])
ds.update_delay = update_config_data['Delay']
ds.update_parallelism = update_config_data['Parallelism']
ds.update_failure_action = update_config_data['FailureAction']
ds.update_monitor = update_config_data['Monitor']
ds.update_max_failure_ratio = update_config_data['MaxFailureRatio']
if 'Order' in update_config_data:
ds.update_order = update_config_data['Order']
dns_config = task_template_data['ContainerSpec'].get('DNSConfig', None)
if dns_config:
if 'Nameservers' in dns_config.keys():
ds.dns = dns_config['Nameservers']
if 'Search' in dns_config.keys():
ds.dns_search = dns_config['Search']
if 'Options' in dns_config.keys():
ds.dns_options = dns_config['Options']
ds.hostname = task_template_data['ContainerSpec'].get('Hostname', '')
ds.tty = task_template_data['ContainerSpec'].get('TTY', False)
if 'Placement' in task_template_data.keys():
ds.constraints = task_template_data['Placement'].get('Constraints', [])
restart_policy_data = task_template_data.get('RestartPolicy', None)
if restart_policy_data:
ds.restart_policy = restart_policy_data.get('Condition')
ds.restart_policy_delay = restart_policy_data.get('Delay')
ds.restart_policy_attempts = restart_policy_data.get('MaxAttempts')
ds.restart_policy_window = restart_policy_data.get('Window')
raw_data_endpoint = raw_data.get('Endpoint', None)
if raw_data_endpoint:
raw_data_endpoint_spec = raw_data_endpoint.get('Spec', None)
if raw_data_endpoint_spec:
ds.endpoint_mode = raw_data_endpoint_spec.get('Mode', 'vip')
for port in raw_data_endpoint_spec.get('Ports', []):
ds.publish.append({
'protocol': port['Protocol'],
'mode': port.get('PublishMode', None),
'published_port': int(port['PublishedPort']),
'target_port': int(port['TargetPort'])})
if 'Resources' in task_template_data.keys():
if 'Limits' in task_template_data['Resources'].keys():
if 'NanoCPUs' in task_template_data['Resources']['Limits'].keys():
ds.limit_cpu = float(task_template_data['Resources']['Limits']['NanoCPUs']) / 1000000000
if 'MemoryBytes' in task_template_data['Resources']['Limits'].keys():
ds.limit_memory = int(task_template_data['Resources']['Limits']['MemoryBytes'])
if 'Reservations' in task_template_data['Resources'].keys():
if 'NanoCPUs' in task_template_data['Resources']['Reservations'].keys():
ds.reserve_cpu = float(task_template_data['Resources']['Reservations']['NanoCPUs']) / 1000000000
if 'MemoryBytes' in task_template_data['Resources']['Reservations'].keys():
ds.reserve_memory = int(
task_template_data['Resources']['Reservations']['MemoryBytes'])
ds.labels = raw_data['Spec'].get('Labels', {})
if 'LogDriver' in task_template_data.keys():
ds.log_driver = task_template_data['LogDriver'].get('Name', 'json-file')
ds.log_driver_options = task_template_data['LogDriver'].get('Options', {})
ds.container_labels = task_template_data['ContainerSpec'].get('Labels', {})
mode = raw_data['Spec']['Mode']
if 'Replicated' in mode.keys():
ds.mode = to_text('replicated', encoding='utf-8')
ds.replicas = mode['Replicated']['Replicas']
elif 'Global' in mode.keys():
ds.mode = 'global'
else:
raise Exception("Unknown service mode: %s" % mode)
for mount_data in raw_data['Spec']['TaskTemplate']['ContainerSpec'].get('Mounts', []):
ds.mounts.append({
'source': mount_data['Source'],
'type': mount_data['Type'],
'target': mount_data['Target'],
'readonly': mount_data.get('ReadOnly', False)})
for config_data in raw_data['Spec']['TaskTemplate']['ContainerSpec'].get('Configs', []):
ds.configs.append({
'config_id': config_data['ConfigID'],
'config_name': config_data['ConfigName'],
'filename': config_data['File'].get('Name'),
'uid': int(config_data['File'].get('UID')),
'gid': int(config_data['File'].get('GID')),
'mode': config_data['File'].get('Mode')
})
for secret_data in raw_data['Spec']['TaskTemplate']['ContainerSpec'].get('Secrets', []):
ds.secrets.append({
'secret_id': secret_data['SecretID'],
'secret_name': secret_data['SecretName'],
'filename': secret_data['File'].get('Name'),
'uid': int(secret_data['File'].get('UID')),
'gid': int(secret_data['File'].get('GID')),
'mode': secret_data['File'].get('Mode')
})
networks_names_ids = self.get_networks_names_ids()
for raw_network_data in raw_data['Spec']['TaskTemplate'].get('Networks', raw_data['Spec'].get('Networks', [])):
network_name = [network_name_id['name'] for network_name_id in networks_names_ids if
network_name_id['id'] == raw_network_data['Target']]
if len(network_name) == 0:
ds.networks.append(raw_network_data['Target'])
else:
ds.networks.append(network_name[0])
ds.service_version = raw_data['Version']['Index']
ds.service_id = raw_data['ID']
return ds
def update_service(self, name, old_service, new_service):
update_policy, task_template, networks, endpoint_spec, mode, labels = new_service.generate_docker_py_service_description(
name, self.get_networks_names_ids())
self.client.update_service(
old_service.service_id,
old_service.service_version,
name=name,
endpoint_spec=endpoint_spec,
networks=networks,
mode=mode,
update_config=update_policy,
task_template=task_template,
labels=labels)
def create_service(self, name, service):
update_policy, task_template, networks, endpoint_spec, mode, labels = service.generate_docker_py_service_description(
name, self.get_networks_names_ids())
self.client.create_service(
name=name,
endpoint_spec=endpoint_spec,
mode=mode,
networks=networks,
update_config=update_policy,
task_template=task_template,
labels=labels)
def remove_service(self, name):
self.client.remove_service(name)
def __init__(self, client):
self.client = client
self.diff_tracker = DifferenceTracker()
def test_parameter_versions(self):
parameters_versions = [
{'param': 'dns', 'attribute': 'dns', 'min_version': '1.25'},
{'param': 'dns_options', 'attribute': 'dns_options', 'min_version': '1.25'},
{'param': 'dns_search', 'attribute': 'dns_search', 'min_version': '1.25'},
{'param': 'hostname', 'attribute': 'hostname', 'min_version': '1.25'},
{'param': 'tty', 'attribute': 'tty', 'min_version': '1.25'},
{'param': 'secrets', 'attribute': 'secrets', 'min_version': '1.25'},
{'param': 'configs', 'attribute': 'configs', 'min_version': '1.30'},
{'param': 'update_order', 'attribute': 'update_order', 'min_version': '1.29'}]
params = self.client.module.params
empty_service = DockerService()
for pv in parameters_versions:
if (params[pv['param']] != getattr(empty_service, pv['attribute']) and
(LooseVersion(self.client.version()['ApiVersion']) <
LooseVersion(pv['min_version']))):
self.client.module.fail_json(
msg=('%s parameter supported only with api_version>=%s'
% (pv['param'], pv['min_version'])))
for publish_def in self.client.module.params.get('publish', []):
if 'mode' in publish_def.keys():
if LooseVersion(self.client.version()['ApiVersion']) < LooseVersion('1.25'):
self.client.module.fail_json(msg='publish.mode parameter supported only with api_version>=1.25')
if LooseVersion(docker_version) < LooseVersion('3.0.0'):
self.client.module.fail_json(msg='publish.mode parameter requires docker python library>=3.0.0')
def run(self):
self.test_parameter_versions()
module = self.client.module
try:
current_service = self.get_service(module.params['name'])
except Exception as e:
return module.fail_json(
msg="Error looking for service named %s: %s" %
(module.params['name'], e))
try:
new_service = DockerService.from_ansible_params(module.params, current_service)
except Exception as e:
return module.fail_json(
msg="Error parsing module parameters: %s" % e)
changed = False
msg = 'noop'
rebuilt = False
differences = DifferenceTracker()
facts = {}
if current_service:
if module.params['state'] == 'absent':
if not module.check_mode:
self.remove_service(module.params['name'])
msg = 'Service removed'
changed = True
else:
changed, differences, need_rebuild, force_update = new_service.compare(current_service)
if changed:
self.diff_tracker.merge(differences)
if need_rebuild:
if not module.check_mode:
self.remove_service(module.params['name'])
self.create_service(module.params['name'],
new_service)
msg = 'Service rebuilt'
rebuilt = True
else:
if not module.check_mode:
self.update_service(module.params['name'],
current_service,
new_service)
msg = 'Service updated'
rebuilt = False
else:
if force_update:
if not module.check_mode:
self.update_service(module.params['name'],
current_service,
new_service)
msg = 'Service forcefully updated'
rebuilt = False
changed = True
else:
msg = 'Service unchanged'
facts = new_service.get_facts()
else:
if module.params['state'] == 'absent':
msg = 'Service absent'
else:
if not module.check_mode:
service_id = self.create_service(module.params['name'],
new_service)
msg = 'Service created'
changed = True
facts = new_service.get_facts()
return msg, changed, rebuilt, differences.get_legacy_docker_diffs(), facts
def main():
argument_spec = dict(
name=dict(required=True),
image=dict(type='str'),
state=dict(default="present", choices=['present', 'absent']),
mounts=dict(default=[], type='list'),
configs=dict(default=None, type='list'),
secrets=dict(default=[], type='list'),
networks=dict(default=[], type='list'),
args=dict(default=[], type='list'),
env=dict(default=[], type='list'),
force_update=dict(default=False, type='bool'),
log_driver=dict(default="json-file", type='str'),
log_driver_options=dict(default={}, type='dict'),
publish=dict(default=[], type='list'),
constraints=dict(default=[], type='list'),
tty=dict(default=False, type='bool'),
dns=dict(default=[], type='list'),
dns_search=dict(default=[], type='list'),
dns_options=dict(default=[], type='list'),
hostname=dict(default="", type='str'),
labels=dict(default={}, type='dict'),
container_labels=dict(default={}, type='dict'),
mode=dict(default="replicated"),
replicas=dict(default=-1, type='int'),
endpoint_mode=dict(default='vip', choices=['vip', 'dnsrr']),
restart_policy=dict(default='none', choices=['none', 'on-failure', 'any']),
limit_cpu=dict(default=0, type='float'),
limit_memory=dict(default=0, type='str'),
reserve_cpu=dict(default=0, type='float'),
reserve_memory=dict(default=0, type='str'),
restart_policy_delay=dict(default=0, type='int'),
restart_policy_attempts=dict(default=0, type='int'),
restart_policy_window=dict(default=0, type='int'),
update_delay=dict(default=10, type='int'),
update_parallelism=dict(default=1, type='int'),
update_failure_action=dict(default='continue', choices=['continue', 'pause']),
update_monitor=dict(default=5000000000, type='int'),
update_max_failure_ratio=dict(default=0, type='float'),
update_order=dict(default=None, type='str'),
user=dict(default='root'))
required_if = [
('state', 'present', ['image'])
]
client = AnsibleDockerClient(
argument_spec=argument_spec,
required_if=required_if,
supports_check_mode=True,
min_docker_version='2.0.0',
)
dsm = DockerServiceManager(client)
msg, changed, rebuilt, changes, facts = dsm.run()
results = dict(
msg=msg,
changed=changed,
rebuilt=rebuilt,
changes=changes,
ansible_docker_service=facts,
)
if client.module._diff:
before, after = dsm.diff_tracker.get_before_after()
results['diff'] = dict(before=before, after=after)
client.module.exit_json(**results)
if __name__ == '__main__':
main()
| orgito/ansible | lib/ansible/modules/cloud/docker/docker_swarm_service.py | Python | gpl-3.0 | 46,290 |
#!/usr/bin/env python
# (C) Copyright IBM Corporation 2004, 2005
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# on the rights to use, copy, modify, merge, publish, distribute, sub
# license, and/or sell copies of the Software, and to permit persons to whom
# the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice (including the next
# paragraph) shall be included in all copies or substantial portions of the
# Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
# IBM AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
# Authors:
# Ian Romanick <[email protected]>
# Jeremy Kolb <[email protected]>
import gl_XML, glX_XML, glX_proto_common, license
import sys, getopt, copy, string
def convertStringForXCB(str):
tmp = ""
special = [ "ARB" ]
i = 0
while i < len(str):
if str[i:i+3] in special:
tmp = '%s_%s' % (tmp, string.lower(str[i:i+3]))
i = i + 2;
elif str[i].isupper():
tmp = '%s_%s' % (tmp, string.lower(str[i]))
else:
tmp = '%s%s' % (tmp, str[i])
i += 1
return tmp
def hash_pixel_function(func):
"""Generate a 'unique' key for a pixel function. The key is based on
the parameters written in the command packet. This includes any
padding that might be added for the original function and the 'NULL
image' flag."""
h = ""
hash_pre = ""
hash_suf = ""
for param in func.parameterIterateGlxSend():
if param.is_image():
[dim, junk, junk, junk, junk] = param.get_dimensions()
d = (dim + 1) & ~1
hash_pre = "%uD%uD_" % (d - 1, d)
if param.img_null_flag:
hash_suf = "_NF"
h += "%u" % (param.size())
if func.pad_after(param):
h += "4"
n = func.name.replace("%uD" % (dim), "")
n = "__glx_%s_%uD%uD" % (n, d - 1, d)
h = hash_pre + h + hash_suf
return [h, n]
class glx_pixel_function_stub(glX_XML.glx_function):
"""Dummy class used to generate pixel "utility" functions that are
shared by multiple dimension image functions. For example, these
objects are used to generate shared functions used to send GLX
protocol for TexImage1D and TexImage2D, TexSubImage1D and
TexSubImage2D, etc."""
def __init__(self, func, name):
# The parameters to the utility function are the same as the
# parameters to the real function except for the added "pad"
# parameters.
self.name = name
self.images = []
self.parameters = []
self.parameters_by_name = {}
for _p in func.parameterIterator():
p = copy.copy(_p)
self.parameters.append(p)
self.parameters_by_name[ p.name ] = p
if p.is_image():
self.images.append(p)
p.height = "height"
if p.img_yoff == None:
p.img_yoff = "yoffset"
if p.depth:
if p.extent == None:
p.extent = "extent"
if p.img_woff == None:
p.img_woff = "woffset"
pad_name = func.pad_after(p)
if pad_name:
pad = copy.copy(p)
pad.name = pad_name
self.parameters.append(pad)
self.parameters_by_name[ pad.name ] = pad
self.return_type = func.return_type
self.glx_rop = ~0
self.glx_sop = 0
self.glx_vendorpriv = 0
self.glx_doubles_in_order = func.glx_doubles_in_order
self.vectorequiv = None
self.output = None
self.can_be_large = func.can_be_large
self.reply_always_array = func.reply_always_array
self.dimensions_in_reply = func.dimensions_in_reply
self.img_reset = None
self.server_handcode = 0
self.client_handcode = 0
self.ignore = 0
self.count_parameter_list = func.count_parameter_list
self.counter_list = func.counter_list
self.offsets_calculated = 0
return
class PrintGlxProtoStubs(glX_proto_common.glx_print_proto):
def __init__(self):
glX_proto_common.glx_print_proto.__init__(self)
self.name = "glX_proto_send.py (from Mesa)"
self.license = license.bsd_license_template % ( "(C) Copyright IBM Corporation 2004, 2005", "IBM")
self.last_category = ""
self.generic_sizes = [3, 4, 6, 8, 12, 16, 24, 32]
self.pixel_stubs = {}
self.debug = 0
return
def printRealHeader(self):
print ''
print '#include <GL/gl.h>'
print '#include "indirect.h"'
print '#include "glxclient.h"'
print '#include "indirect_size.h"'
print '#include "glapi.h"'
print '#include "glthread.h"'
print '#include <GL/glxproto.h>'
print '#ifdef USE_XCB'
print '#include <X11/Xlib-xcb.h>'
print '#include <xcb/xcb.h>'
print '#include <xcb/glx.h>'
print '#endif /* USE_XCB */'
print ''
print '#define __GLX_PAD(n) (((n) + 3) & ~3)'
print ''
self.printFastcall()
self.printNoinline()
print ''
print '#ifndef __GNUC__'
print '# define __builtin_expect(x, y) x'
print '#endif'
print ''
print '/* If the size and opcode values are known at compile-time, this will, on'
print ' * x86 at least, emit them with a single instruction.'
print ' */'
print '#define emit_header(dest, op, size) \\'
print ' do { union { short s[2]; int i; } temp; \\'
print ' temp.s[0] = (size); temp.s[1] = (op); \\'
print ' *((int *)(dest)) = temp.i; } while(0)'
print ''
print """NOINLINE CARD32
__glXReadReply( Display *dpy, size_t size, void * dest, GLboolean reply_is_always_array )
{
xGLXSingleReply reply;
(void) _XReply(dpy, (xReply *) & reply, 0, False);
if (size != 0) {
if ((reply.length > 0) || reply_is_always_array) {
const GLint bytes = (reply_is_always_array)
? (4 * reply.length) : (reply.size * size);
const GLint extra = 4 - (bytes & 3);
_XRead(dpy, dest, bytes);
if ( extra < 4 ) {
_XEatData(dpy, extra);
}
}
else {
(void) memcpy( dest, &(reply.pad3), size);
}
}
return reply.retval;
}
NOINLINE void
__glXReadPixelReply( Display *dpy, struct glx_context * gc, unsigned max_dim,
GLint width, GLint height, GLint depth, GLenum format, GLenum type,
void * dest, GLboolean dimensions_in_reply )
{
xGLXSingleReply reply;
GLint size;
(void) _XReply(dpy, (xReply *) & reply, 0, False);
if ( dimensions_in_reply ) {
width = reply.pad3;
height = reply.pad4;
depth = reply.pad5;
if ((height == 0) || (max_dim < 2)) { height = 1; }
if ((depth == 0) || (max_dim < 3)) { depth = 1; }
}
size = reply.length * 4;
if (size != 0) {
void * buf = Xmalloc( size );
if ( buf == NULL ) {
_XEatData(dpy, size);
__glXSetError(gc, GL_OUT_OF_MEMORY);
}
else {
const GLint extra = 4 - (size & 3);
_XRead(dpy, buf, size);
if ( extra < 4 ) {
_XEatData(dpy, extra);
}
__glEmptyImage(gc, 3, width, height, depth, format, type,
buf, dest);
Xfree(buf);
}
}
}
#define X_GLXSingle 0
NOINLINE FASTCALL GLubyte *
__glXSetupSingleRequest( struct glx_context * gc, GLint sop, GLint cmdlen )
{
xGLXSingleReq * req;
Display * const dpy = gc->currentDpy;
(void) __glXFlushRenderBuffer(gc, gc->pc);
LockDisplay(dpy);
GetReqExtra(GLXSingle, cmdlen, req);
req->reqType = gc->majorOpcode;
req->contextTag = gc->currentContextTag;
req->glxCode = sop;
return (GLubyte *)(req) + sz_xGLXSingleReq;
}
NOINLINE FASTCALL GLubyte *
__glXSetupVendorRequest( struct glx_context * gc, GLint code, GLint vop, GLint cmdlen )
{
xGLXVendorPrivateReq * req;
Display * const dpy = gc->currentDpy;
(void) __glXFlushRenderBuffer(gc, gc->pc);
LockDisplay(dpy);
GetReqExtra(GLXVendorPrivate, cmdlen, req);
req->reqType = gc->majorOpcode;
req->glxCode = code;
req->vendorCode = vop;
req->contextTag = gc->currentContextTag;
return (GLubyte *)(req) + sz_xGLXVendorPrivateReq;
}
const GLuint __glXDefaultPixelStore[9] = { 0, 0, 0, 0, 0, 0, 0, 0, 1 };
#define zero (__glXDefaultPixelStore+0)
#define one (__glXDefaultPixelStore+8)
#define default_pixel_store_1D (__glXDefaultPixelStore+4)
#define default_pixel_store_1D_size 20
#define default_pixel_store_2D (__glXDefaultPixelStore+4)
#define default_pixel_store_2D_size 20
#define default_pixel_store_3D (__glXDefaultPixelStore+0)
#define default_pixel_store_3D_size 36
#define default_pixel_store_4D (__glXDefaultPixelStore+0)
#define default_pixel_store_4D_size 36
"""
for size in self.generic_sizes:
self.print_generic_function(size)
return
def printBody(self, api):
self.pixel_stubs = {}
generated_stubs = []
for func in api.functionIterateGlx():
if func.client_handcode: continue
# If the function is a pixel function with a certain
# GLX protocol signature, create a fake stub function
# for it. For example, create a single stub function
# that is used to implement both glTexImage1D and
# glTexImage2D.
if func.glx_rop != 0:
do_it = 0
for image in func.get_images():
if image.img_pad_dimensions:
do_it = 1
break
if do_it:
[h, n] = hash_pixel_function(func)
self.pixel_stubs[ func.name ] = n
if h not in generated_stubs:
generated_stubs.append(h)
fake_func = glx_pixel_function_stub( func, n )
self.printFunction(fake_func, fake_func.name)
self.printFunction(func, func.name)
if func.glx_sop and func.glx_vendorpriv:
self.printFunction(func, func.glx_vendorpriv_names[0])
self.printGetProcAddress(api)
return
def printGetProcAddress(self, api):
procs = {}
for func in api.functionIterateGlx():
for n in func.entry_points:
if func.has_different_protocol(n):
procs[n] = func.static_glx_name(n)
print """
#ifdef GLX_SHARED_GLAPI
static const struct proc_pair
{
const char *name;
_glapi_proc proc;
} proc_pairs[%d] = {""" % len(procs)
names = procs.keys()
names.sort()
for i in xrange(len(names)):
comma = ',' if i < len(names) - 1 else ''
print ' { "%s", (_glapi_proc) gl%s }%s' % (names[i], procs[names[i]], comma)
print """};
static int
__indirect_get_proc_compare(const void *key, const void *memb)
{
const struct proc_pair *pair = (const struct proc_pair *) memb;
return strcmp((const char *) key, pair->name);
}
_glapi_proc
__indirect_get_proc_address(const char *name)
{
const struct proc_pair *pair;
/* skip "gl" */
name += 2;
pair = (const struct proc_pair *) bsearch((const void *) name,
(const void *) proc_pairs, ARRAY_SIZE(proc_pairs), sizeof(proc_pairs[0]),
__indirect_get_proc_compare);
return (pair) ? pair->proc : NULL;
}
#endif /* GLX_SHARED_GLAPI */
"""
return
def printFunction(self, func, name):
footer = '}\n'
if func.glx_rop == ~0:
print 'static %s' % (func.return_type)
print '%s( unsigned opcode, unsigned dim, %s )' % (func.name, func.get_parameter_string())
print '{'
else:
if func.has_different_protocol(name):
if func.return_type == "void":
ret_string = ''
else:
ret_string = "return "
func_name = func.static_glx_name(name)
print '#define %s %d' % (func.opcode_vendor_name(name), func.glx_vendorpriv)
print '%s gl%s(%s)' % (func.return_type, func_name, func.get_parameter_string())
print '{'
print ' struct glx_context * const gc = __glXGetCurrentContext();'
print ''
print '#if defined(GLX_DIRECT_RENDERING) && !defined(GLX_USE_APPLEGL)'
print ' if (gc->isDirect) {'
print ' const _glapi_proc *const disp_table = GET_DISPATCH();'
print ' PFNGL%sPROC p =' % (name.upper())
print ' (PFNGL%sPROC) disp_table[%d];' % (name.upper(), func.offset)
print ' %sp(%s);' % (ret_string, func.get_called_parameter_string())
print ' } else'
print '#endif'
print ' {'
footer = '}\n}\n'
else:
print '#define %s %d' % (func.opcode_name(), func.opcode_value())
print '%s __indirect_gl%s(%s)' % (func.return_type, name, func.get_parameter_string())
print '{'
if func.glx_rop != 0 or func.vectorequiv != None:
if len(func.images):
self.printPixelFunction(func)
else:
self.printRenderFunction(func)
elif func.glx_sop != 0 or func.glx_vendorpriv != 0:
self.printSingleFunction(func, name)
pass
else:
print "/* Missing GLX protocol for %s. */" % (name)
print footer
return
def print_generic_function(self, n):
size = (n + 3) & ~3
print """static FASTCALL NOINLINE void
generic_%u_byte( GLint rop, const void * ptr )
{
struct glx_context * const gc = __glXGetCurrentContext();
const GLuint cmdlen = %u;
emit_header(gc->pc, rop, cmdlen);
(void) memcpy((void *)(gc->pc + 4), ptr, %u);
gc->pc += cmdlen;
if (__builtin_expect(gc->pc > gc->limit, 0)) { (void) __glXFlushRenderBuffer(gc, gc->pc); }
}
""" % (n, size + 4, size)
return
def common_emit_one_arg(self, p, pc, adjust, extra_offset):
if p.is_array():
src_ptr = p.name
else:
src_ptr = "&" + p.name
if p.is_padding:
print '(void) memset((void *)(%s + %u), 0, %s);' \
% (pc, p.offset + adjust, p.size_string() )
elif not extra_offset:
print '(void) memcpy((void *)(%s + %u), (void *)(%s), %s);' \
% (pc, p.offset + adjust, src_ptr, p.size_string() )
else:
print '(void) memcpy((void *)(%s + %u + %s), (void *)(%s), %s);' \
% (pc, p.offset + adjust, extra_offset, src_ptr, p.size_string() )
def common_emit_args(self, f, pc, adjust, skip_vla):
extra_offset = None
for p in f.parameterIterateGlxSend( not skip_vla ):
if p.name != f.img_reset:
self.common_emit_one_arg(p, pc, adjust, extra_offset)
if p.is_variable_length():
temp = p.size_string()
if extra_offset:
extra_offset += " + %s" % (temp)
else:
extra_offset = temp
return
def pixel_emit_args(self, f, pc, large):
"""Emit the arguments for a pixel function. This differs from
common_emit_args in that pixel functions may require padding
be inserted (i.e., for the missing width field for
TexImage1D), and they may also require a 'NULL image' flag
be inserted before the image data."""
if large:
adjust = 8
else:
adjust = 4
for param in f.parameterIterateGlxSend():
if not param.is_image():
self.common_emit_one_arg(param, pc, adjust, None)
if f.pad_after(param):
print '(void) memcpy((void *)(%s + %u), zero, 4);' % (pc, (param.offset + param.size()) + adjust)
else:
[dim, width, height, depth, extent] = param.get_dimensions()
if f.glx_rop == ~0:
dim_str = "dim"
else:
dim_str = str(dim)
if param.is_padding:
print '(void) memset((void *)(%s + %u), 0, %s);' \
% (pc, (param.offset - 4) + adjust, param.size_string() )
if param.img_null_flag:
if large:
print '(void) memcpy((void *)(%s + %u), zero, 4);' % (pc, (param.offset - 4) + adjust)
else:
print '(void) memcpy((void *)(%s + %u), (void *)((%s == NULL) ? one : zero), 4);' % (pc, (param.offset - 4) + adjust, param.name)
pixHeaderPtr = "%s + %u" % (pc, adjust)
pcPtr = "%s + %u" % (pc, param.offset + adjust)
if not large:
if param.img_send_null:
condition = '(compsize > 0) && (%s != NULL)' % (param.name)
else:
condition = 'compsize > 0'
print 'if (%s) {' % (condition)
print ' (*gc->fillImage)(gc, %s, %s, %s, %s, %s, %s, %s, %s, %s);' % (dim_str, width, height, depth, param.img_format, param.img_type, param.name, pcPtr, pixHeaderPtr)
print '} else {'
print ' (void) memcpy( %s, default_pixel_store_%uD, default_pixel_store_%uD_size );' % (pixHeaderPtr, dim, dim)
print '}'
else:
print '__glXSendLargeImage(gc, compsize, %s, %s, %s, %s, %s, %s, %s, %s, %s);' % (dim_str, width, height, depth, param.img_format, param.img_type, param.name, pcPtr, pixHeaderPtr)
return
def large_emit_begin(self, f, op_name = None):
if not op_name:
op_name = f.opcode_real_name()
print 'const GLint op = %s;' % (op_name)
print 'const GLuint cmdlenLarge = cmdlen + 4;'
print 'GLubyte * const pc = __glXFlushRenderBuffer(gc, gc->pc);'
print '(void) memcpy((void *)(pc + 0), (void *)(&cmdlenLarge), 4);'
print '(void) memcpy((void *)(pc + 4), (void *)(&op), 4);'
return
def common_func_print_just_start(self, f, name):
print ' struct glx_context * const gc = __glXGetCurrentContext();'
# The only reason that single and vendor private commands need
# a variable called 'dpy' is becuase they use the SyncHandle
# macro. For whatever brain-dead reason, that macro is hard-
# coded to use a variable called 'dpy' instead of taking a
# parameter.
# FIXME Simplify the logic related to skip_condition and
# FIXME condition_list in this function. Basically, remove
# FIXME skip_condition, and just append the "dpy != NULL" type
# FIXME condition to condition_list from the start. The only
# FIXME reason it's done in this confusing way now is to
# FIXME minimize the diffs in the generated code.
if not f.glx_rop:
for p in f.parameterIterateOutputs():
if p.is_image() and (p.img_format != "GL_COLOR_INDEX" or p.img_type != "GL_BITMAP"):
print ' const __GLXattribute * const state = gc->client_state_private;'
break
print ' Display * const dpy = gc->currentDpy;'
skip_condition = "dpy != NULL"
elif f.can_be_large:
skip_condition = "gc->currentDpy != NULL"
else:
skip_condition = None
if f.return_type != 'void':
print ' %s retval = (%s) 0;' % (f.return_type, f.return_type)
if name != None and name not in f.glx_vendorpriv_names:
print '#ifndef USE_XCB'
self.emit_packet_size_calculation(f, 0)
if name != None and name not in f.glx_vendorpriv_names:
print '#endif'
condition_list = []
for p in f.parameterIterateCounters():
condition_list.append( "%s >= 0" % (p.name) )
# 'counter' parameters cannot be negative
print " if (%s < 0) {" % p.name
print " __glXSetError(gc, GL_INVALID_VALUE);"
if f.return_type != 'void':
print " return 0;"
else:
print " return;"
print " }"
if skip_condition:
condition_list.append( skip_condition )
if len( condition_list ) > 0:
if len( condition_list ) > 1:
skip_condition = "(%s)" % (string.join( condition_list, ") && (" ))
else:
skip_condition = "%s" % (condition_list.pop(0))
print ' if (__builtin_expect(%s, 1)) {' % (skip_condition)
return 1
else:
return 0
def printSingleFunction(self, f, name):
self.common_func_print_just_start(f, name)
if self.debug:
print ' printf( "Enter %%s...\\n", "gl%s" );' % (f.name)
if name not in f.glx_vendorpriv_names:
# XCB specific:
print '#ifdef USE_XCB'
if self.debug:
print ' printf("\\tUsing XCB.\\n");'
print ' xcb_connection_t *c = XGetXCBConnection(dpy);'
print ' (void) __glXFlushRenderBuffer(gc, gc->pc);'
xcb_name = 'xcb_glx%s' % convertStringForXCB(name)
iparams=[]
extra_iparams = []
output = None
for p in f.parameterIterator():
if p.is_output:
output = p
if p.is_image():
if p.img_format != "GL_COLOR_INDEX" or p.img_type != "GL_BITMAP":
extra_iparams.append("state->storePack.swapEndian")
else:
extra_iparams.append("0")
# Hardcode this in. lsb_first param (apparently always GL_FALSE)
# also present in GetPolygonStipple, but taken care of above.
if xcb_name == "xcb_glx_read_pixels":
extra_iparams.append("0")
else:
iparams.append(p.name)
xcb_request = '%s(%s)' % (xcb_name, ", ".join(["c", "gc->currentContextTag"] + iparams + extra_iparams))
if f.needs_reply():
print ' %s_reply_t *reply = %s_reply(c, %s, NULL);' % (xcb_name, xcb_name, xcb_request)
if output:
if output.is_image():
[dim, w, h, d, junk] = output.get_dimensions()
if f.dimensions_in_reply:
w = "reply->width"
h = "reply->height"
d = "reply->depth"
if dim < 2:
h = "1"
else:
print ' if (%s == 0) { %s = 1; }' % (h, h)
if dim < 3:
d = "1"
else:
print ' if (%s == 0) { %s = 1; }' % (d, d)
print ' __glEmptyImage(gc, 3, %s, %s, %s, %s, %s, %s_data(reply), %s);' % (w, h, d, output.img_format, output.img_type, xcb_name, output.name)
else:
if f.reply_always_array:
print ' (void)memcpy(%s, %s_data(reply), %s_data_length(reply) * sizeof(%s));' % (output.name, xcb_name, xcb_name, output.get_base_type_string())
else:
print ' if (%s_data_length(reply) == 0)' % (xcb_name)
print ' (void)memcpy(%s, &reply->datum, sizeof(reply->datum));' % (output.name)
print ' else'
print ' (void)memcpy(%s, %s_data(reply), %s_data_length(reply) * sizeof(%s));' % (output.name, xcb_name, xcb_name, output.get_base_type_string())
if f.return_type != 'void':
print ' retval = reply->ret_val;'
print ' free(reply);'
else:
print ' ' + xcb_request + ';'
print '#else'
# End of XCB specific.
if f.parameters != []:
pc_decl = "GLubyte const * pc ="
else:
pc_decl = "(void)"
if name in f.glx_vendorpriv_names:
print ' %s __glXSetupVendorRequest(gc, %s, %s, cmdlen);' % (pc_decl, f.opcode_real_name(), f.opcode_vendor_name(name))
else:
print ' %s __glXSetupSingleRequest(gc, %s, cmdlen);' % (pc_decl, f.opcode_name())
self.common_emit_args(f, "pc", 0, 0)
images = f.get_images()
for img in images:
if img.is_output:
o = f.command_fixed_length() - 4
print ' *(int32_t *)(pc + %u) = 0;' % (o)
if img.img_format != "GL_COLOR_INDEX" or img.img_type != "GL_BITMAP":
print ' * (int8_t *)(pc + %u) = state->storePack.swapEndian;' % (o)
if f.img_reset:
print ' * (int8_t *)(pc + %u) = %s;' % (o + 1, f.img_reset)
return_name = ''
if f.needs_reply():
if f.return_type != 'void':
return_name = " retval"
return_str = " retval = (%s)" % (f.return_type)
else:
return_str = " (void)"
got_reply = 0
for p in f.parameterIterateOutputs():
if p.is_image():
[dim, w, h, d, junk] = p.get_dimensions()
if f.dimensions_in_reply:
print " __glXReadPixelReply(dpy, gc, %u, 0, 0, 0, %s, %s, %s, GL_TRUE);" % (dim, p.img_format, p.img_type, p.name)
else:
print " __glXReadPixelReply(dpy, gc, %u, %s, %s, %s, %s, %s, %s, GL_FALSE);" % (dim, w, h, d, p.img_format, p.img_type, p.name)
got_reply = 1
else:
if f.reply_always_array:
aa = "GL_TRUE"
else:
aa = "GL_FALSE"
# gl_parameter.size() returns the size
# of the entire data item. If the
# item is a fixed-size array, this is
# the size of the whole array. This
# is not what __glXReadReply wants. It
# wants the size of a single data
# element in the reply packet.
# Dividing by the array size (1 for
# non-arrays) gives us this.
s = p.size() / p.get_element_count()
print " %s __glXReadReply(dpy, %s, %s, %s);" % (return_str, s, p.name, aa)
got_reply = 1
# If a reply wasn't read to fill an output parameter,
# read a NULL reply to get the return value.
if not got_reply:
print " %s __glXReadReply(dpy, 0, NULL, GL_FALSE);" % (return_str)
elif self.debug:
# Only emit the extra glFinish call for functions
# that don't already require a reply from the server.
print ' __indirect_glFinish();'
if self.debug:
print ' printf( "Exit %%s.\\n", "gl%s" );' % (name)
print ' UnlockDisplay(dpy); SyncHandle();'
if name not in f.glx_vendorpriv_names:
print '#endif /* USE_XCB */'
print ' }'
print ' return%s;' % (return_name)
return
def printPixelFunction(self, f):
if self.pixel_stubs.has_key( f.name ):
# Normally gl_function::get_parameter_string could be
# used. However, this call needs to have the missing
# dimensions (e.g., a fake height value for
# glTexImage1D) added in.
p_string = ""
for param in f.parameterIterateGlxSend():
if param.is_padding:
continue
p_string += ", " + param.name
if param.is_image():
[dim, junk, junk, junk, junk] = param.get_dimensions()
if f.pad_after(param):
p_string += ", 1"
print ' %s(%s, %u%s );' % (self.pixel_stubs[f.name] , f.opcode_name(), dim, p_string)
return
if self.common_func_print_just_start(f, None):
trailer = " }"
else:
trailer = None
if f.can_be_large:
print 'if (cmdlen <= gc->maxSmallRenderCommandSize) {'
print ' if ( (gc->pc + cmdlen) > gc->bufEnd ) {'
print ' (void) __glXFlushRenderBuffer(gc, gc->pc);'
print ' }'
if f.glx_rop == ~0:
opcode = "opcode"
else:
opcode = f.opcode_real_name()
print 'emit_header(gc->pc, %s, cmdlen);' % (opcode)
self.pixel_emit_args( f, "gc->pc", 0 )
print 'gc->pc += cmdlen;'
print 'if (gc->pc > gc->limit) { (void) __glXFlushRenderBuffer(gc, gc->pc); }'
if f.can_be_large:
print '}'
print 'else {'
self.large_emit_begin(f, opcode)
self.pixel_emit_args(f, "pc", 1)
print '}'
if trailer: print trailer
return
def printRenderFunction(self, f):
# There is a class of GL functions that take a single pointer
# as a parameter. This pointer points to a fixed-size chunk
# of data, and the protocol for this functions is very
# regular. Since they are so regular and there are so many
# of them, special case them with generic functions. On
# x86, this saves about 26KB in the libGL.so binary.
if f.variable_length_parameter() == None and len(f.parameters) == 1:
p = f.parameters[0]
if p.is_pointer():
cmdlen = f.command_fixed_length()
if cmdlen in self.generic_sizes:
print ' generic_%u_byte( %s, %s );' % (cmdlen, f.opcode_real_name(), p.name)
return
if self.common_func_print_just_start(f, None):
trailer = " }"
else:
trailer = None
if self.debug:
print 'printf( "Enter %%s...\\n", "gl%s" );' % (f.name)
if f.can_be_large:
print 'if (cmdlen <= gc->maxSmallRenderCommandSize) {'
print ' if ( (gc->pc + cmdlen) > gc->bufEnd ) {'
print ' (void) __glXFlushRenderBuffer(gc, gc->pc);'
print ' }'
print 'emit_header(gc->pc, %s, cmdlen);' % (f.opcode_real_name())
self.common_emit_args(f, "gc->pc", 4, 0)
print 'gc->pc += cmdlen;'
print 'if (__builtin_expect(gc->pc > gc->limit, 0)) { (void) __glXFlushRenderBuffer(gc, gc->pc); }'
if f.can_be_large:
print '}'
print 'else {'
self.large_emit_begin(f)
self.common_emit_args(f, "pc", 8, 1)
p = f.variable_length_parameter()
print ' __glXSendLargeCommand(gc, pc, %u, %s, %s);' % (p.offset + 8, p.name, p.size_string())
print '}'
if self.debug:
print '__indirect_glFinish();'
print 'printf( "Exit %%s.\\n", "gl%s" );' % (f.name)
if trailer: print trailer
return
class PrintGlxProtoInit_c(gl_XML.gl_print_base):
def __init__(self):
gl_XML.gl_print_base.__init__(self)
self.name = "glX_proto_send.py (from Mesa)"
self.license = license.bsd_license_template % ( \
"""Copyright 1998-1999 Precision Insight, Inc., Cedar Park, Texas.
(C) Copyright IBM Corporation 2004""", "PRECISION INSIGHT, IBM")
return
def printRealHeader(self):
print """/**
* \\file indirect_init.c
* Initialize indirect rendering dispatch table.
*
* \\author Kevin E. Martin <[email protected]>
* \\author Brian Paul <[email protected]>
* \\author Ian Romanick <[email protected]>
*/
#include "indirect_init.h"
#include "indirect.h"
#include "glapi.h"
#include <assert.h>
/**
* No-op function used to initialize functions that have no GLX protocol
* support.
*/
static int NoOp(void)
{
return 0;
}
/**
* Create and initialize a new GL dispatch table. The table is initialized
* with GLX indirect rendering protocol functions.
*/
struct _glapi_table * __glXNewIndirectAPI( void )
{
_glapi_proc *table;
unsigned entries;
unsigned i;
int o;
entries = _glapi_get_dispatch_table_size();
table = (_glapi_proc *) Xmalloc(entries * sizeof(_glapi_proc));
/* first, set all entries to point to no-op functions */
for (i = 0; i < entries; i++) {
table[i] = (_glapi_proc) NoOp;
}
/* now, initialize the entries we understand */"""
def printRealFooter(self):
print """
return (struct _glapi_table *) table;
}
"""
return
def printBody(self, api):
for [name, number] in api.categoryIterate():
if number != None:
preamble = '\n /* %3u. %s */\n' % (int(number), name)
else:
preamble = '\n /* %s */\n' % (name)
for func in api.functionIterateByCategory(name):
if func.client_supported_for_indirect():
if preamble:
print preamble
preamble = None
if func.is_abi():
print ' table[{offset}] = (_glapi_proc) __indirect_gl{name};'.format(name = func.name, offset = func.offset)
else:
print ' o = _glapi_get_proc_offset("gl{0}");'.format(func.name)
print ' assert(o > 0);'
print ' table[o] = (_glapi_proc) __indirect_gl{0};'.format(func.name)
return
class PrintGlxProtoInit_h(gl_XML.gl_print_base):
def __init__(self):
gl_XML.gl_print_base.__init__(self)
self.name = "glX_proto_send.py (from Mesa)"
self.license = license.bsd_license_template % ( \
"""Copyright 1998-1999 Precision Insight, Inc., Cedar Park, Texas.
(C) Copyright IBM Corporation 2004""", "PRECISION INSIGHT, IBM")
self.header_tag = "_INDIRECT_H_"
self.last_category = ""
return
def printRealHeader(self):
print """/**
* \\file
* Prototypes for indirect rendering functions.
*
* \\author Kevin E. Martin <[email protected]>
* \\author Ian Romanick <[email protected]>
*/
"""
self.printFastcall()
self.printNoinline()
print """
#include <X11/Xfuncproto.h>
#include "glxclient.h"
extern _X_HIDDEN NOINLINE CARD32 __glXReadReply( Display *dpy, size_t size,
void * dest, GLboolean reply_is_always_array );
extern _X_HIDDEN NOINLINE void __glXReadPixelReply( Display *dpy,
struct glx_context * gc, unsigned max_dim, GLint width, GLint height,
GLint depth, GLenum format, GLenum type, void * dest,
GLboolean dimensions_in_reply );
extern _X_HIDDEN NOINLINE FASTCALL GLubyte * __glXSetupSingleRequest(
struct glx_context * gc, GLint sop, GLint cmdlen );
extern _X_HIDDEN NOINLINE FASTCALL GLubyte * __glXSetupVendorRequest(
struct glx_context * gc, GLint code, GLint vop, GLint cmdlen );
"""
def printBody(self, api):
for func in api.functionIterateGlx():
params = func.get_parameter_string()
print 'extern _X_HIDDEN %s __indirect_gl%s(%s);' % (func.return_type, func.name, params)
for n in func.entry_points:
if func.has_different_protocol(n):
asdf = func.static_glx_name(n)
if asdf not in func.static_entry_points:
print 'extern _X_HIDDEN %s gl%s(%s);' % (func.return_type, asdf, params)
# give it a easy-to-remember name
if func.client_handcode:
print '#define gl_dispatch_stub_%s gl%s' % (n, asdf)
else:
print 'GLAPI %s GLAPIENTRY gl%s(%s);' % (func.return_type, asdf, params)
break
print ''
print '#ifdef GLX_SHARED_GLAPI'
print 'extern _X_HIDDEN void (*__indirect_get_proc_address(const char *name))(void);'
print '#endif'
def show_usage():
print "Usage: %s [-f input_file_name] [-m output_mode] [-d]" % sys.argv[0]
print " -m output_mode Output mode can be one of 'proto', 'init_c' or 'init_h'."
print " -d Enable extra debug information in the generated code."
sys.exit(1)
if __name__ == '__main__':
file_name = "gl_API.xml"
try:
(args, trail) = getopt.getopt(sys.argv[1:], "f:m:d")
except Exception,e:
show_usage()
debug = 0
mode = "proto"
for (arg,val) in args:
if arg == "-f":
file_name = val
elif arg == "-m":
mode = val
elif arg == "-d":
debug = 1
if mode == "proto":
printer = PrintGlxProtoStubs()
elif mode == "init_c":
printer = PrintGlxProtoInit_c()
elif mode == "init_h":
printer = PrintGlxProtoInit_h()
else:
show_usage()
printer.debug = debug
api = gl_XML.parse_GL_API( file_name, glX_XML.glx_item_factory() )
printer.Print( api )
| guorendong/iridium-browser-ubuntu | third_party/mesa/src/src/mapi/glapi/gen/glX_proto_send.py | Python | bsd-3-clause | 33,001 |
##############################################################################
#
# Copyright (c) 2002 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Interface Package Interfaces
"""
__docformat__ = 'restructuredtext'
from zope.interface.interface import Attribute
from zope.interface.interface import Interface
from zope.interface.declarations import implementer
__all__ = [
'IAdapterRegistration',
'IAdapterRegistry',
'IAttribute',
'IComponentLookup',
'IComponentRegistry',
'IComponents',
'IDeclaration',
'IElement',
'IHandlerRegistration',
'IInterface',
'IInterfaceDeclaration',
'IMethod',
'IObjectEvent',
'IRegistered',
'IRegistration',
'IRegistrationEvent',
'ISpecification',
'ISubscriptionAdapterRegistration',
'IUnregistered',
'IUtilityRegistration',
]
class IElement(Interface):
"""
Objects that have basic documentation and tagged values.
Known derivatives include :class:`IAttribute` and its derivative
:class:`IMethod`; these have no notion of inheritance.
:class:`IInterface` is also a derivative, and it does have a
notion of inheritance, expressed through its ``__bases__`` and
ordered in its ``__iro__`` (both defined by
:class:`ISpecification`).
"""
# Note that defining __doc__ as an Attribute hides the docstring
# from introspection. When changing it, also change it in the Sphinx
# ReST files.
__name__ = Attribute('__name__', 'The object name')
__doc__ = Attribute('__doc__', 'The object doc string')
###
# Tagged values.
#
# Direct values are established in this instance. Others may be
# inherited. Although ``IElement`` itself doesn't have a notion of
# inheritance, ``IInterface`` *does*. It might have been better to
# make ``IInterface`` define new methods
# ``getIndirectTaggedValue``, etc, to include inheritance instead
# of overriding ``getTaggedValue`` to do that, but that ship has sailed.
# So to keep things nice and symmetric, we define the ``Direct`` methods here.
###
def getTaggedValue(tag):
"""Returns the value associated with *tag*.
Raise a `KeyError` if the tag isn't set.
If the object has a notion of inheritance, this searches
through the inheritance hierarchy and returns the nearest result.
If there is no such notion, this looks only at this object.
.. versionchanged:: 4.7.0
This method should respect inheritance if present.
"""
def queryTaggedValue(tag, default=None):
"""
As for `getTaggedValue`, but instead of raising a `KeyError`, returns *default*.
.. versionchanged:: 4.7.0
This method should respect inheritance if present.
"""
def getTaggedValueTags():
"""
Returns a collection of all tags in no particular order.
If the object has a notion of inheritance, this
includes all the inherited tagged values. If there is
no such notion, this looks only at this object.
.. versionchanged:: 4.7.0
This method should respect inheritance if present.
"""
def setTaggedValue(tag, value):
"""
Associates *value* with *key* directly in this object.
"""
def getDirectTaggedValue(tag):
"""
As for `getTaggedValue`, but never includes inheritance.
.. versionadded:: 5.0.0
"""
def queryDirectTaggedValue(tag, default=None):
"""
As for `queryTaggedValue`, but never includes inheritance.
.. versionadded:: 5.0.0
"""
def getDirectTaggedValueTags():
"""
As for `getTaggedValueTags`, but includes only tags directly
set on this object.
.. versionadded:: 5.0.0
"""
class IAttribute(IElement):
"""Attribute descriptors"""
interface = Attribute('interface',
'Stores the interface instance in which the '
'attribute is located.')
class IMethod(IAttribute):
"""Method attributes"""
def getSignatureInfo():
"""Returns the signature information.
This method returns a dictionary with the following string keys:
- positional
A sequence of the names of positional arguments.
- required
A sequence of the names of required arguments.
- optional
A dictionary mapping argument names to their default values.
- varargs
The name of the varargs argument (or None).
- kwargs
The name of the kwargs argument (or None).
"""
def getSignatureString():
"""Return a signature string suitable for inclusion in documentation.
This method returns the function signature string. For example, if you
have ``def func(a, b, c=1, d='f')``, then the signature string is ``"(a, b,
c=1, d='f')"``.
"""
class ISpecification(Interface):
"""Object Behavioral specifications"""
def providedBy(object):
"""Test whether the interface is implemented by the object
Return true of the object asserts that it implements the
interface, including asserting that it implements an extended
interface.
"""
def implementedBy(class_):
"""Test whether the interface is implemented by instances of the class
Return true of the class asserts that its instances implement the
interface, including asserting that they implement an extended
interface.
"""
def isOrExtends(other):
"""Test whether the specification is or extends another
"""
def extends(other, strict=True):
"""Test whether a specification extends another
The specification extends other if it has other as a base
interface or if one of it's bases extends other.
If strict is false, then the specification extends itself.
"""
def weakref(callback=None):
"""Return a weakref to the specification
This method is, regrettably, needed to allow weakrefs to be
computed to security-proxied specifications. While the
zope.interface package does not require zope.security or
zope.proxy, it has to be able to coexist with it.
"""
__bases__ = Attribute("""Base specifications
A tuple of specifications from which this specification is
directly derived.
""")
__sro__ = Attribute("""Specification-resolution order
A tuple of the specification and all of it's ancestor
specifications from most specific to least specific. The specification
itself is the first element.
(This is similar to the method-resolution order for new-style classes.)
""")
__iro__ = Attribute("""Interface-resolution order
A tuple of the specification's ancestor interfaces from
most specific to least specific. The specification itself is
included if it is an interface.
(This is similar to the method-resolution order for new-style classes.)
""")
def get(name, default=None):
"""Look up the description for a name
If the named attribute is not defined, the default is
returned.
"""
class IInterface(ISpecification, IElement):
"""Interface objects
Interface objects describe the behavior of an object by containing
useful information about the object. This information includes:
- Prose documentation about the object. In Python terms, this
is called the "doc string" of the interface. In this element,
you describe how the object works in prose language and any
other useful information about the object.
- Descriptions of attributes. Attribute descriptions include
the name of the attribute and prose documentation describing
the attributes usage.
- Descriptions of methods. Method descriptions can include:
- Prose "doc string" documentation about the method and its
usage.
- A description of the methods arguments; how many arguments
are expected, optional arguments and their default values,
the position or arguments in the signature, whether the
method accepts arbitrary arguments and whether the method
accepts arbitrary keyword arguments.
- Optional tagged data. Interface objects (and their attributes and
methods) can have optional, application specific tagged data
associated with them. Examples uses for this are examples,
security assertions, pre/post conditions, and other possible
information you may want to associate with an Interface or its
attributes.
Not all of this information is mandatory. For example, you may
only want the methods of your interface to have prose
documentation and not describe the arguments of the method in
exact detail. Interface objects are flexible and let you give or
take any of these components.
Interfaces are created with the Python class statement using
either `zope.interface.Interface` or another interface, as in::
from zope.interface import Interface
class IMyInterface(Interface):
'''Interface documentation'''
def meth(arg1, arg2):
'''Documentation for meth'''
# Note that there is no self argument
class IMySubInterface(IMyInterface):
'''Interface documentation'''
def meth2():
'''Documentation for meth2'''
You use interfaces in two ways:
- You assert that your object implement the interfaces.
There are several ways that you can declare that an object
provides an interface:
1. Call `zope.interface.implementer` on your class definition.
2. Call `zope.interface.directlyProvides` on your object.
3. Call `zope.interface.classImplements` to declare that instances
of a class implement an interface.
For example::
from zope.interface import classImplements
classImplements(some_class, some_interface)
This approach is useful when it is not an option to modify
the class source. Note that this doesn't affect what the
class itself implements, but only what its instances
implement.
- You query interface meta-data. See the IInterface methods and
attributes for details.
"""
def names(all=False):
"""Get the interface attribute names
Return a collection of the names of the attributes, including
methods, included in the interface definition.
Normally, only directly defined attributes are included. If
a true positional or keyword argument is given, then
attributes defined by base classes will be included.
"""
def namesAndDescriptions(all=False):
"""Get the interface attribute names and descriptions
Return a collection of the names and descriptions of the
attributes, including methods, as name-value pairs, included
in the interface definition.
Normally, only directly defined attributes are included. If
a true positional or keyword argument is given, then
attributes defined by base classes will be included.
"""
def __getitem__(name):
"""Get the description for a name
If the named attribute is not defined, a `KeyError` is raised.
"""
def direct(name):
"""Get the description for the name if it was defined by the interface
If the interface doesn't define the name, returns None.
"""
def validateInvariants(obj, errors=None):
"""Validate invariants
Validate object to defined invariants. If errors is None,
raises first Invalid error; if errors is a list, appends all errors
to list, then raises Invalid with the errors as the first element
of the "args" tuple."""
def __contains__(name):
"""Test whether the name is defined by the interface"""
def __iter__():
"""Return an iterator over the names defined by the interface
The names iterated include all of the names defined by the
interface directly and indirectly by base interfaces.
"""
__module__ = Attribute("""The name of the module defining the interface""")
class IDeclaration(ISpecification):
"""Interface declaration
Declarations are used to express the interfaces implemented by
classes or provided by objects.
"""
def __contains__(interface):
"""Test whether an interface is in the specification
Return true if the given interface is one of the interfaces in
the specification and false otherwise.
"""
def __iter__():
"""Return an iterator for the interfaces in the specification
"""
def flattened():
"""Return an iterator of all included and extended interfaces
An iterator is returned for all interfaces either included in
or extended by interfaces included in the specifications
without duplicates. The interfaces are in "interface
resolution order". The interface resolution order is such that
base interfaces are listed after interfaces that extend them
and, otherwise, interfaces are included in the order that they
were defined in the specification.
"""
def __sub__(interfaces):
"""Create an interface specification with some interfaces excluded
The argument can be an interface or an interface
specifications. The interface or interfaces given in a
specification are subtracted from the interface specification.
Removing an interface that is not in the specification does
not raise an error. Doing so has no effect.
Removing an interface also removes sub-interfaces of the interface.
"""
def __add__(interfaces):
"""Create an interface specification with some interfaces added
The argument can be an interface or an interface
specifications. The interface or interfaces given in a
specification are added to the interface specification.
Adding an interface that is already in the specification does
not raise an error. Doing so has no effect.
"""
def __nonzero__():
"""Return a true value of the interface specification is non-empty
"""
class IInterfaceDeclaration(Interface):
"""
Declare and check the interfaces of objects.
The functions defined in this interface are used to declare the
interfaces that objects provide and to query the interfaces that
have been declared.
Interfaces can be declared for objects in two ways:
- Interfaces are declared for instances of the object's class
- Interfaces are declared for the object directly.
The interfaces declared for an object are, therefore, the union of
interfaces declared for the object directly and the interfaces
declared for instances of the object's class.
Note that we say that a class implements the interfaces provided
by it's instances. An instance can also provide interfaces
directly. The interfaces provided by an object are the union of
the interfaces provided directly and the interfaces implemented by
the class.
This interface is implemented by :mod:`zope.interface`.
"""
###
# Defining interfaces
###
Interface = Attribute("The base class used to create new interfaces")
def taggedValue(key, value):
"""
Attach a tagged value to an interface while defining the interface.
This is a way of executing :meth:`IElement.setTaggedValue` from
the definition of the interface. For example::
class IFoo(Interface):
taggedValue('key', 'value')
.. seealso:: `zope.interface.taggedValue`
"""
def invariant(checker_function):
"""
Attach an invariant checker function to an interface while defining it.
Invariants can later be validated against particular implementations by
calling :meth:`IInterface.validateInvariants`.
For example::
def check_range(ob):
if ob.max < ob.min:
raise ValueError("max value is less than min value")
class IRange(Interface):
min = Attribute("The min value")
max = Attribute("The max value")
invariant(check_range)
.. seealso:: `zope.interface.invariant`
"""
def interfacemethod(method):
"""
A decorator that transforms a method specification into an
implementation method.
This is used to override methods of ``Interface`` or provide new methods.
Definitions using this decorator will not appear in :meth:`IInterface.names()`.
It is possible to have an implementation method and a method specification
of the same name.
For example::
class IRange(Interface):
@interfacemethod
def __adapt__(self, obj):
if isinstance(obj, range):
# Return the builtin ``range`` as-is
return obj
return super(type(IRange), self).__adapt__(obj)
You can use ``super`` to call the parent class functionality. Note that
the zero-argument version (``super().__adapt__``) works on Python 3.6 and above, but
prior to that the two-argument version must be used, and the class must be explicitly
passed as the first argument.
.. versionadded:: 5.1.0
.. seealso:: `zope.interface.interfacemethod`
"""
###
# Querying interfaces
###
def providedBy(ob):
"""
Return the interfaces provided by an object.
This is the union of the interfaces directly provided by an
object and interfaces implemented by it's class.
The value returned is an `IDeclaration`.
.. seealso:: `zope.interface.providedBy`
"""
def implementedBy(class_):
"""
Return the interfaces implemented for a class's instances.
The value returned is an `IDeclaration`.
.. seealso:: `zope.interface.implementedBy`
"""
###
# Declaring interfaces
###
def classImplements(class_, *interfaces):
"""
Declare additional interfaces implemented for instances of a class.
The arguments after the class are one or more interfaces or
interface specifications (`IDeclaration` objects).
The interfaces given (including the interfaces in the
specifications) are added to any interfaces previously
declared.
Consider the following example::
class C(A, B):
...
classImplements(C, I1, I2)
Instances of ``C`` provide ``I1``, ``I2``, and whatever interfaces
instances of ``A`` and ``B`` provide. This is equivalent to::
@implementer(I1, I2)
class C(A, B):
pass
.. seealso:: `zope.interface.classImplements`
.. seealso:: `zope.interface.implementer`
"""
def classImplementsFirst(cls, interface):
"""
See :func:`zope.interface.classImplementsFirst`.
"""
def implementer(*interfaces):
"""
Create a decorator for declaring interfaces implemented by a
factory.
A callable is returned that makes an implements declaration on
objects passed to it.
.. seealso:: :meth:`classImplements`
"""
def classImplementsOnly(class_, *interfaces):
"""
Declare the only interfaces implemented by instances of a class.
The arguments after the class are one or more interfaces or
interface specifications (`IDeclaration` objects).
The interfaces given (including the interfaces in the
specifications) replace any previous declarations.
Consider the following example::
class C(A, B):
...
classImplements(C, IA, IB. IC)
classImplementsOnly(C. I1, I2)
Instances of ``C`` provide only ``I1``, ``I2``, and regardless of
whatever interfaces instances of ``A`` and ``B`` implement.
.. seealso:: `zope.interface.classImplementsOnly`
"""
def implementer_only(*interfaces):
"""
Create a decorator for declaring the only interfaces implemented.
A callable is returned that makes an implements declaration on
objects passed to it.
.. seealso:: `zope.interface.implementer_only`
"""
def directlyProvidedBy(object):
"""
Return the interfaces directly provided by the given object.
The value returned is an `IDeclaration`.
.. seealso:: `zope.interface.directlyProvidedBy`
"""
def directlyProvides(object, *interfaces):
"""
Declare interfaces declared directly for an object.
The arguments after the object are one or more interfaces or
interface specifications (`IDeclaration` objects).
.. caution::
The interfaces given (including the interfaces in the
specifications) *replace* interfaces previously
declared for the object. See :meth:`alsoProvides` to add
additional interfaces.
Consider the following example::
class C(A, B):
...
ob = C()
directlyProvides(ob, I1, I2)
The object, ``ob`` provides ``I1``, ``I2``, and whatever interfaces
instances have been declared for instances of ``C``.
To remove directly provided interfaces, use `directlyProvidedBy` and
subtract the unwanted interfaces. For example::
directlyProvides(ob, directlyProvidedBy(ob)-I2)
removes I2 from the interfaces directly provided by
``ob``. The object, ``ob`` no longer directly provides ``I2``,
although it might still provide ``I2`` if it's class
implements ``I2``.
To add directly provided interfaces, use `directlyProvidedBy` and
include additional interfaces. For example::
directlyProvides(ob, directlyProvidedBy(ob), I2)
adds I2 to the interfaces directly provided by ob.
.. seealso:: `zope.interface.directlyProvides`
"""
def alsoProvides(object, *interfaces):
"""
Declare additional interfaces directly for an object.
For example::
alsoProvides(ob, I1)
is equivalent to::
directlyProvides(ob, directlyProvidedBy(ob), I1)
.. seealso:: `zope.interface.alsoProvides`
"""
def noLongerProvides(object, interface):
"""
Remove an interface from the list of an object's directly provided
interfaces.
For example::
noLongerProvides(ob, I1)
is equivalent to::
directlyProvides(ob, directlyProvidedBy(ob) - I1)
with the exception that if ``I1`` is an interface that is
provided by ``ob`` through the class's implementation,
`ValueError` is raised.
.. seealso:: `zope.interface.noLongerProvides`
"""
def implements(*interfaces):
"""
Declare interfaces implemented by instances of a class.
.. deprecated:: 5.0
This only works for Python 2. The `implementer` decorator
is preferred for all versions.
This function is called in a class definition (Python 2.x only).
The arguments are one or more interfaces or interface
specifications (`IDeclaration` objects).
The interfaces given (including the interfaces in the
specifications) are added to any interfaces previously
declared.
Previous declarations include declarations for base classes
unless implementsOnly was used.
This function is provided for convenience. It provides a more
convenient way to call `classImplements`. For example::
implements(I1)
is equivalent to calling::
classImplements(C, I1)
after the class has been created.
Consider the following example (Python 2.x only)::
class C(A, B):
implements(I1, I2)
Instances of ``C`` implement ``I1``, ``I2``, and whatever interfaces
instances of ``A`` and ``B`` implement.
"""
def implementsOnly(*interfaces):
"""
Declare the only interfaces implemented by instances of a class.
.. deprecated:: 5.0
This only works for Python 2. The `implementer_only` decorator
is preferred for all versions.
This function is called in a class definition (Python 2.x only).
The arguments are one or more interfaces or interface
specifications (`IDeclaration` objects).
Previous declarations including declarations for base classes
are overridden.
This function is provided for convenience. It provides a more
convenient way to call `classImplementsOnly`. For example::
implementsOnly(I1)
is equivalent to calling::
classImplementsOnly(I1)
after the class has been created.
Consider the following example (Python 2.x only)::
class C(A, B):
implementsOnly(I1, I2)
Instances of ``C`` implement ``I1``, ``I2``, regardless of what
instances of ``A`` and ``B`` implement.
"""
def classProvides(*interfaces):
"""
Declare interfaces provided directly by a class.
.. deprecated:: 5.0
This only works for Python 2. The `provider` decorator
is preferred for all versions.
This function is called in a class definition.
The arguments are one or more interfaces or interface
specifications (`IDeclaration` objects).
The given interfaces (including the interfaces in the
specifications) are used to create the class's direct-object
interface specification. An error will be raised if the module
class has an direct interface specification. In other words, it is
an error to call this function more than once in a class
definition.
Note that the given interfaces have nothing to do with the
interfaces implemented by instances of the class.
This function is provided for convenience. It provides a more
convenient way to call `directlyProvides` for a class. For example::
classProvides(I1)
is equivalent to calling::
directlyProvides(theclass, I1)
after the class has been created.
"""
def provider(*interfaces):
"""
A class decorator version of `classProvides`.
.. seealso:: `zope.interface.provider`
"""
def moduleProvides(*interfaces):
"""
Declare interfaces provided by a module.
This function is used in a module definition.
The arguments are one or more interfaces or interface
specifications (`IDeclaration` objects).
The given interfaces (including the interfaces in the
specifications) are used to create the module's direct-object
interface specification. An error will be raised if the module
already has an interface specification. In other words, it is
an error to call this function more than once in a module
definition.
This function is provided for convenience. It provides a more
convenient way to call `directlyProvides` for a module. For example::
moduleImplements(I1)
is equivalent to::
directlyProvides(sys.modules[__name__], I1)
.. seealso:: `zope.interface.moduleProvides`
"""
def Declaration(*interfaces):
"""
Create an interface specification.
The arguments are one or more interfaces or interface
specifications (`IDeclaration` objects).
A new interface specification (`IDeclaration`) with the given
interfaces is returned.
.. seealso:: `zope.interface.Declaration`
"""
class IAdapterRegistry(Interface):
"""Provide an interface-based registry for adapters
This registry registers objects that are in some sense "from" a
sequence of specification to an interface and a name.
No specific semantics are assumed for the registered objects,
however, the most common application will be to register factories
that adapt objects providing required specifications to a provided
interface.
"""
def register(required, provided, name, value):
"""Register a value
A value is registered for a *sequence* of required specifications, a
provided interface, and a name, which must be text.
"""
def registered(required, provided, name=u''):
"""Return the component registered for the given interfaces and name
name must be text.
Unlike the lookup method, this methods won't retrieve
components registered for more specific required interfaces or
less specific provided interfaces.
If no component was registered exactly for the given
interfaces and name, then None is returned.
"""
def lookup(required, provided, name='', default=None):
"""Lookup a value
A value is looked up based on a *sequence* of required
specifications, a provided interface, and a name, which must be
text.
"""
def queryMultiAdapter(objects, provided, name=u'', default=None):
"""Adapt a sequence of objects to a named, provided, interface
"""
def lookup1(required, provided, name=u'', default=None):
"""Lookup a value using a single required interface
A value is looked up based on a single required
specifications, a provided interface, and a name, which must be
text.
"""
def queryAdapter(object, provided, name=u'', default=None):
"""Adapt an object using a registered adapter factory.
"""
def adapter_hook(provided, object, name=u'', default=None):
"""Adapt an object using a registered adapter factory.
name must be text.
"""
def lookupAll(required, provided):
"""Find all adapters from the required to the provided interfaces
An iterable object is returned that provides name-value two-tuples.
"""
def names(required, provided):
"""Return the names for which there are registered objects
"""
def subscribe(required, provided, subscriber, name=u''):
"""Register a subscriber
A subscriber is registered for a *sequence* of required
specifications, a provided interface, and a name.
Multiple subscribers may be registered for the same (or
equivalent) interfaces.
"""
def subscriptions(required, provided, name=u''):
"""Get a sequence of subscribers
Subscribers for a *sequence* of required interfaces, and a provided
interface are returned.
"""
def subscribers(objects, provided, name=u''):
"""Get a sequence of subscription adapters
"""
# begin formerly in zope.component
class ComponentLookupError(LookupError):
"""A component could not be found."""
class Invalid(Exception):
"""A component doesn't satisfy a promise."""
class IObjectEvent(Interface):
"""An event related to an object.
The object that generated this event is not necessarily the object
refered to by location.
"""
object = Attribute("The subject of the event.")
@implementer(IObjectEvent)
class ObjectEvent(object):
def __init__(self, object):
self.object = object
class IComponentLookup(Interface):
"""Component Manager for a Site
This object manages the components registered at a particular site. The
definition of a site is intentionally vague.
"""
adapters = Attribute(
"Adapter Registry to manage all registered adapters.")
utilities = Attribute(
"Adapter Registry to manage all registered utilities.")
def queryAdapter(object, interface, name=u'', default=None):
"""Look for a named adapter to an interface for an object
If a matching adapter cannot be found, returns the default.
"""
def getAdapter(object, interface, name=u''):
"""Look for a named adapter to an interface for an object
If a matching adapter cannot be found, a `ComponentLookupError`
is raised.
"""
def queryMultiAdapter(objects, interface, name=u'', default=None):
"""Look for a multi-adapter to an interface for multiple objects
If a matching adapter cannot be found, returns the default.
"""
def getMultiAdapter(objects, interface, name=u''):
"""Look for a multi-adapter to an interface for multiple objects
If a matching adapter cannot be found, a `ComponentLookupError`
is raised.
"""
def getAdapters(objects, provided):
"""Look for all matching adapters to a provided interface for objects
Return an iterable of name-adapter pairs for adapters that
provide the given interface.
"""
def subscribers(objects, provided):
"""Get subscribers
Subscribers are returned that provide the provided interface
and that depend on and are comuted from the sequence of
required objects.
"""
def handle(*objects):
"""Call handlers for the given objects
Handlers registered for the given objects are called.
"""
def queryUtility(interface, name='', default=None):
"""Look up a utility that provides an interface.
If one is not found, returns default.
"""
def getUtilitiesFor(interface):
"""Look up the registered utilities that provide an interface.
Returns an iterable of name-utility pairs.
"""
def getAllUtilitiesRegisteredFor(interface):
"""Return all registered utilities for an interface
This includes overridden utilities.
An iterable of utility instances is returned. No names are
returned.
"""
class IRegistration(Interface):
"""A registration-information object
"""
registry = Attribute("The registry having the registration")
name = Attribute("The registration name")
info = Attribute("""Information about the registration
This is information deemed useful to people browsing the
configuration of a system. It could, for example, include
commentary or information about the source of the configuration.
""")
class IUtilityRegistration(IRegistration):
"""Information about the registration of a utility
"""
factory = Attribute("The factory used to create the utility. Optional.")
component = Attribute("The object registered")
provided = Attribute("The interface provided by the component")
class _IBaseAdapterRegistration(IRegistration):
"""Information about the registration of an adapter
"""
factory = Attribute("The factory used to create adapters")
required = Attribute("""The adapted interfaces
This is a sequence of interfaces adapters by the registered
factory. The factory will be caled with a sequence of objects, as
positional arguments, that provide these interfaces.
""")
provided = Attribute("""The interface provided by the adapters.
This interface is implemented by the factory
""")
class IAdapterRegistration(_IBaseAdapterRegistration):
"""Information about the registration of an adapter
"""
class ISubscriptionAdapterRegistration(_IBaseAdapterRegistration):
"""Information about the registration of a subscription adapter
"""
class IHandlerRegistration(IRegistration):
handler = Attribute("An object called used to handle an event")
required = Attribute("""The handled interfaces
This is a sequence of interfaces handled by the registered
handler. The handler will be caled with a sequence of objects, as
positional arguments, that provide these interfaces.
""")
class IRegistrationEvent(IObjectEvent):
"""An event that involves a registration"""
@implementer(IRegistrationEvent)
class RegistrationEvent(ObjectEvent):
"""There has been a change in a registration
"""
def __repr__(self):
return "%s event:\n%r" % (self.__class__.__name__, self.object)
class IRegistered(IRegistrationEvent):
"""A component or factory was registered
"""
@implementer(IRegistered)
class Registered(RegistrationEvent):
pass
class IUnregistered(IRegistrationEvent):
"""A component or factory was unregistered
"""
@implementer(IUnregistered)
class Unregistered(RegistrationEvent):
"""A component or factory was unregistered
"""
pass
class IComponentRegistry(Interface):
"""Register components
"""
def registerUtility(component=None, provided=None, name=u'',
info=u'', factory=None):
"""Register a utility
:param factory:
Factory for the component to be registered.
:param component:
The registered component
:param provided:
This is the interface provided by the utility. If the
component provides a single interface, then this
argument is optional and the component-implemented
interface will be used.
:param name:
The utility name.
:param info:
An object that can be converted to a string to provide
information about the registration.
Only one of *component* and *factory* can be used.
A `IRegistered` event is generated with an `IUtilityRegistration`.
"""
def unregisterUtility(component=None, provided=None, name=u'',
factory=None):
"""Unregister a utility
:returns:
A boolean is returned indicating whether the registry was
changed. If the given *component* is None and there is no
component registered, or if the given *component* is not
None and is not registered, then the function returns
False, otherwise it returns True.
:param factory:
Factory for the component to be unregistered.
:param component:
The registered component The given component can be
None, in which case any component registered to provide
the given provided interface with the given name is
unregistered.
:param provided:
This is the interface provided by the utility. If the
component is not None and provides a single interface,
then this argument is optional and the
component-implemented interface will be used.
:param name:
The utility name.
Only one of *component* and *factory* can be used.
An `IUnregistered` event is generated with an `IUtilityRegistration`.
"""
def registeredUtilities():
"""Return an iterable of `IUtilityRegistration` instances.
These registrations describe the current utility registrations
in the object.
"""
def registerAdapter(factory, required=None, provided=None, name=u'',
info=u''):
"""Register an adapter factory
:param factory:
The object used to compute the adapter
:param required:
This is a sequence of specifications for objects to be
adapted. If omitted, then the value of the factory's
``__component_adapts__`` attribute will be used. The
``__component_adapts__`` attribute is
normally set in class definitions using
the `.adapter`
decorator. If the factory doesn't have a
``__component_adapts__`` adapts attribute, then this
argument is required.
:param provided:
This is the interface provided by the adapter and
implemented by the factory. If the factory
implements a single interface, then this argument is
optional and the factory-implemented interface will be
used.
:param name:
The adapter name.
:param info:
An object that can be converted to a string to provide
information about the registration.
A `IRegistered` event is generated with an `IAdapterRegistration`.
"""
def unregisterAdapter(factory=None, required=None,
provided=None, name=u''):
"""Unregister an adapter factory
:returns:
A boolean is returned indicating whether the registry was
changed. If the given component is None and there is no
component registered, or if the given component is not
None and is not registered, then the function returns
False, otherwise it returns True.
:param factory:
This is the object used to compute the adapter. The
factory can be None, in which case any factory
registered to implement the given provided interface
for the given required specifications with the given
name is unregistered.
:param required:
This is a sequence of specifications for objects to be
adapted. If the factory is not None and the required
arguments is omitted, then the value of the factory's
__component_adapts__ attribute will be used. The
__component_adapts__ attribute attribute is normally
set in class definitions using adapts function, or for
callables using the adapter decorator. If the factory
is None or doesn't have a __component_adapts__ adapts
attribute, then this argument is required.
:param provided:
This is the interface provided by the adapter and
implemented by the factory. If the factory is not
None and implements a single interface, then this
argument is optional and the factory-implemented
interface will be used.
:param name:
The adapter name.
An `IUnregistered` event is generated with an `IAdapterRegistration`.
"""
def registeredAdapters():
"""Return an iterable of `IAdapterRegistration` instances.
These registrations describe the current adapter registrations
in the object.
"""
def registerSubscriptionAdapter(factory, required=None, provides=None,
name=u'', info=''):
"""Register a subscriber factory
:param factory:
The object used to compute the adapter
:param required:
This is a sequence of specifications for objects to be
adapted. If omitted, then the value of the factory's
``__component_adapts__`` attribute will be used. The
``__component_adapts__`` attribute is
normally set using the adapter
decorator. If the factory doesn't have a
``__component_adapts__`` adapts attribute, then this
argument is required.
:param provided:
This is the interface provided by the adapter and
implemented by the factory. If the factory implements
a single interface, then this argument is optional and
the factory-implemented interface will be used.
:param name:
The adapter name.
Currently, only the empty string is accepted. Other
strings will be accepted in the future when support for
named subscribers is added.
:param info:
An object that can be converted to a string to provide
information about the registration.
A `IRegistered` event is generated with an
`ISubscriptionAdapterRegistration`.
"""
def unregisterSubscriptionAdapter(factory=None, required=None,
provides=None, name=u''):
"""Unregister a subscriber factory.
:returns:
A boolean is returned indicating whether the registry was
changed. If the given component is None and there is no
component registered, or if the given component is not
None and is not registered, then the function returns
False, otherwise it returns True.
:param factory:
This is the object used to compute the adapter. The
factory can be None, in which case any factories
registered to implement the given provided interface
for the given required specifications with the given
name are unregistered.
:param required:
This is a sequence of specifications for objects to be
adapted. If omitted, then the value of the factory's
``__component_adapts__`` attribute will be used. The
``__component_adapts__`` attribute is
normally set using the adapter
decorator. If the factory doesn't have a
``__component_adapts__`` adapts attribute, then this
argument is required.
:param provided:
This is the interface provided by the adapter and
implemented by the factory. If the factory is not
None implements a single interface, then this argument
is optional and the factory-implemented interface will
be used.
:param name:
The adapter name.
Currently, only the empty string is accepted. Other
strings will be accepted in the future when support for
named subscribers is added.
An `IUnregistered` event is generated with an
`ISubscriptionAdapterRegistration`.
"""
def registeredSubscriptionAdapters():
"""Return an iterable of `ISubscriptionAdapterRegistration` instances.
These registrations describe the current subscription adapter
registrations in the object.
"""
def registerHandler(handler, required=None, name=u'', info=''):
"""Register a handler.
A handler is a subscriber that doesn't compute an adapter
but performs some function when called.
:param handler:
The object used to handle some event represented by
the objects passed to it.
:param required:
This is a sequence of specifications for objects to be
adapted. If omitted, then the value of the factory's
``__component_adapts__`` attribute will be used. The
``__component_adapts__`` attribute is
normally set using the adapter
decorator. If the factory doesn't have a
``__component_adapts__`` adapts attribute, then this
argument is required.
:param name:
The handler name.
Currently, only the empty string is accepted. Other
strings will be accepted in the future when support for
named handlers is added.
:param info:
An object that can be converted to a string to provide
information about the registration.
A `IRegistered` event is generated with an `IHandlerRegistration`.
"""
def unregisterHandler(handler=None, required=None, name=u''):
"""Unregister a handler.
A handler is a subscriber that doesn't compute an adapter
but performs some function when called.
:returns: A boolean is returned indicating whether the registry was
changed.
:param handler:
This is the object used to handle some event
represented by the objects passed to it. The handler
can be None, in which case any handlers registered for
the given required specifications with the given are
unregistered.
:param required:
This is a sequence of specifications for objects to be
adapted. If omitted, then the value of the factory's
``__component_adapts__`` attribute will be used. The
``__component_adapts__`` attribute is
normally set using the adapter
decorator. If the factory doesn't have a
``__component_adapts__`` adapts attribute, then this
argument is required.
:param name:
The handler name.
Currently, only the empty string is accepted. Other
strings will be accepted in the future when support for
named handlers is added.
An `IUnregistered` event is generated with an `IHandlerRegistration`.
"""
def registeredHandlers():
"""Return an iterable of `IHandlerRegistration` instances.
These registrations describe the current handler registrations
in the object.
"""
class IComponents(IComponentLookup, IComponentRegistry):
"""Component registration and access
"""
# end formerly in zope.component
| mdworks2016/work_development | Python/05_FirstPython/Chapter9_WebApp/fppython_develop/lib/python3.7/site-packages/zope/interface/interfaces.py | Python | apache-2.0 | 50,271 |
# Copyright 2016 Bridgewater Associates
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. module: security_monkey.watchers.vpc.endpoint
:platform: Unix
.. version:: $$VERSION$$
.. moduleauthor:: Bridgewater OSS <[email protected]>
"""
from security_monkey.watcher import Watcher
from security_monkey.watcher import ChangeItem
from security_monkey.constants import TROUBLE_REGIONS
from security_monkey.exceptions import BotoConnectionIssue
from security_monkey import app
from boto.vpc import regions
class Endpoint(Watcher):
index = 'endpoint'
i_am_singular = 'Endpoint'
i_am_plural = 'Endpoints'
def __init__(self, accounts=None, debug=False):
super(Endpoint, self).__init__(accounts=accounts, debug=debug)
def slurp(self):
"""
:returns: item_list - list of endpoints.
:returns: exception_map - A dict where the keys are a tuple containing the
location of the exception and the value is the actual exception
"""
self.prep_for_slurp()
item_list = []
exception_map = {}
from security_monkey.common.sts_connect import connect
for account in self.accounts:
for region in regions():
app.logger.debug(
"Checking {}/{}/{}".format(self.index, account, region.name))
try:
conn = connect(account, 'boto3.ec2.client', region=region)
all_vpc_endpoints_resp = self.wrap_aws_rate_limited_call(
conn.describe_vpc_endpoints
)
all_vpc_endpoints = all_vpc_endpoints_resp.get(
'VpcEndpoints', [])
except Exception as e:
if region.name not in TROUBLE_REGIONS:
exc = BotoConnectionIssue(
str(e), self.index, account, region.name)
self.slurp_exception(
(self.index, account, region.name), exc, exception_map)
continue
app.logger.debug("Found {} {}".format(
len(all_vpc_endpoints), self.i_am_plural))
for endpoint in all_vpc_endpoints:
endpoint_name = endpoint.get('VpcEndpointId')
if self.check_ignore_list(endpoint_name):
continue
service = endpoint.get('ServiceName', '').split('.')[-1]
config = {
"id": endpoint.get('VpcEndpointId'),
"policy_document": endpoint.get('PolicyDocument', {}),
"service_name": endpoint.get('ServiceName'),
"service": service,
"route_table_ids": endpoint.get('RouteTableIds', []),
"creation_time_stamp": str(endpoint.get('CreationTimestamp')),
"state": endpoint.get('State'),
"vpc_id": endpoint.get('VpcId'),
}
item = EndpointItem(
region=region.name, account=account, name=endpoint_name, config=config, source_watcher=self)
item_list.append(item)
return item_list, exception_map
class EndpointItem(ChangeItem):
def __init__(self, region=None, account=None, name=None, config=None, source_watcher=None):
super(EndpointItem, self).__init__(
index=Endpoint.index,
region=region,
account=account,
name=name,
new_config=config if config else {},
source_watcher=source_watcher)
| Netflix/security_monkey | security_monkey/watchers/vpc/endpoint.py | Python | apache-2.0 | 4,242 |
import urllib3
import cloudshare
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
''' IMPORTS '''
# Disable insecure warnings
urllib3.disable_warnings()
''' CONSTANTS '''
''' CLIENT CLASS '''
class Client():
def __init__(self, hostname: str, api_id: str = None, api_key: str = None):
self.hostname = hostname
self.apiId = api_id
self.apiKey = api_key
def send_request(self, method: str, path: str, queryParams: dict = None, content: dict = None):
res = cloudshare.req(
hostname=self.hostname,
method=method,
path=path,
apiId=self.apiId,
apiKey=self.apiKey,
queryParams=queryParams,
content=content
)
return res
''' HELPER FUNCTIONS '''
def test_module_command(client, args):
res = client.send_request(
'GET',
'ping'
)
if res.status == 200:
if "result" in res.content and res.content['result'] == "Pong":
return_results('ok')
else:
return_error(res.content)
else:
return_error(res.content)
def get_projects_command(client, args):
queryParams = {
"WhereUserIsProjectManager": True if args.get('WhereUserIsProjectManager', 'false') == 'true' else False,
"WhereUserIsProjectMember": True if args.get('WhereUserIsProjectMember', 'false') == 'true' else False,
"WhereUserCanCreateClass": True if args.get('WhereUserCanCreateClass', 'false') == 'true' else False
}
res = client.send_request(
'GET',
'projects',
queryParams=queryParams
)
if res.status == 200:
md = tableToMarkdown('CloudShare Projects:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Projects",
outputs_key_field='id',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error getting projects - {res.content}")
def get_project_command(client, args):
projectId = args.get('projectId')
res = client.send_request(
'GET',
f'projects/{projectId}'
)
if res.status == 200:
md = tableToMarkdown(f'CloudShare Project {projectId}:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Projects",
outputs_key_field='id',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error getting project - {res.content}")
def get_project_policies_command(client, args):
projectId = args.get('projectId')
res = client.send_request(
'GET',
f'projects/{projectId}/policies'
)
if res.status == 200:
policies = {
"id": projectId,
"Policies": res.content
}
md = tableToMarkdown(f'CloudShare Project Policies for {projectId}:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Projects",
outputs_key_field='id',
outputs=policies if policies else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error getting project policies - {res.content}")
def get_project_blueprints_command(client, args):
projectId = args.get('projectId')
queryParams = {k: v for k, v in args.items() if k != 'projectId'}
res = client.send_request(
'GET',
f'projects/{projectId}/blueprints',
queryParams=queryParams
)
if res.status == 200:
blueprints = {
"id": projectId,
"Blueprints": res.content if res.content else None
}
md = tableToMarkdown(f'CloudShare Project Blueprints for {projectId}:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Projects",
outputs_key_field='id',
outputs=blueprints if blueprints else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error getting project blueprints - {res.content}")
def get_project_blueprint_command(client, args):
projectId = args.get('projectId')
blueprintId = args.get('blueprintId', None)
res = client.send_request(
'GET',
f'projects/{projectId}/blueprints/{blueprintId}'
)
if res.status == 200:
blueprints = {
"id": projectId,
"Blueprints": res.content if res.content else None
}
md = tableToMarkdown(f'CloudShare Blueprint ID {blueprintId} for Project {projectId}:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Projects",
outputs_key_field='id',
outputs=blueprints if blueprints else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error getting project blueprint - {res.content}")
def get_classes_command(client, args):
res = client.send_request(
'GET',
'class'
)
if res.status == 200:
md = tableToMarkdown('CloudShare classes:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Classes",
outputs_key_field='id',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error retrieving classes - {res.content}")
def get_class_command(client, args):
classId = args.get('classId')
res = client.send_request(
'GET',
f'class/{classId}'
)
if res.status == 200:
md = tableToMarkdown('CloudShare classes:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Classes",
outputs_key_field='id',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error finding class - {res.content}")
def delete_class_command(client, args):
classId = args.get('classId')
res = client.send_request(
'DELETE',
f'class/{classId}'
)
if res.status == 200:
return_results("Class {classId} deleted successfully")
else:
return_error(f"Error deleteing class {classId} - {res.content}")
def delete_class_environments_command(client, args):
classId = args.get('classId')
res = client.send_request(
'DELETE',
'class/actions/deleteallenvironments',
content={"id": classId}
)
if res.status == 200:
results = {
"failed": res[0].get('failed', []),
"succeed": res[0].get('succeed', [])
}
for k, v in results.items():
md = tableToMarkdown(f'CloudShare class {classId} environments deletion ({k}):', v)
command_results = CommandResults(
outputs_prefix="CloudShare.Classes.Actions.Delete.{k}",
outputs_key_field='id',
outputs=v if v else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error deleteing class {classId} environments - {res.content}")
def get_classes_countries_command(client, args):
res = client.send_request(
'GET',
'class/actions/countries',
queryParams={"fullCountriesList": True}
)
if res.status == 200:
md = tableToMarkdown('CloudShare classes countries:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Classes.Countries",
outputs_key_field='code',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error retrieving countries - {res.content}")
def get_classes_customfields_command(client, args):
projectId = args.get('projectId')
res = client.send_request(
'GET',
'class/actions/customfields',
queryParams={"projectId": projectId}
)
if res.status == 200:
md = tableToMarkdown(f'CloudShare project {projectId} classes custom fields:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Classes.CustomFields",
outputs_key_field='id',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error retrieving custom fields - {res.content}")
def get_classes_detailed_command(client, args):
classId = args.get('classId')
res = client.get_classes_detailed(
'GET',
'class/actions/getdetailed',
queryParams={"classId": classId}
)
if res.status == 200:
res.content['id'] = classId
md = tableToMarkdown(f'CloudShare class {classId} details:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Classes",
outputs_key_field='id',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error retrieving details - {res.content}")
def get_classes_instructors_command(client, args):
policyId = args.get('policyId')
res = client.send_request(
'GET',
'class/actions/instructors',
queryParams={"policyId": policyId}
)
if res.status == 200:
md = tableToMarkdown(f'CloudShare class instructors under policy {policyId}:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Classes.Instructors",
outputs_key_field='id',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error retrieving details - {res.content}")
def create_class_command(client, args):
res = client.send_request(
'POST',
'class',
content={k: True if v == 'true' else False if v == 'false' else v for k, v in args.items()}
)
if res.status == 200:
res.content.extend(args)
md = tableToMarkdown('CloudShare create new class:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Classes",
outputs_key_field='id',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error creating new class - {res.content}")
def send_class_invitations_command(client, args):
classId = args.get('classId')
studentIds = args.get('studentIds').replace(" ", "").split(",")
res = client.send_request(
'POST',
'class/actions/sendinvitations',
queryParams={"isMultiple": True},
content={
"classId": classId,
"studentIds": studentIds
}
)
if res.status == 200:
return_results(f"Invitations sent for class {classId} successfully.")
else:
return_error(f"Error sending invitations - {res.content}")
def suspend_class_environments_command(client, args):
classId = args.get('classId')
res = client.send_request(
'PUT',
'class/actions/suspendallenvironments',
content={"id": classId}
)
if res.status == 200:
results = {
"failed": res[0].get('failed', []),
"succeed": res[0].get('succeed', [])
}
for k, v in results.items():
md = tableToMarkdown(f'CloudShare class {classId} environments suspension ({k}):', v)
command_results = CommandResults(
outputs_prefix="CloudShare.Classes.Actions.Suspend.{k}",
outputs_key_field='id',
outputs=v if v else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error suspending class {classId} environments - {res.content}")
def modify_class_command(client, args):
classId = args.get('classId')
res = client.send_request(
'PUT',
f'class/{classId}',
content={k: True if v == 'true' else False if v == 'false' else v for k, v in args.items() if k != 'classId'}
)
if res.status == 200:
md = tableToMarkdown(f'CloudShare modify class {classId}:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Classes",
outputs_key_field='id',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error modifying class {classId} - {res.content}")
def get_students_command(client, args):
classId = args.get('classId')
res = client.send_request(
'GET',
f'class/{classId}/students',
queryParams={
"isFull": True if args.get('isFull', 'false') == 'true' else False
}
)
if res.status == 200:
md = tableToMarkdown(f'CloudShare students for class {classId}:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Students",
outputs_key_field='id',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error retrieving students for class {classId} - {res.content}")
def get_student_command(client, args):
classId = args.get('classId')
studentId = args.get('studentId')
res = client.send_request(
'GET',
f'class/{classId}/students/{studentId}'
)
if res.status == 200:
md = tableToMarkdown(f'CloudShare student {studentId} for class {classId}:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Students",
outputs_key_field='id',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error retrieving students for class {classId} - {res.content}")
def delete_student_command(client, args):
classId = args.get('classId')
studentId = args.get('studentId')
res = client.send_request(
'DELETE',
f'class/{classId}/students/{studentId}'
)
if res.status == 200:
return_results("Successfully deleted student {studentId} from class {classId}")
else:
return_error(f"Error deleting student {studentId} from class {classId} - {res.content}")
def register_student_command(client, args):
classId = args.get('classId')
res = client.send_request(
'POST',
f'class/{classId}/students',
content={k: v for k, v in args.items() if k != 'classId'}
)
if res.status == 200:
results = {"id": v for k, v in res.contents.items() if k == 'studentId'}
md = tableToMarkdown(f'CloudShare registered student for class {classId}:', results)
command_results = CommandResults(
outputs_prefix="CloudShare.Students",
outputs_key_field='id',
outputs=results if results else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error retrieving students for class {classId} - {res.content}")
def modify_student_command(client, args):
classId = args.get('classId')
studentId = args.get('studentId')
res = client.send_request(
'PUT',
f'class/{classId}/students/{studentId}',
content={k: v for k, v in args.items() if k != 'classId' and k != 'studentId'}
)
if res.status == 200:
return_results(f"Student {studentId} modified in class {classId} successfully")
else:
return_error(f"Error modifying student {studentId} for class {classId} - {res.content}")
def get_regions_command(client, args):
res = client.send_request(
'GET',
'regions'
)
if res.status == 200:
md = tableToMarkdown('CloudShare regions:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Regions",
outputs_key_field='id',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error retrieving regions - {res.content}")
def get_timezones_command(client, args):
res = client.send_request(
'GET',
'timezones'
)
if res.status == 200:
md = tableToMarkdown('CloudShare timezones:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Timezones",
outputs_key_field='id',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error retrieving timezones - {res.content}")
def get_envs_command(client, args):
owned = True if args.get('owned', 'false') == 'true' else False
visible = True if args.get('visible', 'false') == 'true' else False
owner_email = args.get('ownerEmail', None)
class_id = args.get('classId', None)
brief = args.get('brief', 'false')
queryParams = dict()
if owned or visible:
owned_visible = list()
if owned:
owned_visible.append('allowned')
if visible:
owned_visible.append('allvisible')
queryParams['criteria'] = ','.join(owned_visible) if owned_visible else None
if owner_email:
queryParams['ownerEmail'] = owner_email
if class_id:
queryParams['classId'] = class_id
if brief:
queryParams['brief'] = brief
res = client.send_request(
'GET',
'envs',
queryParams=queryParams
)
if res.status == 200:
md = tableToMarkdown('CloudShare Environments:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Environments",
outputs_key_field='id',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error getting environments - {res.content}")
def get_env_resources_command(client, args):
envId = args.get('envId')
res = client.send_request(
'GET',
'envs/actions/getextended',
queryParams={"envId": envId}
)
if res.status == 200:
md = tableToMarkdown('CloudShare Environment {envId} Resources:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.EnvironmentResources",
outputs_key_field='id',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error getting environments - {res.content}")
def get_env_extended_command(client, args):
envId = args.get('envId')
res = client.send_request(
'GET',
'envs/actions/getenvresources',
queryParams={"envId": envId}
)
if res.status == 200:
md = tableToMarkdown('CloudShare Environment {envId}:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Environments",
outputs_key_field='id',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error getting extended environment {envId} - {res.content}")
def get_env_extended_vanity_command(client, args):
machineVanity = args.get('machineVanity')
res = client.send_request(
'GET',
'envs/actions/getextendedbymachinevanity',
queryParams={"machineVanity": machineVanity}
)
if res.status == 200:
md = tableToMarkdown('CloudShare Environment {envId}:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Environments",
outputs_key_field='id',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error getting extended environment - {res.content}")
def get_env_extended_token_command(client, args):
sponsoredLoginToken = args.get('sponsoredLoginToken')
res = client.send_request(
'GET',
'envs/actions/getextendedbytoken',
queryParams={"sponsoredLoginToken": sponsoredLoginToken}
)
if res.status == 200:
md = tableToMarkdown('CloudShare Environment:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Environments",
outputs_key_field='id',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error getting extended environment - {res.content}")
def get_env_multiple_resources_command(client, args):
res = client.send_request(
'GET',
'envs/actions/getmultipleenvsresources',
queryParams={k: v for k, v in args.items()}
)
if res.status == 200:
md = tableToMarkdown('CloudShare Environment Resources from {args.starttime} to {args.endtime}:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.EnvironmentResources",
outputs_key_field='id',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error getting environment resources - {res.content}")
def extend_env_command(client, args):
envId = args.get('envId')
res = client.send_request(
'PUT',
'envs/actions/extend',
queryParams={"envId": envId}
)
if res.status == 200:
return_results(f"Postpone environment {envId} suspend successful")
else:
return_error(f"Error postponing environment {envId} suspension- {res.content}")
def postpone_env_suspend_command(client, args):
envId = args.get('envId')
res = client.send_request(
'PUT',
'envs/actions/postponeinactivity',
queryParams={"envId": envId}
)
if res.status == 200:
return_results(f"Extend environment {envId} successful")
else:
return_error(f"Error extended environment {envId} - {res.content}")
def resume_env_command(client, args):
envId = args.get('envId')
res = client.send_request(
'PUT',
'envs/actions/resume',
queryParams={"envId": envId}
)
if res.status == 200:
return_results(f"Environment {envId} resumed successfully")
else:
return_error(f"Error resuming environment {envId} - {res.content}")
def revert_env_command(client, args):
envId = args.get('envId')
snapshotId = args.get('snapshotId')
res = client.send_request(
'PUT',
'envs/actions/revert',
queryParams={"envId": envId, "snapshotId": snapshotId}
)
if res.status == 200:
return_results(f"Environment {envId} reverted to snapshot {snapshotId} successfully")
else:
return_error(f"Error reverting environment {envId} to snapshot {snapshotId} - {res.content}")
def suspend_env_command(client, args):
envId = args.get('envId')
res = client.send_request(
'PUT',
'envs/actions/suspend',
queryParams={"envId": envId}
)
if res.status == 200:
return_results(f"Environment {envId} suspended successfully")
else:
return_error(f"Error suspending environment {envId} - {res.content}")
def get_env_command(client, args):
envID = args.get('envID')
res = client.send_request(
'GET',
f'envs/{envID}',
queryParams={k: v for k, v in args.items()}
)
if res.status == 200:
md = tableToMarkdown('CloudShare Environment {envID}:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Environments",
outputs_key_field='id',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error suspending environment {envID} - {res.content}")
def delete_env_command(client, args):
envID = args.get('envID')
res = client.send_request(
'DELETE',
f'envs/{envID}'
)
if res.status == 200:
return_results(f"CloudShare Environment {envID} deleted successfully")
else:
return_error(f"Error deleting environment {envID} - {res.content}")
def create_env_command(client, args):
res = client.send_request(
'POST',
'envs',
content={k: v for k, v in args.items()}
)
if res.status == 200:
res.content['id'] = res.content.get('environmentId')
md = tableToMarkdown('CloudShare Environment Created:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Environments",
outputs_key_field='id',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error creating environment - {res.content}")
def modify_env_command(client, args):
envId = args.get('envId')
res = client.send_request(
'PUT',
'envs',
content={"envId": envId}
)
if res.status == 200:
res.content['id'] = res.content.get('environmentId')
md = tableToMarkdown('CloudShare Environment {envId} Modified:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Environments",
outputs_key_field='id',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error creating environment - {res.content}")
def delete_vm_command(client, args):
VmID = args.get('VmID')
res = client.send_request(
'DELETE',
f'vms/{VmID}'
)
if res.status == 200:
res.content['id'] = res.content.get('environmentId')
return_results(f"CloudShare VM {VmID} deleted successfully")
else:
return_error(f"Error deleting VM {VmID} - {res.content}")
def vm_check_execution_status_command(client, args):
vmID = args.get('vmID')
executionId = args.get('executionId')
res = client.send_request(
'GET',
'vms/actions/checkexecutionstatus',
queryParams={"vmID": vmID, "executionId": executionId}
)
if res.status == 200:
md = tableToMarkdown('VM {vmID} execution {executionId} status:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.VM.Executions",
outputs_key_field='id',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error retrieving {vmID} execution {executionId} status - {res.content}")
def vm_get_remote_command(client, args):
VmID = args.get('VmID')
res = client.send_request(
'GET',
'vms/actions/getremoteaccessfile',
queryParams={k: v for k, v in args.items()}
)
if res.status == 200:
res.content['VmID'] = VmID
md = tableToMarkdown('VM {VmID} remote file:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.VM.Remote",
outputs_key_field='VmID',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error retrieving {VmID} remote file - {res.content}")
def vm_execute_command(client, args):
vmId = args.get('vmId')
res = client.send_request(
'POST',
'vms/actions/executepath',
content={"vmId": vmId}
)
if res.status == 200:
md = tableToMarkdown('VM {vmId} execute task:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.VM.Execute",
outputs_key_field='executionId',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error executing command on VM {vmId} - {res.content}")
def vm_modify_hardware_command(client, args):
vmID = args.get('vmID')
res = client.send_request(
'PUT',
'vms/actions/editvmhardware',
content={"vmID": vmID}
)
if res.status == 200:
res.content['id'] = vmID
md = tableToMarkdown('Modify VM {vmID} hardware:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.VM.Modify",
outputs_key_field='vmID',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error modifying VM {vmID} - {res.content}")
def reboot_vm_command(client, args):
VmID = args.get('VmID')
res = client.send_request(
'PUT',
'vms/actions/reboot',
queryParams={"VmID": VmID}
)
if res.status == 200:
return_results(f"Revert of VM {VmID} successful")
else:
return_error(f"Error reverting VM {VmID} - {res.content}")
def revert_vm_command(client, args):
VmID = args.get('VmID')
res = client.send_request(
'PUT',
'vms/actions/revert',
queryParams={"VmID": VmID}
)
if res.status == 200:
return_results(f"Reboot of VM {VmID} successful")
else:
return_error(f"Error reverting VM {VmID} - {res.content}")
def get_cloud_folders_command(client, args):
res = client.send_request(
'GET',
'cloudfolders/actions/getall'
)
if res.status == 200:
md = tableToMarkdown('CloudShare folders:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Folders",
outputs_key_field=['host', 'path'],
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error retrieving folders - {res.content}")
def get_env_cloud_folders_command(client, args):
EnvId = args.get('EnvId')
res = client.send_request(
'PUT',
'cloudfolders/actions/mount',
queryParams={"EnvId": EnvId}
)
if res.status == 200:
md = tableToMarkdown('CloudShare folders for env {EnvId}:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.EnvFolders",
outputs_key_field=['name', 'token'],
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error retrieving env {EnvId} folders - {res.content}")
def generate_password_folder_command(client, args):
res = client.send_request(
'PUT',
'cloudfolders/actions/regeneratecloudfolderspassword'
)
if res.status == 200:
md = tableToMarkdown('CloudShare password for folders:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.FoldersPassword",
outputs_key_field='newFtpUri',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error generating password - {res.content}")
def unmount_env_folders_command(client, args):
EnvId = args.get('EnvId')
res = client.send_request(
'PUT',
'cloudfolders/actions/unmount',
queryParams={"EnvId": EnvId}
)
if res.status == 200:
return_results(f"Unmounted env {EnvId} folders successfully")
else:
return_error(f"Error unmounting env {EnvId} folders - {res.content}")
def get_templates_command(client, args):
queryParams = {k: v for k, v in args.items()}
if "skip" in queryParams:
queryParams['skip'] = int(queryParams['skip'])
if "take" in queryParams:
queryParams['take'] = int(queryParams['take'])
res = client.send_request(
'GET',
'templates',
queryParams=queryParams
)
if res.status == 200:
md = tableToMarkdown('CloudShare env templates:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Templates",
outputs_key_field='id',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error retrieving templates - {res.content}")
def get_snapshot_command(client, args):
snapshotID = args.get('snapshotID')
res = client.send_request(
'GET',
f'snapshots/{snapshotID}'
)
if res.status == 200:
md = tableToMarkdown('CloudShare snapshot {snapshotID}:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Snapshots",
outputs_key_field='id',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error retrieving snapshot {snapshotID} - {res.content}")
def get_env_snapshots_command(client, args):
envId = args.get('envId')
res = client.send_request(
'GET',
'snapshots/actions/getforenv',
queryParams={"envId": envId}
)
if res.status == 200:
md = tableToMarkdown('CloudShare snapshots for env {envId}:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Snapshots",
outputs_key_field='id',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error retrieving snapshots for env {envId} - {res.content}")
def mark_default_snapshot_command(client, args):
snapshotID = args.get('snapshotID')
res = client.send_request(
'PUT',
'snapshots/actions/markdefault',
queryParams={"id": snapshotID}
)
if res.status == 200:
return_results("Snapshot {snapshotID} set as default successfully")
else:
return_error(f"Error setting snapshot {snapshotID} as default - {res.content}")
def take_snapshot_env_command(client, args):
envId = args.get('envId')
content = {k: v for k, v in args.items()}
res = client.send_request(
method='GET',
path='snapshots/actions/takesnapshot',
content=content
)
if res.status == 200:
return_results("Snapshot of env {envId} taken successfully")
else:
return_error(f"Error taking snapshot of {envId} - {res.content}")
def get_teams_command(client, args):
res = client.send_request(
'GET',
'teams'
)
if res.status == 200:
md = tableToMarkdown('CloudShare teams:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Teams",
outputs_key_field='Id',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error retrieving teams - {res.content}")
def invite_user_poc_command(client, args):
content = {k: True if v == 'true' else False if v == 'false' else v for k, v in args.items()}
res = client.send_request(
method='POST',
path='invitations/actions/invitetopoc',
content=content
)
if res.status == 200:
md = tableToMarkdown('CloudShare invite:', res.content)
command_results = CommandResults(
outputs_prefix="CloudShare.Invites",
outputs_key_field='invitationDetailsUrl',
outputs=res.content if res.content else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error retrieving teams - {res.content}")
def get_poc_invitations_command(client, args):
res = client.send_request(
method='GET',
path='ProofOfConceptInvitations/Rows',
queryParams={k: v for k, v in args.items()}
)
if res.status == 200:
rows = res.content.get('rows')
md = tableToMarkdown('CloudShare POC invites:', rows)
command_results = CommandResults(
outputs_prefix="CloudShare.POCInvites",
outputs_key_field='id',
outputs=rows if rows else None,
readable_output=md
)
return_results(command_results)
else:
return_error(f"Error retrieving invitations - {res.content}")
''' MAIN FUNCTION '''
def main() -> None:
params = demisto.params()
args = demisto.args()
hostname = params.get('hostname')
api_id = params.get('api_id')
api_key = params.get('api_key')
handle_proxy()
command = demisto.command()
demisto.debug(f'Command being called is {command}')
try:
commands = {
'cloudshare-get-envs': get_envs_command,
'cloudshare-get-projects': get_projects_command,
'cloudshare-get-project': get_project_command,
'cloudshare-get-project-policies': get_project_policies_command,
'cloudshare-get-project-blueprints': get_project_blueprints_command,
'cloudshare-get-project-blueprint': get_project_blueprint_command,
'cloudshare-get-classes': get_classes_command,
'cloudshare-get-class': get_class_command,
'cloudshare-delete-class': delete_class_command,
'cloudshare-delete-class-environemtns': delete_class_environments_command, # This is here for maintaining BC
'cloudshare-delete-class-environments': delete_class_environments_command,
'cloudshare-get-classes-countries': get_classes_countries_command,
'cloudshare-get-classes-customfields': get_classes_customfields_command,
'cloudshare-get-classes-detailed': get_classes_detailed_command,
'cloudshare-get-classes-instructors': get_classes_instructors_command,
'cloudshare-create-class': create_class_command,
'cloudshare-send-class-invitations': send_class_invitations_command,
'cloudshare-suspend-class-environments': suspend_class_environments_command,
'cloudshare-modify-class': modify_class_command,
'cloudshare-get-students': get_students_command,
'cloudshare-get-student': get_student_command,
'cloudshare-delete-student': delete_student_command,
'cloudshare-register-student': register_student_command,
'cloudshare-modify-student': modify_student_command,
'cloudshare-get-regions': get_regions_command,
'cloudshare-get-timezones': get_timezones_command,
'cloudshare-get-env-resource': get_env_resources_command,
'cloudshare-get-env-extended': get_env_extended_command,
'cloudshare-get-env-extended-vanity': get_env_extended_vanity_command,
'cloudshare-get-env-extended-token': get_env_extended_token_command,
'cloudshare-get-env-multiple-resources': get_env_multiple_resources_command,
'cloudshare-extend-env': extend_env_command,
'cloudshare-postpone-env-suspend': postpone_env_suspend_command,
'cloudshare-resume-env': resume_env_command,
'cloudshare-revert-env': revert_env_command,
'cloudshare-suspend-env': suspend_env_command,
'cloudshare-get-env': get_env_command,
'cloudshare-delete-env': delete_env_command,
'cloudshare-create-env': create_env_command,
'cloudshare-modify-env': modify_env_command,
'cloudshare-delete-vm': delete_vm_command,
'cloudshare-check-vm-execution-status': vm_check_execution_status_command,
'cloudshare-get-vm-remote-access-file': vm_get_remote_command,
'cloudshare-execute-vm-command': vm_execute_command,
'cloudshare-modify-vm-hardware': vm_modify_hardware_command,
'cloudshare-reboot-vm': reboot_vm_command,
'cloudshare-revert-vm': revert_vm_command,
'cloudshare-get-cloud-folders': get_cloud_folders_command,
'cloudshare-get-env-cloud-folders': get_env_cloud_folders_command,
'cloudshare-generate-cloud-folder-password': generate_password_folder_command,
'cloudshare-unmount-env-folders': unmount_env_folders_command,
'cloudshare-get-templates': get_templates_command,
'cloudshare-get-snapshot': get_snapshot_command,
'cloudshare-get-env-snapshots': get_env_snapshots_command,
'cloudshare-mark-default-snapshot': mark_default_snapshot_command,
'cloudshare-take-snapshot-env': take_snapshot_env_command,
'cloudshare-get-teams': get_teams_command,
'cloudshare-invite-user-poc': invite_user_poc_command,
'cloudshare-get-poc-invitations': get_poc_invitations_command
}
client = Client(
hostname,
api_id=api_id,
api_key=api_key
)
if demisto.command() == 'test-module':
# This is the call made when pressing the integration Test button.
test_module_command(client, args)
else:
commands[command](client, args)
# Log exceptions and return errors
except Exception as e:
demisto.error(traceback.format_exc()) # print the traceback
return_error(f'Failed to execute {demisto.command()} command.\nError:\n{str(e)}')
''' ENTRY POINT '''
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| demisto/content | Packs/CloudShare/Integrations/CloudShare/CloudShare.py | Python | mit | 42,863 |
import os
from setuptools import setup
def read(fname):
with open(os.path.join(os.path.dirname(__file__), fname)) as buf:
return buf.read()
conf = dict(
name='colorcut',
version='0.1',
description='Detect colored points and cut images',
long_description=read('README.md'),
author='insomnialab',
author_email='[email protected]',
url='https://github.com/insomnia-lab/SplitByColor',
license='AGPL',
packages=['colorcut'],
install_requires=[
'numpy'
],
zip_safe=True,
entry_points={'console_scripts': [
'colordetect=colorcut.cdect:main',
'imagecut=colorcut.cut:main'
]},
classifiers=[
"License :: OSI Approved :: GNU Affero General Public License v3",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 2",
"Development Status :: 4 - Beta"
])
if __name__ == '__main__':
setup(**conf)
| boyska/SplitByColor | setup.py | Python | gpl-3.0 | 1,033 |
import copy
import numbers
from collections import Hashable
from functools import partial
from bson import ObjectId, json_util
from bson.dbref import DBRef
from bson.son import SON
import pymongo
import six
from mongoengine import signals
from mongoengine.base.common import get_document
from mongoengine.base.datastructures import (BaseDict, BaseList,
EmbeddedDocumentList,
SemiStrictDict, StrictDict)
from mongoengine.base.fields import ComplexBaseField
from mongoengine.common import _import_class
from mongoengine.errors import (FieldDoesNotExist, InvalidDocumentError,
LookUpError, OperationError, ValidationError)
import collections
__all__ = ('BaseDocument',)
NON_FIELD_ERRORS = '__all__'
class BaseDocument(object):
__slots__ = ('_changed_fields', '_initialised', '_created', '_data',
'_dynamic_fields', '_auto_id_field', '_db_field_map',
'__weakref__')
_dynamic = False
_dynamic_lock = True
STRICT = False
def __init__(self, *args, **values):
"""
Initialise a document or embedded document
:param __auto_convert: Try and will cast python objects to Object types
:param values: A dictionary of values for the document
"""
self._initialised = False
self._created = True
if args:
# Combine positional arguments with named arguments.
# We only want named arguments.
field = iter(self._fields_ordered)
# If its an automatic id field then skip to the first defined field
if getattr(self, '_auto_id_field', False):
next(field)
for value in args:
name = next(field)
if name in values:
raise TypeError(
'Multiple values for keyword argument "%s"' % name)
values[name] = value
__auto_convert = values.pop('__auto_convert', True)
# 399: set default values only to fields loaded from DB
__only_fields = set(values.pop('__only_fields', values))
_created = values.pop('_created', True)
signals.pre_init.send(self.__class__, document=self, values=values)
# Check if there are undefined fields supplied to the constructor,
# if so raise an Exception.
if not self._dynamic and (self._meta.get('strict', True) or _created):
_undefined_fields = set(values.keys()) - set(
list(self._fields.keys()) + ['id', 'pk', '_cls', '_text_score'])
if _undefined_fields:
msg = (
'The fields "{0}" do not exist on the document "{1}"'
).format(_undefined_fields, self._class_name)
raise FieldDoesNotExist(msg)
if self.STRICT and not self._dynamic:
self._data = StrictDict.create(allowed_keys=self._fields_ordered)()
else:
self._data = SemiStrictDict.create(
allowed_keys=self._fields_ordered)()
self._dynamic_fields = SON()
# Assign default values to instance
for key, field in self._fields.items():
if self._db_field_map.get(key, key) in __only_fields:
continue
value = getattr(self, key, None)
setattr(self, key, value)
if '_cls' not in values:
self._cls = self._class_name
# Set passed values after initialisation
if self._dynamic:
dynamic_data = {}
for key, value in values.items():
if key in self._fields or key == '_id':
setattr(self, key, value)
elif self._dynamic:
dynamic_data[key] = value
else:
FileField = _import_class('FileField')
for key, value in values.items():
if key == '__auto_convert':
continue
key = self._reverse_db_field_map.get(key, key)
if key in self._fields or key in ('id', 'pk', '_cls'):
if __auto_convert and value is not None:
field = self._fields.get(key)
if field and not isinstance(field, FileField):
value = field.to_python(value)
setattr(self, key, value)
else:
self._data[key] = value
# Set any get_<field>_display methods
self.__set_field_display()
if self._dynamic:
self._dynamic_lock = False
for key, value in dynamic_data.items():
setattr(self, key, value)
# Flag initialised
self._initialised = True
self._created = _created
signals.post_init.send(self.__class__, document=self)
def __delattr__(self, *args, **kwargs):
"""Handle deletions of fields"""
field_name = args[0]
if field_name in self._fields:
default = self._fields[field_name].default
if isinstance(default, collections.Callable):
default = default()
setattr(self, field_name, default)
else:
super(BaseDocument, self).__delattr__(*args, **kwargs)
def __setattr__(self, name, value):
# Handle dynamic data only if an initialised dynamic document
if self._dynamic and not self._dynamic_lock:
if not hasattr(self, name) and not name.startswith('_'):
DynamicField = _import_class('DynamicField')
field = DynamicField(db_field=name)
field.name = name
self._dynamic_fields[name] = field
self._fields_ordered += (name,)
if not name.startswith('_'):
value = self.__expand_dynamic_values(name, value)
# Handle marking data as changed
if name in self._dynamic_fields:
self._data[name] = value
if hasattr(self, '_changed_fields'):
self._mark_as_changed(name)
try:
self__created = self._created
except AttributeError:
self__created = True
if (
self._is_document and
not self__created and
name in self._meta.get('shard_key', tuple()) and
self._data.get(name) != value
):
msg = 'Shard Keys are immutable. Tried to update %s' % name
raise OperationError(msg)
try:
self__initialised = self._initialised
except AttributeError:
self__initialised = False
# Check if the user has created a new instance of a class
if (self._is_document and self__initialised and
self__created and name == self._meta.get('id_field')):
super(BaseDocument, self).__setattr__('_created', False)
super(BaseDocument, self).__setattr__(name, value)
def __getstate__(self):
data = {}
for k in ('_changed_fields', '_initialised', '_created',
'_dynamic_fields', '_fields_ordered'):
if hasattr(self, k):
data[k] = getattr(self, k)
data['_data'] = self.to_mongo()
return data
def __setstate__(self, data):
if isinstance(data['_data'], SON):
data['_data'] = self.__class__._from_son(data['_data'])._data
for k in ('_changed_fields', '_initialised', '_created', '_data',
'_dynamic_fields'):
if k in data:
setattr(self, k, data[k])
if '_fields_ordered' in data:
if self._dynamic:
setattr(self, '_fields_ordered', data['_fields_ordered'])
else:
_super_fields_ordered = type(self)._fields_ordered
setattr(self, '_fields_ordered', _super_fields_ordered)
dynamic_fields = data.get('_dynamic_fields') or SON()
for k in list(dynamic_fields.keys()):
setattr(self, k, data['_data'].get(k))
def __iter__(self):
return iter(self._fields_ordered)
def __getitem__(self, name):
"""Dictionary-style field access, return a field's value if present.
"""
try:
if name in self._fields_ordered:
return getattr(self, name)
except AttributeError:
pass
raise KeyError(name)
def __setitem__(self, name, value):
"""Dictionary-style field access, set a field's value.
"""
# Ensure that the field exists before settings its value
if not self._dynamic and name not in self._fields:
raise KeyError(name)
return setattr(self, name, value)
def __contains__(self, name):
try:
val = getattr(self, name)
return val is not None
except AttributeError:
return False
def __len__(self):
return len(self._data)
def __repr__(self):
try:
u = self.__str__()
except (UnicodeEncodeError, UnicodeDecodeError):
u = '[Bad Unicode data]'
repr_type = str if u is None else type(u)
return repr_type('<%s: %s>' % (self.__class__.__name__, u))
def __str__(self):
# TODO this could be simpler?
if hasattr(self, '__unicode__'):
if six.PY3:
return self.__unicode__()
else:
return six.text_type(self).encode('utf-8')
return six.text_type('%s object' % self.__class__.__name__)
def __eq__(self, other):
if isinstance(other, self.__class__) and hasattr(other, 'id') and other.id is not None:
return self.id == other.id
if isinstance(other, DBRef):
return self._get_collection_name() == other.collection and self.id == other.id
if self.id is None:
return self is other
return False
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
if getattr(self, 'pk', None) is None:
# For new object
return super(BaseDocument, self).__hash__()
else:
return hash(self.pk)
def clean(self):
"""
Hook for doing document level data cleaning before validation is run.
Any ValidationError raised by this method will not be associated with
a particular field; it will have a special-case association with the
field defined by NON_FIELD_ERRORS.
"""
pass
def get_text_score(self):
"""
Get text score from text query
"""
if '_text_score' not in self._data:
raise InvalidDocumentError('This document is not originally built from a text query')
return self._data['_text_score']
def to_mongo(self, use_db_field=True, fields=None):
"""
Return as SON data ready for use with MongoDB.
"""
if not fields:
fields = []
data = SON()
data['_id'] = None
data['_cls'] = self._class_name
# only root fields ['test1.a', 'test2'] => ['test1', 'test2']
root_fields = set([f.split('.')[0] for f in fields])
for field_name in self:
if root_fields and field_name not in root_fields:
continue
value = self._data.get(field_name, None)
field = self._fields.get(field_name)
if field is None and self._dynamic:
field = self._dynamic_fields.get(field_name)
if value is not None:
f_inputs = field.to_mongo.__code__.co_varnames
ex_vars = {}
if fields and 'fields' in f_inputs:
key = '%s.' % field_name
embedded_fields = [
i.replace(key, '') for i in fields
if i.startswith(key)]
ex_vars['fields'] = embedded_fields
if 'use_db_field' in f_inputs:
ex_vars['use_db_field'] = use_db_field
value = field.to_mongo(value, **ex_vars)
# Handle self generating fields
if value is None and field._auto_gen:
value = field.generate()
self._data[field_name] = value
if value is not None:
if use_db_field:
data[field.db_field] = value
else:
data[field.name] = value
# Only add _cls if allow_inheritance is True
if not self._meta.get('allow_inheritance'):
data.pop('_cls')
return data
def validate(self, clean=True):
"""Ensure that all fields' values are valid and that required fields
are present.
"""
# Ensure that each field is matched to a valid value
errors = {}
if clean:
try:
self.clean()
except ValidationError as error:
errors[NON_FIELD_ERRORS] = error
# Get a list of tuples of field names and their current values
fields = [(self._fields.get(name, self._dynamic_fields.get(name)),
self._data.get(name)) for name in self._fields_ordered]
EmbeddedDocumentField = _import_class('EmbeddedDocumentField')
GenericEmbeddedDocumentField = _import_class(
'GenericEmbeddedDocumentField')
for field, value in fields:
if value is not None:
try:
if isinstance(field, (EmbeddedDocumentField,
GenericEmbeddedDocumentField)):
field._validate(value, clean=clean)
else:
field._validate(value)
except ValidationError as error:
errors[field.name] = error.errors or error
except (ValueError, AttributeError, AssertionError) as error:
errors[field.name] = error
elif field.required and not getattr(field, '_auto_gen', False):
errors[field.name] = ValidationError('Field is required',
field_name=field.name)
if errors:
pk = 'None'
if hasattr(self, 'pk'):
pk = self.pk
elif self._instance and hasattr(self._instance, 'pk'):
pk = self._instance.pk
message = 'ValidationError (%s:%s) ' % (self._class_name, pk)
raise ValidationError(message, errors=errors)
def to_json(self, *args, **kwargs):
"""Converts a document to JSON.
:param use_db_field: Set to True by default but enables the output of the json structure with the field names
and not the mongodb store db_names in case of set to False
"""
use_db_field = kwargs.pop('use_db_field', True)
return json_util.dumps(self.to_mongo(use_db_field), *args, **kwargs)
@classmethod
def from_json(cls, json_data, created=False):
"""Converts json data to an unsaved document instance"""
return cls._from_son(json_util.loads(json_data), created=created)
def __expand_dynamic_values(self, name, value):
"""Expand any dynamic values to their correct types / values."""
if not isinstance(value, (dict, list, tuple)):
return value
# If the value is a dict with '_cls' in it, turn it into a document
is_dict = isinstance(value, dict)
if is_dict and '_cls' in value:
cls = get_document(value['_cls'])
return cls(**value)
if is_dict:
value = {
k: self.__expand_dynamic_values(k, v)
for k, v in list(value.items())
}
else:
value = [self.__expand_dynamic_values(name, v) for v in value]
# Convert lists / values so we can watch for any changes on them
EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField')
if (isinstance(value, (list, tuple)) and
not isinstance(value, BaseList)):
if issubclass(type(self), EmbeddedDocumentListField):
value = EmbeddedDocumentList(value, self, name)
else:
value = BaseList(value, self, name)
elif isinstance(value, dict) and not isinstance(value, BaseDict):
value = BaseDict(value, self, name)
return value
def _mark_as_changed(self, key):
"""Mark a key as explicitly changed by the user."""
if not key:
return
if not hasattr(self, '_changed_fields'):
return
if '.' in key:
key, rest = key.split('.', 1)
key = self._db_field_map.get(key, key)
key = '%s.%s' % (key, rest)
else:
key = self._db_field_map.get(key, key)
if key not in self._changed_fields:
levels, idx = key.split('.'), 1
while idx <= len(levels):
if '.'.join(levels[:idx]) in self._changed_fields:
break
idx += 1
else:
self._changed_fields.append(key)
# remove lower level changed fields
level = '.'.join(levels[:idx]) + '.'
remove = self._changed_fields.remove
for field in self._changed_fields[:]:
if field.startswith(level):
remove(field)
def _clear_changed_fields(self):
"""Using _get_changed_fields iterate and remove any fields that
are marked as changed.
"""
for changed in self._get_changed_fields():
parts = changed.split('.')
data = self
for part in parts:
if isinstance(data, list):
try:
data = data[int(part)]
except IndexError:
data = None
elif isinstance(data, dict):
data = data.get(part, None)
else:
data = getattr(data, part, None)
if hasattr(data, '_changed_fields'):
if getattr(data, '_is_document', False):
continue
data._changed_fields = []
self._changed_fields = []
def _nestable_types_changed_fields(self, changed_fields, key, data, inspected):
# Loop list / dict fields as they contain documents
# Determine the iterator to use
if not hasattr(data, 'items'):
iterator = enumerate(data)
else:
iterator = iter(data.items())
for index, value in iterator:
list_key = '%s%s.' % (key, index)
# don't check anything lower if this key is already marked
# as changed.
if list_key[:-1] in changed_fields:
continue
if hasattr(value, '_get_changed_fields'):
changed = value._get_changed_fields(inspected)
changed_fields += ['%s%s' % (list_key, k)
for k in changed if k]
elif isinstance(value, (list, tuple, dict)):
self._nestable_types_changed_fields(
changed_fields, list_key, value, inspected)
def _get_changed_fields(self, inspected=None):
"""Return a list of all fields that have explicitly been changed.
"""
EmbeddedDocument = _import_class('EmbeddedDocument')
DynamicEmbeddedDocument = _import_class('DynamicEmbeddedDocument')
ReferenceField = _import_class('ReferenceField')
SortedListField = _import_class('SortedListField')
changed_fields = []
changed_fields += getattr(self, '_changed_fields', [])
inspected = inspected or set()
if hasattr(self, 'id') and isinstance(self.id, Hashable):
if self.id in inspected:
return changed_fields
inspected.add(self.id)
for field_name in self._fields_ordered:
db_field_name = self._db_field_map.get(field_name, field_name)
key = '%s.' % db_field_name
data = self._data.get(field_name, None)
field = self._fields.get(field_name)
if hasattr(data, 'id'):
if data.id in inspected:
continue
if isinstance(field, ReferenceField):
continue
elif (
isinstance(data, (EmbeddedDocument, DynamicEmbeddedDocument)) and
db_field_name not in changed_fields
):
# Find all embedded fields that have been changed
changed = data._get_changed_fields(inspected)
changed_fields += ['%s%s' % (key, k) for k in changed if k]
elif (isinstance(data, (list, tuple, dict)) and
db_field_name not in changed_fields):
if (hasattr(field, 'field') and
isinstance(field.field, ReferenceField)):
continue
elif isinstance(field, SortedListField) and field._ordering:
# if ordering is affected whole list is changed
if any([field._ordering in d._changed_fields for d in data]):
changed_fields.append(db_field_name)
continue
self._nestable_types_changed_fields(
changed_fields, key, data, inspected)
return changed_fields
def _delta(self):
"""Returns the delta (set, unset) of the changes for a document.
Gets any values that have been explicitly changed.
"""
# Handles cases where not loaded from_son but has _id
doc = self.to_mongo()
set_fields = self._get_changed_fields()
unset_data = {}
parts = []
if hasattr(self, '_changed_fields'):
set_data = {}
# Fetch each set item from its path
for path in set_fields:
parts = path.split('.')
d = doc
new_path = []
for p in parts:
if isinstance(d, (ObjectId, DBRef)):
break
elif isinstance(d, list) and p.lstrip('-').isdigit():
if p[0] == '-':
p = str(len(d) + int(p))
try:
d = d[int(p)]
except IndexError:
d = None
elif hasattr(d, 'get'):
d = d.get(p)
new_path.append(p)
path = '.'.join(new_path)
set_data[path] = d
else:
set_data = doc
if '_id' in set_data:
del set_data['_id']
# Determine if any changed items were actually unset.
for path, value in list(set_data.items()):
if value or isinstance(value, (numbers.Number, bool)):
continue
# If we've set a value that ain't the default value don't unset it.
default = None
if (self._dynamic and len(parts) and parts[0] in
self._dynamic_fields):
del set_data[path]
unset_data[path] = 1
continue
elif path in self._fields:
default = self._fields[path].default
else: # Perform a full lookup for lists / embedded lookups
d = self
parts = path.split('.')
db_field_name = parts.pop()
for p in parts:
if isinstance(d, list) and p.lstrip('-').isdigit():
if p[0] == '-':
p = str(len(d) + int(p))
d = d[int(p)]
elif (hasattr(d, '__getattribute__') and
not isinstance(d, dict)):
real_path = d._reverse_db_field_map.get(p, p)
d = getattr(d, real_path)
else:
d = d.get(p)
if hasattr(d, '_fields'):
field_name = d._reverse_db_field_map.get(db_field_name,
db_field_name)
if field_name in d._fields:
default = d._fields.get(field_name).default
else:
default = None
if default is not None:
if isinstance(default, collections.Callable):
default = default()
if default != value:
continue
del set_data[path]
unset_data[path] = 1
return set_data, unset_data
@classmethod
def _get_collection_name(cls):
"""Return the collection name for this class. None for abstract
class.
"""
return cls._meta.get('collection', None)
@classmethod
def _from_son(cls, son, _auto_dereference=True, only_fields=None, created=False):
"""Create an instance of a Document (subclass) from a PyMongo
SON.
"""
if not only_fields:
only_fields = []
# Get the class name from the document, falling back to the given
# class if unavailable
class_name = son.get('_cls', cls._class_name)
# Convert SON to a dict, making sure each key is a string
data = {str(key): value for key, value in son.items()}
# Return correct subclass for document type
if class_name != cls._class_name:
cls = get_document(class_name)
changed_fields = []
errors_dict = {}
fields = cls._fields
if not _auto_dereference:
fields = copy.copy(fields)
for field_name, field in fields.items():
field._auto_dereference = _auto_dereference
if field.db_field in data:
value = data[field.db_field]
try:
data[field_name] = (value if value is None
else field.to_python(value))
if field_name != field.db_field:
del data[field.db_field]
except (AttributeError, ValueError) as e:
errors_dict[field_name] = e
if errors_dict:
errors = '\n'.join(['%s - %s' % (k, v)
for k, v in list(errors_dict.items())])
msg = ('Invalid data to create a `%s` instance.\n%s'
% (cls._class_name, errors))
raise InvalidDocumentError(msg)
# In STRICT documents, remove any keys that aren't in cls._fields
if cls.STRICT:
data = {k: v for k, v in data.items() if k in cls._fields}
obj = cls(__auto_convert=False, _created=created, __only_fields=only_fields, **data)
obj._changed_fields = changed_fields
if not _auto_dereference:
obj._fields = fields
return obj
@classmethod
def _build_index_specs(cls, meta_indexes):
"""Generate and merge the full index specs."""
geo_indices = cls._geo_indices()
unique_indices = cls._unique_with_indexes()
index_specs = [cls._build_index_spec(spec) for spec in meta_indexes]
def merge_index_specs(index_specs, indices):
"""Helper method for merging index specs."""
if not indices:
return index_specs
# Create a map of index fields to index spec. We're converting
# the fields from a list to a tuple so that it's hashable.
spec_fields = {
tuple(index['fields']): index for index in index_specs
}
# For each new index, if there's an existing index with the same
# fields list, update the existing spec with all data from the
# new spec.
for new_index in indices:
candidate = spec_fields.get(tuple(new_index['fields']))
if candidate is None:
index_specs.append(new_index)
else:
candidate.update(new_index)
return index_specs
# Merge geo indexes and unique_with indexes into the meta index specs.
index_specs = merge_index_specs(index_specs, geo_indices)
index_specs = merge_index_specs(index_specs, unique_indices)
return index_specs
@classmethod
def _build_index_spec(cls, spec):
"""Build a PyMongo index spec from a MongoEngine index spec."""
if isinstance(spec, six.string_types):
spec = {'fields': [spec]}
elif isinstance(spec, (list, tuple)):
spec = {'fields': list(spec)}
elif isinstance(spec, dict):
spec = dict(spec)
index_list = []
direction = None
# Check to see if we need to include _cls
allow_inheritance = cls._meta.get('allow_inheritance')
include_cls = (
allow_inheritance and
not spec.get('sparse', False) and
spec.get('cls', True) and
'_cls' not in spec['fields']
)
# 733: don't include cls if index_cls is False unless there is an explicit cls with the index
include_cls = include_cls and (spec.get('cls', False) or cls._meta.get('index_cls', True))
if 'cls' in spec:
spec.pop('cls')
for key in spec['fields']:
# If inherited spec continue
if isinstance(key, (list, tuple)):
continue
# ASCENDING from +
# DESCENDING from -
# TEXT from $
# HASHED from #
# GEOSPHERE from (
# GEOHAYSTACK from )
# GEO2D from *
direction = pymongo.ASCENDING
if key.startswith('-'):
direction = pymongo.DESCENDING
elif key.startswith('$'):
direction = pymongo.TEXT
elif key.startswith('#'):
direction = pymongo.HASHED
elif key.startswith('('):
direction = pymongo.GEOSPHERE
elif key.startswith(')'):
direction = pymongo.GEOHAYSTACK
elif key.startswith('*'):
direction = pymongo.GEO2D
if key.startswith(('+', '-', '*', '$', '#', '(', ')')):
key = key[1:]
# Use real field name, do it manually because we need field
# objects for the next part (list field checking)
parts = key.split('.')
if parts in (['pk'], ['id'], ['_id']):
key = '_id'
else:
fields = cls._lookup_field(parts)
parts = []
for field in fields:
try:
if field != '_id':
field = field.db_field
except AttributeError:
pass
parts.append(field)
key = '.'.join(parts)
index_list.append((key, direction))
# Don't add cls to a geo index
if include_cls and direction not in (
pymongo.GEO2D, pymongo.GEOHAYSTACK, pymongo.GEOSPHERE):
index_list.insert(0, ('_cls', 1))
if index_list:
spec['fields'] = index_list
return spec
@classmethod
def _unique_with_indexes(cls, namespace=''):
"""Find unique indexes in the document schema and return them."""
unique_indexes = []
for field_name, field in list(cls._fields.items()):
sparse = field.sparse
# Generate a list of indexes needed by uniqueness constraints
if field.unique:
unique_fields = [field.db_field]
# Add any unique_with fields to the back of the index spec
if field.unique_with:
if isinstance(field.unique_with, six.string_types):
field.unique_with = [field.unique_with]
# Convert unique_with field names to real field names
unique_with = []
for other_name in field.unique_with:
parts = other_name.split('.')
# Lookup real name
parts = cls._lookup_field(parts)
name_parts = [part.db_field for part in parts]
unique_with.append('.'.join(name_parts))
# Unique field should be required
parts[-1].required = True
sparse = (not sparse and
parts[-1].name not in cls.__dict__)
unique_fields += unique_with
# Add the new index to the list
fields = [
('%s%s' % (namespace, f), pymongo.ASCENDING)
for f in unique_fields
]
index = {'fields': fields, 'unique': True, 'sparse': sparse}
unique_indexes.append(index)
if field.__class__.__name__ == 'ListField':
field = field.field
# Grab any embedded document field unique indexes
if (field.__class__.__name__ == 'EmbeddedDocumentField' and
field.document_type != cls):
field_namespace = '%s.' % field_name
doc_cls = field.document_type
unique_indexes += doc_cls._unique_with_indexes(field_namespace)
return unique_indexes
@classmethod
def _geo_indices(cls, inspected=None, parent_field=None):
inspected = inspected or []
geo_indices = []
inspected.append(cls)
geo_field_type_names = ('EmbeddedDocumentField', 'GeoPointField',
'PointField', 'LineStringField',
'PolygonField')
geo_field_types = tuple([_import_class(field)
for field in geo_field_type_names])
for field in list(cls._fields.values()):
if not isinstance(field, geo_field_types):
continue
if hasattr(field, 'document_type'):
field_cls = field.document_type
if field_cls in inspected:
continue
if hasattr(field_cls, '_geo_indices'):
geo_indices += field_cls._geo_indices(
inspected, parent_field=field.db_field)
elif field._geo_index:
field_name = field.db_field
if parent_field:
field_name = '%s.%s' % (parent_field, field_name)
geo_indices.append({
'fields': [(field_name, field._geo_index)]
})
return geo_indices
@classmethod
def _lookup_field(cls, parts):
"""Given the path to a given field, return a list containing
the Field object associated with that field and all of its parent
Field objects.
Args:
parts (str, list, or tuple) - path to the field. Should be a
string for simple fields existing on this document or a list
of strings for a field that exists deeper in embedded documents.
Returns:
A list of Field instances for fields that were found or
strings for sub-fields that weren't.
Example:
>>> user._lookup_field('name')
[<mongoengine.fields.StringField at 0x1119bff50>]
>>> user._lookup_field('roles')
[<mongoengine.fields.EmbeddedDocumentListField at 0x1119ec250>]
>>> user._lookup_field(['roles', 'role'])
[<mongoengine.fields.EmbeddedDocumentListField at 0x1119ec250>,
<mongoengine.fields.StringField at 0x1119ec050>]
>>> user._lookup_field('doesnt_exist')
raises LookUpError
>>> user._lookup_field(['roles', 'doesnt_exist'])
[<mongoengine.fields.EmbeddedDocumentListField at 0x1119ec250>,
'doesnt_exist']
"""
# TODO this method is WAY too complicated. Simplify it.
# TODO don't think returning a string for embedded non-existent fields is desired
ListField = _import_class('ListField')
DynamicField = _import_class('DynamicField')
if not isinstance(parts, (list, tuple)):
parts = [parts]
fields = []
field = None
for field_name in parts:
# Handle ListField indexing:
if field_name.isdigit() and isinstance(field, ListField):
fields.append(field_name)
continue
# Look up first field from the document
if field is None:
if field_name == 'pk':
# Deal with "primary key" alias
field_name = cls._meta['id_field']
if field_name in cls._fields:
field = cls._fields[field_name]
elif cls._dynamic:
field = DynamicField(db_field=field_name)
elif cls._meta.get('allow_inheritance') or cls._meta.get('abstract', False):
# 744: in case the field is defined in a subclass
for subcls in cls.__subclasses__():
try:
field = subcls._lookup_field([field_name])[0]
except LookUpError:
continue
if field is not None:
break
else:
raise LookUpError('Cannot resolve field "%s"' % field_name)
else:
raise LookUpError('Cannot resolve field "%s"' % field_name)
else:
ReferenceField = _import_class('ReferenceField')
GenericReferenceField = _import_class('GenericReferenceField')
# If previous field was a reference, throw an error (we
# cannot look up fields that are on references).
if isinstance(field, (ReferenceField, GenericReferenceField)):
raise LookUpError('Cannot perform join in mongoDB: %s' %
'__'.join(parts))
# If the parent field has a "field" attribute which has a
# lookup_member method, call it to find the field
# corresponding to this iteration.
if hasattr(getattr(field, 'field', None), 'lookup_member'):
new_field = field.field.lookup_member(field_name)
# If the parent field is a DynamicField or if it's part of
# a DynamicDocument, mark current field as a DynamicField
# with db_name equal to the field name.
elif cls._dynamic and (isinstance(field, DynamicField) or
getattr(getattr(field, 'document_type', None), '_dynamic', None)):
new_field = DynamicField(db_field=field_name)
# Else, try to use the parent field's lookup_member method
# to find the subfield.
elif hasattr(field, 'lookup_member'):
new_field = field.lookup_member(field_name)
# Raise a LookUpError if all the other conditions failed.
else:
raise LookUpError(
'Cannot resolve subfield or operator {} '
'on the field {}'.format(field_name, field.name)
)
# If current field still wasn't found and the parent field
# is a ComplexBaseField, add the name current field name and
# move on.
if not new_field and isinstance(field, ComplexBaseField):
fields.append(field_name)
continue
elif not new_field:
raise LookUpError('Cannot resolve field "%s"' % field_name)
field = new_field # update field to the new field type
fields.append(field)
return fields
@classmethod
def _translate_field_name(cls, field, sep='.'):
"""Translate a field attribute name to a database field name.
"""
parts = field.split(sep)
parts = [f.db_field for f in cls._lookup_field(parts)]
return '.'.join(parts)
def __set_field_display(self):
"""For each field that specifies choices, create a
get_<field>_display method.
"""
fields_with_choices = [(n, f) for n, f in list(self._fields.items())
if f.choices]
for attr_name, field in fields_with_choices:
setattr(self,
'get_%s_display' % attr_name,
partial(self.__get_field_display, field=field))
def __get_field_display(self, field):
"""Return the display value for a choice field"""
value = getattr(self, field.name)
if field.choices and isinstance(field.choices[0], (list, tuple)):
return dict(field.choices).get(value, value)
return value
| Pablo126/SSBW | Tarea4/tarea4/lib/python3.5/site-packages/mongoengine/base/document.py | Python | gpl-3.0 | 41,762 |
#!/usr/bin/env python
# Copyright 2012 Google Inc. All Rights Reserved.
"""Client actions related to plist files."""
import cStringIO
import types
from grr.client import actions
from grr.client import vfs
from grr.lib import plist as plist_lib
from grr.lib import rdfvalue
from grr.parsers import binplist
class PlistQuery(actions.ActionPlugin):
"""Parses the plist request specified and returns the results.
PlistQuery allows you to obtain data from a plist, optionally only if it
matches the given filter.
Querying for a plist is done in two steps. First, its contents are
retrieved.
For plists where the top level element is a dict, you can use the key
parameter of the PlistRequest to specify a path into the dict to retrieve.
When specifying a key, the requested key values are places under a dictionary
key called "key".
Whether you've specified a key or not, the query parameter allows you to
filter based on the
"""
in_rdfvalue = rdfvalue.PlistRequest
out_rdfvalue = rdfvalue.RDFValueArray
MAX_PLIST_SIZE = 1024 * 1024 * 100 # 100 MB
def Run(self, args):
self.context = args.context
self.filter_query = args.query
with vfs.VFSOpen(args.pathspec, progress_callback=self.Progress) as fd:
data = fd.Read(self.MAX_PLIST_SIZE)
plist = binplist.readPlist(cStringIO.StringIO(data))
# Create the query parser
parser = plist_lib.PlistFilterParser(self.filter_query).Parse()
filter_imp = plist_lib.PlistFilterImplementation
matcher = parser.Compile(filter_imp)
if self.context:
# Obtain the values for the context using the value expander
value_expander = filter_imp.FILTERS["ValueExpander"]
iterator = value_expander().Expand(plist, self.context)
else:
# If we didn't get a context, the context is the whole plist
iterator = [plist]
reply = rdfvalue.RDFValueArray()
for item in iterator:
# As we're setting the context manually, we need to account for types
if isinstance(item, types.ListType):
for sub_item in item:
partial_plist = plist_lib.PlistValueToPlainValue(sub_item)
if matcher.Matches(partial_plist):
reply.Append(sub_item)
else:
partial_plist = plist_lib.PlistValueToPlainValue(item)
if matcher.Matches(partial_plist):
reply.Append(partial_plist)
self.SendReply(reply)
| wandec/grr | client/client_actions/plist.py | Python | apache-2.0 | 2,452 |
from ores.task_tracker.null_task_tracker import NullTaskTracker
def test_null_task_tracker():
task_tracker = NullTaskTracker()
assert task_tracker.lock('fooo', 'value') is True
assert task_tracker.get_in_progress_task('fooo') is None
assert task_tracker.release('fooo') is True
| wiki-ai/ores | tests/task_tracker/tests/test_null_task_tracker.py | Python | mit | 296 |
# coding=utf-8
#
# Copyright 2017 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from f5.bigip.resource import UnnamedResource
from f5.sdk_exception import UnsupportedOperation
class Login_Enforcement(UnnamedResource):
"""BIG-IP® ASM Login Enforcement resource."""
def __init__(self, policy):
super(Login_Enforcement, self).__init__(policy)
self._meta_data['required_json_kind'] = 'tm:asm:policies:login-enforcement:login-enforcementstate'
self._meta_data['required_load_parameters'] = set()
self._meta_data['object_has_stats'] = False
self._meta_data['minimum_version'] = '11.6.0'
def update(self, **kwargs):
"""Update is not supported for Login Enforcement resource
:raises: UnsupportedOperation
"""
raise UnsupportedOperation(
"%s does not support the update method" % self.__class__.__name__
)
| F5Networks/f5-common-python | f5/bigip/tm/asm/policies/login_enforcement.py | Python | apache-2.0 | 1,423 |
import sys
import pickle
import math
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
from subprocess import Popen, PIPE
from collections import defaultdict
def tree(): return defaultdict(tree)
def run_command_with_params_and_get_output(command, args):
# Compile first
Popen(["make", command+"_compile"] + args, stdout=PIPE).communicate()[0]
print(" ".join(["make", command+"_run"] + args))
ret_val = Popen(["make", command+"_run"] + args, stdout=PIPE).communicate()[0].strip().split()
print(ret_val)
return ret_val
def get_values(v, command_range, epoch_range, batch_size_range, thread_range, rank_range, sync_range):
values = []
for c in command_range:
for e in epoch_range:
for b in batch_size_range:
for t in thread_range:
for r in rank_range:
for s in sync_range:
values.append(v[c][e][b][t][r][s])
return values
def draw_epoch_loss_graph(should_load_from_file, epoch_range, batch_size_range, thread_range, rank_range, sync_range, commands, gammas):
total_iter = len(batch_size_range) * len(thread_range) * len(rank_range) * len(sync_range) * len(commands)
cur_iter = 0
loss_values = tree()
overall_time_values = tree()
gradient_time_values = tree()
if not should_load_from_file:
for b in batch_size_range:
for t in thread_range:
for r in rank_range:
for s in sync_range:
for ii, c in enumerate(commands):
print("Iteration %d of %d" % (cur_iter, total_iter))
cur_iter += 1
output = run_command_with_params_and_get_output(c, ["N_EPOCHS="+str(epoch_range), "BATCH_SIZE="+str(b), "NTHREAD="+str(t), "SHOULD_SYNC="+\
str(s), "SHOULD_PRINT_LOSS_TIME_EVERY_EPOCH=1", "START_GAMMA="+str(gammas[ii])])
values = [float(x) for x in output]
losses = [values[i] for i in range(0, len(values), 3)]
overall_times = [values[i] for i in range(1, len(values), 3)]
gradient_times = [values[i] for i in range(2, len(values), 3)]
loss_values[c][epoch_range][b][t][r][s] = losses
overall_time_values[c][epoch_range][b][t][r][s] = overall_times
gradient_time_values[c][epoch_range][b][t][r][s] = gradient_times
else:
with open('objs3.pickle') as f:
loss_values, overall_time_values, gradient_time_values = pickle.load(f)
with open('objs3.pickle', "w") as f:
pickle.dump([loss_values, overall_time_values, gradient_time_values], f)
for b in batch_size_range:
for t in thread_range:
for r in rank_range:
title = "Epoch_Loss_batch=%d_thread=%d_rank=%d" % (b, t, r)
plt.figure()
plt.title(title, fontsize=12)
plt.xlabel("Epoch")
plt.ylabel("Loss")
for s in sync_range:
for c in commands:
losses = loss_values[c][epoch_range][b][t][r][s]
epochs = list(range(1, epoch_range+1))
plt.plot(epochs, losses, label=c+" sync="+str(s), marker='o')
plt.yscale('log')
plt.xscale('log')
plt.legend(loc="upper right", fontsize=8)
plt.savefig(title + ".png")
plt.clf()
def draw_time_loss_graph(should_load_from_file, epoch_range, batch_size_range, thread_range, sync_range, commands, n_rep, gammas=None):
total_iter = len(batch_size_range) * len(thread_range) * len(sync_range) * len(commands) * n_rep
cur_iter = 0
loss_values = tree()
overall_time_values = tree()
gradient_time_values = tree()
if not should_load_from_file:
for b in batch_size_range:
for t in thread_range:
for s in sync_range:
for ii, c in enumerate(commands):
for n in range(n_rep):
print("Iteration %d of %d" % (cur_iter, total_iter))
cur_iter += 1
params = ["N_EPOCHS="+str(epoch_range), "BATCH_SIZE="+str(b), "NTHREAD="+str(t), "SHOULD_SYNC="+ str(s), "SHOULD_PRINT_LOSS_TIME_EVERY_EPOCH=1"]
if gammas != None:
params.append("START_GAMMA="+str(gammas[ii]))
output = run_command_with_params_and_get_output(c, params)
values = [float(x) for x in output]
if n == 0:
losses = [values[i] for i in range(0, len(values), 3)]
overall_times = [values[i] for i in range(1, len(values), 3)]
gradient_times = [values[i] for i in range(2, len(values), 3)]
else:
losses = [losses[i/3] + values[i] for i in range(0, len(values), 3)]
overall_times = [overall_times[i/3] + values[i] for i in range(1, len(values), 3)]
gradient_times = [gradient_times[i/3] + values[i] for i in range(2, len(values), 3)]
losses = [x / float(n_rep) for x in losses]
overall_times = [x / float(n_rep) for x in overall_times]
gradient_times = [x / float(n_rep) for x in gradient_times]
loss_values[c][epoch_range][b][t][0][s] = losses
overall_time_values[c][epoch_range][b][t][0][s] = overall_times
gradient_time_values[c][epoch_range][b][t][0][s] = gradient_times
else:
with open('objs2.pickle') as f:
loss_values, overall_time_values, gradient_time_values = pickle.load(f)
with open('objs2.pickle', "w") as f:
pickle.dump([loss_values, overall_time_values, gradient_time_values], f)
for b in batch_size_range:
for t in thread_range:
title = "Overall_Time_Loss_batch=%d_thread=%d" % (b, t)
plt.figure()
plt.title(title, fontsize=12)
plt.xlabel("Time")
plt.ylabel("Loss")
for s in sync_range:
for i, c in enumerate(commands):
times = overall_time_values[c][epoch_range][b][t][0][s]
losses = loss_values[c][epoch_range][b][t][0][s]
name = c
if 'hog' not in c:
name += " sync="+str(s)
if gammas != None:
name += " gamma="+str(gammas[i])
if 'hog' in c:
if s:
plt.plot(times, losses, label=name)
else:
plt.plot(times, losses, label=name)
plt.yscale('log')
plt.xscale('log')
plt.legend(loc="upper right", fontsize=8)
plt.savefig(title + ".png")
plt.clf()
for b in batch_size_range:
for t in thread_range:
title = "Gradient_Time_Loss_batch=%d_thread=%d" % (b, t)
plt.figure()
plt.title(title, fontsize=12)
plt.xlabel("Time")
plt.ylabel("Loss")
for s in sync_range:
for i, c in enumerate(commands):
times = gradient_time_values[c][epoch_range][b][t][0][s]
losses = loss_values[c][epoch_range][b][t][0][s]
name = c
if 'hog' not in c:
name += " sync="+str(s)
if gammas != None:
name += " gamma="+str(gammas[i])
if 'hog' in c:
if s:
plt.plot(times, losses, label=name)
else:
plt.plot(times, losses, label=name)
plt.yscale('log')
plt.xscale('log')
plt.legend(loc="upper right", fontsize=8)
plt.savefig(title + ".png")
plt.clf()
for b in batch_size_range:
for t in thread_range:
title = "Gradient_Epoch_Loss_batch=%d_thread=%d" % (b, t)
plt.figure()
plt.title(title, fontsize=12)
plt.xlabel("Epoch")
plt.ylabel("Loss")
for s in sync_range:
for i, c in enumerate(commands):
epochs = range(0, epoch_range)
losses = loss_values[c][epoch_range][b][t][0][s]
name = c
if 'hog' not in c:
name += " sync="+str(s)
if gammas != None:
name += " gamma="+str(gammas[i])
if 'hog' in c:
if s:
plt.plot(epochs, losses, label=name)
else:
plt.plot(epochs, losses, label=name)
plt.yscale('log')
plt.xscale('log')
plt.legend(loc="upper right", fontsize=8)
plt.savefig(title + ".png")
plt.clf()
hog_command = [(x,i) for i,x in enumerate(commands) if 'hog' in x]
if len(hog_command) != 0:
hog_command, hog_index = hog_command[0]
else:
return
for b in batch_size_range:
for t in thread_range:
title = "Gradient_Time_Loss_Ratios_batch=%d_thread=%d" % (b, t)
if gammas != None:
title += "hog_gamma="+str(gammas[hog_index])
plt.figure()
plt.title(title, fontsize=12)
plt.xlabel("Time")
plt.ylabel("hog_loss/cyc_loss")
for s in sync_range:
for ind,c in enumerate(commands):
if c == hog_command:
continue
hog_times = gradient_time_values[hog_command][epoch_range][b][t][0][s]
cyc_times = gradient_time_values[c][epoch_range][b][t][0][s]
hog_losses = loss_values[hog_command][epoch_range][b][t][0][s]
cyc_losses = loss_values[c][epoch_range][b][t][0][s]
# Compute cyc losses -- the best loss cyc achieved by hog's time
cyc_losses_aligned = []
for i1, t1 in enumerate(hog_times):
best_loss = 1000000000
for i2, t2 in enumerate(cyc_times):
if t2 > t1:
break
best_loss = min(best_loss, cyc_losses[i2])
cyc_losses_aligned.append(best_loss)
loss_ratio = [hog_losses[i] / cyc_losses_aligned[i] for i in range(len(hog_losses))]
name = c + " sync="+str(s)
if gammas != None:
name += " gamma="+str(gammas[ind])
plt.plot(hog_times, loss_ratio, label=name)
plt.legend(loc="upper right", fontsize=8)
plt.savefig(title + ".png")
plt.clf()
for b in batch_size_range:
for t in thread_range:
title = "Overall_Time_Loss_Ratios_batch=%d_thread=%d" % (b, t)
plt.figure()
plt.title(title, fontsize=12)
plt.xlabel("Time")
plt.ylabel("hog_loss/cyc_loss")
for s in sync_range:
for i, c in enumerate(commands):
if hog_command == c:
continue
hog_times = overall_time_values[hog_command][epoch_range][b][t][0][s]
cyc_times = overall_time_values[c][epoch_range][b][t][0][s]
hog_losses = loss_values[hog_command][epoch_range][b][t][0][s]
cyc_losses = loss_values[c][epoch_range][b][t][0][s]
# Compute cyc losses -- the best loss cyc achieved by hog's time
cyc_losses_aligned = []
for i1, t1 in enumerate(hog_times):
best_loss = 1000000000
for i2, t2 in enumerate(cyc_times):
if t2 > t1:
break
best_loss = min(best_loss, cyc_losses[i2])
cyc_losses_aligned.append(best_loss)
loss_ratio = [hog_losses[i] / cyc_losses_aligned[i] for i in range(len(hog_losses))]
plt.plot(hog_times, loss_ratio, label=c+" sync="+str(s));
plt.legend(loc="upper right", fontsize=8)
plt.savefig(title + ".png")
plt.clf()
def draw_all_graphs(load_previous, epoch_range, batch_size_range, thread_range, rank_range,
sync_range, commands, average_over_n_rep):
total_iter = len(epoch_range) * len(batch_size_range) * len(thread_range) * len(rank_range) * len(sync_range) * len(commands) * average_over_n_rep
average_losses = tree()
average_gradient_times = tree()
average_total_times = tree()
if not load_previous:
cur_iter = 0
# Collect all relevant data
for epoch in epoch_range:
for batch_size in batch_size_range:
for thread in thread_range:
for rank in rank_range:
for sync in sync_range:
for command in commands:
avg_overall_time, avg_gradient_time, avg_loss = 0, 0, 0
for i in range(average_over_n_rep):
print("Iteration %d of %d" % (cur_iter, total_iter))
cur_iter += 1
# Run command with all params
output = run_command_with_params_and_get_output(command, ["N_EPOCHS="+str(epoch), "BATCH_SIZE="+str(batch_size), "NTHREAD="+str(thread), "K="+str(rank), "SHOULD_SYNC="+str(sync)])
# overall elapsed, gradient time, loss
overall_time = float(output[0])
gradient_time = float(output[1])
loss = float(output[2])
avg_overall_time += overall_time
avg_gradient_time += gradient_time
avg_loss += loss
avg_overall_time /= average_over_n_rep
avg_gradient_time /= average_over_n_rep
avg_loss /= average_over_n_rep
average_losses[command][epoch][batch_size][thread][rank][sync] = avg_loss
average_gradient_times[command][epoch][batch_size][thread][rank][sync] = avg_gradient_time
average_total_times[command][epoch][batch_size][thread][rank][sync] = avg_overall_time
else:
with open('objs.pickle') as f:
average_losses, average_gradient_times, average_total_times = pickle.load(f)
with open('objs.pickle', 'w') as f:
pickle.dump([average_losses, average_gradient_times, average_total_times], f)
"""# Reminder: arrays of form [command][epoch][batch_size][thread][rank][sync]
plt.clf()"""
for (time_data, label) in [(average_gradient_times, "Gradient Time"), (average_total_times, "Overall Time")]:
for r in rank_range:
for b in batch_size_range:
f, plots = plt.subplots(1, len(thread_range), sharex=True, sharey=True)
title = "Epoch_%s_Plot_Batch=%d_Rank=%d" % (label, b, r)
f.suptitle(title, fontsize=12)
for index, t in enumerate(thread_range):
plots[index].set_title("%d threads" % t)
for s in sync_range:
for c in commands:
plots[index].set_xlabel("Epoch")
plots[index].tick_params(axis='both', which='major', labelsize=5)
plots[index].tick_params(axis='both', which='minor', labelsize=5)
plots[index].set_ylabel(label)
times = get_values(time_data, [c], epoch_range, [b], [t], [r], [s])
epochs = epoch_range
low = min(times)
high = max(times)
if 'hog' in c:
if s == 0:
plots[index].plot(epochs, times, label=c)
else:
plots[index].plot(epochs, times, label=c+" sync="+str(s))
#plots[index].set_ylim([math.ceil(low-0.5*(high-low)), math.ceil(high+0.5*(high-low))])
plots[index].legend(loc="upper left", fontsize=5)
#f.subplots_adjust(hspace=0)
f.tight_layout()
f.subplots_adjust(top=.85)
f.savefig(title+".png")
f.clf()
for (time_data, label) in [(average_gradient_times, "Gradient Time"), (average_total_times, "Overall Time")]:
for r in rank_range:
for b in batch_size_range:
f, plots = plt.subplots(1, len(epoch_range), sharex=True, sharey=True)
title = "Thread_%s_Plot_Batch=%d_Rank=%d" % (label, b, r)
f.suptitle(title, fontsize=12)
for index, e in enumerate(epoch_range):
plots[index].set_title("%d epoch" % e)
for s in sync_range:
for c in commands:
plots[index].tick_params(axis='both', which='major', labelsize=8)
plots[index].tick_params(axis='both', which='minor', labelsize=8)
plots[index].set_xlabel("Thread")
plots[index].set_ylabel(label)
times = get_values(time_data, [c], [e], [b], thread_range, [r], [s])
threads = thread_range
low = min(times)
high = max(times)
if 'hog' in c:
if s == 0:
plots[index].plot(threads, times, label=c)
else:
plots[index].plot(threads, times, label=c+" sync="+str(s))
#plots[index].set_ylim([math.ceil(low-0.5*(high-low)), math.ceil(high+0.5*(high-low))])
plots[index].legend(loc="upper left", fontsize=5)
#f.subplots_adjust(hspace=0)
f.tight_layout()
f.subplots_adjust(top=.85)
f.savefig(title+".png")
f.clf()
########################################
# TIME RATIOS OVER 1 THREAD
########################################
if 1 in thread_range:
for r in rank_range:
for b in batch_size_range:
for e in epoch_range:
for s in sync_range:
for c in commands:
if 'hog' in c and not s:
continue
title = ""
if 'hog' in c:
title = "Overall_Speedup_Over_Serial_%s_Batch=%d_Epoch=%d_Rank=%d" % (c, b, e, r)
else:
title = "Overall_Speedup_Over_Serial_%s_Sync=%d_Batch=%d_Epoch=%d_Rank=%d" % (c, s, b, e, r)
plt.figure()
plt.title(title, fontsize=12)
plt.ylabel("Serial_Time/Time_With_N_Threads")
plt.xlabel("N")
base_time = average_total_times[c][e][b][1][r][s]
time_values = get_values(average_total_times, [c], [e], [b], thread_range, [r], [s])
time_ratios = [float(base_time)/x for x in time_values]
plt.plot(thread_range, time_ratios)
plt.legend(loc="upper left", fontsize=5)
plt.savefig(title+".png")
plt.clf()
for r in rank_range:
for b in batch_size_range:
for e in epoch_range:
for s in sync_range:
for c in commands:
if 'hog' in c and not s:
continue
title = ""
if 'hog' in c:
title = "Gradient_Speedup_Over_Serial_%s_Batch=%d_Epoch=%d_Rank=%d" % (c, b, e, r)
else:
title = "Gradient_Speedup_Over_Serial_%s_Sync=%d_Batch=%d_Epoch=%d_Rank=%d" % (c, s, b, e, r)
plt.figure()
plt.title(title, fontsize=12)
plt.ylabel("Serial_Time/Time_With_N_Threads")
plt.xlabel("N")
base_time = average_gradient_times[c][e][b][1][r][s]
time_values = get_values(average_total_times, [c], [e], [b], thread_range, [r], [s])
time_ratios = [float(base_time)/x for x in time_values]
plt.plot(thread_range, time_ratios)
plt.legend(loc="upper left", fontsize=5)
plt.savefig(title+".png")
plt.clf()
########################################
# TIME RATIOS OVER HOG PER EPOCH
########################################
hog_command = [x for x in commands if 'hog' in x][0]
for t in thread_range:
for r in rank_range:
for b in batch_size_range:
title = "Overall_Time_Ratios_Over_Hog_Per_Epoch_Batch=%d_Thread=%d_Rank=%d" % (b, t, r)
plt.figure()
plt.title(title, fontsize=12)
plt.ylabel("Hog Time / Cyclades Time")
plt.xlabel("Epoch")
for s in sync_range:
for c in commands:
if 'hog' not in c:
baseline_hog_times = get_values(average_total_times, [hog_command], epoch_range, [b], [t], [r], [s])
times = get_values(average_total_times, [c], epoch_range, [b], [t], [r], [s])
ratio_times = [float(baseline_hog_times[i]) / float(times[i]) for i in range(len(times))]
plt.plot(epoch_range, ratio_times, label=c+"_sync="+str(s))
plt.legend(loc="upper left", fontsize=5)
plt.savefig(title+".png")
plt.clf()
hog_command = [x for x in commands if 'hog' in x][0]
for t in thread_range:
for r in rank_range:
for b in batch_size_range:
title = "Gradient_Time_Ratios_Over_Hog_Per_Epoch_Batch=%d_Thread=%d_Rank=%d" % (b, t, r)
plt.figure()
plt.title(title, fontsize=12)
plt.ylabel("Hog Time / Cyclades Time")
plt.xlabel("Epoch")
for s in sync_range:
for c in commands:
if 'hog' not in c:
baseline_hog_times = get_values(average_total_times, [hog_command], epoch_range, [b], [t], [r], [s])
times = get_values(average_gradient_times, [c], epoch_range, [b], [t], [r], [s])
ratio_times = [float(baseline_hog_times[i]) / float(times[i]) for i in range(len(times))]
plt.plot(epoch_range, ratio_times, label=c+"_sync="+str(s))
plt.legend(loc="upper left", fontsize=5)
plt.savefig(title+".png")
plt.clf()
#####################################
# TIME RATIOS OVER HOG PER THREAD
#####################################
for e in epoch_range:
for r in rank_range:
for b in batch_size_range:
title = "Overall_Time_Ratios_Over_Hog_Per_Thread_Batch=%d_Epoch=%d_Rank=%d" % (b, e, r)
plt.figure()
plt.title(title, fontsize=12)
plt.ylabel("Hog Time / Cyclades Time")
plt.xlabel("Thread")
for s in sync_range:
for c in commands:
if 'hog' not in c:
baseline_hog_times = get_values(average_total_times, [hog_command], [e], [b], thread_range, [r], [s])
times = get_values(average_total_times, [c], [e], [b], thread_range, [r], [s])
ratio_times = [float(baseline_hog_times[i]) / float(times[i]) for i in range(len(times))]
plt.plot(thread_range, ratio_times, label=c+"_sync="+str(s))
plt.legend(loc="upper left", fontsize=5)
plt.savefig(title+".png")
plt.clf()
for e in epoch_range:
for r in rank_range:
for b in batch_size_range:
title = "Gradient_Time_Ratios_Over_Hog_Per_Thread_Batch=%d_Epoch=%d_Rank=%d" % (b, e, r)
plt.figure()
plt.title(title, fontsize=12)
plt.ylabel("Hog Time / Cyclades Time")
plt.xlabel("Thread")
for s in sync_range:
for c in commands:
if 'hog' not in c:
baseline_hog_times = get_values(average_total_times, [hog_command], [e], [b], thread_range, [r], [s])
times = get_values(average_gradient_times, [c], [e], [b], thread_range, [r], [s])
ratio_times = [float(baseline_hog_times[i]) / float(times[i]) for i in range(len(times))]
plt.plot(thread_range, ratio_times, label=c+"_sync="+str(s))
plt.legend(loc="upper left", fontsize=5)
plt.savefig(title+".png")
plt.clf()
#draw_time_loss_graph(1, 200, [500], [1, 8, 16], [30], [0, 1], ["cyc_word_embeddings_cyc", "cyc_word_embeddings_hog"])
#draw_time_loss_graph(1, 500, [4250], [1, 4, 8], [30], [1], ["cyc_word_embeddings_cyc_sgd", "cyc_word_embeddings_hog_sgd"])
#draw_time_loss_graph(0, 500, [4250], [4, 8], [30], [1], ["cyc_word_embeddings_cyc_sag", "cyc_word_embeddings_hog_sag"])
#draw_time_loss_graph(0, 1000, [300], [1, 8], [200], [1], ["cyc_word_embeddings_cyc_sag", "cyc_word_embeddings_hog_sag"])
#draw_epoch_loss_graph(0, 100, [300], [8], [2], [1], ["cyc_word_embeddings_cyc"], [.9])
#draw_time_loss_graph(1, 500, [4250], [8], [30], [1], ["cyc_word_embeddings_cyc_sgd", "cyc_word_embeddings_cyc_sag"], [5e-4, 9e-5])
# NH2010
draw_time_loss_graph(0, 500, [1000], [1, 4, 8, 16, 32], [1], ["cyc_matrix_inverse_cyc_svrg", "cyc_matrix_inverse_hog_svrg"], 3, [.2, .2]);
# DBLP
draw_time_loss_graph(0, 50, [10000], [1, 4, 8, 16, 32], [1], ["cyc_matrix_inverse_cyc_svrg", "cyc_matrix_inverse_hog_svrg"], 3, [.1, .1]);
| pxinghao/dimmwitted | SVRG/run_full_benchmark.py | Python | apache-2.0 | 28,082 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2018-09-24 12:19
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('models', '0036_auto_20180115_1849'),
]
operations = [
migrations.AddField(
model_name='initiative',
name='main_ods',
field=models.CharField(blank=True, choices=[('1', '1 Fin de la pobreza'), ('2', '2 Hambre cero'), ('3', '3 Salud y bienestar'), ('4', '4 Educación de calidad'), ('5', '5 Igualdad de género'), ('6', '6 Agua limpia y saneamiento'), ('7', '7 Energía asequible y no contaminante'), ('8', '8 Trabajo decente y crecimiento económico'), ('9', '9 Industria, innovación e infraestructura'), ('10', '10 Reducción de las desigualdades'), ('11', '11 Ciudades y comunidades sostenibles'), ('12', '12 Producción y consumo responsables'), ('13', '13 Acción por el clima'), ('14', '14 Vida submarina'), ('15', '15 Vida de ecosistemas terrestres'), ('16', '16 Paz, justicia e instituciones sólidas'), ('17', '17 Alianzas para lograr los objetivos')], help_text='Indícanos que Objetivo de Desarrollo Sostenible (ODS) crees que cumple o trabaja principalmente tu iniciativa.', max_length=2, null=True, verbose_name='ODS principal'),
),
migrations.AlterField(
model_name='event',
name='topic',
field=models.CharField(choices=[('DC', 'Desarrollo comunitario'), ('AU', 'Arte urbano'), ('CL', 'Cultura libre'), ('DS', 'Deporte / Salud / Cuidados'), ('ID', 'Igualdad / Derechos / Memoria'), ('EC', 'Ecología / Consumo'), ('OE', 'Otras economías'), ('EE', 'Educación expandida'), ('CT', 'Ciencia / Tecnología'), ('MS', 'Movilidad sostenible'), ('PG', 'Política y gobernanza'), ('UP', 'Urbanismo / Patrimonio'), ('PC', 'Periodismo comunitario'), ('IN', 'Infancia')], default='DC', help_text='El tema de la actividad', max_length=2, verbose_name='Temática del evento'),
),
migrations.AlterField(
model_name='initiative',
name='topic',
field=models.CharField(choices=[('DC', 'Desarrollo comunitario'), ('AU', 'Arte urbano'), ('CL', 'Cultura libre'), ('DS', 'Deporte / Salud / Cuidados'), ('ID', 'Igualdad / Derechos / Memoria'), ('EC', 'Ecología / Consumo'), ('OE', 'Otras economías'), ('EE', 'Educación expandida'), ('CT', 'Ciencia / Tecnología'), ('MS', 'Movilidad sostenible'), ('PG', 'Política y gobernanza'), ('UP', 'Urbanismo / Patrimonio'), ('PC', 'Periodismo comunitario'), ('IN', 'Infancia')], default='DC', help_text='El tema de la iniciativa', max_length=2, verbose_name='Tema'),
),
]
| Ale-/civics | apps/models/migrations/0037_auto_20180924_1219.py | Python | gpl-3.0 | 2,703 |
'''
purpose: Creating Face Parameters
'''
import os
import metautil.json_util
class DefaultFaceParameters():
def __init__(self):
'''
parameter: [pose_frame, pose_sdk_multiplier, control_opposite_parameter(if any), mirror_parameter(if any), regions]
control_opposite_parameter means if this parameter has a value, the control_opposite_parameter must equal 0
'''
self.pose_data = {}
face_regions = {}
###### Phonemes #####
phonemes = ["p", 1, 1.0, None, None, "center", "p"],\
["td", 2, 1.0, None, None, "center", "td"],\
["k", 3, 1.0, None, None, "center", "k"],\
["flap", 4, 1.0, None, None, "center", "flap"],\
["fph", 5, 1.0, None, None, "center", "fph"],\
["th", 6, 1.0, None, None, "center", "th"],\
["ss", 7, 1.0, None, None, "center", "ss"],\
["shch", 8, 1.0, None, None, "center", "shch"], \
["rr", 9, 1.0, None, None, "center", "rr"], \
["er", 10, 1.0, None, None, "center", "er"], \
["y", 11, 1.0, None, None, "center", "y"], \
["ww", 12, 1.0, None, None, "center", "ww"], \
["h", 13, 1.0, None, None, "center", "h"], \
["ee", 14, 1.0, None, None, "center", "ee"], \
["ei", 15, 1.0, None, None, "center", "ei"], \
["eh", 16, 1.0, None, None, "center", "eh"], \
["ah", 17, 1.0, None, None, "center", "ah"], \
["ao", 18, 1.0, None, None, "center", "ao"], \
["oo", 19, 1.0, None, None, "center", "oo"]
face_regions['phonemes'] = phonemes
###### Tongue #####
tongue = ["tongue_throat", 20,1.0, None, None,"center", "tongue_throat"], \
["tongue_roof", 21,1.0, None, None,"center", "tongue_roof"], \
["tongue_teeth", 22,1.0, None, None,"center", "tongue_teeth"],\
["tongue_up", 23, 1.0, "tongue_down", None, "center", "tongue_up"], \
["tongue_out", 24, 1.0, "tongue_in", None, "center", "tongue_out"], \
["tongue_curl_up", 25, 1.0, "tongue_curl_down", None, "center", "tongue_curl_up"], \
["tongue_curl_down", 26, 1.0, "tongue_curl_up", None, "center", "tongue_curl_down"], \
["tongue_right", 27, 1.5, None, "tongue_left", "center", "tongue_right"], \
["tongue_left", 28, 1.5, None, "tongue_right", "center", "tongue_left"], \
["tongue_down", 29, 1.0, "tongue_up", None, "center", "tongue_down"], \
["tongue_in", 30, 1.0, "tongue_out", None, "center", "tongue_in"], \
["tongue_twist_right", 31, 1.0, None, "tongue_twist_left", "center", "tongue_twist_right"], \
["tongue_twist_left", 32, 1.0, None, "tongue_twist_right", "center", "tongue_twist_left"],\
["tongue_wide", 33, 1.0, None, "tongue_twist_right", "center", "tongue_wide"]
face_regions['tongue'] = tongue
###### jaw #####
jaw = ["jaw_stretch", 35, 1.0, "jaw_squash", None, "center", "jaw_stretch"], \
["jaw_squash", 36, 1.0, "jaw_stretch", None, "center", "jaw_squash"], \
["left_jaw_squash_tilt", 37, 1.0, None, 'right_jaw_squash_tilt', "center", "left_jaw_squash_tilt"], \
["right_jaw_squash_tilt", 38, 1.0, None, 'left_jaw_squash_tilt', "center", "right_jaw_squash_tilt"],\
["jaw_open", 39, 1.0, "jaw_clench", None, "center", "jaw_open"], \
["jaw_clench", 40, 1.0, "jaw_open", None, "center", "jaw_clench"], \
["jaw_down", 41, 1.0, "jaw_up", None, "center", "jaw_down"], \
["jaw_up", 42, 1.0, "jaw_down", None, "center", "jaw_up"], \
["right_jaw_tilt", 43, 1.0, None, "left_jaw_tilt", "center", "right_jaw_tilt"], \
["left_jaw_tilt", 44, 1.0, None, "right_jaw_tilt", "center", "left_jaw_tilt"], \
["jaw_forward", 45, 1.0, "jaw_back", None, "center", "jaw_forward"], \
["jaw_back", 46, 1.0, "jaw_forward", None, "center", "jaw_back"], \
["left_jaw", 47, 1.0, None, "right_jaw", "center", "left_jaw"], \
["right_jaw", 48, 1.0, None, "left_jaw", "center", "right_jaw"], \
["chin_up", 49, 1.0, "chin_down", None, "center", "chin_up"], \
["chin_down", 50, 1.0, "chin_up", None, "center", "chin_down"]
face_regions['jaw'] = jaw
###### Mouth #####
mouth = ["right_mouth_stretch", 55, 1.0, "right_mouth_oo", "left_mouth_stretch", "right", "right_mouth_stretch"], \
["left_mouth_stretch", 56, 1.0, "left_mouth_oo", "right_mouth_stretch", "left", "left_mouth_stretch"], \
["right_mouth_oo", 57, 1.0, "right_mouth_stretch", "left_mouth_oo", "right", "right_mouth_oo"], \
["left_mouth_oo", 58, 1.0, "left_mouth_stretch", "right_mouth_oo", "left", "left_mouth_oo"], \
["right_mouth_up", 59, 1.0, "right_mouth_down", "left_mouth_up", "right", "right_mouth_up"], \
["left_mouth_up", 60, 1.0, "left_mouth_down", "right_mouth_up", "left", "left_mouth_up"], \
["right_mouth_down", 61, 1.0, "right_mouth_up", "left_mouth_down", "right", "right_mouth_down"], \
["left_mouth_down", 62, 1.0, "left_mouth_up", "right_mouth_down", "left", "left_mouth_down"], \
["right_upper_lip_outer_up", 63, 1.0, "right_upper_lip_outer_down", "left_upper_lip_outer_up", "right", "right_upper_lip_outer_up"], \
["center_upper_lip_up", 64, 1.0, "center_upper_lip_down", None, "center", "center_upper_lip_up"], \
["left_upper_lip_outer_up", 65, 1.0, "left_upper_lip_outer_down", "right_upper_lip_outer_up", "left", "left_upper_lip_outer_up"], \
["right_upper_lip_inner_up", 66, 1.0, "right_upper_lip_inner_down", "left_upper_lip_inner_up", "right", "right_upper_lip_inner_up"], \
["left_upper_lip_inner_up", 67, 1.0, "left_upper_lip_inner_down", 'right_upper_lip_inner_up', "left", "left_upper_lip_inner_up"], \
["right_upper_lip_outer_down", 68, 1.0, "right_upper_lip_outer_up", "left_upper_lip_outer_down", "right", "right_upper_lip_outer_down"], \
["center_upper_lip_down", 69, 1.0, "center_upper_lip_up", None, "center", "center_upper_lip_down"], \
["left_upper_lip_outer_down", 70, 1.0, "left_upper_lip_outer_up", "right_upper_lip_outer_down", "left", "left_upper_lip_outer_down"], \
["right_upper_lip_inner_down", 71, 1.0, "right_upper_lip_inner_up", "left_upper_lip_inner_down", "right", "right_upper_lip_inner_down"], \
["left_upper_lip_inner_down", 72, 1.0, "left_upper_lip_inner_up", "right_upper_lip_inner_down", "left", "left_upper_lip_inner_down"], \
["right_lower_lip_inner_down", 73, 1.0, "right_lower_lip_inner_up", "left_lower_lip_inner_down", "right", "right_lower_lip_inner_down"], \
["center_lower_lip_down", 74, 1.0, "center_lower_lip_up", None, "center", "center_lower_lip_down"], \
["left_lower_lip_inner_down", 75, 1.0, "left_lower_lip_inner_up", "right_lower_lip_inner_down", "left", "left_lower_lip_inner_down"], \
["right_lower_lip_outer_down", 76, 1.0, "right_lower_lip_outer_up", "left_lower_lip_outer_down", "right", "right_lower_lip_outer_down"], \
["left_lower_lip_outer_down", 77, 1.0, "left_lower_lip_outer_up", "right_lower_lip_outer_down", "left", "left_lower_lip_outer_down"], \
["right_lower_lip_inner_up", 78, 1.0, "right_lower_lip_inner_down", "left_lower_lip_inner_up", "right", "right_lower_lip_inner_up"], \
["center_lower_lip_up", 79, 1.0, "center_lower_lip_down", None, "center", "center_lower_lip_up"], \
["left_lower_lip_inner_up", 80, 1.0, "left_lower_lip_inner_down", "right_lower_lip_inner_up", "left", "left_lower_lip_inner_up"], \
["right_lower_lip_outer_up", 81, 1.0, "right_lower_lip_outer_down", "left_lower_lip_outer_up", "right", "right_lower_lip_outer_up"], \
["left_lower_lip_outer_up", 82, 1.0, "left_lower_lip_outer_down", "right_lower_lip_outer_up", "left", "left_lower_lip_outer_up"], \
["right_upper_corner_adjust_up", 83, 1.0, "right_upper_corner_adjust_down", "left_upper_corner_adjust_up", "right", "right_upper_corner_adjust_up"], \
["left_upper_corner_adjust_up", 84, 1.0, "left_upper_corner_adjust_down", "right_upper_corner_adjust_up", "left", "left_upper_corner_adjust_up"], \
["right_upper_corner_adjust_down", 85, 1.0, "right_upper_corner_adjust_up", "left_upper_corner_adjust_down", "right", "right_upper_corner_adjust_down"], \
["left_upper_corner_adjust_down", 86, 1.0, "left_upper_corner_adjust_up", "right_upper_corner_adjust_down", "left", "left_upper_corner_adjust_down"], \
["right_lower_corner_adjust_down", 87, 1.0, "right_lower_corner_adjust_up", "left_lower_corner_adjust_down", "right", "right_lower_corner_adjust_down"], \
["left_lower_corner_adjust_down", 88, 1.0, "left_lower_corner_adjust_up", "right_lower_corner_adjust_down", "left", "left_lower_corner_adjust_down"], \
["right_lower_corner_adjust_up", 89, 1.0, "right_lower_corner_adjust_down", "left_lower_corner_adjust_up", "right", "right_lower_corner_adjust_up"], \
["left_lower_corner_adjust_up", 90, 1.0, "left_lower_corner_adjust_down", "right_lower_corner_adjust_up", "left", "left_lower_corner_adjust_up"], \
["lips_up", 91, 1.0, "lips_down", None, "center", "lips_up"], \
["lips_down", 92, 1.0, "lips_up", None, "center", "lips_down"], \
["upper_lip_forward", 93, 1.0, "upper_lip_back", None, "center", "upper_lip_forward"], \
["lower_lip_forward", 94, 1.0, "lower_lip_back", None, 'center', "lower_lip_forward"], \
["upper_lip_back", 95, 1.0, "upper_lip_forward", None, "center", "upper_lip_back"], \
["lower_lip_back", 96, 1.0, "lower_lip_forward", None, "center", "lower_lip_back"], \
["left_lips", 97, 1.0, "right_lips", "right_lips", "left", "left_lips"], \
["right_lips", 98, 1.0, "left_lips", "left_lips", "right", "right_lips"], \
["upper_lip_funnel", 99, 1.0, "upper_lip_curl", None, "center", "upper_lip_funnel"], \
["lower_lip_funnel", 100, 1.0, "lower_lip_curl", None, "center", "lower_lip_funnel"], \
["upper_lip_curl", 101, 1.0, "upper_lip_funnel", None, "center", "upper_lip_curl"], \
["lower_lip_curl", 102, 1.0, "lower_lip_funnel", None, "center", "lower_lip_curl"], \
["right_pucker", 103, 1.0, None, "left_pucker", "right", "right_pucker", ], \
["left_pucker", 104, 1.0, None, "right_pucker", "left", "left_pucker"]
face_regions['mouth'] = mouth
# ############# CHEEK #################
cheeks = ["right_blow", 108, 1.0, "right_suck", "left_blow", "right", "right_blow"], \
["left_blow", 109, 1.0, "left_suck", "right_blow", "left", "left_blow"], \
["right_suck", 110, 1.0, "right_blow", "left_suck", "right", "right_suck"], \
["left_suck", 111, 1.0, "left_blow", "right_suck", "left", "left_suck"], \
["right_nasolabial_out", 112, 1.0, None, "left_nasolabial_out", "right", "right_nasolabial_out"], \
["left_nasolabial_out", 113, 1.0, None, "right_nasolabial_out", "left", "left_nasolabial_out"],\
["right_cheek_outer_up", 114, 1.0, "right_cheek_outer_down", "left_cheek_outer_up", "right", "right_cheek_outer_up"], \
["right_cheek_inner_up", 115, 1.0, "right_cheek_inner_down", "left_cheek_inner_up", "right", "right_cheek_inner_up"], \
["left_cheek_outer_up", 116, 1.0, "left_cheek_outer_down", "right_cheek_outer_up", "left", "left_cheek_outer_up"], \
["left_cheek_inner_up", 117, 1.0, "left_cheek_down", "right_cheek_inner_up", "left", "left_cheek_inner_up"], \
["right_cheek_inner_down", 118, 1.0, "right_cheek_inner_up", "left_cheek_inner_down", "right", "right_cheek_inner_down"], \
["right_cheek_outer_down", 119, 1.0, "right_cheek_outer_up", "left_cheek_outer_down", "right", "right_cheek_outer_down"], \
["left_cheek_inner_down", 120, 1.0, "left_cheek_inner_up", "right_cheek_inner_up", "left", "left_cheek_inner_down"], \
["left_cheek_outer_down", 121, 1.0, "left_cheek_outer_up", "right_cheek_outer_down", "left", "left_cheek_outer_down"]
face_regions['cheeks'] = cheeks
###### nose #####
nose = ["nose_tip_up", 125, 1.0, "nose_tip_down", None, "center", "nose_tip_up"],\
["nose_tip_down", 126, 1.0, "nose_tip_up", None, "center", "nose_tip_down"], \
["nose_tip_right", 127, 1.0, "nose_tip_left", "nose_tip_left", "center", "nose_tip_right"], \
["nose_tip_left", 128, 1.0, "nose_tip_right", "nose_tip_right", "center", "nose_tip_left"],\
["right_nose_flare", 129, 1.0, "right_nose_suck", "left_nose_flare", "right", "right_nose_flare"], \
["left_nose_flare", 130, 1.0, "left_nose_suck", "right_nose_flare", "left", "left_nose_flare"], \
["right_nose_sneer", 131, 1.0, "right_nose_down", "left_nose_sneer", "right", "right_nose_sneer"], \
["left_nose_sneer", 132, 1.0, "left_nose_down", "right_nose_sneer", "left", "left_nose_sneer"], \
["right_nose_suck", 133, 1.0, "right_nose_flare", "left_nose_suck", "right", "right_nose_suck"], \
["left_nose_suck", 134, 1.0, "left_nose_flare", "right_nose_suck", "left", "left_nose_suck"], \
["right_nose_down", 135, 1.0, "right_nose_sneer", "left_nose_down", "right", "right_nose_down"], \
["left_nose_down", 136, 1.0, "left_nose_sneer", "right_nose_down", "left", "left_nose_down"]
face_regions['nose'] = nose
# ################# EYES ################
eyes = ["right_eye_right", 138, 1.0, "right_eye_left", "left_eye_left", "right", "right_eye_right"], \
["left_eye_right", 139, 1.0, "left_eye_left", "right_eye_left", "left", "left_eye_right"], \
["right_eye_left", 140, 1.0, "right_eye_right", "left_eye_right", "right", "right_eye_left"], \
["left_eye_left", 141, 1.0, "left_eye_right", "right_eye_right", "left", "left_eye_left"], \
["right_eye_up", 142, 1.0, "right_eye_down", "left_eye_up", "right", "right_eye_up"], \
["left_eye_up", 143, 1.0, "left_eye_down", "right_eye_up", "left", "left_eye_up"], \
["right_eye_down", 144, 1.0, "right_eye_up", "left_eye_down", "right", "right_eye_down"], \
["left_eye_down", 145, 1.0, "left_eye_up", "right_eye_down", "left", "left_eye_down"]
face_regions['eyes'] = eyes
# ############# BROWS #################
######## Pose Name Frame, multiplier, opposite pose, mirror_pose, side, stage_position
brows = ["right_outer_brow_down", 147, 1.0, "right_outer_brow_up", "left_outer_brow_down", 'right', "right_outer_brow_down"],\
["right_inner_brow_down", 148, 1.0, "right_inner_brow_up", "left_inner_brow_down", "right", "right_inner_brow_down"],\
["left_inner_brow_down", 149, 1.0, "left_inner_brow_up", "right_inner_brow_down", "left", "left_inner_brow_down"],\
["left_outer_brow_down", 150, 1.0, "left_outer_brow_up", "right_outer_brow_down", "left", "left_outer_brow_down"],\
["right_outer_brow_up", 151, 1.0, "right_outer_brow_down", "left_outer_brow_up", "right", "right_outer_brow_up"],\
["right_inner_brow_up", 152, 1.0, "right_inner_brow_down", "left_inner_brow_up", "right", "right_inner_brow_up"],\
["left_inner_brow_up", 153, 1.0, "left_inner_brow_down", "right_inner_brow_up", "left", "left_inner_brow_up"],\
["left_outer_brow_up", 154, 1.0, "left_outer_brow_down", "right_outer_brow_up", "left", "left_outer_brow_up"], \
["right_middle_brow_down", 155, 1.0, "right_middle_brow_up", "left_middle_brow_down", "right", "right_middle_brow_down"], \
["right_middle_brow_up", 156, 1.0, "right_middle_brow_down", "left_middle_brow_up", "right", "right_middle_brow_up"], \
["left_middle_brow_down", 157, 1.0, "left_middle_brow_up", "right_middle_brow_down", "left", "left_middle_brow_down"], \
["left_middle_brow_up", 158, 1.0, "left_middle_brow_down", "right_middle_brow_up", "left", "left_middle_brow_up"], \
["right_outer_brow_tilt_sad", 159, 1.0, "right_outer_brow_tilt_anger", "left_outer_brow_tilt_sad", "right", "right_outer_brow_tilt_sad"],\
["right_inner_brow_tilt_sad", 160, 1.0, "right_inner_brow_tilt_anger", "left_inner_brow_tilt_sad", "right", "right_inner_brow_tilt_sad"],\
["left_inner_brow_tilt_sad", 161, 1.0, "left_inner_brow_tilt_anger", "right_inner_brow_tilt_sad", "left", "left_inner_brow_tilt_sad"],\
["left_outer_brow_tilt_sad", 162, 1.0, "left_outer_brow_tilt_anger", "right_outer_brow_tilt_sad", "left", "left_outer_brow_tilt_sad"],\
["right_outer_brow_tilt_anger", 163, 1.0, "right_outer_brow_tilt_sad", "left_outer_brow_tilt_anger", "right", "right_outer_brow_tilt_anger"],\
["right_inner_brow_tilt_anger", 164, 1.0, "right_inner_brow_tilt_sad", "left_inner_brow_tilt_anger", "right", "right_inner_brow_tilt_anger"],\
["left_inner_brow_tilt_anger", 165, 1.0, "left_inner_brow_tilt_sad", "right_inner_brow_tilt_anger", "left", "left_inner_brow_tilt_anger"],\
["left_outer_brow_tilt_anger", 166, 1.0, "left_outer_brow_tilt_sad", "right_outer_brow_tilt_anger", "left", "left_outer_brow_tilt_anger"],\
["right_brow_squeeze", 167, 1.0, None, "left_brow_squeeze", "right", "right_brow_squeeze"],\
["left_brow_squeeze", 168, 1.0, None, "right_brow_squeeze", "left", "left_brow_squeeze"]
face_regions['brows'] = brows
# ############# EYELIDS #################
eyelids = ["right_upper_eyelid_down", 170, 1.0, "right_upper_eyelid_up", "left_upper_eyelid_down", "right", "right_upper_eyelid_down"], \
["left_upper_eyelid_down", 171, 1.0, "left_upper_eyelid_up", "right_upper_eyelid_down", "left", "left_upper_eyelid_down"], \
["right_lower_eyelid_down", 172, 1.0, "right_lower_eyelid_up", "left_lower_eyelid_down", "right", "right_lower_eyelid_down"], \
["left_lower_eyelid_down", 173, 1.0, "left_lower_eyelid_up", "right_lower_eyelid_down", "left", "left_lower_eyelid_down"], \
["right_upper_eyelid_up", 174, 1.0, "right_upper_eyelid_down", "left_upper_eyelid_up", "right", "right_upper_eyelid_up"], \
["left_upper_eyelid_up", 175, 1.0, "left_upper_eyelid_down", "right_upper_eyelid_up", "left", "left_upper_eyelid_up"], \
["right_lower_eyelid_up", 176, 1.0, "right_lower_eyelid_down", "left_lower_eyelid_up", "right", "right_lower_eyelid_up"], \
["left_lower_eyelid_up", 177, 1.0, "left_lower_eyelid_down", "right_lower_eyelid_up", "left", "left_lower_eyelid_up"], \
["right_upper_eyelid_twist_sad", 178, 1.0, "right_upper_eyelid_twist_anger", "left_upper_eyelid_twist_sad", "right", "right_upper_eyelid_twist_sad"], \
["left_upper_eyelid_twist_sad", 179, 1.0, "left_upper_eyelid_twist_anger", "right_upper_eyelid_twist_sad", "left", "left_upper_eyelid_twist_sad"], \
["right_upper_eyelid_twist_anger", 180, 1.0, "right_upper_eyelid_twist_sad", "left_upper_eyelid_twist_anger", "right", "right_upper_eyelid_twist_anger"], \
["left_upper_eyelid_twist_anger", 181, 1.0, "left_upper_eyelid_twist_sad", "right_upper_eyelid_twist_anger", "left", "left_upper_eyelid_twist_anger"], \
["right_lower_eyelid_twist_sad", 182, 1.0, "right_lower_eyelid_twist_anger", "left_lower_eyelid_twist_sad", "right", "right_lower_eyelid_twist_sad"], \
["left_lower_eyelid_twist_sad", 183, 1.0, "left_lower_eyelid_twist_anger", "right_lower_eyelid_twist_sad", "left", "left_lower_eyelid_twist_sad"],\
["right_lower_eyelid_twist_anger", 184, 1.0, "right_lower_eyelid_twist_sad", "left_lower_eyelid_twist_anger", "right", "right_lower_eyelid_twist_anger"], \
["left_lower_eyelid_twist_anger", 185, 1.0, "left_lower_eyelid_twist_sad", "right_lower_eyelid_twist_anger", "left", "left_lower_eyelid_twist_anger"]
face_regions['eyelids'] = eyelids
custom_parameter = ["custom_parameter_01", 188, 1.0, None, None, "center", "custom_parameter_01"], \
["custom_parameter_02", 171, 1.0,None, None, "center", "custom_parameter_02"], \
["custom_parameter_03", 171, 1.0,None, None, "center", "custom_parameter_03"], \
["custom_parameter_04", 171, 1.0,None, None, "center", "custom_parameter_04"], \
["custom_parameter_05", 171, 1.0,None, None, "center", "custom_parameter_05"], \
["custom_parameter_06", 171, 1.0,None, None, "center", "custom_parameter_06"], \
["custom_parameter_07", 171, 1.0,None, None, "center", "custom_parameter_07"], \
["custom_parameter_08", 171, 1.0,None, None, "center", "custom_parameter_08"], \
["custom_parameter_09", 171, 1.0,None, None, "center", "custom_parameter_09"], \
["custom_parameter_10", 171, 1.0,None, None, "center", "custom_parameter_10"]
face_regions['custom_parameter'] = custom_parameter
for region in face_regions:
self.pose_data[region] = {}
for parameter in face_regions[region]:
self.pose_data[region][parameter[0]] = {}
self.pose_data[region][parameter[0]]['frame'] = parameter[1]
self.pose_data[region][parameter[0]]['multiplier'] = parameter[2]
self.pose_data[region][parameter[0]]['opposite_parameter'] = parameter[3]
self.pose_data[region][parameter[0]]['mirror_parameter'] = parameter[4]
self.pose_data[region][parameter[0]]['side'] = parameter[5]
self.pose_data[region][parameter[0]]['connection'] = parameter[6]
def export(self, path, file_name = 'default_face_parameters'):
if not os.path.isdir(path):
os.makedirs(path)
''' Export Default Face Parameters to a JSON File'''
metautil.json_util.write_json(self.pose_data, path, file_name)
class SolveParameters():
'''
parameter: [pose_frame, pose_sdk_multiplier, control_opposite_parameter(if any), mirror_parameter(if any), regions]
control_opposite_parameter means if this parameter has a value, the control_opposite_parameter must equal 0
'''
def __init__(self):
self.pose_data = {}
face_regions = {}
# ############# HEAD #################
head = ["left_twist_head_solve", 22, 1.0, None, 'right_twist_head_solve', "center", "left_twist_head_solve"],\
["right_twist_head_solve", 21, 1.0, None, 'left_twist_head_solve', "center", "right_twist_head_solve"],\
["back_head_solve", 20, 1.0, "forward_head_solve", None, "center", "back_head_solve"],\
["forward_head_solve", 19, 1.0, "back_head_solve", None, "center", "forward_head_solve"],\
["left_head_solve", 18, 1.0, None, "right_head_solve", "center", "left_head_solve"],\
["right_head_solve", 17, 1.0, None, "left_head_solve", "center","right_head_solve"], \
["left_twist_neck_2_solve", 16, 1.0, None, "right_twist_neck_2_solve", "center", "left_twist_neck_2_solve"], \
["right_twist_neck_2_solve", 15, 1.0, None, "left_twist_neck_2_solve", "center", "right_twist_neck_2_solve"], \
["back_neck_2_solve", 14, 1.0, "forward_neck_2_solve",None, "center", "back_neck_2_solve"],\
["forward_neck_2_solve", 13, 1.0, "back_neck_2_solve", None, "center", "forward_neck_2_solve"], \
["left_neck_2_solve", 12, 1.0, None, "right_neck_2_solve", "center", "left_neck_2_solve"], \
["right_neck_2_solve", 11, 1.0, None, "left_neck_2_solve", "center", "right_neck_2_solve"], \
["left_twist_neck_1_solve", 10, 1.0, None, 'right_twist_neck_1_solve', "center", "left_twist_neck_1_solve"], \
["right_twist_neck_1_solve", 9, 1.0, None, 'left_twist_neck_1_solve', "center", "right_twist_neck_1_solve"], \
["back_neck_1_solve", 8, 1.0, "forward_neck_1_solve", None, "center", "back_neck_1_solve"], \
["forward_neck_1_solve", 7, 1.0, "back_neck_1_solve", None, "center", "forward_neck_1_solve"], \
["left_neck_1_solve", 6, 1.0, None, "right_neck_1_solve", "center", "left_neck_1_solve"], \
["right_neck_1_solve", 5, 1.0, None, "left_neck_1_solve", "center", "right_neck_1_solve"], \
["back_spine_3_solve", 4, 1.0, "forward_spine_3_solve", None, "center", "back_spine_3_solve"],\
["forward_spine_3_solve", 3, 1.0, "back_spine_3_solve", None, "center", "forward_spine_3_solve"],\
["left_spine_3_solve", 2, 1.0, None, "right_spine_3_solve", "center", "left_spine_3_solve"],\
["right_spine_3_solve", 1, 1.0, None, "left_spine_3_solve", "center", "right_spine_3_solve"]
face_regions['head'] = head
eyelids = ["left_eyelid_lower_inner_up_solve", 73, 1.0, None, 'right_eyelid_lower_inner_up_solve', "left", "left_eyelid_lower_inner_up_solve"], \
["right_eyelid_lower_inner_up_solve", 74, 1.0, None, 'left_eyelid_lower_inner_up_solve', "right", "right_eyelid_lower_inner_up_solve"], \
["left_eyelid_upper_inner_down_solve", 75, 1.0, None, 'right_eyelid_upper_inner_down_solve', "left", "left_eyelid_upper_inner_down_solve"], \
["right_eyelid_upper_inner_down_solve", 76, 1.0, None, 'left_eyelid_upper_inner_down_solve', "right", "right_eyelid_upper_inner_down_solve"], \
["left_upper_eyelid_extend_down_solve", 77, 1.0, None, 'right_upper_eyelid_extend_down_solve', "left", "left_upper_eyelid_extend_down_solve"],\
["right_upper_eyelid_extend_down_solve", 78, 1.0, None, 'left_upper_eyelid_extend_down_solve', "right", "right_upper_eyelid_extend_down_solve"]
face_regions['eyelids'] = eyelids
cheeks = ["right_cheek_out_solve", 38, 1.0, None, 'left_cheek_out_solve', "right", "right_cheek_out_solve"], \
["left_cheek_out_solve", 39, 1.0, None, 'right_cheek_out_solve', "left", "left_cheek_out_solve"],\
["left_buccinator_01_solve", 40, 1.0, None, 'right_buccinator_01_solve', "left", "left_buccinator_01_solve"],\
["right_buccinator_01_solve", 41, 1.0, None, 'left_buccinator_01_solve', "right", "right_buccinator_01_solve"],\
["left_buccinator_lower_01_solve", 42, 1.0, None, 'right_buccinator_lower_01_solve', "left", "left_buccinator_lower_01_solve"],\
["right_buccinator_lower_01_solve", 43, 1.0, None, 'left_buccinator_lower_01_solve', "right", "right_buccinator_lower_01_solve"],\
["left_buccinator_out_solve", 44, 1.0, None, 'right_buccinator_out_solve', "left", "left_buccinator_out_solve"],\
["right_buccinator_out_solve", 45, 1.0, None, 'left_buccinator_out_solve', "right", "right_buccinator_out_solve"],\
["left_buccinator_upper_02_out_solve", 46, 1.0, None, 'right_buccinator_upper_02_out_solve', "left", "left_buccinator_upper_02_out_solve"],\
["right_buccinator_upper_02_out_solve", 47, 1.0, None, 'left_buccinator_upper_02_out_solve', "right", "right_buccinator_upper_02_out_solve"],\
["left_cheekbone_01_out_solve", 48, 1.0, None, 'right_cheekbone_01_out_solve', "left", "left_cheekbone_01_out_solve"],\
["right_cheekbone_01_out_solve", 49, 1.0, None, 'left_cheekbone_01_out_solve', "right", "right_cheekbone_01_out_solve"],\
["left_cheekbone_02_out_solve", 50, 1.0, None, 'right_cheekbone_02_out_solve', "left", "left_cheekbone_02_out_solve"],\
["right_cheekbone_02_out_solve", 51, 1.0, None, 'left_cheekbone_02_out_solve', "right", "right_cheekbone_02_out_solve"],\
["left_lower_nasolabial_solve", 52, 1.0, None, 'right_lower_nasolabial_solve', "left", "left_lower_nasolabial_solve"],\
["right_lower_nasolabial_solve", 53, 1.0, None, 'left_lower_nasolabial_solve', "right", "right_lower_nasolabial_solve"]
face_regions['cheeks'] = cheeks
nose = ["nose_out_solve", 56, 1.0, None, None, "center", "nose_out_solve"],\
["left_nose_solve", 57, 1.0, None, 'right_nose_solve', "left", "left_nose_solve"],\
["right_nose_solve", 58, 1.0, None, 'left_nose_solve', "right", "right_nose_solve"],\
["left_nose_bridge_out", 59, 1.0, None, 'right_nose_bridge_out', "left", "left_nose_bridge_out"],\
["right_nose_bridge_out", 60, 1.0, None, 'left_nose_bridge_out', "right", "right_nose_bridge_out"]
face_regions['nose'] = nose
brows = ["left_brow_inner_up_solve", 63, 1.0, None, 'right_brow_inner_up_solve', "left", "left_brow_inner_up_solve"],\
["right_brow_inner_up_solve", 64, 1.0, None, 'left_brow_inner_up_solve', "right", "right_brow_inner_up_solve"],\
["right_inner_brow_out_solve", 65, 1.0, None, 'left_inner_brow_out_solve', "right", "right_inner_brow_out_solve"], \
["left_inner_brow_out_solve", 66, 1.0, None, 'right_inner_brow_out_solve', "left", "left_inner_brow_out_solve"],\
["left_brow_middle_up_solve", 67, 1.0, None, 'right_brow_middle_up_solve', "left", "left_brow_middle_up_solve"],\
["right_brow_middle_up_solve", 68, 1.0, None, 'left_brow_middle_up_solve', "right", "right_brow_middle_up_solve"],\
["left_brow_outer_up_solve", 69, 1.0, None, 'right_brow_outer_up_solve', "left", "left_brow_outer_up_solve"],\
["right_brow_outer_up_solve", 70, 1.0, None, 'left_brow_outer_up_solve', "right", "right_brow_outer_up_solve"]
face_regions['brows'] = brows
jaw = ["jaw_down_solve", 25, 1.0, None, None, "center", "jaw_down_solve"],\
["jaw_open_solve", 26, 1.0, None, None, "center", "jaw_open_solve"],\
["jaw_open_left_solve", 27, 1.0, None, 'jaw_open_right_solve', "left", "jaw_open_left_solve"],\
["jaw_open_right_solve", 28, 1.0, None, 'jaw_open_left_solve', "right", "jaw_open_right_solve"],\
["jaw_open_slide_left_solve", 29, 1.0, None, 'jaw_open_slide_right_solve', "left", "jaw_open_slide_left_solve"],\
["jaw_open_slide_right_solve", 30, 1.0, None, 'jaw_open_slide_left_solve', "right", "jaw_open_slide_right_solve"],\
["center_chin_solve", 31, 1.0, None, None, "center", "center_chin_solve"]
face_regions['jaw'] = jaw
lips = ["left_upper_lip_corner_down_solve", 34, 1.0, None, 'right_upper_lip_corner_down_solve', "left", "left_upper_lip_corner_down_solve"], \
["right_upper_lip_corner_down_solve", 35, 1.0, None, 'left_upper_lip_corner_down_solve', "right", "right_upper_lip_corner_down_solve"]
face_regions['lips'] = lips
for region in face_regions:
self.pose_data[region] = {}
for parameter in face_regions[region]:
self.pose_data[region][parameter[0]] = {}
self.pose_data[region][parameter[0]]['frame'] = parameter[1]
self.pose_data[region][parameter[0]]['multiplier'] = parameter[2]
self.pose_data[region][parameter[0]]['opposite_parameter'] = parameter[3]
self.pose_data[region][parameter[0]]['mirror_parameter'] = parameter[4]
self.pose_data[region][parameter[0]]['side'] = parameter[5]
self.pose_data[region][parameter[0]]['connection'] = parameter[6]
def export(self, path, file_name = 'solve_parameters'):
''' Export Cinematics Face Parameters to a JSON File'''
if not os.path.isdir(path):
os.makedirs(path)
metautil.json_util.write_json(path, file_name, self.pose_data)
| deathglitch/metarigging | python/face/face_parameters_export.py | Python | mit | 29,430 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2012, Jean-Rémy Bancel <[email protected]>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Chromagon Project nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Jean-Rémy Bancel BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Frontend script using Chrome Cache parsing library
"""
import argparse
import textwrap
import chromagnon.cacheParse
def main():
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent('''
\033[32mChromagnon Chrome Cache Parser\033[0m
\033[4mTwo input methods\033[0m
1) A list of urls (usefull if you want to analyse only know urls).
The entry corresponding to the url is found in the hash table,
so it is fast.
2) Parse the whole cash by not specifying urls. Usefull to get
an exhaustive idea of what is in the cache. Can be slow if the
cache has numerous entries.
\033[4mThree output methods\033[0m
1) Terminal output. Displays main details of each cache entry.
Stderr is used for error messages.
2) Export to files. The entries are exported to the given directory
(specified with \033[1m-o\033[0m option). It is browsable with a web browser.
3) Output a csv file to terminal compliant with log2timeline.
\033[1m-l2t\033[0m flag.
'''),
epilog=textwrap.dedent('''
\033[4mExamples\033[0m
- Export the whole cache to browse it
> python chromagnonCache.py ~/.cache/chromium/Default/Cache/ -o /tmp/export
- Export the whole cache and read it with log2timeline
> python chromagnonCache.py ~/.cache/chromium/Default/Cache/ -l2t > l2t.csv
> log2timeline -f l2t_csv l2t.csv
- Displaying informations about a url
> python chromagnonCache.py ~/.cache/chromium/Default/Cache/ -u "http://test.com"
'''))
parser.add_argument("Cache Directory", action='store',
help="Chrome Cache Directory")
parser.add_argument("-o", "-output", action='store',
default=None,
help="Export cached data to that directory \
(created if it doesn't exist)")
parser.add_argument("-l2t", "-log2timeline", action='store_true',
default=False,
help="Use csv log2timeline format for output")
parser.add_argument("-u", "-urls", action='store', nargs="+",
help="Use given urls as input")
args = parser.parse_args()
# Getting data
cache = chromagnon.cacheParse.parse(
args.__getattribute__("Cache Directory"), args.u)
# Export or display
if args.o == None:
if args.l2t:
chromagnon.cacheParse.exportTol2t(cache)
else:
for entry in cache:
print entry
print "-"*80
else:
chromagnon.cacheParse.exportToHTML(cache, args.o)
if __name__ == "__main__":
main()
| JRBANCEL/Chromagnon | chromagnonCache.py | Python | bsd-3-clause | 4,390 |
# coding: utf-8
from __future__ import unicode_literals
import itertools
import json
import re
from .common import InfoExtractor, SearchInfoExtractor
from ..compat import (
compat_urllib_parse,
compat_urlparse,
)
from ..utils import (
clean_html,
unescapeHTML,
ExtractorError,
int_or_none,
mimetype2ext,
)
from .nbc import NBCSportsVPlayerIE
class YahooIE(InfoExtractor):
IE_DESC = 'Yahoo screen and movies'
_VALID_URL = r'(?P<url>(?P<host>https?://(?:[a-zA-Z]{2}\.)?[\da-zA-Z_-]+\.yahoo\.com)/(?:[^/]+/)*(?P<display_id>.+)?-(?P<id>[0-9]+)(?:-[a-z]+)?\.html)'
_TESTS = [
{
'url': 'http://screen.yahoo.com/julian-smith-travis-legg-watch-214727115.html',
'info_dict': {
'id': '2d25e626-2378-391f-ada0-ddaf1417e588',
'ext': 'mp4',
'title': 'Julian Smith & Travis Legg Watch Julian Smith',
'description': 'Julian and Travis watch Julian Smith',
'duration': 6863,
},
},
{
'url': 'http://screen.yahoo.com/wired/codefellas-s1-ep12-cougar-lies-103000935.html',
'md5': 'd6e6fc6e1313c608f316ddad7b82b306',
'info_dict': {
'id': 'd1dedf8c-d58c-38c3-8963-e899929ae0a9',
'ext': 'mp4',
'title': 'Codefellas - The Cougar Lies with Spanish Moss',
'description': 'md5:66b627ab0a282b26352136ca96ce73c1',
'duration': 151,
},
},
{
'url': 'https://screen.yahoo.com/community/community-sizzle-reel-203225340.html?format=embed',
'md5': '60e8ac193d8fb71997caa8fce54c6460',
'info_dict': {
'id': '4fe78544-8d48-39d8-97cd-13f205d9fcdb',
'ext': 'mp4',
'title': "Yahoo Saves 'Community'",
'description': 'md5:4d4145af2fd3de00cbb6c1d664105053',
'duration': 170,
}
},
{
'url': 'https://tw.screen.yahoo.com/election-2014-askmayor/敢問市長-黃秀霜批賴清德-非常高傲-033009720.html',
'md5': '3a09cf59349cfaddae1797acc3c087fc',
'info_dict': {
'id': 'cac903b3-fcf4-3c14-b632-643ab541712f',
'ext': 'mp4',
'title': '敢問市長/黃秀霜批賴清德「非常高傲」',
'description': '直言台南沒捷運 交通居五都之末',
'duration': 396,
}
},
{
'url': 'https://uk.screen.yahoo.com/editor-picks/cute-raccoon-freed-drain-using-091756545.html',
'md5': '0b51660361f0e27c9789e7037ef76f4b',
'info_dict': {
'id': 'b3affa53-2e14-3590-852b-0e0db6cd1a58',
'ext': 'mp4',
'title': 'Cute Raccoon Freed From Drain\u00a0Using Angle Grinder',
'description': 'md5:f66c890e1490f4910a9953c941dee944',
'duration': 97,
}
},
{
'url': 'https://ca.sports.yahoo.com/video/program-makes-hockey-more-affordable-013127711.html',
'md5': '57e06440778b1828a6079d2f744212c4',
'info_dict': {
'id': 'c9fa2a36-0d4d-3937-b8f6-cc0fb1881e73',
'ext': 'mp4',
'title': 'Program that makes hockey more affordable not offered in Manitoba',
'description': 'md5:c54a609f4c078d92b74ffb9bf1f496f4',
'duration': 121,
}
}, {
'url': 'https://ca.finance.yahoo.com/news/hackers-sony-more-trouble-well-154609075.html',
'md5': '226a895aae7e21b0129e2a2006fe9690',
'info_dict': {
'id': 'e624c4bc-3389-34de-9dfc-025f74943409',
'ext': 'mp4',
'title': '\'The Interview\' TV Spot: War',
'description': 'The Interview',
'duration': 30,
}
}, {
'url': 'http://news.yahoo.com/video/china-moses-crazy-blues-104538833.html',
'md5': '88e209b417f173d86186bef6e4d1f160',
'info_dict': {
'id': 'f885cf7f-43d4-3450-9fac-46ac30ece521',
'ext': 'mp4',
'title': 'China Moses Is Crazy About the Blues',
'description': 'md5:9900ab8cd5808175c7b3fe55b979bed0',
'duration': 128,
}
}, {
'url': 'https://in.lifestyle.yahoo.com/video/connect-dots-dark-side-virgo-090247395.html',
'md5': 'd9a083ccf1379127bf25699d67e4791b',
'info_dict': {
'id': '52aeeaa3-b3d1-30d8-9ef8-5d0cf05efb7c',
'ext': 'mp4',
'title': 'Connect the Dots: Dark Side of Virgo',
'description': 'md5:1428185051cfd1949807ad4ff6d3686a',
'duration': 201,
}
}, {
'url': 'https://www.yahoo.com/movies/v/true-story-trailer-173000497.html',
'md5': '989396ae73d20c6f057746fb226aa215',
'info_dict': {
'id': '071c4013-ce30-3a93-a5b2-e0413cd4a9d1',
'ext': 'mp4',
'title': '\'True Story\' Trailer',
'description': 'True Story',
'duration': 150,
},
}, {
'url': 'https://gma.yahoo.com/pizza-delivery-man-surprised-huge-tip-college-kids-195200785.html',
'only_matching': True,
}, {
'note': 'NBC Sports embeds',
'url': 'http://sports.yahoo.com/blogs/ncaab-the-dagger/tyler-kalinoski-s-buzzer-beater-caps-davidson-s-comeback-win-185609842.html?guid=nbc_cbk_davidsonbuzzerbeater_150313',
'info_dict': {
'id': '9CsDKds0kvHI',
'ext': 'flv',
'description': 'md5:df390f70a9ba7c95ff1daace988f0d8d',
'title': 'Tyler Kalinoski hits buzzer-beater to lift Davidson',
}
}, {
'url': 'https://tw.news.yahoo.com/-100120367.html',
'only_matching': True,
}, {
# Query result is embedded in webpage, but explicit request to video API fails with geo restriction
'url': 'https://screen.yahoo.com/community/communitary-community-episode-1-ladders-154501237.html',
'md5': '4fbafb9c9b6f07aa8f870629f6671b35',
'info_dict': {
'id': '1f32853c-a271-3eef-8cb6-f6d6872cb504',
'ext': 'mp4',
'title': 'Communitary - Community Episode 1: Ladders',
'description': 'md5:8fc39608213295748e1e289807838c97',
'duration': 1646,
},
}, {
# it uses an alias to get the video_id
'url': 'https://www.yahoo.com/movies/the-stars-of-daddys-home-have-very-different-212843197.html',
'info_dict': {
'id': '40eda9c8-8e5f-3552-8745-830f67d0c737',
'ext': 'mp4',
'title': 'Will Ferrell & Mark Wahlberg Are Pro-Spanking',
'description': 'While they play feuding fathers in \'Daddy\'s Home,\' star Will Ferrell & Mark Wahlberg share their true feelings on parenthood.',
},
},
]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
display_id = mobj.group('display_id') or self._match_id(url)
page_id = mobj.group('id')
url = mobj.group('url')
host = mobj.group('host')
webpage = self._download_webpage(url, display_id)
# Look for iframed media first
iframe_m = re.search(r'<iframe[^>]+src="(/video/.+?-\d+\.html\?format=embed.*?)"', webpage)
if iframe_m:
iframepage = self._download_webpage(
host + iframe_m.group(1), display_id, 'Downloading iframe webpage')
items_json = self._search_regex(
r'mediaItems: (\[.+?\])$', iframepage, 'items', flags=re.MULTILINE, default=None)
if items_json:
items = json.loads(items_json)
video_id = items[0]['id']
return self._get_info(video_id, display_id, webpage)
# Look for NBCSports iframes
nbc_sports_url = NBCSportsVPlayerIE._extract_url(webpage)
if nbc_sports_url:
return self.url_result(nbc_sports_url, 'NBCSportsVPlayer')
# Query result is often embedded in webpage as JSON. Sometimes explicit requests
# to video API results in a failure with geo restriction reason therefore using
# embedded query result when present sounds reasonable.
config_json = self._search_regex(
r'window\.Af\.bootstrap\[[^\]]+\]\s*=\s*({.*?"applet_type"\s*:\s*"td-applet-videoplayer".*?});(?:</script>|$)',
webpage, 'videoplayer applet', default=None)
if config_json:
config = self._parse_json(config_json, display_id, fatal=False)
if config:
sapi = config.get('models', {}).get('applet_model', {}).get('data', {}).get('sapi')
if sapi:
return self._extract_info(display_id, sapi, webpage)
items_json = self._search_regex(
r'mediaItems: ({.*?})$', webpage, 'items', flags=re.MULTILINE,
default=None)
if items_json is None:
alias = self._search_regex(
r'"aliases":{"video":"(.*?)"', webpage, 'alias', default=None)
if alias is not None:
alias_info = self._download_json(
'https://www.yahoo.com/_td/api/resource/VideoService.videos;video_aliases=["%s"]' % alias,
display_id, 'Downloading alias info')
video_id = alias_info[0]['id']
else:
CONTENT_ID_REGEXES = [
r'YUI\.namespace\("Media"\)\.CONTENT_ID\s*=\s*"([^"]+)"',
r'root\.App\.Cache\.context\.videoCache\.curVideo = \{"([^"]+)"',
r'"first_videoid"\s*:\s*"([^"]+)"',
r'%s[^}]*"ccm_id"\s*:\s*"([^"]+)"' % re.escape(page_id),
r'<article[^>]data-uuid=["\']([^"\']+)',
r'yahoo://article/view\?.*\buuid=([^&"\']+)',
]
video_id = self._search_regex(
CONTENT_ID_REGEXES, webpage, 'content ID')
else:
items = json.loads(items_json)
info = items['mediaItems']['query']['results']['mediaObj'][0]
# The 'meta' field is not always in the video webpage, we request it
# from another page
video_id = info['id']
return self._get_info(video_id, display_id, webpage)
def _extract_info(self, display_id, query, webpage):
info = query['query']['results']['mediaObj'][0]
meta = info.get('meta')
video_id = info.get('id')
if not meta:
msg = info['status'].get('msg')
if msg:
raise ExtractorError(
'%s returned error: %s' % (self.IE_NAME, msg), expected=True)
raise ExtractorError('Unable to extract media object meta')
formats = []
for s in info['streams']:
format_info = {
'width': int_or_none(s.get('width')),
'height': int_or_none(s.get('height')),
'tbr': int_or_none(s.get('bitrate')),
}
host = s['host']
path = s['path']
if host.startswith('rtmp'):
format_info.update({
'url': host,
'play_path': path,
'ext': 'flv',
})
else:
if s.get('format') == 'm3u8_playlist':
format_info['protocol'] = 'm3u8_native'
format_info['ext'] = 'mp4'
format_url = compat_urlparse.urljoin(host, path)
format_info['url'] = format_url
formats.append(format_info)
self._sort_formats(formats)
closed_captions = self._html_search_regex(
r'"closedcaptions":(\[[^\]]+\])', webpage, 'closed captions',
default='[]')
cc_json = self._parse_json(closed_captions, video_id, fatal=False)
subtitles = {}
if cc_json:
for closed_caption in cc_json:
lang = closed_caption['lang']
if lang not in subtitles:
subtitles[lang] = []
subtitles[lang].append({
'url': closed_caption['url'],
'ext': mimetype2ext(closed_caption['content_type']),
})
return {
'id': video_id,
'display_id': display_id,
'title': unescapeHTML(meta['title']),
'formats': formats,
'description': clean_html(meta['description']),
'thumbnail': meta['thumbnail'] if meta.get('thumbnail') else self._og_search_thumbnail(webpage),
'duration': int_or_none(meta.get('duration')),
'subtitles': subtitles,
}
def _get_info(self, video_id, display_id, webpage):
region = self._search_regex(
r'\\?"region\\?"\s*:\s*\\?"([^"]+?)\\?"',
webpage, 'region', fatal=False, default='US')
data = compat_urllib_parse.urlencode({
'protocol': 'http',
'region': region,
})
query_url = (
'https://video.media.yql.yahoo.com/v1/video/sapi/streams/'
'{id}?{data}'.format(id=video_id, data=data))
query_result = self._download_json(
query_url, display_id, 'Downloading video info')
return self._extract_info(display_id, query_result, webpage)
class YahooSearchIE(SearchInfoExtractor):
IE_DESC = 'Yahoo screen search'
_MAX_RESULTS = 1000
IE_NAME = 'screen.yahoo:search'
_SEARCH_KEY = 'yvsearch'
def _get_n_results(self, query, n):
"""Get a specified number of results for a query"""
entries = []
for pagenum in itertools.count(0):
result_url = 'http://video.search.yahoo.com/search/?p=%s&fr=screen&o=js&gs=0&b=%d' % (compat_urllib_parse.quote_plus(query), pagenum * 30)
info = self._download_json(result_url, query,
note='Downloading results page ' + str(pagenum + 1))
m = info['m']
results = info['results']
for (i, r) in enumerate(results):
if (pagenum * 30) + i >= n:
break
mobj = re.search(r'(?P<url>screen\.yahoo\.com/.*?-\d*?\.html)"', r)
e = self.url_result('http://' + mobj.group('url'), 'Yahoo')
entries.append(e)
if (pagenum * 30 + i >= n) or (m['last'] >= (m['total'] - 1)):
break
return {
'_type': 'playlist',
'id': query,
'entries': entries,
}
| lzambella/Qyoutube-dl | youtube_dl/extractor/yahoo.py | Python | gpl-3.0 | 15,027 |
from numpy import *
import matplotlib.pyplot as plt
#Define array for results
wav_array = zeros((900,900), dtype=float32)
#Define wave sources
pa = (300,450)
pb = (600,450)
#Loop over all cells in array to calculate values
for i in range(900):
for j in range(900):
adist = sin( sqrt((i-pa[0])**2 + (j-pa[1])**2))
bdist = sin( sqrt((i-pb[0])**2 + (j-pb[1])**2))
wav_array[i][j] = (adist+bdist)/2
#Show the results
plt.imshow(wav_array)
plt.clim(-1,1);
plt.set_cmap('gray')
plt.axis('off')
plt.show() | AdamHarries/CUDA_Talk | code/waves_normal.py | Python | unlicense | 506 |
from sms import *
| cheddarfinancial/quarry-platform | chassis/sms/__init__.py | Python | bsd-3-clause | 18 |
# -*- coding: utf-8 -*-
#
# 2020-06-04 Cornelius Kölbel <[email protected]>
# Initial Code
#
# This program is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public
# License, version 3, as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the
# GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
__doc__ = """This is the SMSClass to send SMS via a script.
"""
from privacyidea.lib.smsprovider.SMSProvider import (ISMSProvider, SMSError)
from privacyidea.lib import _
from privacyidea.lib.framework import get_app_config_value
import subprocess
import logging
import traceback
log = logging.getLogger(__name__)
SCRIPT_BACKGROUND = "background"
SCRIPT_WAIT = "wait"
class ScriptSMSProvider(ISMSProvider):
def __init__(self, db_smsprovider_object=None, smsgateway=None, directory=None):
"""
Create a new SMS Provider object fom a DB SMS provider object
:param db_smsprovider_object: The database object
:param smsgateway: The SMS gateway object from the database table
SMS gateway. The options can be accessed via
self.smsgateway.option_dict
:param directory: The directory where the SMS sending scripts are located.
:type directory: str
:return: An SMS provider object
"""
self.config = db_smsprovider_object or {}
self.smsgateway = smsgateway
self.script_directory = directory or get_app_config_value("PI_SCRIPT_SMSPROVIDER_DIRECTORY",
"/etc/privacyidea/scripts")
def submit_message(self, phone, message):
"""
send a message to a phone using an external script
:param phone: the phone number
:param message: the message to submit to the phone
:return:
"""
log.debug("submitting message {0!s} to {1!s}".format(message, phone))
if not self.smsgateway:
# this should not happen. We now always use sms gateway definitions.
log.warning("Missing smsgateway definition!")
raise SMSError(-1, "Missing smsgateway definition!")
script = self.smsgateway.option_dict.get("script")
background = self.smsgateway.option_dict.get("background")
script_name = self.script_directory + "/" + script
proc_args = [script_name]
proc_args.append(phone)
# As the message can contain blanks... it is passed via stdin
rcode = 0
try:
log.info("Starting script {script!r}.".format(script=script_name))
p = subprocess.Popen(proc_args, cwd=self.script_directory, universal_newlines=True, stdin=subprocess.PIPE)
p.communicate(message)
if background == SCRIPT_WAIT:
rcode = p.wait()
except Exception as e:
log.warning("Failed to execute script {0!r}: {1!r}".format(
script_name, e))
log.warning(traceback.format_exc())
if background == SCRIPT_WAIT:
raise SMSError(-1, "Failed to start script for sending SMS.")
if rcode:
log.warning("Script {script!r} failed to execute with error code {error!r}".format(script=script_name,
error=rcode))
raise SMSError(-1, "Error during execution of the script.")
else:
log.info("SMS delivered to {0!s}.".format(phone))
return True
@classmethod
def parameters(cls):
"""
Return a dictionary, that describes the parameters and options for the
SMS provider.
Parameters are required keys to values.
:return: dict
"""
params = {"options_allowed": False,
"parameters": {
"script": {
"required": True,
"description": _("The script in script directory PI_SCRIPT_SMSPROVIDER_DIRECTORY to call. "
"Expects phone as the parameter and the message from stdin.")
},
"background": {
"required": True,
"description": _("Wait for script to complete or run script in background. This will "
"either return the HTTP request early or could also block the request."),
"values": [SCRIPT_BACKGROUND, SCRIPT_WAIT]}
}
}
return params
| privacyidea/privacyidea | privacyidea/lib/smsprovider/ScriptSMSProvider.py | Python | agpl-3.0 | 5,045 |
INSTALLED_APPS = (
'geelweb.django.editos',
)
SECRET_KEY = '$-muc7nzd^9g9z^f+^8@^inpbpixz=)d8k%g%qsgrw*3+)^1t_'
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware'
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
# Insert your TEMPLATE_CONTEXT_PROCESSORS here or use this
# list if you haven't customized them:
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
},
},
]
| geelweb/django-editos | tests/test_settings.py | Python | mit | 1,471 |
from lacuna.building import MyBuilding
class citadelofknope(MyBuilding):
path = 'citadelofknope'
def __init__( self, client, body_id:int = 0, building_id:int = 0 ):
super().__init__( client, body_id, building_id )
| tmtowtdi/MontyLacuna | lib/lacuna/buildings/permanent/citadelofknope.py | Python | mit | 233 |
from django.utils.translation import ugettext_lazy as _
STATUS_NEW = 0
STATUS_SUCCESS = 1
STATUS_FAILED = 2
STATUSES = (
(STATUS_NEW, _('new')),
(STATUS_SUCCESS, _('success')),
(STATUS_FAILED, _('failed')),
)
GITHUB_STATES = {
STATUS_NEW: 'pending',
STATUS_SUCCESS: 'success',
STATUS_FAILED: 'failure',
}
GITHUB_DESCRIPTION_OK = 'coviolations.io mark commit as safe'
GITHUB_DESCRIPTION_FAIL = 'coviolations.io mark commit as unsafe'
| nvbn/coviolations_web | tasks/const.py | Python | mit | 462 |
"""Test for the tutorials.utils package"""
import datetime
import unittest
from mock import patch
from django.template import Template
from ..utils import send_email_message
today = datetime.date.today()
class TestSendEmailMessage(unittest.TestCase):
@patch('django.core.mail.message.EmailMessage.send')
@patch('pycon.tutorials.utils.get_template')
def test_send_email_message(self, get_template, send_mail):
# send_email_message comes up with the expected template names
# and calls send_mail with the expected arguments
test_template = Template("test template")
get_template.return_value = test_template
context = {'a': 1, 'b': 2}
send_email_message("TESTNAME", "from_address", [1, 2], [], context)
args, kwargs = get_template.call_args_list[0]
expected_template_name = "tutorials/email/TESTNAME/subject.txt"
self.assertEqual(expected_template_name, args[0])
args, kwargs = get_template.call_args_list[1]
expected_template_name = "tutorials/email/TESTNAME/body.txt"
self.assertEqual(expected_template_name, args[0])
self.assertEqual(1, send_mail.call_count)
| pyconjp/pyconjp-website | pycon/tutorials/tests/test_utils.py | Python | bsd-3-clause | 1,188 |
""" Simple Python class to access the Tesla JSON API
https://github.com/gglockner/teslajson
The Tesla JSON API is described at:
https://tesla-api.timdorr.com
Example:
import teslajson
c = teslajson.Connection('youremail', 'yourpassword')
v = c.vehicles[0]
v.wake_up()
v.data_request('charge_state')
v.command('charge_start')
"""
import requests_oauthlib
import string
import random
import base64
import hashlib
import re
import oauthlib.oauth2
class Connection(object):
"""Connection to Tesla Motors API"""
def __init__(self,
email='',
password='',
mfa='',
mfa_id='',
**kwargs):
"""Initialize connection object
Sets the vehicles field, a list of Vehicle objects
associated with your account
Required parameters:
email: your login for teslamotors.com
password: your password for teslamotors.com
Optional parameters:
mfa: multifactor passcode
mfa_id: multifactor id (if you have multiple MFA devices)
"""
self.email = email
self.password = password
self.mfa = {
'passcode': mfa,
'factor_id': mfa_id
}
self.uri = {
'sso': "https://auth.tesla.com",
'api': "https://owner-api.teslamotors.com"
}
self.uri['data'] = self.uri['api'] + "/api/1/"
self.uri['oauth'] = self.uri['sso'] + "/oauth2/v3/"
self.api = {
'client_id': "81527cff06843c8634fdc09e8ac0abefb46ac849f38fe1e431c2ef2106796384",
'client_secret': "c7257eb71a564034f9419ee651c7d0e5f7aa6bfbd18bafb5c5c033b093bb2fa3"
}
self.session = {}
self.fetch_token(**kwargs)
# Get vehicles
self.vehicles = [Vehicle(v, self) for v in self.request('GET', 'vehicles')]
def fetch_sso_token(self, **kwargs):
redirect_uri = self.uri['sso'] + "/void/callback"
# Step 1: Obtain the login page
self.session['sso'] = requests_oauthlib.OAuth2Session(
redirect_uri = redirect_uri,
client_id='ownerapi',
**kwargs)
self.__randchars = string.ascii_lowercase+string.digits
code_verifier = self.__randstr(86)
hexdigest = hashlib.sha256(code_verifier.encode('utf-8')).hexdigest()
code_challenge = base64.urlsafe_b64encode(hexdigest.encode('utf-8')).decode('utf-8')
login_uri = self.uri['oauth']+"authorize"
self.session['sso'].params = {
'client_id': 'ownerapi',
'code_challenge': code_challenge,
'code_challenge_method': 'S256',
'redirect_uri': redirect_uri,
'response_type': 'code',
'scope': 'openid email offline_access',
'state': self.__randstr(24) }
r = self.session['sso'].get(login_uri)
r.raise_for_status()
login_data = dict(re.findall(
'<input type="hidden" name="([^"]*)" value="([^"]*)"', r.text))
# Step 2: Obtain an authorization code
login_data['identity'] = self.email
login_data['credential'] = self.password
r = self.session['sso'].post(login_uri, data=login_data, allow_redirects=False)
r.raise_for_status()
# Handle MFA
if (re.search('passcode', r.text)):
if not self.mfa['passcode']:
raise RuntimeError('MFA passcode is required')
self.mfa['transaction_id'] = login_data['transaction_id']
if not self.mfa['factor_id']:
r = self.session['sso'].get(self.uri['oauth']+"authorize/mfa/factors",
params=self.mfa)
r.raise_for_status()
self.mfa['factor_id'] = r.json()['data'][0]['id']
r = self.session['sso'].post(self.uri['oauth']+"authorize/mfa/verify",
json=self.mfa)
r.raise_for_status()
if not r.json()['data']['valid']:
raise RuntimeError('Invalid MFA passcode')
r = self.session['sso'].post(login_uri,
data={'transaction_id': login_data['transaction_id']},
allow_redirects=False)
r.raise_for_status()
m = re.search('code=([^&]*)',r.headers['location'])
authorization_code = m.group(1)
# Step 3: Exchange authorization code for bearer token
self.session['sso'].params = None
self.session['sso'].token_url = self.uri['oauth']+"token"
self.session['sso'].fetch_token(
self.session['sso'].token_url,
code=authorization_code, code_verifier=code_verifier,
include_client_id=True)
def fetch_api_token(self, **kwargs):
# Step 4: Exchange bearer token for access token
# (Create the main oauth2 session by calling the super initializer)
client = oauthlib.oauth2.BackendApplicationClient(
client_id=self.api['client_id'], token_type='Bearer')
client.grant_type = 'urn:ietf:params:oauth:grant-type:jwt-bearer'
self.session['api'] = requests_oauthlib.OAuth2Session(client=client, **kwargs)
self.session['api'].token_url = self.uri['api']+'/oauth/token'
self.session['api'].fetch_token(
self.session['api'].token_url,
client_secret=self.api['client_secret'],
headers={'Authorization': 'Bearer %s' % self.session['sso'].token['access_token']},
include_client_id=True)
def fetch_token(self, **kwargs):
# Fetch both tokens
self.fetch_sso_token(**kwargs)
self.fetch_api_token(**kwargs)
def refresh_token(self):
# Note: We only need to refresh the API token
self.session['api'].refresh_token(self.session['api'].token_url)
def __randstr(self, n):
return ''.join(random.choice(self.__randchars) for i in range(n))
def request(self, method, command, rdata=None):
"""Utility command to process API request
"""
try:
r = self.session['api'].request(method, self.uri['data'] + command, data=rdata)
except oauthlib.oauth2.TokenExpiredError as e:
self.refresh_token()
return self.requestdata(method, command, rdata)
r.raise_for_status()
return r.json()['response']
class Vehicle(dict):
"""Vehicle class, subclassed from dictionary.
There are 3 primary methods: wake_up, data_request and command.
data_request and command both require a name to specify the data
or command, respectively. These names can be found in the
Tesla JSON API."""
def __init__(self, data, connection):
"""Initialize vehicle class
Called automatically by the Connection class
"""
super(Vehicle, self).__init__(data)
self.connection = connection
def data_request(self, name):
"""Get vehicle data"""
return self.get('data_request/%s' % name)
def wake_up(self):
"""Wake the vehicle"""
return self.post('wake_up')
def command(self, name, data={}):
"""Run the command for the vehicle"""
return self.post('command/%s' % name, data)
def get(self, command):
"""Utility command to get data from API"""
return self.connection.request('GET', 'vehicles/%i/%s' % (self['id'], command))
def post(self, command, data={}):
"""Utility command to post data to API"""
return self.connection.request('POST', 'vehicles/%i/%s' % (self['id'], command), data)
| gglockner/teslajson | teslajson/teslajson.py | Python | mit | 7,625 |
# coding=utf-8
# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
from future.utils import PY3
def assertRegex(self, text, expected_regex, msg=None):
"""Call unit test assertion to ensure regex matches.
While Py3 still has assertRegexpMatches, it's deprecated. This function also exists for consistency with
our helper function assertNotRegex, which *is* required for Py2-3 compatibility.
"""
if PY3:
self.assertRegex(text, expected_regex, msg)
else:
self.assertRegexpMatches(text, expected_regex, msg)
def assertNotRegex(self, text, unexpected_regex, msg=None):
"""Call unit test assertion to ensure regex does not match.
Required for compatibility because Py3.4 does not have assertNotRegexpMatches.
"""
if PY3:
self.assertNotRegex(text, unexpected_regex, msg)
else:
self.assertNotRegexpMatches(text, unexpected_regex, msg)
| twitter/pants | tests/python/pants_test/testutils/py2_compat.py | Python | apache-2.0 | 1,035 |
import numpy as np
import pandas as pd
from bokeh.document import Document
from bokeh.embed import file_html
from bokeh.layouts import column, gridplot
from bokeh.models import (Circle, ColumnDataSource, Div, Grid,
Line, LinearAxis, Plot, Range1d,)
from bokeh.resources import INLINE
from bokeh.util.browser import view
raw_columns=[
[10.0, 8.04, 10.0, 9.14, 10.0, 7.46, 8.0, 6.58],
[8.0, 6.95, 8.0, 8.14, 8.0, 6.77, 8.0, 5.76],
[13.0, 7.58, 13.0, 8.74, 13.0, 12.74, 8.0, 7.71],
[9.0, 8.81, 9.0, 8.77, 9.0, 7.11, 8.0, 8.84],
[11.0, 8.33, 11.0, 9.26, 11.0, 7.81, 8.0, 8.47],
[14.0, 9.96, 14.0, 8.10, 14.0, 8.84, 8.0, 7.04],
[6.0, 7.24, 6.0, 6.13, 6.0, 6.08, 8.0, 5.25],
[4.0, 4.26, 4.0, 3.10, 4.0, 5.39, 19.0, 12.5],
[12.0, 10.84, 12.0, 9.13, 12.0, 8.15, 8.0, 5.56],
[7.0, 4.82, 7.0, 7.26, 7.0, 6.42, 8.0, 7.91],
[5.0, 5.68, 5.0, 4.74, 5.0, 5.73, 8.0, 6.89]]
quartet = pd.DataFrame(data=raw_columns, columns=
['Ix','Iy','IIx','IIy','IIIx','IIIy','IVx','IVy'])
circles_source = ColumnDataSource(
data = dict(
xi = quartet['Ix'],
yi = quartet['Iy'],
xii = quartet['IIx'],
yii = quartet['IIy'],
xiii = quartet['IIIx'],
yiii = quartet['IIIy'],
xiv = quartet['IVx'],
yiv = quartet['IVy'],
)
)
x = np.linspace(-0.5, 20.5, 10)
y = 3 + 0.5 * x
lines_source = ColumnDataSource(data=dict(x=x, y=y))
xdr = Range1d(start=-0.5, end=20.5)
ydr = Range1d(start=-0.5, end=20.5)
def make_plot(title, xname, yname):
plot = Plot(x_range=xdr, y_range=ydr, plot_width=400, plot_height=400,
background_fill_color='#efefef')
plot.title.text = title
xaxis = LinearAxis(axis_line_color=None)
plot.add_layout(xaxis, 'below')
yaxis = LinearAxis(axis_line_color=None)
plot.add_layout(yaxis, 'left')
plot.add_layout(Grid(dimension=0, ticker=xaxis.ticker))
plot.add_layout(Grid(dimension=1, ticker=yaxis.ticker))
line = Line(x='x', y='y', line_color="#666699", line_width=2)
plot.add_glyph(lines_source, line)
circle = Circle(
x=xname, y=yname, size=12,
fill_color="#cc6633", line_color="#cc6633", fill_alpha=0.5
)
plot.add_glyph(circles_source, circle)
return plot
#where will this comment show up
I = make_plot('I', 'xi', 'yi')
II = make_plot('II', 'xii', 'yii')
III = make_plot('III', 'xiii', 'yiii')
IV = make_plot('IV', 'xiv', 'yiv')
grid = gridplot([[I, II], [III, IV]], toolbar_location=None)
div = Div(text="""
<h1>Anscombe's Quartet</h1>
<p>Anscombe's quartet is a collection of four small datasets that have nearly
identical simple descriptive statistics (mean, variance, correlation, and linear
regression lines), yet appear very different when graphed.
</p>
""")
doc = Document()
doc.add_root(column(div, grid, sizing_mode="scale_width"))
if __name__ == "__main__":
doc.validate()
filename = "anscombe.html"
with open(filename, "w") as f:
f.write(file_html(doc, INLINE, "Anscombe's Quartet"))
print("Wrote %s" % filename)
view(filename)
| ericmjl/bokeh | examples/models/file/anscombe.py | Python | bsd-3-clause | 3,276 |
# -*- coding: utf-8 -*-
"""
@author: Satoshi Hara
"""
import sys
sys.path.append('../')
from defragTrees import *
import BATree
from RForest import RForest
import numpy as np
import re
from sklearn import tree
from sklearn.grid_search import GridSearchCV
#************************
# inTree Class
#************************
class inTreeModel(RuleModel):
def __init__(self, modeltype='regression'):
super().__init__(modeltype=modeltype)
#************************
# Fit and Related Methods
#************************
def fit(self, X, y, filename, featurename=[]):
self.dim_ = X.shape[1]
self.setfeaturename(featurename)
self.setdefaultpred(y)
if self.modeltype_ == 'regression':
v1 = np.percentile(y, 17)
v2 = np.percentile(y, 50)
v3 = np.percentile(y, 83)
val = (v1, v2, v3)
mdl = self.__parsInTreesFile(filename)
for m in mdl:
if m[3] == 'X[,1]==X[,1]':
self.rule_.append([])
else:
subrule = []
ll = m[3].split(' & ')
for r in ll:
id1 = r.find(',') + 1
id2 = r.find(']')
idx = int(r[id1:id2])
if '>' in r:
v = 1
id1 = r.find('>') + 1
t = float(r[id1:])
else:
v = 0
id1 = r.find('<=') + 2
t = float(r[id1:])
subrule.append((idx, v, t))
self.rule_.append(subrule)
if self.modeltype_ == 'classification':
self.pred_.append(int(m[4]))
elif self.modeltype_ == 'regression':
if m[4] == 'L1':
self.pred_.append(val[0])
elif m[4] == 'L2':
self.pred_.append(val[1])
elif m[4] == 'L3':
self.pred_.append(val[2])
self.weight_ = np.arange(len(self.rule_))[::-1].tolist()
def __parsInTreesFile(self, filename):
f = open(filename)
line = f.readline()
mdl = []
while line:
if not'[' in line:
line = f.readline()
continue
id1 = line.find('[') + 1
id2 = line.find(',')
idx = int(line[id1:id2])
if idx > len(mdl):
mdl.append(re.findall(r'"([^"]*)"', line))
else:
mdl[idx-1] += re.findall(r'"([^"]*)"', line)
line = f.readline()
f.close()
return mdl
#************************
# NHarvest Class
#************************
class NHarvestModel(RuleModel):
def __init__(self, modeltype='regression'):
super().__init__(modeltype=modeltype)
#************************
# Fit and Related Methods
#************************
def fit(self, X, y, filename, featurename=[]):
self.dim_ = X.shape[1]
self.setfeaturename(featurename)
rule, pred, weight = self.__parsNHarvestFile(filename)
self.setdefaultpred(pred[-1])
idx = np.argsort(weight[:-1])[::-1]
self.rule_ = [rule[i] for i in idx]
if self.modeltype_ == 'regression':
self.pred_ = [pred[i] for i in idx]
elif self.modeltype_ == 'classification':
self.pred_ = (np.array([pred[i] for i in idx]) > 0.5).astype(int).tolist()
self.weight_ = [weight[i] for i in idx]
def __parsNHarvestFile(self, filename):
f = open(filename)
line = f.readline()
rule = []
pred = []
weight = []
while line:
f.readline()
subrule = []
line = f.readline()
while (line[0] != 'a'):
s = line.split()
idx = int(s[1])
low = float(s[2])
up = float(s[3])
if not np.isinf(low):
subrule.append((idx, 1, low))
if not np.isinf(up):
subrule.append((idx, 0, up))
line = f.readline()
if (len(subrule) > 0):
rule.append(subrule)
while True:
line = f.readline()
if (line[0] == 'a'):
s = line.split('"')
if (s[1] == 'predict'):
break
line = f.readline()
s = line.split()
pred.append(float(s[1]))
f.readline()
line = f.readline()
s = line.split()
weight.append(float(s[1]))
line = f.readline()
line = f.readline()
line = f.readline()
line = f.readline()
if not line[:2] == '[[':
break
f.close()
return rule, pred, weight
#************************
# DTree Class
#************************
class DTreeModel(RuleModel):
def __init__(self, modeltype='regression', max_depth=[None, 2, 4, 6, 8], min_samples_leaf=[5, 10, 20, 30], cv=5):
super().__init__(modeltype=modeltype)
self.max_depth_ = max_depth
self.min_samples_leaf_ = min_samples_leaf
self.cv_ = cv
#************************
# Fit and Related Methods
#************************
def fit(self, X, y, featurename=[]):
self.dim_ = X.shape[1]
self.setfeaturename(featurename)
self.setdefaultpred(y)
param_grid = {"max_depth": self.max_depth_, "min_samples_leaf": self.min_samples_leaf_}
if self.modeltype_ == 'regression':
mdl = tree.DecisionTreeRegressor()
elif self.modeltype_ == 'classification':
mdl = tree.DecisionTreeClassifier()
grid_search = GridSearchCV(mdl, param_grid=param_grid, cv=self.cv_)
grid_search.fit(X, y)
mdl = grid_search.best_estimator_
self.__parseTree(mdl)
self.weight_ = np.ones(len(self.rule_))
def __parseTree(self, mdl):
t = mdl.tree_
m = len(t.value)
left = t.children_left
right = t.children_right
feature = t.feature
threshold = t.threshold
value = t.value
parent = [-1] * m
ctype = [-1] * m
for i in range(m):
if not left[i] == -1:
parent[left[i]] = i
ctype[left[i]] = 0
if not right[i] == -1:
parent[right[i]] = i
ctype[right[i]] = 1
for i in range(m):
if not left[i] == -1:
continue
subrule = []
c = ctype[i]
idx = parent[i]
while not idx == -1:
subrule.append((int(feature[idx])+1, c, threshold[idx]))
c = ctype[idx]
idx = parent[idx]
self.rule_.append(subrule)
if np.array(value[i]).size > 1:
self.pred_.append(np.argmax(np.array(value[i])))
else:
self.pred_.append(np.asscalar(value[i]))
#************************
# BTree Class
#************************
class BTreeModel(RuleModel):
def __init__(self, modeltype='regression', max_depth=[2, 3, 4, 6, 8, 10], min_samples_leaf=[10], cv=5, smear_num=100, njobs=1, seed=0):
super().__init__(modeltype=modeltype)
self.max_depth_ = max_depth
self.min_samples_leaf_ = min_samples_leaf
self.cv_ = cv
self.smear_num_ = smear_num
self.njobs_ = njobs
self.seed_ = seed
#************************
# Fit and Related Methods
#************************
def fit(self, X, y, dirname, featurename=[]):
self.dim_ = X.shape[1]
self.setfeaturename(featurename)
self.setdefaultpred(y)
mdl = RForest(modeltype=self.modeltype_)
mdl.fit(dirname)
tree = BATree.fitBATreeCV(X, y, mdl, modeltype=self.modeltype_, max_depth=self.max_depth_, min_samples_split=self.min_samples_leaf_, cv=self.cv_, seed=self.seed_, smear_num=self.smear_num_, njobs=self.njobs_)
self.__parseTree(tree)
self.weight_ = np.ones(len(self.rule_))
return tree
def __parseTree(self, tree):
m = len(tree.pred_)
left = tree.left_
right = tree.right_
feature = tree.index_
threshold = tree.threshold_
value = tree.pred_
parent = [-1] * m
ctype = [-1] * m
for i in range(m):
if not left[i] == -1:
parent[left[i]] = i
ctype[left[i]] = 0
if not right[i] == -1:
parent[right[i]] = i
ctype[right[i]] = 1
for i in range(m):
if not left[i] == -1:
continue
subrule = []
c = ctype[i]
idx = parent[i]
while not idx == -1:
subrule.append((int(feature[idx])+1, c, threshold[idx]))
c = ctype[idx]
idx = parent[idx]
self.rule_.append(subrule)
if np.array(value[i]).size > 1:
self.pred_.append(np.argmax(np.array(value[i])))
else:
self.pred_.append(np.asscalar(value[i]))
| sato9hara/defragTrees | paper/baselines/Baselines.py | Python | mit | 9,354 |
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='cool-commits',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version='0.1.2',
description="Find the coolest git commits' hashes in your project!",
long_description=long_description,
url='https://github.com/OrDuan/cool_commits',
# Author details
author='Or Duan',
author_email='[email protected]',
# Choose your license
license='Apache Software License',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Version Control :: Git',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.5',
],
python_requires='>=3.5',
# What does your project relate to?
keywords='git cool commits hashes ',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(),
)
| OrDuan/cool_commits | setup.py | Python | apache-2.0 | 1,475 |
class C:
def __init__(self):
self.instance_attr = 42
match C():
case C(instance<caret>=True):
pass | jwren/intellij-community | python/testData/completion/existingKeywordPattern.py | Python | apache-2.0 | 124 |
Subsets and Splits