repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
tankca/store | store/dummy_data/orders.py | 1 | 1720 | """Order dummy data."""
from store.orders.models import Order
# from store.orders.models import OrderConsistsOf
# Orders
o1 = Order('cid1')
o2 = Order('cid2')
o3 = Order('cid3')
o4 = Order('cid4')
o5 = Order('cid5')
o6 = Order('cid6')
o7 = Order('cid7')
o8 = Order('cid8')
o9 = Order('cid9')
o10 = Order('cid10')
o11 = Order('cid1')
o12 = Order('cid1')
o13 = Order('cid1')
o14 = Order('cid2')
o15 = Order('cid7')
o16 = Order('cid7')
o17 = Order('cid7')
o18 = Order('cid3')
# oco1 = OrderConsistsOf(o1.id'9780439139595', 3)
# oco2 = OrderConsistsOf(o2.id'9780345803498', 5)
# oco3 = OrderConsistsOf(o3.id'9780345803498', 1)
# oco4 = OrderConsistsOf(o4.id'9780345803481', 2)
# oco5 = OrderConsistsOf(o5.id'9780545139700', 5)
# oco6 = OrderConsistsOf(o6.id'9780439358071', 4)
# oco7 = OrderConsistsOf(o7.id'9780439139595', 5)
# oco8 = OrderConsistsOf(o8.id'9780439784542', 3)
# oco9 = OrderConsistsOf(o9.id'9780439784542', 5)
# oco10 = OrderConsistsOf(o10.id'9780439784542', 15)
sample_list = []
# Orders
sample_list.append(o1)
sample_list.append(o2)
sample_list.append(o3)
sample_list.append(o4)
sample_list.append(o5)
sample_list.append(o6)
sample_list.append(o7)
sample_list.append(o8)
sample_list.append(o9)
sample_list.append(o10)
sample_list.append(o11)
sample_list.append(o12)
sample_list.append(o13)
sample_list.append(o14)
sample_list.append(o15)
sample_list.append(o16)
sample_list.append(o17)
sample_list.append(o18)
# OrderConsistsOf
# sample_list.append(oco1)
# sample_list.append(oco2)
# sample_list.append(oco3)
# sample_list.append(oco4)
# sample_list.append(oco5)
# sample_list.append(oco6)
# sample_list.append(oco7)
# sample_list.append(oco8)
# sample_list.append(oco9)
# sample_list.append(oco10) | bsd-3-clause | 8,301,438,605,723,252,000 | 23.585714 | 52 | 0.72093 | false |
Captain-Coder/tribler | Tribler/Test/Core/Libtorrent/test_libtorrent_mgr.py | 1 | 19035 | import binascii
import os
import shutil
import tempfile
from libtorrent import bencode
from twisted.internet.task import deferLater
from Tribler.Test.tools import trial_timeout
from twisted.internet.defer import inlineCallbacks, Deferred
from twisted.internet import reactor
from Tribler.Core.CacheDB.Notifier import Notifier
from Tribler.Core.Libtorrent.LibtorrentDownloadImpl import LibtorrentDownloadImpl
from Tribler.Core.Libtorrent.LibtorrentMgr import LibtorrentMgr
from Tribler.Core.exceptions import TorrentFileException
from Tribler.Test.Core.base_test import MockObject
from Tribler.Test.test_as_server import AbstractServer
class TestLibtorrentMgr(AbstractServer):
FILE_DIR = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
LIBTORRENT_FILES_DIR = os.path.abspath(os.path.join(FILE_DIR, u"../data/libtorrent/"))
@inlineCallbacks
def setUp(self):
yield super(TestLibtorrentMgr, self).setUp()
self.tribler_session = MockObject()
self.tribler_session.lm = MockObject()
self.tribler_session.notifier = Notifier()
self.tribler_session.state_dir = self.session_base_dir
self.tribler_session.trustchain_keypair = MockObject()
self.tribler_session.trustchain_keypair.key_to_hash = lambda: 'a' * 20
self.tribler_session.notify_shutdown_state = lambda _: None
self.tribler_session.config = MockObject()
self.tribler_session.config.get_libtorrent_utp = lambda: True
self.tribler_session.config.get_libtorrent_proxy_settings = lambda: (0, None, None)
self.tribler_session.config.get_anon_proxy_settings = lambda: (2, ('127.0.0.1', [1338]), None)
self.tribler_session.config.get_libtorrent_port = lambda: 1337
self.tribler_session.config.get_anon_listen_port = lambda: 1338
self.tribler_session.config.get_state_dir = lambda: self.session_base_dir
self.tribler_session.config.set_listen_port_runtime = lambda: None
self.tribler_session.config.get_libtorrent_max_upload_rate = lambda: 100
self.tribler_session.config.get_libtorrent_max_download_rate = lambda: 120
self.tribler_session.config.get_libtorrent_dht_enabled = lambda: False
self.tribler_session.config.set_libtorrent_port_runtime = lambda _: None
self.ltmgr = LibtorrentMgr(self.tribler_session)
@inlineCallbacks
def tearDown(self):
self.ltmgr.shutdown(timeout=0)
self.assertTrue(os.path.exists(os.path.join(self.session_base_dir, 'lt.state')))
yield super(TestLibtorrentMgr, self).tearDown()
def test_get_session_zero_hops(self):
self.ltmgr.initialize()
ltsession = self.ltmgr.get_session(0)
self.assertTrue(ltsession)
def test_get_session_one_hop(self):
self.ltmgr.initialize()
ltsession = self.ltmgr.get_session(1)
self.assertTrue(ltsession)
def test_get_session_zero_hops_corrupt_lt_state(self):
file = open(os.path.join(self.session_base_dir, 'lt.state'), "w")
file.write("Lorem ipsum")
file.close()
self.ltmgr.initialize()
ltsession = self.ltmgr.get_session(0)
self.assertTrue(ltsession)
def test_get_session_zero_hops_working_lt_state(self):
shutil.copy(os.path.join(self.LIBTORRENT_FILES_DIR, 'lt.state'),
os.path.join(self.session_base_dir, 'lt.state'))
self.ltmgr.initialize()
ltsession = self.ltmgr.get_session(0)
self.assertTrue(ltsession)
def test_get_metainfo_not_ready(self):
"""
Testing the metainfo fetching method when the DHT is not ready
"""
self.ltmgr.initialize()
self.assertFalse(self.ltmgr.get_metainfo("a" * 20, None))
@trial_timeout(20)
def test_get_metainfo(self):
"""
Testing the metainfo fetching method
"""
test_deferred = Deferred()
def metainfo_cb(metainfo):
self.assertEqual(metainfo, {'info': {'pieces': ['a']}, 'leechers': 0,
'nodes': [], 'seeders': 0, 'initial peers': []})
test_deferred.callback(None)
infohash = "a" * 20
self.ltmgr.initialize()
torrent_info = MockObject()
torrent_info.metadata = lambda: bencode({'pieces': ['a']})
torrent_info.trackers = lambda: []
fake_handle = MockObject()
fake_handle.is_valid = lambda: True
fake_handle.has_metadata = lambda: True
fake_handle.get_peer_info = lambda: []
fake_handle.torrent_file = lambda: torrent_info
self.ltmgr.ltsession_metainfo.add_torrent = lambda *_: fake_handle
self.ltmgr.ltsession_metainfo.remove_torrent = lambda *_: None
fake_alert = type('lt.metadata_received_alert', (object,), dict(handle=fake_handle))
self.ltmgr.ltsession_metainfo.pop_alerts = lambda: [fake_alert]
self.ltmgr.is_dht_ready = lambda: True
self.ltmgr.get_metainfo(infohash.decode('hex'), metainfo_cb)
return test_deferred
@trial_timeout(20)
def test_get_metainfo_cache(self):
"""
Testing metainfo caching
"""
test_deferred = Deferred()
def metainfo_cb(metainfo):
self.assertEqual(metainfo, "test")
test_deferred.callback(None)
self.ltmgr.initialize()
self.ltmgr.is_dht_ready = lambda: True
self.ltmgr.metainfo_cache[("a" * 20).encode('hex')] = {'meta_info': 'test'}
self.ltmgr.get_metainfo("a" * 20, metainfo_cb)
return test_deferred
@trial_timeout(20)
def test_got_metainfo(self):
"""
Testing whether the callback is correctly invoked when we received metainfo
"""
test_deferred = Deferred()
self.ltmgr.initialize()
def metainfo_cb(metainfo):
self.assertDictEqual(metainfo, {'info': {'pieces': ['a']}, 'leechers': 0,
'nodes': [], 'seeders': 0, 'initial peers': []})
test_deferred.callback(None)
fake_handle = MockObject()
torrent_info = MockObject()
torrent_info.metadata = lambda: bencode({'pieces': ['a']})
torrent_info.trackers = lambda: []
fake_handle.get_peer_info = lambda: []
fake_handle.torrent_file = lambda: torrent_info
self.ltmgr.ltsession_metainfo.remove_torrent = lambda *_: None
self.ltmgr.metainfo_requests['a' * 20] = {
'handle': fake_handle,
'timeout_callbacks': [],
'callbacks': [metainfo_cb],
'notify': False
}
self.ltmgr.got_metainfo("a" * 20)
return test_deferred
@trial_timeout(20)
def test_got_metainfo_timeout(self):
"""
Testing whether the callback is correctly invoked when we received metainfo after timeout
"""
test_deferred = Deferred()
def metainfo_timeout_cb(metainfo):
self.assertEqual(metainfo, 'a' * 20)
test_deferred.callback(None)
fake_handle = MockObject()
self.ltmgr.initialize()
self.ltmgr.metainfo_requests[('a' * 20).encode('hex')] = {'handle': fake_handle,
'timeout_callbacks': [metainfo_timeout_cb],
'callbacks': [],
'notify': True}
self.ltmgr.ltsession_metainfo.remove_torrent = lambda _dummy1, _dummy2: None
self.ltmgr.got_metainfo(('a' * 20).encode('hex'), timeout=True)
return test_deferred
@trial_timeout(20)
def test_get_metainfo_with_already_added_torrent(self):
"""
Testing metainfo fetching for a torrent which is already in session.
got_metainfo() should be called with timeout=False.
"""
magnet_link = "magnet:?xt=urn:btih:f72636475a375653083e49d501601675ce3e6619&dn=ubuntu-16.04.3-server-i386.iso"
test_deferred = Deferred()
def fake_got_metainfo(_, timeout):
self.assertFalse(timeout, "Timeout should not be True")
test_deferred.callback(None)
mock_handle = MockObject()
mock_handle.info_hash = lambda: 'a' * 20
mock_handle.is_valid = lambda: True
mock_handle.has_metadata = lambda: True
mock_ltsession = MockObject()
mock_ltsession.add_torrent = lambda _: mock_handle
mock_ltsession.find_torrent = lambda _: mock_handle
mock_ltsession.get_torrents = lambda: []
mock_ltsession.start_upnp = lambda: None
mock_ltsession.stop_upnp = lambda: None
mock_ltsession.save_state = lambda: None
self.ltmgr.ltsession_metainfo = mock_ltsession
self.ltmgr.metadata_tmpdir = tempfile.mkdtemp(suffix=u'tribler_metainfo_tmpdir')
self.ltmgr.is_dht_ready = lambda: True
self.ltmgr.got_metainfo = fake_got_metainfo
self.ltmgr.get_metainfo(magnet_link, lambda _: None)
return test_deferred
@trial_timeout(20)
def test_add_torrent(self):
"""
Testing the addition of a torrent to the libtorrent manager
"""
test_deferred = Deferred()
mock_handle = MockObject()
mock_handle.info_hash = lambda: 'a' * 20
mock_handle.is_valid = lambda: False
mock_error = MockObject()
mock_error.value = lambda: None
mock_alert = type('add_torrent_alert', (object,), dict(handle=mock_handle, error=mock_error))()
mock_ltsession = MockObject()
mock_ltsession.async_add_torrent = lambda _: reactor.callLater(0.1, self.ltmgr.process_alert, mock_alert)
mock_ltsession.find_torrent = lambda _: mock_handle
mock_ltsession.get_torrents = lambda: []
mock_ltsession.stop_upnp = lambda: None
mock_ltsession.save_state = lambda: None
self.ltmgr.get_session = lambda *_: mock_ltsession
self.ltmgr.metadata_tmpdir = tempfile.mkdtemp(suffix=u'tribler_metainfo_tmpdir')
infohash = MockObject()
infohash.info_hash = lambda: 'a' * 20
mock_download = MockObject()
mock_download.deferred_added = Deferred()
def cb_torrent_added(handle):
self.assertEqual(handle, mock_handle)
test_deferred.callback(None)
self.ltmgr.add_torrent(mock_download, {'ti': infohash}).addCallback(cb_torrent_added)
return test_deferred
@trial_timeout(20)
def test_add_torrent_desync(self):
"""
Testing the addition of a torrent to the libtorrent manager, if it already exists in the session.
"""
mock_handle = MockObject()
mock_handle.info_hash = lambda: 'a' * 20
mock_handle.is_valid = lambda: True
mock_alert = type('add_torrent_alert', (object,), dict(handle=mock_handle))
mock_ltsession = MockObject()
mock_ltsession.async_add_torrent = lambda _: self.ltmgr.process_alert(mock_alert)
mock_ltsession.find_torrent = lambda _: mock_handle
mock_ltsession.get_torrents = lambda: [mock_handle]
mock_ltsession.stop_upnp = lambda: None
mock_ltsession.save_state = lambda: None
self.ltmgr.get_session = lambda *_: mock_ltsession
self.ltmgr.metadata_tmpdir = tempfile.mkdtemp(suffix=u'tribler_metainfo_tmpdir')
infohash = MockObject()
infohash.info_hash = lambda: 'a' * 20
mock_download = MockObject()
mock_download.deferred_added = Deferred()
return self.ltmgr.add_torrent(mock_download, {'ti': infohash}).addCallback(
lambda handle: self.assertEqual(handle, mock_handle)
)
def test_remove_invalid_torrent(self):
"""
Tests a successful removal status of torrents without a handle
"""
self.ltmgr.initialize()
mock_dl = MockObject()
mock_dl.handle = None
self.assertTrue(self.ltmgr.remove_torrent(mock_dl).called)
def test_remove_invalid_handle_torrent(self):
"""
Tests a successful removal status of torrents with an invalid handle
"""
self.ltmgr.initialize()
mock_handle = MockObject()
mock_handle.is_valid = lambda: False
mock_dl = MockObject()
mock_dl.handle = mock_handle
self.assertTrue(self.ltmgr.remove_torrent(mock_dl).called)
def test_remove_unregistered_torrent(self):
"""
Tests a successful removal status of torrents which aren't known
"""
self.ltmgr.initialize()
mock_handle = MockObject()
mock_handle.is_valid = lambda: False
alert = type('torrent_removed_alert', (object, ), dict(handle=mock_handle, info_hash='0'*20))
self.ltmgr.process_alert(alert())
self.assertNotIn('0'*20, self.ltmgr.torrents)
def test_start_download_corrupt(self):
"""
Testing whether starting the download of a corrupt torrent file raises an exception
"""
self.ltmgr.metadata_tmpdir = tempfile.mkdtemp(suffix=u'tribler_metainfo_tmpdir')
corrupt_file = os.path.join(self.LIBTORRENT_FILES_DIR, 'corrupt_torrent.torrent')
self.assertRaises(TorrentFileException, self.ltmgr.start_download, torrentfilename=corrupt_file)
def test_start_download_duplicate(self):
"""
Test the starting of a download when there are no new trackers
"""
mock_tdef = MockObject()
mock_tdef.get_infohash = lambda: 'a' * 20
mock_tdef.get_trackers_as_single_tuple = lambda: tuple()
mock_download = MockObject()
mock_download.get_def = lambda: mock_tdef
mock_download.get_credit_mining = lambda: False
self.tribler_session.get_download = lambda _: mock_download
self.tribler_session.start_download_from_tdef = lambda tdef, _: MockObject()
self.ltmgr.tribler_session = self.tribler_session
self.ltmgr.metadata_tmpdir = tempfile.mkdtemp(suffix=u'tribler_metainfo_tmpdir')
self.ltmgr.start_download(infohash='a' * 20, tdef=mock_tdef)
def test_set_proxy_settings(self):
"""
Test setting the proxy settings
"""
def on_proxy_set(settings):
self.assertTrue(settings)
self.assertEqual(settings.hostname, 'a')
self.assertEqual(settings.port, 1234)
self.assertEqual(settings.username, 'abc')
self.assertEqual(settings.password, 'def')
def on_set_settings(settings):
self.assertTrue(settings)
self.assertEqual(settings['proxy_hostname'], 'a')
self.assertEqual(settings['proxy_port'], 1234)
self.assertEqual(settings['proxy_username'], 'abc')
self.assertEqual(settings['proxy_password'], 'def')
self.assertEqual(settings['proxy_peer_connections'], True)
self.assertEqual(settings['proxy_hostnames'], True)
mock_lt_session = MockObject()
mock_lt_session.get_settings = lambda: {}
mock_lt_session.set_settings = on_set_settings
mock_lt_session.set_proxy = on_proxy_set # Libtorrent < 1.1.0 uses set_proxy to set proxy settings
self.ltmgr.metadata_tmpdir = tempfile.mkdtemp(suffix=u'tribler_metainfo_tmpdir')
self.ltmgr.set_proxy_settings(mock_lt_session, 0, ('a', "1234"), ('abc', 'def'))
def test_save_resume_preresolved_magnet(self):
"""
Test whether a magnet link correctly writes save-resume data before it is resolved.
This can happen when a magnet link is added when the user does not have internet.
"""
self.ltmgr.initialize()
self.ltmgr.trsession = self.tribler_session
self.ltmgr.metadata_tmpdir = tempfile.mkdtemp(suffix=u'tribler_metainfo_tmpdir')
mock_tdef = MockObject()
mock_tdef.get_infohash = lambda: 'a' * 20
self.tribler_session.get_download = lambda _: None
self.tribler_session.get_downloads_pstate_dir = lambda: self.ltmgr.metadata_tmpdir
mock_lm = MockObject()
mock_lm.ltmgr = self.ltmgr
mock_lm.tunnel_community = None
self.tribler_session.lm = mock_lm
def dl_from_tdef(tdef, _):
dl = LibtorrentDownloadImpl(self.tribler_session, tdef)
dl.setup()
dl.cancel_all_pending_tasks()
return dl
self.tribler_session.start_download_from_tdef = dl_from_tdef
download = self.ltmgr.start_download_from_magnet("magnet:?xt=urn:btih:" + ('1'*40))
basename = binascii.hexlify(download.get_def().get_infohash()) + '.state'
filename = os.path.join(download.session.get_downloads_pstate_dir(), basename)
self.assertTrue(os.path.isfile(filename))
@trial_timeout(5)
def test_callback_on_alert(self):
"""
Test whether the alert callback is called when a libtorrent alert is posted
"""
self.ltmgr.default_alert_mask = 0xffffffff
test_deferred = Deferred()
def callback(*args):
self.ltmgr.alert_callback = None
test_deferred.callback(None)
callback.called = False
self.ltmgr.alert_callback = callback
self.ltmgr.initialize()
self.ltmgr._task_process_alerts()
return test_deferred
def test_payout_on_disconnect(self):
"""
Test whether a payout is initialized when a peer disconnects
"""
class peer_disconnected_alert(object):
def __init__(self):
self.pid = MockObject()
self.pid.to_string = lambda: 'a' * 20
def mocked_do_payout(mid):
self.assertEqual(mid, 'a' * 20)
mocked_do_payout.called = True
mocked_do_payout.called = False
disconnect_alert = peer_disconnected_alert()
self.ltmgr.tribler_session.lm.payout_manager = MockObject()
self.ltmgr.tribler_session.lm.payout_manager.do_payout = mocked_do_payout
self.ltmgr.initialize()
self.ltmgr.get_session(0).pop_alerts = lambda: [disconnect_alert]
self.ltmgr._task_process_alerts()
self.assertTrue(mocked_do_payout.called)
def test_post_session_stats(self):
"""
Test whether post_session_stats actually updates the state of libtorrent readiness for clean shutdown.
"""
def check_if_session_shutdown_is_ready():
self.ltmgr._task_process_alerts()
self.assertTrue(self.ltmgr.lt_session_shutdown_ready[0])
self.ltmgr.default_alert_mask = 0xffffffff
self.ltmgr.initialize()
# Zero hop session should be initialized
self.assertFalse(self.ltmgr.lt_session_shutdown_ready[0])
# Check for status with session stats alert
self.ltmgr.post_session_stats(hops=0)
# Wait sometime to get the alert and check the status
return deferLater(reactor, 0.01, check_if_session_shutdown_is_ready)
| lgpl-3.0 | -4,260,357,818,593,068,000 | 38.166667 | 118 | 0.630155 | false |
usc-isi/nova | nova/tests/network/test_quantumv2.py | 1 | 26589 | # Copyright 2012 OpenStack LLC.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# vim: tabstop=4 shiftwidth=4 softtabstop=4
import mox
from nova import context
from nova import exception
from nova.network import model
from nova.network import quantumv2
from nova.network.quantumv2 import api as quantumapi
from nova.openstack.common import cfg
from nova import test
from nova import utils
from quantumclient.v2_0 import client
FLAGS = cfg.CONF
#NOTE: Quantum client raises Exception which is discouraged by HACKING.
# We set this variable here and use it for assertions below to avoid
# the hacking checks until we can make quantum client throw a custom
# exception class instead.
QUANTUM_CLIENT_EXCEPTION = Exception
class MyComparator(mox.Comparator):
def __init__(self, lhs):
self.lhs = lhs
def _com_dict(self, lhs, rhs):
if len(lhs) != len(rhs):
return False
for key, value in lhs.iteritems():
if key not in rhs:
return False
rhs_value = rhs[key]
if not self._com(value, rhs_value):
return False
return True
def _com_list(self, lhs, rhs):
if len(lhs) != len(rhs):
return False
for lhs_value in lhs:
if lhs_value not in rhs:
return False
return True
def _com(self, lhs, rhs):
if lhs is None:
return rhs is None
if isinstance(lhs, dict):
if not isinstance(rhs, dict):
return False
return self._com_dict(lhs, rhs)
if isinstance(lhs, list):
if not isinstance(rhs, list):
return False
return self._com_list(lhs, rhs)
if isinstance(lhs, tuple):
if not isinstance(rhs, tuple):
return False
return self._com_list(lhs, rhs)
return lhs == rhs
def equals(self, rhs):
return self._com(self.lhs, rhs)
def __repr__(self):
return str(self.lhs)
class TestQuantumClient(test.TestCase):
def test_withtoken(self):
self.flags(quantum_url='http://anyhost/')
self.flags(quantum_url_timeout=30)
my_context = context.RequestContext('userid',
'my_tenantid',
auth_token='token')
self.mox.StubOutWithMock(client.Client, "__init__")
client.Client.__init__(
endpoint_url=FLAGS.quantum_url,
token=my_context.auth_token,
timeout=FLAGS.quantum_url_timeout).AndReturn(None)
self.mox.ReplayAll()
quantumv2.get_client(my_context)
def test_withouttoken_keystone_connection_error(self):
self.flags(quantum_auth_strategy='keystone')
self.flags(quantum_url='http://anyhost/')
my_context = context.RequestContext('userid', 'my_tenantid')
self.assertRaises(QUANTUM_CLIENT_EXCEPTION,
quantumv2.get_client,
my_context)
def test_withouttoken_keystone_not_auth(self):
# self.flags(quantum_auth_strategy=None) fail to work
old_quantum_auth_strategy = FLAGS.quantum_auth_strategy
setattr(FLAGS, 'quantum_auth_strategy', None)
self.flags(quantum_url='http://anyhost/')
self.flags(quantum_url_timeout=30)
my_context = context.RequestContext('userid', 'my_tenantid')
self.mox.StubOutWithMock(client.Client, "__init__")
client.Client.__init__(
endpoint_url=FLAGS.quantum_url,
auth_strategy=None,
timeout=FLAGS.quantum_url_timeout).AndReturn(None)
self.mox.ReplayAll()
try:
quantumv2.get_client(my_context)
finally:
setattr(FLAGS, 'quantum_auth_strategy',
old_quantum_auth_strategy)
class TestQuantumv2(test.TestCase):
def setUp(self):
super(TestQuantumv2, self).setUp()
self.mox.StubOutWithMock(quantumv2, 'get_client')
self.moxed_client = self.mox.CreateMock(client.Client)
quantumv2.get_client(mox.IgnoreArg()).MultipleTimes().AndReturn(
self.moxed_client)
self.context = context.RequestContext('userid', 'my_tenantid')
setattr(self.context,
'auth_token',
'bff4a5a6b9eb4ea2a6efec6eefb77936')
self.instance = {'project_id': '9d049e4b60b64716978ab415e6fbd5c0',
'uuid': str(utils.gen_uuid()),
'display_name': 'test_instance',
'security_groups': []}
self.nets1 = [{'id': 'my_netid1',
'name': 'my_netname1',
'tenant_id': 'my_tenantid'}]
self.nets2 = []
self.nets2.append(self.nets1[0])
self.nets2.append({'id': 'my_netid2',
'name': 'my_netname2',
'tenant_id': 'my_tenantid'})
self.nets3 = self.nets2 + [{'id': 'my_netid3',
'name': 'my_netname3',
'tenant_id': 'my_tenantid'}]
self.nets4 = [{'id': 'his_netid4',
'name': 'his_netname4',
'tenant_id': 'his_tenantid'}]
self.nets = [self.nets1, self.nets2, self.nets3, self.nets4]
self.port_address = '10.0.1.2'
self.port_data1 = [{'network_id': 'my_netid1',
'device_id': 'device_id1',
'device_owner': 'compute:nova',
'id': 'my_portid1',
'fixed_ips': [{'ip_address': self.port_address,
'subnet_id': 'my_subid1'}],
'mac_address': 'my_mac1', }]
self.dhcp_port_data1 = [{'fixed_ips': [{'ip_address': '10.0.1.9',
'subnet_id': 'my_subid1'}]}]
self.port_data2 = []
self.port_data2.append(self.port_data1[0])
self.port_data2.append({'network_id': 'my_netid2',
'device_id': 'device_id2',
'device_owner': 'compute:nova',
'id': 'my_portid2',
'fixed_ips': [{'ip_address': '10.0.2.2',
'subnet_id': 'my_subid2'}],
'mac_address': 'my_mac2', })
self.port_data3 = [{'network_id': 'my_netid1',
'device_id': 'device_id3',
'device_owner': 'compute:nova',
'id': 'my_portid3',
'fixed_ips': [], # no fixed ip
'mac_address': 'my_mac3', }]
self.subnet_data1 = [{'id': 'my_subid1',
'cidr': '10.0.1.0/24',
'network_id': 'my_netid1',
'gateway_ip': '10.0.1.1',
'dns_nameservers': ['8.8.1.1', '8.8.1.2']}]
self.subnet_data2 = []
self.subnet_data2.append({'id': 'my_subid2',
'cidr': '10.0.2.0/24',
'network_id': 'my_netid2',
'gateway_ip': '10.0.2.1',
'dns_nameservers': ['8.8.2.1', '8.8.2.2']})
def tearDown(self):
try:
self.mox.UnsetStubs()
self.mox.VerifyAll()
finally:
FLAGS.reset()
def _verify_nw_info(self, nw_inf, index=0):
id_suffix = index + 1
self.assertEquals('10.0.%s.2' % id_suffix,
nw_inf.fixed_ips()[index]['address'])
self.assertEquals('my_netname%s' % id_suffix,
nw_inf[index]['network']['label'])
self.assertEquals('my_portid%s' % id_suffix, nw_inf[index]['id'])
self.assertEquals('my_mac%s' % id_suffix, nw_inf[index]['address'])
self.assertEquals('10.0.%s.0/24' % id_suffix,
nw_inf[index]['network']['subnets'][0]['cidr'])
self.assertTrue(model.IP(address='8.8.%s.1' % id_suffix) in
nw_inf[index]['network']['subnets'][0]['dns'])
def _get_instance_nw_info(self, number):
api = quantumapi.API()
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
api.db.instance_info_cache_update(mox.IgnoreArg(),
self.instance['uuid'],
mox.IgnoreArg())
port_data = number == 1 and self.port_data1 or self.port_data2
self.moxed_client.list_ports(
tenant_id=self.instance['project_id'],
device_id=self.instance['uuid']).AndReturn(
{'ports': port_data})
nets = number == 1 and self.nets1 or self.nets2
self.moxed_client.list_networks(
tenant_id=self.instance['project_id'],
shared=False).AndReturn({'networks': nets})
self.moxed_client.list_networks(
shared=True).AndReturn({'networks': []})
for i in xrange(1, number + 1):
subnet_data = i == 1 and self.subnet_data1 or self.subnet_data2
self.moxed_client.list_subnets(
id=mox.SameElementsAs(['my_subid%s' % i])).AndReturn(
{'subnets': subnet_data})
self.moxed_client.list_ports(
network_id=subnet_data[0]['network_id'],
device_owner='network:dhcp').AndReturn(
{'ports': []})
self.mox.ReplayAll()
nw_inf = api.get_instance_nw_info(self.context, self.instance)
for i in xrange(0, number):
self._verify_nw_info(nw_inf, i)
def test_get_instance_nw_info_1(self):
"""Test to get one port in one network and subnet."""
self._get_instance_nw_info(1)
def test_get_instance_nw_info_2(self):
"""Test to get one port in each of two networks and subnets."""
self._get_instance_nw_info(2)
def test_get_instance_nw_info_with_nets(self):
"""Test get instance_nw_info with networks passed in."""
api = quantumapi.API()
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
api.db.instance_info_cache_update(
mox.IgnoreArg(),
self.instance['uuid'], mox.IgnoreArg())
self.moxed_client.list_ports(
tenant_id=self.instance['project_id'],
device_id=self.instance['uuid']).AndReturn(
{'ports': self.port_data1})
self.moxed_client.list_subnets(
id=mox.SameElementsAs(['my_subid1'])).AndReturn(
{'subnets': self.subnet_data1})
self.moxed_client.list_ports(
network_id='my_netid1',
device_owner='network:dhcp').AndReturn(
{'ports': self.dhcp_port_data1})
self.mox.ReplayAll()
nw_inf = api.get_instance_nw_info(self.context,
self.instance,
networks=self.nets1)
self._verify_nw_info(nw_inf, 0)
def test_get_instance_nw_info_without_subnet(self):
"""Test get instance_nw_info for a port without subnet."""
api = quantumapi.API()
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
api.db.instance_info_cache_update(
mox.IgnoreArg(),
self.instance['uuid'], mox.IgnoreArg())
self.moxed_client.list_ports(
tenant_id=self.instance['project_id'],
device_id=self.instance['uuid']).AndReturn(
{'ports': self.port_data3})
self.moxed_client.list_networks(
shared=False,
tenant_id=self.instance['project_id']).AndReturn(
{'networks': self.nets1})
self.moxed_client.list_networks(
shared=True).AndReturn({'networks': []})
self.mox.ReplayAll()
nw_inf = api.get_instance_nw_info(self.context,
self.instance)
id_suffix = 3
self.assertEquals(0, len(nw_inf.fixed_ips()))
self.assertEquals('my_netname1', nw_inf[0]['network']['label'])
self.assertEquals('my_portid%s' % id_suffix, nw_inf[0]['id'])
self.assertEquals('my_mac%s' % id_suffix, nw_inf[0]['address'])
self.assertEquals(0, len(nw_inf[0]['network']['subnets']))
def _allocate_for_instance(self, net_idx=1, **kwargs):
api = quantumapi.API()
self.mox.StubOutWithMock(api, 'get_instance_nw_info')
# Net idx is 1-based for compatibility with existing unit tests
nets = self.nets[net_idx - 1]
api.get_instance_nw_info(mox.IgnoreArg(),
self.instance,
networks=nets).AndReturn(None)
ports = {}
fixed_ips = {}
req_net_ids = []
if 'requested_networks' in kwargs:
for id, fixed_ip, port_id in kwargs['requested_networks']:
if port_id:
self.moxed_client.show_port(port_id).AndReturn(
{'port': {'id': 'my_portid1',
'network_id': 'my_netid1'}})
req_net_ids.append('my_netid1')
ports['my_netid1'] = self.port_data1[0]
id = 'my_netid1'
else:
fixed_ips[id] = fixed_ip
req_net_ids.append(id)
search_ids = [net['id'] for net in nets if net['id'] in req_net_ids]
mox_list_network_params = dict(tenant_id=self.instance['project_id'],
shared=False)
if search_ids:
mox_list_network_params['id'] = search_ids
self.moxed_client.list_networks(
**mox_list_network_params).AndReturn({'networks': nets})
mox_list_network_params = dict(shared=True)
if search_ids:
mox_list_network_params['id'] = search_ids
self.moxed_client.list_networks(
**mox_list_network_params).AndReturn({'networks': []})
for network in nets:
port_req_body = {
'port': {
'device_id': self.instance['uuid'],
'device_owner': 'compute:nova',
},
}
port = ports.get(network['id'], None)
if port:
port_id = port['id']
self.moxed_client.update_port(port_id,
MyComparator(port_req_body)
).AndReturn(
{'port': port})
else:
fixed_ip = fixed_ips.get(network['id'])
if fixed_ip:
port_req_body['port']['fixed_ip'] = fixed_ip
port_req_body['port']['network_id'] = network['id']
port_req_body['port']['admin_state_up'] = True
port_req_body['port']['tenant_id'] = \
self.instance['project_id']
res_port = {'port': {'id': 'fake'}}
self.moxed_client.create_port(
MyComparator(port_req_body)).AndReturn(res_port)
self.mox.ReplayAll()
api.allocate_for_instance(self.context, self.instance, **kwargs)
def test_allocate_for_instance_1(self):
"""Allocate one port in one network env."""
self._allocate_for_instance(1)
def test_allocate_for_instance_2(self):
"""Allocate one port in two networks env."""
self._allocate_for_instance(2)
def test_allocate_for_instance_with_requested_networks(self):
# specify only first and last network
requested_networks = [(net['id'], None, None)
for net in (self.nets3[0], self.nets3[-1])]
self._allocate_for_instance(net_idx=3,
requested_networks=requested_networks)
def test_allocate_for_instance_with_requested_networks_with_fixedip(self):
# specify only first and last network
requested_networks = [(self.nets1[0]['id'], '10.0.1.0/24', None)]
self._allocate_for_instance(net_idx=1,
requested_networks=requested_networks)
def test_allocate_for_instance_with_requested_networks_with_port(self):
# specify only first and last network
requested_networks = [(None, None, 'myportid1')]
self._allocate_for_instance(net_idx=1,
requested_networks=requested_networks)
def test_allocate_for_instance_ex1(self):
"""verify we will delete created ports
if we fail to allocate all net resources.
Mox to raise exception when creating a second port.
In this case, the code should delete the first created port.
"""
api = quantumapi.API()
self.moxed_client.list_networks(
tenant_id=self.instance['project_id'],
shared=False).AndReturn(
{'networks': self.nets2})
self.moxed_client.list_networks(shared=True).AndReturn(
{'networks': []})
index = 0
for network in self.nets2:
port_req_body = {
'port': {
'network_id': network['id'],
'admin_state_up': True,
'device_id': self.instance['uuid'],
'device_owner': 'compute:nova',
'tenant_id': self.instance['project_id'],
},
}
port = {'id': 'portid_' + network['id']}
if index == 0:
self.moxed_client.create_port(
MyComparator(port_req_body)).AndReturn({'port': port})
else:
self.moxed_client.create_port(
MyComparator(port_req_body)).AndRaise(
Exception("fail to create port"))
index += 1
self.moxed_client.delete_port('portid_' + self.nets2[0]['id'])
self.mox.ReplayAll()
self.assertRaises(QUANTUM_CLIENT_EXCEPTION, api.allocate_for_instance,
self.context, self.instance)
def test_allocate_for_instance_ex2(self):
"""verify we have no port to delete
if we fail to allocate the first net resource.
Mox to raise exception when creating the first port.
In this case, the code should not delete any ports.
"""
api = quantumapi.API()
self.moxed_client.list_networks(
tenant_id=self.instance['project_id'],
shared=False).AndReturn(
{'networks': self.nets2})
self.moxed_client.list_networks(shared=True).AndReturn(
{'networks': []})
port_req_body = {
'port': {
'network_id': self.nets2[0]['id'],
'admin_state_up': True,
'device_id': self.instance['uuid'],
'tenant_id': self.instance['project_id'],
},
}
self.moxed_client.create_port(
MyComparator(port_req_body)).AndRaise(
Exception("fail to create port"))
self.mox.ReplayAll()
self.assertRaises(QUANTUM_CLIENT_EXCEPTION, api.allocate_for_instance,
self.context, self.instance)
def _deallocate_for_instance(self, number):
port_data = number == 1 and self.port_data1 or self.port_data2
self.moxed_client.list_ports(
device_id=self.instance['uuid']).AndReturn(
{'ports': port_data})
for port in port_data:
self.moxed_client.delete_port(port['id'])
self.mox.ReplayAll()
api = quantumapi.API()
api.deallocate_for_instance(self.context, self.instance)
def test_deallocate_for_instance_1(self):
"""Test to deallocate in one port env."""
self._deallocate_for_instance(1)
def test_deallocate_for_instance_2(self):
"""Test to deallocate in two ports env."""
self._deallocate_for_instance(2)
def test_validate_networks(self):
requested_networks = [('my_netid1', 'test', None),
('my_netid2', 'test2', None)]
self.moxed_client.list_networks(
id=mox.SameElementsAs(['my_netid1', 'my_netid2']),
tenant_id=self.context.project_id,
shared=False).AndReturn(
{'networks': self.nets2})
self.moxed_client.list_networks(
id=mox.SameElementsAs(['my_netid1', 'my_netid2']),
shared=True).AndReturn(
{'networks': []})
self.mox.ReplayAll()
api = quantumapi.API()
api.validate_networks(self.context, requested_networks)
def test_validate_networks_ex_1(self):
requested_networks = [('my_netid1', 'test', None),
('my_netid2', 'test2', None)]
self.moxed_client.list_networks(
id=mox.SameElementsAs(['my_netid1', 'my_netid2']),
tenant_id=self.context.project_id,
shared=False).AndReturn(
{'networks': self.nets1})
self.moxed_client.list_networks(
id=mox.SameElementsAs(['my_netid1', 'my_netid2']),
shared=True).AndReturn(
{'networks': []})
self.mox.ReplayAll()
api = quantumapi.API()
try:
api.validate_networks(self.context, requested_networks)
except exception.NetworkNotFound as ex:
self.assertTrue("my_netid2" in str(ex))
def test_validate_networks_ex_2(self):
requested_networks = [('my_netid1', 'test', None),
('my_netid2', 'test2', None),
('my_netid3', 'test3', None)]
self.moxed_client.list_networks(
id=mox.SameElementsAs(['my_netid1', 'my_netid2', 'my_netid3']),
tenant_id=self.context.project_id,
shared=False).AndReturn(
{'networks': self.nets1})
self.moxed_client.list_networks(
id=mox.SameElementsAs(['my_netid1', 'my_netid2', 'my_netid3']),
shared=True).AndReturn(
{'networks': []})
self.mox.ReplayAll()
api = quantumapi.API()
try:
api.validate_networks(self.context, requested_networks)
except exception.NetworkNotFound as ex:
self.assertTrue("my_netid2, my_netid3" in str(ex))
def _mock_list_ports(self, port_data=None):
if port_data is None:
port_data = self.port_data2
address = self.port_address
self.moxed_client.list_ports(
fixed_ips=MyComparator('ip_address=%s' % address)).AndReturn(
{'ports': port_data})
self.mox.ReplayAll()
return address
def test_get_instance_uuids_by_ip_filter(self):
self._mock_list_ports()
filters = {'ip': '^10\\.0\\.1\\.2$'}
api = quantumapi.API()
result = api.get_instance_uuids_by_ip_filter(self.context, filters)
self.assertEquals('device_id1', result[0]['instance_uuid'])
self.assertEquals('device_id2', result[1]['instance_uuid'])
def test_get_fixed_ip_by_address_fails_for_no_ports(self):
address = self._mock_list_ports(port_data=[])
api = quantumapi.API()
self.assertRaises(exception.FixedIpNotFoundForAddress,
api.get_fixed_ip_by_address,
self.context, address)
def test_get_fixed_ip_by_address_succeeds_for_1_port(self):
address = self._mock_list_ports(port_data=self.port_data1)
api = quantumapi.API()
result = api.get_fixed_ip_by_address(self.context, address)
self.assertEquals('device_id1', result['instance_uuid'])
def test_get_fixed_ip_by_address_fails_for_more_than_1_port(self):
address = self._mock_list_ports()
api = quantumapi.API()
self.assertRaises(exception.FixedIpAssociatedWithMultipleInstances,
api.get_fixed_ip_by_address,
self.context, address)
def _get_available_networks(self, prv_nets, pub_nets, req_ids=None):
api = quantumapi.API()
nets = prv_nets + pub_nets
mox_list_network_params = dict(tenant_id=self.instance['project_id'],
shared=False)
if req_ids:
mox_list_network_params['id'] = req_ids
self.moxed_client.list_networks(
**mox_list_network_params).AndReturn({'networks': prv_nets})
mox_list_network_params = dict(shared=True)
if req_ids:
mox_list_network_params['id'] = req_ids
self.moxed_client.list_networks(
**mox_list_network_params).AndReturn({'networks': pub_nets})
self.mox.ReplayAll()
rets = api._get_available_networks(self.context,
self.instance['project_id'],
req_ids)
self.assertEqual(rets, nets)
def test_get_available_networks_all_private(self):
self._get_available_networks(prv_nets=self.nets2, pub_nets=[])
def test_get_available_networks_all_public(self):
self._get_available_networks(prv_nets=[], pub_nets=self.nets2)
def test_get_available_networks_private_and_public(self):
self._get_available_networks(prv_nets=self.nets1, pub_nets=self.nets4)
def test_get_available_networks_with_network_ids(self):
prv_nets = [self.nets3[0]]
pub_nets = [self.nets3[-1]]
# specify only first and last network
req_ids = [net['id'] for net in (self.nets3[0], self.nets3[-1])]
self._get_available_networks(prv_nets, pub_nets, req_ids)
| apache-2.0 | 8,201,003,567,821,700,000 | 41.885484 | 78 | 0.537027 | false |
carlthome/librosa | librosa/feature/utils.py | 1 | 8078 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Feature manipulation utilities"""
from warnings import warn
import numpy as np
import scipy.signal
from .._cache import cache
from ..util.exceptions import ParameterError
__all__ = ['delta', 'stack_memory']
@cache(level=40)
def delta(data, width=9, order=1, axis=-1, mode='interp', **kwargs):
r'''Compute delta features: local estimate of the derivative
of the input data along the selected axis.
Delta features are computed Savitsky-Golay filtering.
Parameters
----------
data : np.ndarray
the input data matrix (eg, spectrogram)
width : int, positive, odd [scalar]
Number of frames over which to compute the delta features.
Cannot exceed the length of `data` along the specified axis.
If `mode='interp'`, then `width` must be at least `data.shape[axis]`.
order : int > 0 [scalar]
the order of the difference operator.
1 for first derivative, 2 for second, etc.
axis : int [scalar]
the axis along which to compute deltas.
Default is -1 (columns).
mode : str, {'interp', 'nearest', 'mirror', 'constant', 'wrap'}
Padding mode for estimating differences at the boundaries.
kwargs : additional keyword arguments
See `scipy.signal.savgol_filter`
Returns
-------
delta_data : np.ndarray [shape=(d, t)]
delta matrix of `data` at specified order
Notes
-----
This function caches at level 40.
See Also
--------
scipy.signal.savgol_filter
Examples
--------
Compute MFCC deltas, delta-deltas
>>> y, sr = librosa.load(librosa.util.example_audio_file())
>>> mfcc = librosa.feature.mfcc(y=y, sr=sr)
>>> mfcc_delta = librosa.feature.delta(mfcc)
>>> mfcc_delta
array([[ 1.666e+01, 1.666e+01, ..., 1.869e-15, 1.869e-15],
[ 1.784e+01, 1.784e+01, ..., 6.085e-31, 6.085e-31],
...,
[ 7.262e-01, 7.262e-01, ..., 9.259e-31, 9.259e-31],
[ 6.578e-01, 6.578e-01, ..., 7.597e-31, 7.597e-31]])
>>> mfcc_delta2 = librosa.feature.delta(mfcc, order=2)
>>> mfcc_delta2
array([[ -1.703e+01, -1.703e+01, ..., 3.834e-14, 3.834e-14],
[ -1.108e+01, -1.108e+01, ..., -1.068e-30, -1.068e-30],
...,
[ 4.075e-01, 4.075e-01, ..., -1.565e-30, -1.565e-30],
[ 1.676e-01, 1.676e-01, ..., -2.104e-30, -2.104e-30]])
>>> import matplotlib.pyplot as plt
>>> plt.subplot(3, 1, 1)
>>> librosa.display.specshow(mfcc)
>>> plt.title('MFCC')
>>> plt.colorbar()
>>> plt.subplot(3, 1, 2)
>>> librosa.display.specshow(mfcc_delta)
>>> plt.title(r'MFCC-$\Delta$')
>>> plt.colorbar()
>>> plt.subplot(3, 1, 3)
>>> librosa.display.specshow(mfcc_delta2, x_axis='time')
>>> plt.title(r'MFCC-$\Delta^2$')
>>> plt.colorbar()
>>> plt.tight_layout()
>>> plt.show()
'''
data = np.atleast_1d(data)
if mode == 'interp' and width > data.shape[axis]:
raise ParameterError("when mode='interp', width={} "
"cannot exceed data.shape[axis]={}".format(width, data.shape[axis]))
if width < 3 or np.mod(width, 2) != 1:
raise ParameterError('width must be an odd integer >= 3')
if order <= 0 or not isinstance(order, int):
raise ParameterError('order must be a positive integer')
kwargs.pop('deriv', None)
kwargs.setdefault('polyorder', order)
return scipy.signal.savgol_filter(data, width,
deriv=order,
axis=axis,
mode=mode,
**kwargs)
@cache(level=40)
def stack_memory(data, n_steps=2, delay=1, **kwargs):
"""Short-term history embedding: vertically concatenate a data
vector or matrix with delayed copies of itself.
Each column `data[:, i]` is mapped to::
data[:, i] -> [data[:, i],
data[:, i - delay],
...
data[:, i - (n_steps-1)*delay]]
For columns `i < (n_steps - 1) * delay` , the data will be padded.
By default, the data is padded with zeros, but this behavior can be
overridden by supplying additional keyword arguments which are passed
to `np.pad()`.
Parameters
----------
data : np.ndarray [shape=(t,) or (d, t)]
Input data matrix. If `data` is a vector (`data.ndim == 1`),
it will be interpreted as a row matrix and reshaped to `(1, t)`.
n_steps : int > 0 [scalar]
embedding dimension, the number of steps back in time to stack
delay : int != 0 [scalar]
the number of columns to step.
Positive values embed from the past (previous columns).
Negative values embed from the future (subsequent columns).
kwargs : additional keyword arguments
Additional arguments to pass to `np.pad`.
Returns
-------
data_history : np.ndarray [shape=(m * d, t)]
data augmented with lagged copies of itself,
where `m == n_steps - 1`.
Notes
-----
This function caches at level 40.
Examples
--------
Keep two steps (current and previous)
>>> data = np.arange(-3, 3)
>>> librosa.feature.stack_memory(data)
array([[-3, -2, -1, 0, 1, 2],
[ 0, -3, -2, -1, 0, 1]])
Or three steps
>>> librosa.feature.stack_memory(data, n_steps=3)
array([[-3, -2, -1, 0, 1, 2],
[ 0, -3, -2, -1, 0, 1],
[ 0, 0, -3, -2, -1, 0]])
Use reflection padding instead of zero-padding
>>> librosa.feature.stack_memory(data, n_steps=3, mode='reflect')
array([[-3, -2, -1, 0, 1, 2],
[-2, -3, -2, -1, 0, 1],
[-1, -2, -3, -2, -1, 0]])
Or pad with edge-values, and delay by 2
>>> librosa.feature.stack_memory(data, n_steps=3, delay=2, mode='edge')
array([[-3, -2, -1, 0, 1, 2],
[-3, -3, -3, -2, -1, 0],
[-3, -3, -3, -3, -3, -2]])
Stack time-lagged beat-synchronous chroma edge padding
>>> y, sr = librosa.load(librosa.util.example_audio_file())
>>> chroma = librosa.feature.chroma_stft(y=y, sr=sr)
>>> tempo, beats = librosa.beat.beat_track(y=y, sr=sr, hop_length=512)
>>> beats = librosa.util.fix_frames(beats, x_min=0, x_max=chroma.shape[1])
>>> chroma_sync = librosa.util.sync(chroma, beats)
>>> chroma_lag = librosa.feature.stack_memory(chroma_sync, n_steps=3,
... mode='edge')
Plot the result
>>> import matplotlib.pyplot as plt
>>> beat_times = librosa.frames_to_time(beats, sr=sr, hop_length=512)
>>> librosa.display.specshow(chroma_lag, y_axis='chroma', x_axis='time',
... x_coords=beat_times)
>>> plt.yticks([0, 12, 24], ['Lag=0', 'Lag=1', 'Lag=2'])
>>> plt.title('Time-lagged chroma')
>>> plt.colorbar()
>>> plt.tight_layout()
>>> plt.show()
"""
if n_steps < 1:
raise ParameterError('n_steps must be a positive integer')
if delay == 0:
raise ParameterError('delay must be a non-zero integer')
data = np.atleast_2d(data)
t = data.shape[1]
kwargs.setdefault('mode', 'constant')
if kwargs['mode'] == 'constant':
kwargs.setdefault('constant_values', [0])
# Pad the end with zeros, which will roll to the front below
if delay > 0:
padding = (int((n_steps - 1) * delay), 0)
else:
padding = (0, int((n_steps - 1) * -delay))
data = np.pad(data, [(0, 0), padding], **kwargs)
history = data
# TODO: this could be more efficient
for i in range(1, n_steps):
history = np.vstack([np.roll(data, -i * delay, axis=1), history])
# Trim to original width
if delay > 0:
history = history[:, :t]
else:
history = history[:, -t:]
# Make contiguous
return np.asfortranarray(history)
| isc | -6,802,198,681,692,537,000 | 30.678431 | 97 | 0.551498 | false |
ivelum/graphql-py | tests/test_lexer.py | 1 | 4421 | from decimal import Decimal
from unittest import TestCase
from graphql.lexer import GraphQLLexer
from graphql.exceptions import LexerError
class GraphQLLexerTest(TestCase):
lexer = GraphQLLexer()
def assert_output(self, lexer, expected):
actual = list(lexer)
len_actual = len(actual)
len_expected = len(expected)
self.assertEqual(
len_actual,
len_expected,
'Actual output length %s does not match expected length %s\n'
'Actual: %s\n'
'Expected: %s' % (len_actual, len_expected, actual, expected)
)
for i, token in enumerate(actual):
self.assertEqual(token.type, expected[i][0])
self.assertEqual(token.value, expected[i][1])
def test_punctuator(self):
self.assert_output(self.lexer.input('!'), [('BANG', '!')])
self.assert_output(self.lexer.input('$'), [('DOLLAR', '$')])
self.assert_output(self.lexer.input('('), [('PAREN_L', '(')])
self.assert_output(self.lexer.input(')'), [('PAREN_R', ')')])
self.assert_output(self.lexer.input(':'), [('COLON', ':')])
self.assert_output(self.lexer.input('='), [('EQUALS', '=')])
self.assert_output(self.lexer.input('@'), [('AT', '@')])
self.assert_output(self.lexer.input('['), [('BRACKET_L', '[')])
self.assert_output(self.lexer.input(']'), [('BRACKET_R', ']')])
self.assert_output(self.lexer.input('{'), [('BRACE_L', '{')])
self.assert_output(self.lexer.input('}'), [('BRACE_R', '}')])
self.assert_output(self.lexer.input('...'), [('SPREAD', '...')])
def test_name(self):
for name in ('a', 'myVar_42', '__LOL__', '_', '_0'):
self.assert_output(self.lexer.input(name), [('NAME', name)])
def test_reserved_words(self):
reserved = ('fragment', 'query', 'mutation', 'on')
for word in reserved:
self.assert_output(self.lexer.input(word), [(word.upper(), word)])
# A word made of reserved words should be treated as a name
for word in ('queryType', 'mutation42', 'on_fragment'):
self.assert_output(self.lexer.input(word), [('NAME', word)])
def test_true(self):
self.assert_output(self.lexer.input('true'), [('TRUE', True)])
self.assert_output(self.lexer.input('True'), [('NAME', 'True')])
def test_false(self):
self.assert_output(self.lexer.input('false'), [('FALSE', False)])
self.assert_output(self.lexer.input('False'), [('NAME', 'False')])
def test_null(self):
self.assert_output(self.lexer.input('null'), [('NULL', None)])
self.assert_output(self.lexer.input('Null'), [('NAME', 'Null')])
def test_int(self):
for val in ('0', '-0', '42', '-42'):
self.assert_output(
self.lexer.input(val),
[('INT_VALUE', int(val))],
)
def test_float(self):
for val in ('-0.5e+42', '42.0', '2E64', '2.71e-0002'):
self.assert_output(
self.lexer.input(val),
[('FLOAT_VALUE', Decimal(val))],
)
def test_string(self):
for s in ('""', u'""', '"42"', r'"\t\n\u0042 ^"'):
self.assert_output(
self.lexer.input(s),
[('STRING_VALUE', s.strip('"'))]
)
def test_comment(self):
lexer = self.lexer.input("""
42 # lol this is a number. But this -> 9000 is not.
"" # lol this is a string. But this -> "gav" is not.
# lol the whole line commented
#
""")
self.assert_output(lexer, [('INT_VALUE', 42), ('STRING_VALUE', '')])
def test_illegal_chars(self):
for s in ('"', '^'):
try:
list(self.lexer.input(s))
self.fail('Illegal char exception not raised for %s' % repr(s))
except LexerError as e:
self.assertEqual(1, e.line)
self.assertEqual(1, e.column)
self.assertTrue(
str(e).startswith('Line 1, col 1: Illegal character')
)
self.assertEqual(s, e.value)
def test_positional_info(self):
for i, t in enumerate(self.lexer.input('1\n 3\n 5\n')):
self.assertEqual(i + 1, t.lineno)
self.assertEqual(i * 2 + 1, self.lexer.find_column(t))
| mit | 2,787,235,725,609,226,000 | 38.828829 | 79 | 0.52884 | false |
all-of-us/raw-data-repository | rdr_service/lib_fhir/fhirclient_1_0_6/models/familymemberhistory_tests.py | 1 | 3883 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 1.0.2.7202 on 2016-06-23.
# 2016, SMART Health IT.
import io
import json
import os
import unittest
from . import familymemberhistory
from .fhirdate import FHIRDate
class FamilyMemberHistoryTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get('FHIR_UNITTEST_DATADIR') or ''
with io.open(os.path.join(datadir, filename), 'r', encoding='utf-8') as handle:
js = json.load(handle)
self.assertEqual("FamilyMemberHistory", js["resourceType"])
return familymemberhistory.FamilyMemberHistory(js)
def testFamilyMemberHistory1(self):
inst = self.instantiate_from("familymemberhistory-example-mother.json")
self.assertIsNotNone(inst, "Must have instantiated a FamilyMemberHistory instance")
self.implFamilyMemberHistory1(inst)
js = inst.as_json()
self.assertEqual("FamilyMemberHistory", js["resourceType"])
inst2 = familymemberhistory.FamilyMemberHistory(js)
self.implFamilyMemberHistory1(inst2)
def implFamilyMemberHistory1(self, inst):
self.assertEqual(inst.condition[0].code.coding[0].code, "371041009")
self.assertEqual(inst.condition[0].code.coding[0].display, "Embolic Stroke")
self.assertEqual(inst.condition[0].code.coding[0].system, "http://snomed.info/sct")
self.assertEqual(inst.condition[0].code.text, "Stroke")
self.assertEqual(inst.condition[0].onsetQuantity.system, "http://unitsofmeasure.org")
self.assertEqual(inst.condition[0].onsetQuantity.unit, "a")
self.assertEqual(inst.condition[0].onsetQuantity.value, 56)
self.assertEqual(inst.id, "mother")
self.assertEqual(inst.relationship.coding[0].code, "mother")
self.assertEqual(inst.relationship.coding[0].system, "http://hl7.org/fhir/familial-relationship")
self.assertEqual(inst.status, "completed")
self.assertEqual(inst.text.div, "<div>Mother died of a stroke aged 56</div>")
self.assertEqual(inst.text.status, "generated")
def testFamilyMemberHistory2(self):
inst = self.instantiate_from("familymemberhistory-example.json")
self.assertIsNotNone(inst, "Must have instantiated a FamilyMemberHistory instance")
self.implFamilyMemberHistory2(inst)
js = inst.as_json()
self.assertEqual("FamilyMemberHistory", js["resourceType"])
inst2 = familymemberhistory.FamilyMemberHistory(js)
self.implFamilyMemberHistory2(inst2)
def implFamilyMemberHistory2(self, inst):
self.assertEqual(inst.condition[0].code.coding[0].code, "315619001")
self.assertEqual(inst.condition[0].code.coding[0].display, "Myocardial Infarction")
self.assertEqual(inst.condition[0].code.coding[0].system, "http://snomed.info/sct")
self.assertEqual(inst.condition[0].code.text, "Heart Attack")
self.assertEqual(inst.condition[0].note.text, "Was fishing at the time. At least he went doing someting he loved.")
self.assertEqual(inst.condition[0].onsetQuantity.system, "http://unitsofmeasure.org")
self.assertEqual(inst.condition[0].onsetQuantity.unit, "a")
self.assertEqual(inst.condition[0].onsetQuantity.value, 74)
self.assertEqual(inst.date.date, FHIRDate("2011-03-18").date)
self.assertEqual(inst.date.as_json(), "2011-03-18")
self.assertEqual(inst.id, "father")
self.assertEqual(inst.relationship.coding[0].code, "father")
self.assertEqual(inst.relationship.coding[0].system, "http://hl7.org/fhir/familial-relationship")
self.assertEqual(inst.status, "completed")
self.assertEqual(inst.text.div, "<div>Father died of a heart attack aged 74</div>")
self.assertEqual(inst.text.status, "generated")
| bsd-3-clause | -9,157,715,298,358,130,000 | 49.428571 | 123 | 0.695339 | false |
chewxy/cu | cmd/gencudnn/parse.py | 1 | 3705 | from bs4 import BeautifulSoup
import requests
import re
import sys
import os
inputs ={}
outputs = {}
ios = {}
docs = {}
def get():
if os.path.isfile("cache/docs.html"):
with open("cache/docs.html", 'r') as f:
print("Using cache", file=sys.stderr)
return f.read()
r = requests.get("http://docs.nvidia.com/deeplearning/sdk/cudnn-developer-guide/index.html")
with open("cache/docs.html", 'w') as f:
f.write(r.text)
return r.text
def main():
txt = get()
soup = BeautifulSoup(txt, "html5lib")
contents = soup.find_all(id="api-introduction")
topics = contents[0].find_all(class_="topic concept nested1")
for topic in topics:
rawFnName = topic.find_all(class_='title topictitle2')[0].text
try:
fnName = re.search('cudnn.+$', rawFnName).group(0)
except AttributeError as e:
print("rawFnName: {}".format(rawFnName), file=sys.stderr)
continue
try:
paramsDL = topic.find_all(class_='dl')[0] # first definition list is params
except IndexError:
print("rawFnName: {} - topic has no dl class".format(fnName), file=sys.stderr)
continue
# check previous
if paramsDL.previous_sibling.previous_sibling.text != "Parameters":
print("rawFnName: {} has no params::: {}".format(fnName, paramsDL.previous_sibling), file=sys.stderr)
continue
params = paramsDL.find_all(class_='dt dlterm') # name
paramsDesc = paramsDL.find_all(class_='dd') # use type
paramUse = []
for d in paramsDesc:
try:
use = d.find_all(class_='ph i')[0].text
except IndexError as e:
use = "Input"
paramUse.append(use)
if len(params) != len(paramUse):
print("rawFnName: {} - differing params and use cases".format(fnName), file=sys.stderr)
continue
inputParams = [p.text.strip() for i, p in enumerate(params) if (paramUse[i].strip()=='Input') or (paramUse[i].strip()=="Inputs")]
outputParams = [p.text.strip() for i, p in enumerate(params) if (paramUse[i].strip()=='Output') or (paramUse[i].strip()=="Outputs")]
ioParams = [p.text.strip() for i, p in enumerate(params) if paramUse[i].strip()=='Input/Output']
inputs[fnName] = inputParams
outputs[fnName] = outputParams
ios[fnName] = ioParams
# extract docs
try:
docbody = topic.find_all(class_='body conbody')[0]
except IndexError:
print("fnName: {} - no body".format(fnName), file=sys.stderr)
continue
# clear is better than clever.
doc = docbody.find_all("p")[0].text
doc = doc.replace("\n", "")
doc = re.sub("\t+", " ", doc)
doc = re.sub("\s+", " ", doc)
doc = doc.replace('"', '`')
doc = doc.replace("This function", fnName)
doc = doc.replace("This routine", fnName)
doc = doc.replace("This", fnName)
doc = doc.strip()
docs[fnName] = doc
# write the go file
print("package main")
print("var inputParams = map[string][]string{")
for k, v in inputs.items():
if len(v) == 0: continue
print('"{}": {{ '.format(k), end="")
for inp in v :
split = inp.split(",")
for s in split:
print('"{}", '.format(s.strip()), end="")
print("},")
print("}")
print("var outputParams = map[string][]string{")
for k, v in outputs.items():
if len(v) == 0: continue
print('"{}": {{ '.format(k), end="")
for inp in v :
split = inp.split(",")
for s in split:
print('"{}", '.format(s.strip()), end="")
print("},")
print("}")
print("var ioParams = map[string][]string{")
for k, v in ios.items():
if len(v) == 0: continue
print('"{}": {{ '.format(k), end="")
for inp in v :
split = inp.split(",")
for s in split:
print('"{}", '.format(s.strip()), end="")
print("},")
print("}")
print("var docs = map[string]string{")
for k, v in docs.items():
print('"{}": "{}",'.format(k, v.strip()))
print("}")
main() | mit | -7,812,344,624,569,286,000 | 28.412698 | 134 | 0.623212 | false |
MortalViews/python-notes | inheritance.py | 1 | 1192 | import random
class Person:
def __init__(self,name,age,location):
self.name = name
self.age = age
self.locaiton = location
def is_sick(self):
return random.randint(1,10)%2==0
class AttendenceMixin:
def swip_in(self):
pass
def swip_out(self):
pass
class Employee(Person):
def __init__(self,emp_id,joining_date,*args,**kwargs):
self.emp_id =emp_id
self.joining_date =joining_date
super().__init__(*args,**kwargs)
class Contractor(Employee):
pass
class InfraEmployee(Employee,AttendenceMixin):
def __init__(self,dept,*args,**kwargs):
self.dept = dept
super().__init__(*args,**kwargs)
class ITEmployee(Employee,AttendenceMixin):
def __init__(self,project,technologies,system_id,*args,**kwargs):
self.project =project
self.tech = technologies
self.system = system_id
super().__init__(*args,**kwargs)
def is_sick(self):
return random.randint(1,10)%2==1
class Manager(Employee):
def __init__(self,cabin_no,*args,**kwargs):
self.cabin=cabin_no
super().__init__(*args,**kwargs)
| apache-2.0 | 4,436,098,363,554,478,600 | 26.090909 | 69 | 0.589765 | false |
lutris/website | games/notifier.py | 1 | 1688 | """Send a digest of unpublished content to moderators"""
from django.conf import settings
from accounts.models import User
from games import models
from emails.messages import send_email
DEFAULT_COUNT = 12
def get_unpublished_installers(count=DEFAULT_COUNT):
"""Return a random list of unpublished installers"""
return models.Installer.objects.filter(published=False).order_by('?')[:count]
def get_unpublished_screenshots(count=DEFAULT_COUNT):
"""Return a random list of unpublished screenshots"""
return models.Screenshot.objects.filter(published=False).order_by('?')[:count]
def get_unreviewed_game_submissions(count=DEFAULT_COUNT):
"""Return a random list of unreviewed game submissions"""
return models.GameSubmission.objects.filter(
accepted_at__isnull=True
).order_by('?')[:count]
def get_installer_issues(count=DEFAULT_COUNT):
"""Return a random list of installer issues"""
return models.InstallerIssue.objects.all().order_by('?')[:count]
def get_mod_mail_content():
"""Get the payload to be included in the digest"""
return {
'installers': get_unpublished_installers(),
'screenshots': get_unpublished_screenshots(),
'submissions': get_unreviewed_game_submissions(),
'issues': get_installer_issues()
}
def send_daily_mod_mail():
"""Send the email to moderators"""
context = get_mod_mail_content()
if settings.DEBUG:
moderators = [u[1] for u in settings.MANAGERS]
else:
moderators = [u.email for u in User.objects.filter(is_staff=True)]
subject = 'Your daily moderator mail'
return send_email('daily_mod_mail', context, subject, moderators)
| agpl-3.0 | -6,707,878,073,285,417,000 | 32.76 | 82 | 0.702607 | false |
cloud-ark/cloudark | server/common/fm_logger.py | 1 | 2400 | import inspect
import logging
from server.common import constants
class Logging(object):
def __init__(self):
logging.basicConfig(filename=constants.LOG_FILE_NAME,
level=logging.DEBUG, filemode='a',
format='%(asctime)s %(message)s',
datefmt='%m/%d/%Y %I:%M:%S %p')
self.logger = logging.getLogger("CloudARK")
# http://stackoverflow.com/questions/10973362/python-logging-function-name-file-name-line-number-using-a-single-file
def info(self, message):
# Get the previous frame in the stack, otherwise it would
# be this function!!!
try:
func = inspect.currentframe().f_back.f_code
# Dump the message + the name of this function to the log.
self.logger.info("<%s>: %s() %s:%i" % (
message,
func.co_name,
func.co_filename,
func.co_firstlineno
))
except IOError as e:
if e.errno == 28:
print("-- Disk full -- (most likely this also won't get printed.")
def debug(self, message):
# Get the previous frame in the stack, otherwise it would
# be this function!!!
try:
func = inspect.currentframe().f_back.f_code
# Dump the message + the name of this function to the log.
self.logger.debug("<%s>: %s() %s:%i" % (
message,
func.co_name,
func.co_filename,
func.co_firstlineno
))
except IOError as e:
if e.errno == 28:
print("-- Disk full -- (most likely this also won't get printed.")
def error(self, message):
# Get the previous frame in the stack, otherwise it would
# be this function!!!
try:
func = inspect.currentframe().f_back.f_code
# Dump the message + the name of this function to the log.
self.logger.error("<%s>: %s() %s:%i" % (
message,
func.co_name,
func.co_filename,
func.co_firstlineno
))
self.logger.error(message, exc_info=1)
except IOError as e:
if e.errno == 28:
print("-- Disk full -- (most likely this also won't get printed.")
| apache-2.0 | -4,898,865,385,617,483,000 | 34.294118 | 120 | 0.515417 | false |
azvoleff/chitwanabm | chitwanabm/modelloop.py | 1 | 18907 | # Copyright 2008-2013 Alex Zvoleff
#
# This file is part of the chitwanabm agent-based model.
#
# chitwanabm is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# chitwanabm is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# chitwanabm. If not, see <http://www.gnu.org/licenses/>.
#
# See the README.rst file for author contact information.
"""
Contains main model loop: Contains the main loop for the model. Takes input
parameters read from runmodel.py, and passes results of model run back.
"""
from __future__ import division
import os
import time
import copy
import logging
import numpy as np
from pyabm.file_io import write_NBH_shapefile
from pyabm.utility import TimeSteps
from chitwanabm import rc_params
from chitwanabm import test
logger = logging.getLogger(__name__)
rcParams = rc_params.get_params()
timebounds = rcParams['model.timebounds']
timestep = rcParams['model.timestep']
model_time = TimeSteps(timebounds, timestep)
def main_loop(world, results_path):
"""This function contains the main model loop. Passed to it is a list of
regions, which contains the person, household, and neighborhood agents to
be used in the model, and the land-use parameters."""
if rcParams['run_validation_checks']:
if not test.validate_person_attributes(world):
logger.critical("Person attributes validation failed")
if not test.validate_household_attributes(world):
logger.critical("Household attributes validation failed")
if not test.validate_neighborhood_attributes(world):
logger.critical("Neighborhood attributes validation failed")
time_strings = {}
# Store the date values (as timestep number (0), float and date string)
# for time zero (T0) so that the initial values of the model (which are for
# time zero, the zeroth timestep) can be used in plotting model results.
time_strings['timestep'] = [0]
time_strings['time_float'] = [model_time.get_T0_date_float()]
time_strings['time_date'] = [model_time.get_T0_date_string()]
# Keep annual totals to print while the model is running
annual_num_marr = 0
annual_num_divo = 0
annual_num_births = 0
annual_num_deaths = 0
annual_num_out_migr_LL_indiv = 0
annual_num_ret_migr_LL_indiv = 0
annual_num_out_migr_LD_indiv = 0
annual_num_ret_migr_LD_indiv = 0
annual_num_in_migr_HH = 0
annual_num_out_migr_HH = 0
# Save the starting time of the model to use in printing elapsed time while
# it runs.
modelrun_starttime = time.time()
def write_results_CSV(world, results_path, timestep):
"""
Function to periodically save model results to CSV (if this option is
selected in the rc file).
"""
if rcParams['save_psn_data']:
world.write_persons_to_csv(timestep, results_path)
if rcParams['save_NBH_data']:
world.write_NBHs_to_csv(timestep, results_path)
if rcParams['save_LULC_shapefiles']:
NBH_shapefile = os.path.join(results_path, "NBHs_time_%s.shp"%timestep)
neighborhoods = []
regions = world.get_regions()
for region in regions:
neighborhoods.extend(region.get_agents())
file_io.write_NBH_shapefile(neighborhoods, NBH_shapefile)
# TODO: The below is still a work in progress
# def write_results_netcdf(world, results_path, timestep):
# if rcParams['save_psn_data_netcdf']:
# world.write_persons_to_netcdf(timestep, results_path)
# Write the results for timestep 0
write_results_CSV(world, results_path, 0)
# saved_data will store event, population, and fuelwood usage data keyed by
# timestep:variable:nbh.
saved_data = {}
# Save the initialization data for timestep 0 (note that all the event
# variables, like new_births, new_deaths, etc., need to be set to None
# in each neighborhood, for each variable, as they are unknown for timestep
# 0 (since the model has not yet begun). Need to construct an empty_events
# dictionary to initialize these events for timestep 0.
# TODO: Fix this to work for multiple regions.
region = world.get_regions()[0]
empty_events = {}
EVIs = {}
for neighborhood in region.iter_agents():
empty_events[neighborhood.get_ID()] = np.NaN
EVIs[neighborhood.get_ID()] = neighborhood._EVI
saved_data[0] = {}
saved_data[0]['EVI'] = EVIs
saved_data[0]['births'] = empty_events
saved_data[0]['deaths'] = empty_events
saved_data[0]['marr'] = empty_events
saved_data[0]['divo'] = empty_events
saved_data[0]['out_migr_LL_indiv'] = empty_events
saved_data[0]['ret_migr_LL_indiv'] = empty_events
saved_data[0]['out_migr_LD_indiv'] = empty_events
saved_data[0]['ret_migr_LD_indiv'] = empty_events
saved_data[0]['in_migr_HH'] = empty_events
saved_data[0]['out_migr_HH'] = empty_events
saved_data[0].update(region.get_neighborhood_pop_stats())
saved_data[0].update(region.get_neighborhood_fw_usage(model_time.get_T0_date_float()))
###########################################################################
# Define the result arrays - there will be three arrays stored in a
# dictionary:
# 1) timesteps stores the output
# 2) nbh stores neighborhood level output
# 3) psn stores person level output
results_new_format = {}
timesteps_dtype = [('timestep', 'i2'),
('year', 'i2'),
('month', 'i2'),
('date_float', 'f4')]
results_new_format['timesteps'] = np.zeros((model_time.get_total_num_timesteps()),
dtype=timesteps_dtype)
#TODO: Finish this
nbh_dtype = [('EVI', 'f4'),
('births', 'i2'),
('deaths', 'i2'),
('marr', 'i2'),
('divo', 'i2'),
('out_migr_LL_indiv', 'i2'),
('ret_migr_LL_indiv', 'i2'),
('out_migr_LD_indiv', 'i2'),
('ret_migr_LD_indiv', 'i2'),
('in_migr_HH', 'i2'),
('out_migr_HH', 'i2'),
('num_psn', 'i4'),
('num_hs', 'i2'),
('num_marr', 'i2')]
results_new_format['nbh'] = np.zeros((region.num_members() *
model_time.get_total_num_timesteps()), dtype=nbh_dtype)
#TODO: Finish this
psn_dtype = [('births', 'i2'),
('deaths', 'i2'),
('marr', 'i2'),
('divo', 'i2'),
('out_migr_LL_indiv', 'i2'),
('ret_migr_LL_indiv', 'i2'),
('out_migr_LD_indiv', 'i2'),
('ret_migr_LD_indiv', 'i2'),
('in_migr_HH', 'i2'),
('out_migr_HH', 'i2'),
('num_psn', 'i4'),
('num_hs', 'i2'),
('num_marr', 'i2')]
results_new_format['psn'] = np.zeros((model_time.get_total_num_timesteps()), dtype=psn_dtype)
# Make a dictionary to store empty (zero) event data for submodels if they
# are turned off by the user.
zero_events = {}
for neighborhood in region.iter_agents():
zero_events[neighborhood.get_ID()] = 0
# "Burn in" by running the model for three years in simulated mode, where
# age isn't incremented, but migrations occur. This allows starting the
# model with realistic migration histories, avoiding a huge loss of
# population to migration in the first month of the model.
logger.info('Burning in events for region %s'%region.get_ID())
for neg_timestep in xrange(-rcParams['model.burnin_timesteps'], 0):
for region in world.iter_regions():
if rcParams['submodels.migration_LL_individual']:
new_out_migr_LL_indiv, new_ret_migr_LL_indiv = region.individual_LL_migrations(model_time.get_T_minus_date_float(neg_timestep), neg_timestep, BURN_IN=True)
else: new_out_migr_LL_indiv, new_ret_migr_LL_indiv = zero_events, zero_events
if rcParams['submodels.migration_LD_individual']:
new_out_migr_LD_indiv, new_ret_migr_LD_indiv = region.individual_LD_migrations(model_time.get_T_minus_date_float(neg_timestep), neg_timestep, BURN_IN=True)
else: new_out_migr_LD_indiv, new_ret_migr_LD_indiv = zero_events, zero_events
if rcParams['submodels.fertility']:
new_births = region.births(model_time.get_cur_date_float(), model_time.get_cur_int_timestep(), simulate=True)
else: new_births = zero_events
num_new_births = sum(new_births.values())
num_new_out_migr_LL_indiv = sum(new_out_migr_LL_indiv.values())
num_new_ret_migr_LL_indiv = sum(new_ret_migr_LL_indiv.values())
num_new_out_migr_LD_indiv = sum(new_out_migr_LD_indiv.values())
num_new_ret_migr_LD_indiv = sum(new_ret_migr_LD_indiv.values())
logger.info("Burn in %3s: P: %5s NOLL: %3s NRLL: %3s NOLD: %3s NRLD: %3s NB: %3s"%(neg_timestep,
region.num_persons(), num_new_out_migr_LL_indiv,
num_new_ret_migr_LL_indiv, num_new_out_migr_LD_indiv,
num_new_ret_migr_LD_indiv, num_new_births))
while model_time.in_bounds():
timestep = model_time.get_cur_int_timestep()
results_new_format['timesteps'][timestep - 1] = (timestep,
model_time.get_cur_year(), model_time.get_cur_month(),
model_time.get_cur_date_float())
logger.debug('beginning timestep %s (%s)'%(model_time.get_cur_int_timestep(),
model_time.get_cur_date_string()))
if model_time.get_cur_month() == 1:
annual_num_births = 0
annual_num_deaths = 0
annual_num_marr = 0
annual_num_divo = 0
annual_num_out_migr_LL_indiv = 0
annual_num_ret_migr_LL_indiv = 0
annual_num_out_migr_LD_indiv = 0
annual_num_ret_migr_LD_indiv = 0
annual_num_in_migr_HH = 0
annual_num_out_migr_HH = 0
for region in world.iter_regions():
logger.debug('processing region %s'%region.get_ID())
# This could easily handle multiple regions, although currently
# there is only one, for all of Chitwan.
if rcParams['submodels.fertility']:
new_births = region.births(model_time.get_cur_date_float(), model_time.get_cur_int_timestep())
else: new_births = zero_events
if rcParams['submodels.mortality']:
new_deaths = region.deaths(model_time.get_cur_date_float(), model_time.get_cur_int_timestep())
else: new_deaths = zero_events
if rcParams['submodels.marriage']:
new_marr = region.marriages(model_time.get_cur_date_float(), model_time.get_cur_int_timestep())
else: new_marr = zero_events
if rcParams['submodels.divorce']:
new_divo = region.divorces(model_time.get_cur_date_float(), model_time.get_cur_int_timestep())
else: new_divo = zero_events
if rcParams['submodels.migration_LL_individual']:
new_out_migr_LL_indiv, new_ret_migr_LL_indiv = region.individual_LL_migrations(model_time.get_cur_date_float(), model_time.get_cur_int_timestep())
else: new_out_migr_LL_indiv, new_ret_migr_LL_indiv = zero_events, zero_events
if rcParams['submodels.migration_LD_individual']:
new_out_migr_LD_indiv, new_ret_migr_LD_indiv = region.individual_LD_migrations(model_time.get_cur_date_float(), model_time.get_cur_int_timestep())
else: new_out_migr_LD_indiv, new_ret_migr_LD_indiv = zero_events, zero_events
if rcParams['submodels.migration_household']:
new_in_migr_HH, new_out_migr_HH = region.household_migrations(model_time.get_cur_date_float(), model_time.get_cur_int_timestep())
else: new_in_migr_HH, new_out_migr_HH = zero_events, zero_events
if rcParams['submodels.schooling']:
schooling = region.education(model_time.get_cur_date_float())
else: schooling = zero_events
region.increment_age()
# Now account for changing NFOs (if desired)
if rcParams['NFOs.change.model'].lower() != 'none':
region.establish_NFOs()
# Save event, LULC, and population data in the saved_data dictionary
# for later output to CSV.
saved_data[timestep] = {}
saved_data[timestep]['EVI'] = EVIs
saved_data[timestep]['births'] = new_births
saved_data[timestep]['deaths'] = new_deaths
saved_data[timestep]['marr'] = new_marr
saved_data[timestep]['divo'] = new_divo
saved_data[timestep]['out_migr_LL_indiv'] = new_out_migr_LL_indiv
saved_data[timestep]['ret_migr_LL_indiv'] = new_ret_migr_LL_indiv
saved_data[timestep]['out_migr_LD_indiv'] = new_out_migr_LD_indiv
saved_data[timestep]['ret_migr_LD_indiv'] = new_ret_migr_LD_indiv
saved_data[timestep]['in_migr_HH'] = new_in_migr_HH
saved_data[timestep]['out_migr_HH'] = new_out_migr_HH
saved_data[timestep].update(region.get_neighborhood_pop_stats())
saved_data[timestep].update(region.get_neighborhood_fw_usage(model_time.get_cur_date_float()))
saved_data[timestep].update(region.get_neighborhood_landuse())
saved_data[timestep].update(region.get_neighborhood_nfo_context())
saved_data[timestep].update(region.get_neighborhood_forest_distance())
# Keep running totals of events for printing results:
num_new_births = sum(new_births.values())
num_new_deaths = sum(new_deaths.values())
num_new_marr = sum(new_marr.values())
num_new_divo = sum(new_divo.values())
num_new_out_migr_LL_indiv = sum(new_out_migr_LL_indiv.values())
num_new_ret_migr_LL_indiv = sum(new_ret_migr_LL_indiv.values())
num_new_out_migr_LD_indiv = sum(new_out_migr_LD_indiv.values())
num_new_ret_migr_LD_indiv = sum(new_ret_migr_LD_indiv.values())
num_new_in_migr_HH = sum(new_in_migr_HH.values())
num_new_out_migr_HH = sum(new_out_migr_HH.values())
annual_num_births += num_new_births
annual_num_deaths += num_new_deaths
annual_num_marr += num_new_marr
annual_num_divo += num_new_divo
annual_num_out_migr_LL_indiv += num_new_out_migr_LL_indiv
annual_num_ret_migr_LL_indiv += num_new_ret_migr_LL_indiv
annual_num_out_migr_LD_indiv += num_new_out_migr_LD_indiv
annual_num_ret_migr_LD_indiv += num_new_ret_migr_LD_indiv
annual_num_in_migr_HH += num_new_in_migr_HH
annual_num_out_migr_HH += num_new_out_migr_HH
# Print an information line to allow keeping tabs on the model while it
# is running.
num_persons = region.num_persons()
num_households = region.num_households()
stats_string = "%s: P: %5s TMa: %5s THH: %5s NMa: %3s NDv: %3s NB: %3s ND: %3s NOLL: %3s NRLL: %3s NOLD: %3s NRLD: %3s NOMH: %3s NIMH: %3s"%(
model_time.get_cur_date_string().ljust(7), num_persons,
region.get_num_marriages(), num_households,
num_new_marr, num_new_divo, num_new_births, num_new_deaths,
num_new_out_migr_LL_indiv, num_new_ret_migr_LL_indiv,
num_new_out_migr_LD_indiv, num_new_ret_migr_LD_indiv,
num_new_out_migr_HH, num_new_in_migr_HH)
logger.info('%s'%stats_string)
# Save timestep, year and month, and time_float values for use in
# storing results (to CSV) keyed to a particular timestep.
time_strings['timestep'].append(model_time.get_cur_int_timestep())
time_strings['time_float'].append(model_time.get_cur_date_float())
time_strings['time_date'].append(model_time.get_cur_date_string())
if model_time.get_cur_month() == 12 or model_time.is_last_iteration() \
and model_time.get_cur_date() != model_time._starttime:
# Model this years agricultural productivity, to be used in the
# next year's model runs.
EVIs = region.agricultural_productivity()
mean_NBH_EVI = np.mean(EVIs.values())
mean_Valley_EVI = region._Valley_Mean_EVI
# The last condition in the above if statement is necessary as
# there is no total to print on the first timestep, so it wouldn't
# make sense to print it.
total_string = "%s totals: New Ma: %3s Dv: %3s B: %3s D: %3s LLOutMi: %3s LLRetMi: %3s LDOutMi: %3s LDRetMi: %3s OutMiHH: %3s InMiHH: %3s | NBHEVI: %3s ValEVI: %3s"%(
model_time.get_cur_year(), annual_num_marr,
annual_num_divo, annual_num_births,
annual_num_deaths, annual_num_out_migr_LL_indiv,
annual_num_ret_migr_LL_indiv, annual_num_out_migr_LD_indiv,
annual_num_ret_migr_LD_indiv, annual_num_out_migr_HH,
annual_num_in_migr_HH, mean_NBH_EVI, mean_Valley_EVI)
logger.info('%s'%total_string)
logger.info("Elapsed time: %11s"%elapsed_time(modelrun_starttime))
if rcParams['run_validation_checks']:
if not test.validate_person_attributes(world):
logger.critical("Person attributes validation failed")
if not test.validate_household_attributes(world):
logger.critical("Household attributes validation failed")
if not test.validate_neighborhood_attributes(world):
logger.critical("Neighborhood attributes validation failed")
if num_persons == 0:
logger.info("End of model run: population is zero")
break
if model_time.get_cur_month() == 12 or model_time.is_last_iteration():
write_results_CSV(world, results_path, model_time.get_cur_int_timestep())
model_time.increment()
return saved_data, time_strings, results_new_format
def elapsed_time(start_time):
elapsed = int(time.time() - start_time)
hours = int(elapsed / 3600)
minutes = int((elapsed - hours * 3600) / 60)
seconds = int(elapsed - hours * 3600 - minutes * 60)
return "%ih %im %is" %(hours, minutes, seconds)
| gpl-3.0 | 1,953,961,682,125,296,600 | 48.755263 | 178 | 0.613106 | false |
djfkahn/MemberHubDirectoryTools | roster_tools.py | 1 | 5813 | #!/usr/bin/env python
"""This program inputs a MemberHub directory dump, and analyzes it.
"""
import family
import roster
import os
from openpyxl import load_workbook
MIN_NUM_ROSTER_FIELDS = 5
def ReadRosterAdultsFromMostRecent(file_name=None):
""" roster_tools.ReadRosterAdultsFromMostRecent
PURPOSE:
Generates a list of adult names in the newest roster file.
INPUT:
- none
OUTPUTS:
- adults_list -- list of adult name fields in the newest roster file.
ASSUMPTIONS:
- none
"""
##
## Find the files in the "Roster" folder with ".xlsx" extension, sort them by
## date, and pick the most recently added
if not file_name:
file_path = os.path.abspath("./Roster/")
with os.scandir(file_path) as raw_files:
files = [file for file in raw_files \
if not(file.name.startswith('~')) and (file.name.endswith('.xlsx'))]
files.sort(key=lambda x: os.stat(x).st_mtime, reverse=True)
file_name = file_path + "/" +files[0].name
##
## Load the workbook, and select the active/only worksheet
wb = load_workbook(file_name)
ws = wb.active
##
## Copy all the values in column 'D' for all rows beyond the title row into
## the output list
adults_list = []
for fields in ws.iter_rows(min_row=2, max_row=ws.max_row, min_col=4, max_col=4):
adults_list.append(fields[0].value)
return adults_list
def ReadRosterFromFile(file_name, hub_map, rosterC):
""" roster_tools.ReadRosterFromFile
PURPOSE:
Reads a roster file with the following fields:
<**Last Name>,<**First Name>,<**Grade>,<**Parent/Guardian Name(s)>,<***Teacher Name>
** - indicates always required field
*** - indicates field that is required when Grade field is < 6
INPUT:
- file_name -- name of the roster file
- hub_map -- dictionary that maps hub names to hub IDs
- rosterC -- the Roster object containing the errata
OUTPUTS:
- roster -- list of families extracted from the roster
ASSUMPTIONS:
1. First row of the file is the column headers...not a member of the roster.
"""
wb = load_workbook(file_name)
ws = wb.active
student_count = -1
for fields in ws.values:
## Skip the first row
if student_count < 0:
student_count = 0
continue
## Skip any row for which all fields are not populated
empty_field_found = False
for i in range(MIN_NUM_ROSTER_FIELDS):
if fields[i] == None or fields[i] == "":
empty_field_found = True
print("Found row with missing required fields:", fields)
break
if empty_field_found:
continue
## each row represents one student
student_count += 1
## treat the student as a member of a new family...for now
new_family = family.RosterFamily(adults_raw_name=fields[3])
new_family.AddToFamily(child_first = fields[1],
child_last = fields[0],
grade = fields[2],
adult_names = fields[3],
teacher_name = fields[4],
hub_map = hub_map,
rosterC = rosterC)
# if new_family is the same as a family already in the roster, then combine
# families. Otherwise, append new_family at the end of the roster.
for roster_entry in rosterC.GetRoster():
if roster_entry.IsSameFamily(new_family):
roster_entry.CombineWith(new_family)
break
else:
rosterC.append(new_family)
print("%d students processed %d families." % (student_count, len(rosterC)))
return rosterC.GetRoster()
def GetRosterFileName():
""" roster_tools.GetRosterFileName
PURPOSE:
Gives the user a list of possible roster files, and processes their selection.
INPUTS:
None
OUTPUTS:
- file_name - the selected roster file name
ASSUMPTIONS:
- Assumes the candidate roster files are stored in a subfolder called 'Roster'
"""
print ("These are the potential roster files:")
file_path = os.path.abspath("./Roster/")
with os.scandir(file_path) as raw_files:
files = [file for file in raw_files \
if not(file.name.startswith('~')) and (file.name.endswith('.xlsx'))]
files.sort(key=lambda x: os.stat(x).st_mtime, reverse=True)
max_index = 0
file_number = '1'
while int(file_number) >= max_index:
for file in files:
max_index += 1
print("%d) %s" % (max_index, file.name))
file_number = input("Enter list number of file or press <enter> to use '" + files[0].name + "':")
if not file_number:
return file_path + "/" +files[0].name
elif 0 < int(file_number) <= max_index:
return file_path + "/" + files[int(file_number)-1].name
else:
max_index = 0
print("The selection made is out of range. Please try again.")
def ReadRoster(hub_map):
""" roster_tools.ReadRoster
PURPOSE:
Prompts the user for roster file name and proceeds to read the file.
INPUT:
- hub_map -- mapping of teacher names to hub numbers
OUTPUTS:
- roster -- list of families extracted from the roster
ASSUMPTIONS:
- All the candidate rosters reside in a folder called "Roster" under the
run directory.
- All candidate rosters are Microsoft Excel files.
"""
return ReadRosterFromFile(GetRosterFileName(), hub_map, roster.Roster())
| apache-2.0 | 4,514,965,062,765,273,000 | 35.559748 | 109 | 0.590745 | false |
yusufm/mobly | mobly/controllers/android_device_lib/event_dispatcher.py | 1 | 15487 | #!/usr/bin/env python3.4
#
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from concurrent.futures import ThreadPoolExecutor
import queue
import re
import threading
import time
import traceback
class EventDispatcherError(Exception):
pass
class IllegalStateError(EventDispatcherError):
"""Raise when user tries to put event_dispatcher into an illegal state.
"""
class DuplicateError(EventDispatcherError):
"""Raise when a duplicate is being created and it shouldn't.
"""
class EventDispatcher:
"""Class managing events for an sl4a connection.
"""
DEFAULT_TIMEOUT = 60
def __init__(self, sl4a):
self._sl4a = sl4a
self.started = False
self.executor = None
self.poller = None
self.event_dict = {}
self.handlers = {}
self.lock = threading.RLock()
def poll_events(self):
"""Continuously polls all types of events from sl4a.
Events are sorted by name and store in separate queues.
If there are registered handlers, the handlers will be called with
corresponding event immediately upon event discovery, and the event
won't be stored. If exceptions occur, stop the dispatcher and return
"""
while self.started:
event_obj = None
event_name = None
try:
event_obj = self._sl4a.eventWait(50000)
except:
if self.started:
print("Exception happened during polling.")
print(traceback.format_exc())
raise
if not event_obj:
continue
elif 'name' not in event_obj:
print("Received Malformed event {}".format(event_obj))
continue
else:
event_name = event_obj['name']
# if handler registered, process event
if event_name in self.handlers:
self.handle_subscribed_event(event_obj, event_name)
if event_name == "EventDispatcherShutdown":
self._sl4a.closeSl4aSession()
break
else:
self.lock.acquire()
if event_name in self.event_dict: # otherwise, cache event
self.event_dict[event_name].put(event_obj)
else:
q = queue.Queue()
q.put(event_obj)
self.event_dict[event_name] = q
self.lock.release()
def register_handler(self, handler, event_name, args):
"""Registers an event handler.
One type of event can only have one event handler associated with it.
Args:
handler: The event handler function to be registered.
event_name: Name of the event the handler is for.
args: User arguments to be passed to the handler when it's called.
Raises:
IllegalStateError: Raised if attempts to register a handler after
the dispatcher starts running.
DuplicateError: Raised if attempts to register more than one
handler for one type of event.
"""
if self.started:
raise IllegalStateError(("Can't register service after polling is"
" started"))
self.lock.acquire()
try:
if event_name in self.handlers:
raise DuplicateError('A handler for {} already exists'.format(
event_name))
self.handlers[event_name] = (handler, args)
finally:
self.lock.release()
def start(self):
"""Starts the event dispatcher.
Initiates executor and start polling events.
Raises:
IllegalStateError: Can't start a dispatcher again when it's already
running.
"""
if not self.started:
self.started = True
self.executor = ThreadPoolExecutor(max_workers=32)
self.poller = self.executor.submit(self.poll_events)
else:
raise IllegalStateError("Dispatcher is already started.")
def clean_up(self):
"""Clean up and release resources after the event dispatcher polling
loop has been broken.
The following things happen:
1. Clear all events and flags.
2. Close the sl4a client the event_dispatcher object holds.
3. Shut down executor without waiting.
"""
if not self.started:
return
self.started = False
self.clear_all_events()
self._sl4a.close()
self.poller.set_result("Done")
# The polling thread is guaranteed to finish after a max of 60 seconds,
# so we don't wait here.
self.executor.shutdown(wait=False)
def pop_event(self, event_name, timeout=DEFAULT_TIMEOUT):
"""Pop an event from its queue.
Return and remove the oldest entry of an event.
Block until an event of specified name is available or
times out if timeout is set.
Args:
event_name: Name of the event to be popped.
timeout: Number of seconds to wait when event is not present.
Never times out if None.
Returns:
event: The oldest entry of the specified event. None if timed out.
Raises:
IllegalStateError: Raised if pop is called before the dispatcher
starts polling.
"""
if not self.started:
raise IllegalStateError(
"Dispatcher needs to be started before popping.")
e_queue = self.get_event_q(event_name)
if not e_queue:
raise TypeError("Failed to get an event queue for {}".format(
event_name))
try:
# Block for timeout
if timeout:
return e_queue.get(True, timeout)
# Non-blocking poll for event
elif timeout == 0:
return e_queue.get(False)
else:
# Block forever on event wait
return e_queue.get(True)
except queue.Empty:
raise queue.Empty('Timeout after {}s waiting for event: {}'.format(
timeout, event_name))
def wait_for_event(self,
event_name,
predicate,
timeout=DEFAULT_TIMEOUT,
*args,
**kwargs):
"""Wait for an event that satisfies a predicate to appear.
Continuously pop events of a particular name and check against the
predicate until an event that satisfies the predicate is popped or
timed out. Note this will remove all the events of the same name that
do not satisfy the predicate in the process.
Args:
event_name: Name of the event to be popped.
predicate: A function that takes an event and returns True if the
predicate is satisfied, False otherwise.
timeout: Number of seconds to wait.
*args: Optional positional args passed to predicate().
**kwargs: Optional keyword args passed to predicate().
Returns:
The event that satisfies the predicate.
Raises:
queue.Empty: Raised if no event that satisfies the predicate was
found before time out.
"""
deadline = time.time() + timeout
while True:
event = None
try:
event = self.pop_event(event_name, 1)
except queue.Empty:
pass
if event and predicate(event, *args, **kwargs):
return event
if time.time() > deadline:
raise queue.Empty(
'Timeout after {}s waiting for event: {}'.format(
timeout, event_name))
def pop_events(self, regex_pattern, timeout):
"""Pop events whose names match a regex pattern.
If such event(s) exist, pop one event from each event queue that
satisfies the condition. Otherwise, wait for an event that satisfies
the condition to occur, with timeout.
Results are sorted by timestamp in ascending order.
Args:
regex_pattern: The regular expression pattern that an event name
should match in order to be popped.
timeout: Number of seconds to wait for events in case no event
matching the condition exits when the function is called.
Returns:
results: Pop events whose names match a regex pattern.
Empty if none exist and the wait timed out.
Raises:
IllegalStateError: Raised if pop is called before the dispatcher
starts polling.
queue.Empty: Raised if no event was found before time out.
"""
if not self.started:
raise IllegalStateError(
"Dispatcher needs to be started before popping.")
deadline = time.time() + timeout
while True:
#TODO: fix the sleep loop
results = self._match_and_pop(regex_pattern)
if len(results) != 0 or time.time() > deadline:
break
time.sleep(1)
if len(results) == 0:
raise queue.Empty('Timeout after {}s waiting for event: {}'.format(
timeout, regex_pattern))
return sorted(results, key=lambda event: event['time'])
def _match_and_pop(self, regex_pattern):
"""Pop one event from each of the event queues whose names
match (in a sense of regular expression) regex_pattern.
"""
results = []
self.lock.acquire()
for name in self.event_dict.keys():
if re.match(regex_pattern, name):
q = self.event_dict[name]
if q:
try:
results.append(q.get(False))
except:
pass
self.lock.release()
return results
def get_event_q(self, event_name):
"""Obtain the queue storing events of the specified name.
If no event of this name has been polled, wait for one to.
Returns:
queue: A queue storing all the events of the specified name.
None if timed out.
timeout: Number of seconds to wait for the operation.
Raises:
queue.Empty: Raised if the queue does not exist and timeout has
passed.
"""
self.lock.acquire()
if not event_name in self.event_dict or self.event_dict[
event_name] is None:
self.event_dict[event_name] = queue.Queue()
self.lock.release()
event_queue = self.event_dict[event_name]
return event_queue
def handle_subscribed_event(self, event_obj, event_name):
"""Execute the registered handler of an event.
Retrieve the handler and its arguments, and execute the handler in a
new thread.
Args:
event_obj: Json object of the event.
event_name: Name of the event to call handler for.
"""
handler, args = self.handlers[event_name]
self.executor.submit(handler, event_obj, *args)
def _handle(self, event_handler, event_name, user_args, event_timeout,
cond, cond_timeout):
"""Pop an event of specified type and calls its handler on it. If
condition is not None, block until condition is met or timeout.
"""
if cond:
cond.wait(cond_timeout)
event = self.pop_event(event_name, event_timeout)
return event_handler(event, *user_args)
def handle_event(self,
event_handler,
event_name,
user_args,
event_timeout=None,
cond=None,
cond_timeout=None):
"""Handle events that don't have registered handlers
In a new thread, poll one event of specified type from its queue and
execute its handler. If no such event exists, the thread waits until
one appears.
Args:
event_handler: Handler for the event, which should take at least
one argument - the event json object.
event_name: Name of the event to be handled.
user_args: User arguments for the handler; to be passed in after
the event json.
event_timeout: Number of seconds to wait for the event to come.
cond: A condition to wait on before executing the handler. Should
be a threading.Event object.
cond_timeout: Number of seconds to wait before the condition times
out. Never times out if None.
Returns:
worker: A concurrent.Future object associated with the handler.
If blocking call worker.result() is triggered, the handler
needs to return something to unblock.
"""
worker = self.executor.submit(self._handle, event_handler, event_name,
user_args, event_timeout, cond,
cond_timeout)
return worker
def pop_all(self, event_name):
"""Return and remove all stored events of a specified name.
Pops all events from their queue. May miss the latest ones.
If no event is available, return immediately.
Args:
event_name: Name of the events to be popped.
Returns:
results: List of the desired events.
Raises:
IllegalStateError: Raised if pop is called before the dispatcher
starts polling.
"""
if not self.started:
raise IllegalStateError(("Dispatcher needs to be started before "
"popping."))
results = []
try:
self.lock.acquire()
while True:
e = self.event_dict[event_name].get(block=False)
results.append(e)
except (queue.Empty, KeyError):
return results
finally:
self.lock.release()
def clear_events(self, event_name):
"""Clear all events of a particular name.
Args:
event_name: Name of the events to be popped.
"""
self.lock.acquire()
try:
q = self.get_event_q(event_name)
q.queue.clear()
except queue.Empty:
return
finally:
self.lock.release()
def clear_all_events(self):
"""Clear all event queues and their cached events."""
self.lock.acquire()
self.event_dict.clear()
self.lock.release()
| apache-2.0 | -2,744,429,708,353,299,500 | 34.766744 | 79 | 0.572093 | false |
cavestruz/L500analysis | plotting/profiles/T_Vcirc_evolution/Vcirc_evolution/plot_Vcirc2_nu_binned_Vc500c.py | 1 | 3175 | from L500analysis.data_io.get_cluster_data import GetClusterData
from L500analysis.utils.utils import aexp2redshift
from L500analysis.plotting.tools.figure_formatting import *
from L500analysis.plotting.profiles.tools.profiles_percentile \
import *
from L500analysis.plotting.profiles.tools.select_profiles \
import nu_cut, prune_dict
from L500analysis.utils.constants import rbins
from derived_field_functions import *
color = matplotlib.cm.afmhot_r
aexps = [1.0,0.9,0.8,0.7,0.6,0.5,0.45,0.4,0.35]
nu_threshold = [2.3,2.7]
nu_label = r"%0.1f$\leq\nu_{500c}\leq$%0.1f"%(nu_threshold[0],nu_threshold[1])
db_name = 'L500_NR_0'
db_dir = '/home/babyostrich/Documents/Repos/L500analysis/'
profiles_list = ['r_mid',
'Vcirc2_Vc500c',
'M_dark', 'M_star', 'M_gas',
'R/R500c']
halo_properties_list=['r500c','M_total_500c','nu_500c']
Vcirc2ratioVc500c=r"$\tilde{V}=V^2_{c}/V^2_{c,500c}$"
fVcz1=r"$\tilde{V}/\tilde{V}(z=1)$"
pa = PlotAxes(figname='Vcirc2_Vc500c_nu%0.1f'%nu_threshold[0],
axes=[[0.15,0.4,0.80,0.55],[0.15,0.15,0.80,0.24]],
axes_labels=[Vcirc2ratioVc500c,fVcz1],
xlabel=r"$R/R_{500c}$",
xlim=(0.2,5),
ylims=[(0.6,1.4),(0.6,1.4)])
Vcirc2={}
clkeys = ['Vcirc2_Vc500c']
plots = [Vcirc2]
linestyles = ['-']
for aexp in aexps :
cldata = GetClusterData(aexp=aexp,db_name=db_name,
db_dir=db_dir,
profiles_list=profiles_list,
halo_properties_list=halo_properties_list)
nu_cut_hids = nu_cut(nu=cldata['nu_500c'], threshold=nu_threshold)
for plot, key in zip(plots,clkeys) :
pruned_profiles = prune_dict(d=cldata[key],k=nu_cut_hids)
plot[aexp] = calculate_profiles_mean_variance(pruned_profiles)
pa.axes[Vcirc2ratioVc500c].plot( rbins, Vcirc2[aexp]['mean'],color=color(aexp),
ls='-',label="$z=%3.1f$" % aexp2redshift(aexp))
pa.axes[Vcirc2ratioVc500c].fill_between(rbins, Vcirc2[0.5]['down'], Vcirc2[0.5]['up'],
color=color(0.5), zorder=0)
for aexp in aexps :
for V,ls in zip(plots,linestyles) :
fractional_evolution = get_profiles_division_mean_variance(
mean_profile1=V[aexp]['mean'],
var_profile1=V[aexp]['var'],
mean_profile2=V[0.5]['mean'],
var_profile2=V[0.5]['var'],
)
pa.axes[fVcz1].plot( rbins, fractional_evolution['mean'],
color=color(aexp),ls=ls)
pa.axes[Vcirc2ratioVc500c].annotate(nu_label, xy=(.75, .75), xytext=(.3, 1.3))
pa.axes[Vcirc2ratioVc500c].tick_params(labelsize=12)
pa.axes[Vcirc2ratioVc500c].tick_params(labelsize=12)
pa.axes[fVcz1].set_yticks(arange(0.6,1.4,0.2))
matplotlib.rcParams['legend.handlelength'] = 0
matplotlib.rcParams['legend.numpoints'] = 1
matplotlib.rcParams['legend.fontsize'] = 12
pa.set_legend(axes_label=Vcirc2ratioVc500c,ncol=3,loc='upper right', frameon=False)
pa.color_legend_texts(axes_label=Vcirc2ratioVc500c)
pa.savefig()
| mit | 385,859,252,746,646,600 | 36.352941 | 87 | 0.610394 | false |
tobiaghiraldini/DjangoBBB | docs/source/conf.py | 1 | 8027 | # -*- coding: utf-8 -*-
#
# Django Backbone Boilerplate documentation build configuration file, created by
# sphinx-quickstart on Thu Sep 19 19:15:23 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Django Backbone Boilerplate'
copyright = u'2013, Tobia Ghiraldini'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'DjangoBackboneBoilerplatedoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'DjangoBackboneBoilerplate.tex', u'Django Backbone Boilerplate Documentation',
u'Tobia Ghiraldini', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'djangobackboneboilerplate', u'Django Backbone Boilerplate Documentation',
[u'Tobia Ghiraldini'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'DjangoBackboneBoilerplate', u'Django Backbone Boilerplate Documentation',
u'Tobia Ghiraldini', 'DjangoBackboneBoilerplate', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
| mit | 6,544,720,773,913,661,000 | 32.169421 | 122 | 0.710477 | false |
soleneulmer/atmos | indicators_molec.py | 1 | 4324 | # ===================================
# CALCULATES Ioff and Ires
# Indicators described in Molecfit II
#
# Solene 20.09.2016
# ===================================
#
import numpy as np
from astropy.io import fits
import matplotlib.pyplot as plt
# from PyAstronomy import pyasl
from scipy.interpolate import interp1d
from scipy.interpolate import InterpolatedUnivariateSpline
from scipy import stats
# from sklearn.metrics import mean_squared_error
# from math import sqrt
# from numpy import linalg as LA
# MOLECFIT
#
file_molecfit = '/home/solene/atmos/For_Solene/1203nm/output/molecfit_crires_solene_tac.fits'
hdu_molecfit = fits.open(file_molecfit)
data_molecfit = hdu_molecfit[1].data
cols_molecfit = hdu_molecfit[1].columns
# cols_molecfit.info()
rawwl_molecfit = data_molecfit.field('mlambda')
wl_molecfit = rawwl_molecfit*10e2
trans_molecfit = data_molecfit.field('mtrans')
cflux_molecfit = data_molecfit.field('cflux')
# TELFIT
#
file_telfit = '/home/solene/atmos/trans_telfit.txt'
wl_telfit, trans_telfit, wl_datatelfit, flux_datatelfit = np.loadtxt(
file_telfit, unpack=True)
# Interpolation
f_molecfit = interp1d(wl_molecfit, cflux_molecfit, kind='cubic')
ftrans_molecfit = interp1d(wl_molecfit, trans_molecfit, kind='cubic')
# f_tapas = interp1d(wlcorr_tapas, trans_tapas)
# **1** BINNED DATA
# 3 delta-lambda = 0.036
# Mean and std deviation of bins on the telluric CORRECTED spectrum
fluxmean_bin_means, bin_edges, binnumber = stats.binned_statistic(
wl_datatelfit, f_molecfit(wl_datatelfit), statistic='mean',
bins=np.floor((wl_datatelfit[-1]-wl_datatelfit[0])/0.036))
fluxstd_bin_means, _, _ = stats.binned_statistic(
wl_datatelfit, f_molecfit(wl_datatelfit), statistic=np.std,
bins=np.floor((wl_datatelfit[-1]-wl_datatelfit[0])/0.036))
bin_width = (bin_edges[1] - bin_edges[0])
bin_centers = bin_edges[1:] - bin_width/2
# **2** Bins where average TRANSMISSION is > 0.99
flux_trans_mean_bin_means, _, _ = stats.binned_statistic(
wl_datatelfit, ftrans_molecfit(wl_datatelfit), statistic='mean',
bins=np.floor((wl_datatelfit[-1]-wl_datatelfit[0])/0.036))
# cont_bin_means = flux_trans_mean_bin_means[flux_trans_mean_bin_means > 0.99]
ind_cont = np.where(flux_trans_mean_bin_means > 0.99)
ind_out = np.where((flux_trans_mean_bin_means < 0.95) &
(flux_trans_mean_bin_means > 0.1))
# plt.plot(bin_centers[ind_cont], flux_trans_mean_bin_means[ind_cont], 'kx')
# **3** Interpolation of the continuum cubic
# f_cont = interp1d(bin_centers[ind_cont], flux_trans_mean_bin_means[ind_cont], kind='cubic')
# Extrapolation with constant value spline
f_cont = InterpolatedUnivariateSpline(
bin_centers[ind_cont], flux_trans_mean_bin_means[ind_cont], ext=3)
# bbox=[bin_centers[ind_cont][0], bin_centers[ind_cont][-1]],
# **5** Subtract cont to mean flux
# and Divide offset and std by interpolated continuum mean value
sys_offset = (fluxmean_bin_means - f_cont(bin_centers)) / f_cont(bin_centers)
flux_std = fluxstd_bin_means / f_cont(bin_centers)
# **6** independant WL = Divide by average absorption
absorp_molecfit = 1 - flux_trans_mean_bin_means
sys_offset_final = sys_offset / absorp_molecfit
flux_std_final = flux_std / absorp_molecfit
plt.figure(1)
plt.plot(wl_datatelfit, flux_datatelfit, 'b.-', label='Raw data')
# plt.hlines(flux_bin_means, bin_edges[:-1],
# bin_edges[1:], colors='g', lw=5, label='binned statistic of data')
plt.plot(bin_centers, fluxmean_bin_means, 'rx-', label='Mean binned data')
plt.plot(bin_centers, fluxstd_bin_means, 'kx-', label='Standard deviation binned data')
plt.legend()
plt.figure(2)
plt.plot(wl_datatelfit, flux_datatelfit, 'g.-', label='Data 2nd detector')
plt.plot(wl_molecfit, trans_molecfit, 'r-', label='Molecfit')
plt.plot(wl_datatelfit, f_molecfit(wl_datatelfit),
'b-', label='Corrected data - Molecfit')
plt.plot(wl_datatelfit, f_cont(wl_datatelfit),
'k-', label='Interpolated Continuum')
plt.plot(sys_offset_final[ind_out], flux_std_final[ind_out], 'kx')
plt.plot(flux_trans_mean_bin_means[ind_out],
sys_offset_final[ind_out], 'kx', label='Ioff vs Transmission')
plt.plot(flux_trans_mean_bin_means[ind_out],
flux_std_final[ind_out], 'r.', label='Ires vs Transmission')
plt.xlabel('Wavelength (nm)')
plt.ylabel('Transmission')
plt.legend(loc=3.)
plt.show()
| mit | -8,536,585,623,544,594,000 | 39.037037 | 94 | 0.705365 | false |
Videoclases/videoclases | quality_control/views/api.py | 1 | 4172 | import random
from django.contrib.auth.decorators import user_passes_test
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.db.models.aggregates import Count
from django.http.response import JsonResponse, HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.utils.decorators import method_decorator
from django.views.generic.base import TemplateView
from django.views.generic.detail import DetailView
from django.core import serializers
from quality_control.models.quality_control import QualityControl
from videoclases.models.groupofstudents import GroupOfStudents
from videoclases.models.homework import Homework
from videoclases.models.student_evaluations import StudentEvaluations
def in_students_group(user):
if user:
return user.groups.filter(name='Alumnos').exists()
return False
class GetVideoClaseView(DetailView):
template_name = 'blank.html'
model = Homework
def get(self, request, *args, **kwargs):
result = dict()
homework_base = self.get_object()
homework = homework_base
groups = GroupOfStudents.objects.filter(homework=homework)
student = self.request.user.student
if homework_base.homework_to_evaluate is not None:
homework = homework_base.homework_to_evaluate
groups = GroupOfStudents.objects.filter(homework=homework)
else:
group_student = get_object_or_404(GroupOfStudents, students=student, homework=homework)
groups = groups.exclude(id=group_student.id)
groups = groups \
.exclude(videoclase__video__isnull=True) \
.exclude(videoclase__video__exact='') \
.exclude(videoclase__answers__student=student) \
.annotate(revision=Count('videoclase__answers')) \
.order_by('revision', '?')
element_response = groups[0] if groups.exists() else None
control = QualityControl.objects.filter(homework=homework)
control = control[0] if control.exists() else None
if control:
evaluated_items = control.list_items.filter(videoclase__answers__student=student)
# limit max evaluation of quality item to 5
if evaluated_items.count() < 3:
items = control.list_items.all() \
.exclude(videoclase__answers__student=student)
item_to_evaluate = items[random.randint(0, items.count()-1)] if items.exists() else None
if item_to_evaluate and element_response:
value_random = random.random()
# TODO: need to be a more smart filter
element_response = item_to_evaluate if value_random > 0.55 else element_response
elif item_to_evaluate:
element_response = item_to_evaluate
if element_response:
alternativas = [element_response.videoclase.correct_alternative,
element_response.videoclase.alternative_2,
element_response.videoclase.alternative_3]
random.shuffle(alternativas)
result['video'] = element_response.videoclase.video
result['question'] = element_response.videoclase.question
result['videoclase_id'] = element_response.videoclase.pk
result['alternativas'] = alternativas
result['redirect'] = False
else:
result['redirect'] = True
return JsonResponse(result)
def get_context_data(self, **kwargs):
context = super(GetVideoClaseView, self).get_context_data(**kwargs)
return context
@method_decorator(user_passes_test(in_students_group, login_url='/'))
def dispatch(self, *args, **kwargs):
obj = self.get_object()
hw = Homework.objects.filter(id=obj.id,course__students=self.request.user.student)
if hw.count() == 0:
messages.info(self.request, 'No tienes permisos para evaluar esta tarea.')
return HttpResponseRedirect(reverse('student'))
return super(GetVideoClaseView, self).dispatch(*args, **kwargs)
| gpl-3.0 | -7,057,957,377,848,098,000 | 45.355556 | 104 | 0.661793 | false |
JGrishey/MHLSim | pylib/simulation.py | 1 | 19119 | '''
Season Simulation
2017 Jacob Grishey
For the purpose of simulating sports seasons
and determining regular season standings.
'''
# IMPORTS
import json
import statistics
import numpy
from operator import itemgetter
import copy
# Read JSON file (schedule, team list)
with open("./../data/season.json") as jsonfile:
data = json.load(jsonfile)
# Parse JSON file
teams = {team: {'w': 0, 'l': 0, 'otl': 0, 'elo': 1500} for team in data['teams']}
schedule = data['schedule']
# Results
results = [{'name': team, 'seed1': 0, 'seed2': 0, 'seed3': 0, 'seed4': 0,
'seed5': 0, 'seed6': 0, 'seed7': 0, 'seed8': 0, 'aw': 0,
'al': 0, 'aotl': 0, 'r2': 0, 'r3': 0, 'cup': 0} for team in teams]
# Divisions
brent = ["Cape Cod Bluefins", "Trevor Phillips Industries",
"Inglorious Basterds", "Crack Smoking Monkeys",
"Moose Knuckles", "Hood Rich"]
drew = ["Small Sample Size", "The Bearded Bandits",
"Row Row Fight the Powah", "Motor City Machine Guns",
"Suck Our Dekes", "Tenacious V"]
# Separate past from future games
past = [game for game in schedule if game['h-score'] != -1]
future = [game for game in schedule if game['h-score'] == -1]
# Update teams with past results
for game in past:
if game['h-score'] > game['a-score']:
if game['ot'] == 1:
teams[game['home']]['w'] += 1
teams[game['away']]['otl'] += 1
else:
teams[game['home']]['w'] += 1
teams[game['away']]['l'] += 1
else:
if game['ot'] == 1:
teams[game['away']]['w'] += 1
teams[game['home']]['otl'] += 1
else:
teams[game['away']]['w'] += 1
teams[game['home']]['l'] += 1
# Expected Score function
#
# Given elo of team A and team B, calculate expected score of team A.
def expectedScoreA (eloA, eloB):
return 1 / (1 + 10 ** ((eloB - eloA) / 400))
# New Rating Function
#
# Given Elo, actual score, expected score, and goal differential and calculate the team's new Elo rating.
def newRating (eloA, eloB, scoreActual, scoreExpected, goalDifferential, win):
# K-Factor
if eloA < 2100:
K = 32
elif eloA <= 2400:
K = 24
else:
K = 16
# Calculate for goal differential and autocorrelation
marginMult = numpy.log(goalDifferential + 1) * (2.2 / (abs(eloA - eloB) * 0.01 + 2.2))
# Return new rating
return eloA + (marginMult * K) * (scoreActual - scoreExpected)
# Update elo from past games
for game in past:
# Current Elo ratings
currentEloA = teams[game['home']]['elo']
currentEloB = teams[game['away']]['elo']
# Get Expected Scores
eA = expectedScoreA(currentEloA, currentEloB)
eB = 1 - eA
# Get scores
homeGoals = game['h-score']
awayGoals = game['a-score']
goalDifferential = abs(homeGoals - awayGoals)
# Get Actual Scores
if homeGoals > awayGoals:
if game['ot'] == 1:
sA = 1.0
sB = 0.5
winA = True
winB = False
else:
sA = 1.0
sB = 0.0
winA = False
winB = True
else:
if game['ot'] == 1:
sB = 1.0
sA = 0.5
winA = True
winB = False
else:
sB = 1.0
sA = 0.0
winA = False
winB = True
# Calculate new Elo ratings
newA = newRating(currentEloA, currentEloB, sA, eA, goalDifferential, winA)
newB = newRating(currentEloB, currentEloA, sB, eB, goalDifferential, winB)
# Apply Elo ratings
teams[game['home']]['elo'] = newA
teams[game['away']]['elo'] = newB
# Simulation
def runSeason (tempTeams):
for game in future:
# Current Elo ratings
currentEloA = tempTeams[game['home']]['elo']
currentEloB = tempTeams[game['away']]['elo']
# Get Expected Scores
eA = expectedScoreA(currentEloA, currentEloB)
eB = 1 - eA
# Random number between 0 and 1 to decide who wins.
decideWin = numpy.random.random()
# Random number between 0 and 1 to decide if it goes into Overtime.
decideOT = numpy.random.random()
# Actual Predicted Scores
if decideOT <= 0.233:
if decideWin <= eA:
sA = 1.0
tempTeams[game['home']]['w'] += 1
sB = 0.5
tempTeams[game['away']]['otl'] += 1
else:
sA = 0.5
tempTeams[game['home']]['otl'] += 1
sB = 1.0
tempTeams[game['away']]['w'] += 1
else:
if decideWin <= eA:
sA = 1.0
tempTeams[game['home']]['w'] += 1
sB = 0.0
tempTeams[game['away']]['l'] += 1
else:
sA = 0.0
tempTeams[game['home']]['l'] += 1
sB = 1.0
tempTeams[game['away']]['w'] += 1
# Calculate new Elo ratings
#newA = newRating(currentEloA, sA, eA)
#newB = newRating(currentEloB, sB, eB)
# Apply new Elo ratings
#tempTeams[game['home']]['elo'] = newA
#tempTeams[game['away']]['elo'] = newB
# End of Season standings
brentStandings = []
drewStandings = []
# Collect teams, calculate points.
for team in tempTeams:
if team in brent:
brentStandings.append({"name": team, "pts": tempTeams[team]['w'] * 2 + tempTeams[team]['otl']})
next(item for item in results if item["name"] == team)['aw'] += tempTeams[team]['w']
next(item for item in results if item["name"] == team)['al'] += tempTeams[team]['l']
next(item for item in results if item["name"] == team)['aotl'] += tempTeams[team]['otl']
else:
drewStandings.append({"name": team, "pts": tempTeams[team]['w'] * 2 + tempTeams[team]['otl']})
next(item for item in results if item["name"] == team)['aw'] += tempTeams[team]['w']
next(item for item in results if item["name"] == team)['al'] += tempTeams[team]['l']
next(item for item in results if item["name"] == team)['aotl'] += tempTeams[team]['otl']
# Sort by points
brentStandings = sorted(brentStandings, key=itemgetter('pts'), reverse=True)
drewStandings = sorted(drewStandings, key=itemgetter('pts'), reverse=True)
# Cut off top 2, then concat and sort by points
overall8 = sorted(brentStandings[2:] + drewStandings[2:], key=itemgetter('pts'), reverse=True)
# Playoff Seeding
playoffs = [{"seed": seed, "name": ""} for seed in range(1,9)]
# Get playoff teams
playoffTeams = sorted(brentStandings[:2] + drewStandings[:2] + overall8[:4],
key=itemgetter('pts'), reverse=True)
# Add Results
next(item for item in results if item["name"] == playoffTeams[0]['name'])['seed1'] += 1
next(item for item in results if item["name"] == playoffTeams[1]['name'])['seed2'] += 1
next(item for item in results if item["name"] == playoffTeams[2]['name'])['seed3'] += 1
next(item for item in results if item["name"] == playoffTeams[3]['name'])['seed4'] += 1
next(item for item in results if item["name"] == playoffTeams[4]['name'])['seed5'] += 1
next(item for item in results if item["name"] == playoffTeams[5]['name'])['seed6'] += 1
next(item for item in results if item["name"] == playoffTeams[6]['name'])['seed7'] += 1
next(item for item in results if item["name"] == playoffTeams[7]['name'])['seed8'] += 1
# Insert into seeds
for (team, i) in zip(playoffTeams, range(0, 8)):
playoffs[i]['name'] = team['name']
# Schedule first round games
firstRoundGames = []
firstRoundSeries = []
for i in range(0, 4):
firstRoundSeries.append({
'home': playoffs[i]['name'],
'away': playoffs[7-i]['name'],
'h-seed': playoffs[i]['seed'],
'a-seed': playoffs[7-i]['seed'],
'h-wins': 0,
'a-wins': 0
})
for k in range(0, 4):
firstRoundGames.append({
'home': playoffs[i]['name'],
'away': playoffs[7-i]['name']
})
# Simulate first round
for game in firstRoundGames:
# Current Elo ratings of both teams
homeElo = tempTeams[game['home']]['elo']
awayElo = tempTeams[game['away']]['elo']
# Win probabilities
eA = expectedScoreA(homeElo, awayElo)
eB = 1 - eA
# Decide win and OT
decideWin = numpy.random.random()
decideOT = numpy.random.random()
# Get series data
series = next(item for item in firstRoundSeries if item['home'] == game['home'])
# For scheduling purposes
previousLow = min([series['h-wins'], series['a-wins']])
# Simulate game
if decideOT <= 0.233:
if decideWin <= eA:
series['h-wins'] += 1
if min([series['h-wins'], series['a-wins']]) > previousLow:
firstRoundGames.append({
'home': game['home'],
'away': game['away']
})
sA = 1.0
sB = 0.5
else:
series['a-wins'] += 1
if min([series['h-wins'], series['a-wins']]) > previousLow:
firstRoundGames.append({
'home': game['home'],
'away': game['away']
})
sA = 0.5
sB = 1.0
else:
if decideWin <= eA:
series['h-wins'] += 1
if min([series['h-wins'], series['a-wins']]) > previousLow:
firstRoundGames.append({
'home': game['home'],
'away': game['away']
})
sA = 1.0
sB = 0.0
else:
series['a-wins'] += 1
if min([series['h-wins'], series['a-wins']]) > previousLow:
firstRoundGames.append({
'home': game['home'],
'away': game['away']
})
sA = 0.0
sB = 1.0
# Calculate new Elo ratings
#newA = newRating(homeElo, sA, eA)
#newB = newRating(awayElo, sB, eB)
# Apply new Elo ratings
#tempTeams[game['home']]['elo'] = newA
#tempTeams[game['away']]['elo'] = newB
# Collect series winners.
secondRoundTeams = []
for series in firstRoundSeries:
if series['h-wins'] == 4:
secondRoundTeams.append({'seed': series['h-seed'], 'name': series['home']})
next(item for item in results if item['name'] == series['home'])['r2'] += 1
else:
secondRoundTeams.append({'seed': series['a-seed'], 'name': series['away']})
next(item for item in results if item['name'] == series['away'])['r2'] += 1
secondRoundTeams = sorted(secondRoundTeams, key=itemgetter('seed'))
# Schedule second round games
secondRoundGames = []
secondRoundSeries = []
for i in range(0, 2):
secondRoundSeries.append({
'home': secondRoundTeams[i]['name'],
'away': secondRoundTeams[3-i]['name'],
'h-seed': secondRoundTeams[i]['seed'],
'a-seed': secondRoundTeams[3-i]['seed'],
'h-wins': 0,
'a-wins': 0
})
for k in range(0, 4):
secondRoundGames.append({
'home': secondRoundTeams[i]['name'],
'away': secondRoundTeams[3-i]['name']
})
# Simulate second round
for game in secondRoundGames:
# Current Elo ratings of both teams
homeElo = tempTeams[game['home']]['elo']
awayElo = tempTeams[game['away']]['elo']
# Win probabilities
eA = expectedScoreA(homeElo, awayElo)
eB = 1 - eA
# Decide win and OT
decideWin = numpy.random.random()
decideOT = numpy.random.random()
# Get series data
series = next(item for item in secondRoundSeries if item['home'] == game['home'])
# For scheduling purposes
previousLow = min([series['h-wins'], series['a-wins']])
# Simulate game
if decideOT <= 0.233:
if decideWin <= eA:
series['h-wins'] += 1
if min([series['h-wins'], series['a-wins']]) > previousLow:
secondRoundGames.append({
'home': game['home'],
'away': game['away']
})
sA = 1.0
sB = 0.5
else:
series['a-wins'] += 1
if min([series['h-wins'], series['a-wins']]) > previousLow:
secondRoundGames.append({
'home': game['home'],
'away': game['away']
})
sA = 0.5
sB = 1.0
else:
if decideWin <= eA:
series['h-wins'] += 1
if min([series['h-wins'], series['a-wins']]) > previousLow:
secondRoundGames.append({
'home': game['home'],
'away': game['away']
})
sA = 1.0
sB = 0.0
else:
series['a-wins'] += 1
if min([series['h-wins'], series['a-wins']]) > previousLow:
secondRoundGames.append({
'home': game['home'],
'away': game['away']
})
sA = 0.0
sB = 1.0
# Calculate new Elo ratings
#newA = newRating(homeElo, sA, eA)
#newB = newRating(awayElo, sB, eB)
# Apply new Elo ratings
#tempTeams[game['home']]['elo'] = newA
#tempTeams[game['away']]['elo'] = newB
# Collect series winners.
thirdRoundTeams = []
for series in secondRoundSeries:
if series['h-wins'] == 4:
thirdRoundTeams.append({'seed': series['h-seed'], 'name': series['home']})
next(item for item in results if item['name'] == series['home'])['r3'] += 1
else:
thirdRoundTeams.append({'seed': series['a-seed'], 'name': series['away']})
next(item for item in results if item['name'] == series['away'])['r3'] += 1
thirdRoundTeams = sorted(thirdRoundTeams, key=itemgetter('seed'))
# Schedule second round games
thirdRoundGames = []
thirdRoundSeries = []
for i in range(0, 1):
thirdRoundSeries.append({
'home': thirdRoundTeams[i]['name'],
'away': thirdRoundTeams[1-i]['name'],
'h-seed': thirdRoundTeams[i]['seed'],
'a-seed': thirdRoundTeams[1-i]['seed'],
'h-wins': 0,
'a-wins': 0
})
for k in range(0, 4):
thirdRoundGames.append({
'home': thirdRoundTeams[i]['name'],
'away': thirdRoundTeams[1-i]['name']
})
# Simulate third round
for game in thirdRoundGames:
# Current Elo ratings of both teams
homeElo = tempTeams[game['home']]['elo']
awayElo = tempTeams[game['away']]['elo']
# Win probabilities
eA = expectedScoreA(homeElo, awayElo)
eB = 1 - eA
# Decide win and OT
decideWin = numpy.random.random()
decideOT = numpy.random.random()
# Get series data
series = next(item for item in thirdRoundSeries if item['home'] == game['home'])
# For scheduling purposes
previousLow = min([series['h-wins'], series['a-wins']])
# Simulate game
if decideOT <= 0.233:
if decideWin <= eA:
series['h-wins'] += 1
if min([series['h-wins'], series['a-wins']]) > previousLow:
thirdRoundGames.append({
'home': game['home'],
'away': game['away']
})
sA = 1.0
sB = 0.5
else:
series['a-wins'] += 1
if min([series['h-wins'], series['a-wins']]) > previousLow:
thirdRoundGames.append({
'home': game['home'],
'away': game['away']
})
sA = 0.5
sB = 1.0
else:
if decideWin <= eA:
series['h-wins'] += 1
if min([series['h-wins'], series['a-wins']]) > previousLow:
thirdRoundGames.append({
'home': game['home'],
'away': game['away']
})
sA = 1.0
sB = 0.0
else:
series['a-wins'] += 1
if min([series['h-wins'], series['a-wins']]) > previousLow:
thirdRoundGames.append({
'home': game['home'],
'away': game['away']
})
sA = 0.0
sB = 1.0
# Calculate new Elo ratings
#newA = newRating(homeElo, sA, eA)
#newB = newRating(awayElo, sB, eB)
# Apply new Elo ratings
#tempTeams[game['home']]['elo'] = newA
#tempTeams[game['away']]['elo'] = newB
# Collect series winners.
cupWinner = []
for series in thirdRoundSeries:
if series['h-wins'] == 4:
cupWinner.append({'seed': series['h-seed'], 'name': series['home']})
next(item for item in results if item['name'] == series['home'])['cup'] += 1
else:
cupWinner.append({'seed': series['a-seed'], 'name': series['away']})
next(item for item in results if item['name'] == series['away'])['cup'] += 1
# Run simulation 100,000 times.
for i in range(0, 100000):
runSeason(copy.deepcopy(teams))
# Calculate average season.
for team in results:
team['aw'] /= 100000
team['al'] /= 100000
team['aotl'] /= 100000
# Add division info to each team.
for team in teams:
if team in brent:
teams[team]['division'] = "Brent"
else:
teams[team]['division'] = "Drew"
next(item for item in results if item["name"] == team)['w'] = teams[team]['w']
next(item for item in results if item["name"] == team)['l'] = teams[team]['l']
next(item for item in results if item["name"] == team)['otl'] = teams[team]['otl']
next(item for item in results if item["name"] == team)['elo'] = teams[team]['elo']
next(item for item in results if item["name"] == team)['division'] = teams[team]['division']
# Write results to outfile.
with open('./../data/results.json', 'w') as outfile:
json.dump(results, outfile, indent=4) | mit | -6,972,750,639,965,787,000 | 33.021352 | 107 | 0.500078 | false |
PuZheng/lejian-backend | lejian/apis/model_wrapper.py | 1 | 3211 | # -*- coding: UTF-8 -*-
import types
import inspect
import traceback
class _MyAttributeError(Exception):
pass
def convert_attribute_error(f):
def f_(*args, **kwargs):
try:
return f(*args, **kwargs)
except AttributeError, e:
print "~" * 78
traceback.print_exc()
print "~" * 78
raise _MyAttributeError(e)
return f_
class _FGet(object):
def __init__(self, attr):
self.attr = attr
def __call__(self, wrapper):
return wraps(convert_attribute_error(self.attr.fget)(wrapper))
def wraps(obj):
if isinstance(obj, types.ListType) or isinstance(obj, types.TupleType):
return obj.__class__(wraps(obj_) for obj_ in obj)
if hasattr(obj.__class__, '_sa_class_manager'):
try:
return _wrappers[obj.__class__.__name__ + "Wrapper"](obj)
except KeyError:
return obj
return obj
def unwraps(obj):
if isinstance(obj, types.ListType) or isinstance(obj, types.TupleType):
return obj.__class__(unwraps(obj_) for obj_ in obj)
if isinstance(obj, ModelWrapper):
return obj.obj
return obj
_wrappers = {}
class ModelWrapper(object):
class __metaclass__(type):
def __init__(cls, name, bases, nmspc):
type.__init__(cls, name, bases, nmspc)
# register wrappers
_wrappers[cls.__name__] = cls
# decorate wrapper's method:
#
# * convert result object(s) to wrapper(s)
# * convert attribute error, otherwise the underlying object
# will be searched, and finally make bizzare result
for name, attr in cls.__dict__.items():
if isinstance(attr, property) and name not in {'obj'}:
setattr(cls, name, property(fget=_FGet(attr),
fset=attr.fset,
fdel=attr.fdel))
elif inspect.ismethod(attr) and attr not in {'__getattr__',
'__setattr__',
'__unicode__'}:
old = convert_attribute_error(getattr(cls, name))
setattr(cls, name, lambda wrapper, *args,
**kwargs: wraps(old(wrapper, *args, **kwargs)))
def __init__(self, obj):
self.__obj = obj
@property
def obj(self):
return self.__obj
def __getattr__(self, name):
attr = getattr(self.__obj, name)
if isinstance(attr, types.ListType) or isinstance(attr,
types.TupleType):
return type(attr)(wraps(i) for i in attr)
return wraps(attr)
def __setattr__(self, key, value):
# TODO when only key is defined in wrapped object
if key != '_ModelWrapper__obj':
self.__obj.__setattr__(key, value)
else:
self.__dict__[key] = value
def __unicode__(self):
return unicode(self.__obj)
def __dir__(self):
return self.__obj.__dict__.keys()
| mit | 353,604,400,444,146,900 | 29.875 | 76 | 0.507319 | false |
flyingbanana1024102/transmission-line-simulator | src/views/materialwidget.py | 1 | 2230 | #
# Transmission Line Simulator
#
# Author(s): Jiacong Xu
# Created: Jun-28-2017
#
from kivy.uix.widget import Widget
from kivy.properties import *
from kivy.clock import Clock
from kivy.graphics.texture import Texture
from kivy.graphics import *
from PIL import Image, ImageDraw, ImageFilter
class MaterialWidget(Widget):
"""
The basic UI element layout, automatically draws and updates its shadows.
raised: whether this widget has an edge and shadow.
"""
keyShadowTexture = ObjectProperty(None)
ambientShadowTexture = ObjectProperty(None)
raised = BooleanProperty(True)
clipSubviews = BooleanProperty(False)
elevation = NumericProperty(2.0)
backgroundColor = ListProperty([1, 1, 1, 1])
def __init__(self, **kwargs):
super(MaterialWidget, self).__init__(**kwargs)
def on_size(self, *args, **kwargs):
self._updateShadow()
def on_pos(self, *args, **kwargs):
self._updateShadow()
def on_elevation(self, *args, **kwargs):
self._updateShadow()
def _updateShadow(self):
# Shadow 1
offset_y = self.elevation
radius = self.elevation / 2.0
t1 = self._genShadow(self.size[0], self.size[1], radius, 0.26)
self.keyShadowTexture = t1
# Shadow 2
radius = self.elevation
t2 = self._genShadow(self.size[0], self.size[1], radius, 0.05)
self.ambientShadowTexture = t2
def _genShadow(self, ow, oh, radius, alpha):
# We need a bigger texture to correctly blur the edges
w = ow + radius * 6.0
h = oh + radius * 6.0
w = int(w)
h = int(h)
texture = Texture.create(size=(w, h), colorfmt='rgba')
im = Image.new('RGBA', (w, h), color=(1, 1, 1, 0))
draw = ImageDraw.Draw(im)
# the rectangle to be rendered needs to be centered on the texture
x0, y0 = (w - ow) / 2., (h - oh) / 2.
x1, y1 = x0 + ow - 1, y0 + oh - 1
draw.rectangle((x0, y0, x1, y1), fill=(0, 0, 0, int(255 * alpha)))
im = im.filter(ImageFilter.GaussianBlur(radius))
texture.blit_buffer(im.tobytes(), colorfmt='rgba', bufferfmt='ubyte')
return texture
| mit | -3,297,739,092,324,212,700 | 26.530864 | 77 | 0.604933 | false |
davy39/eric | Plugins/VcsPlugins/vcsMercurial/HgImportDialog.py | 1 | 3032 | # -*- coding: utf-8 -*-
# Copyright (c) 2011 - 2014 Detlev Offenbach <[email protected]>
#
"""
Module implementing a dialog to enter data for the Mercurial import command.
"""
from __future__ import unicode_literals
from PyQt5.QtCore import pyqtSlot, QDateTime
from PyQt5.QtWidgets import QDialog, QDialogButtonBox
from E5Gui import E5FileDialog
from E5Gui.E5Completers import E5FileCompleter
from .Ui_HgImportDialog import Ui_HgImportDialog
import Utilities
import UI.PixmapCache
class HgImportDialog(QDialog, Ui_HgImportDialog):
"""
Class implementing a dialog to enter data for the Mercurial import command.
"""
def __init__(self, parent=None):
"""
Constructor
@param parent reference to the parent widget (QWidget)
"""
super(HgImportDialog, self).__init__(parent)
self.setupUi(self)
self.patchFileButton.setIcon(UI.PixmapCache.getIcon("open.png"))
self.buttonBox.button(QDialogButtonBox.Ok).setEnabled(False)
self.__patchFileCompleter = E5FileCompleter(self.patchFileEdit)
self.__initDateTime = QDateTime.currentDateTime()
self.dateEdit.setDateTime(self.__initDateTime)
def __updateOK(self):
"""
Private slot to update the OK button.
"""
enabled = True
if self.patchFileEdit.text() == "":
enabled = False
self.buttonBox.button(QDialogButtonBox.Ok).setEnabled(enabled)
@pyqtSlot(str)
def on_patchFileEdit_textChanged(self, txt):
"""
Private slot to react on changes of the patch file edit.
@param txt contents of the line edit (string)
"""
self.__updateOK()
@pyqtSlot()
def on_patchFileButton_clicked(self):
"""
Private slot called by pressing the file selection button.
"""
fn = E5FileDialog.getOpenFileName(
self,
self.tr("Select patch file"),
self.patchFileEdit.text(),
self.tr("Patch Files (*.diff *.patch);;All Files (*)"))
if fn:
self.patchFileEdit.setText(Utilities.toNativeSeparators(fn))
def getParameters(self):
"""
Public method to retrieve the import data.
@return tuple naming the patch file, a flag indicating to not commit,
a commit message, a commit date, a commit user, a strip count and
a flag indicating to enforce the import
(string, boolean, string, string, string, integer, boolean)
"""
if self.dateEdit.dateTime() != self.__initDateTime:
date = self.dateEdit.dateTime().toString("yyyy-MM-dd hh:mm")
else:
date = ""
return (self.patchFileEdit.text(), self.noCommitCheckBox.isChecked(),
self.messageEdit.toPlainText(), date, self.userEdit.text(),
self.stripSpinBox.value(), self.forceCheckBox.isChecked())
| gpl-3.0 | -8,518,808,736,762,301,000 | 30.915789 | 79 | 0.616755 | false |
vladimir-ipatov/ganeti | test/py/cmdlib/backup_unittest.py | 1 | 7854 | #!/usr/bin/python
#
# Copyright (C) 2013 Google Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
"""Tests for LUBackup*"""
from ganeti import constants
from ganeti import objects
from ganeti import opcodes
from ganeti import query
from testsupport import *
import testutils
class TestLUBackupQuery(CmdlibTestCase):
def setUp(self):
super(TestLUBackupQuery, self).setUp()
self.fields = query._BuildExportFields().keys()
def testFailingExportList(self):
self.rpc.call_export_list.return_value = \
self.RpcResultsBuilder() \
.AddFailedNode(self.master) \
.Build()
op = opcodes.OpBackupQuery(nodes=[self.master.name])
ret = self.ExecOpCode(op)
self.assertEqual({self.master.name: False}, ret)
def testQueryOneNode(self):
self.rpc.call_export_list.return_value = \
self.RpcResultsBuilder() \
.AddSuccessfulNode(self.master,
["mock_export1", "mock_export2"]) \
.Build()
op = opcodes.OpBackupQuery(nodes=[self.master.name])
ret = self.ExecOpCode(op)
self.assertEqual({self.master.name: ["mock_export1", "mock_export2"]}, ret)
def testQueryAllNodes(self):
node = self.cfg.AddNewNode()
self.rpc.call_export_list.return_value = \
self.RpcResultsBuilder() \
.AddSuccessfulNode(self.master, ["mock_export1"]) \
.AddSuccessfulNode(node, ["mock_export2"]) \
.Build()
op = opcodes.OpBackupQuery()
ret = self.ExecOpCode(op)
self.assertEqual({
self.master.name: ["mock_export1"],
node.name: ["mock_export2"]
}, ret)
class TestLUBackupPrepare(CmdlibTestCase):
@patchUtils("instance_utils")
def testPrepareLocalExport(self, utils):
utils.ReadOneLineFile.return_value = "cluster_secret"
inst = self.cfg.AddNewInstance()
op = opcodes.OpBackupPrepare(instance_name=inst.name,
mode=constants.EXPORT_MODE_LOCAL)
self.ExecOpCode(op)
@patchUtils("instance_utils")
def testPrepareRemoteExport(self, utils):
utils.ReadOneLineFile.return_value = "cluster_secret"
inst = self.cfg.AddNewInstance()
self.rpc.call_x509_cert_create.return_value = \
self.RpcResultsBuilder() \
.CreateSuccessfulNodeResult(inst.primary_node,
("key_name",
testutils.ReadTestData("cert1.pem")))
op = opcodes.OpBackupPrepare(instance_name=inst.name,
mode=constants.EXPORT_MODE_REMOTE)
self.ExecOpCode(op)
class TestLUBackupExportBase(CmdlibTestCase):
def setUp(self):
super(TestLUBackupExportBase, self).setUp()
self.rpc.call_instance_start.return_value = \
self.RpcResultsBuilder() \
.CreateSuccessfulNodeResult(self.master, True)
self.rpc.call_blockdev_assemble.return_value = \
self.RpcResultsBuilder() \
.CreateSuccessfulNodeResult(self.master, ("/dev/mock_path",
"/dev/mock_link_name"))
self.rpc.call_blockdev_shutdown.return_value = \
self.RpcResultsBuilder() \
.CreateSuccessfulNodeResult(self.master, None)
self.rpc.call_blockdev_snapshot.return_value = \
self.RpcResultsBuilder() \
.CreateSuccessfulNodeResult(self.master, ("mock_vg", "mock_id"))
self.rpc.call_blockdev_remove.return_value = \
self.RpcResultsBuilder() \
.CreateSuccessfulNodeResult(self.master, None)
self.rpc.call_export_start.return_value = \
self.RpcResultsBuilder() \
.CreateSuccessfulNodeResult(self.master, "export_daemon")
def ImpExpStatus(node_uuid, name):
return self.RpcResultsBuilder() \
.CreateSuccessfulNodeResult(node_uuid,
[objects.ImportExportStatus(
exit_status=0
)])
self.rpc.call_impexp_status.side_effect = ImpExpStatus
def ImpExpCleanup(node_uuid, name):
return self.RpcResultsBuilder() \
.CreateSuccessfulNodeResult(node_uuid)
self.rpc.call_impexp_cleanup.side_effect = ImpExpCleanup
self.rpc.call_finalize_export.return_value = \
self.RpcResultsBuilder() \
.CreateSuccessfulNodeResult(self.master, None)
def testRemoveRunningInstanceWithoutShutdown(self):
inst = self.cfg.AddNewInstance(admin_state=constants.ADMINST_UP)
op = opcodes.OpBackupExport(instance_name=inst.name,
target_node=self.master.name,
shutdown=False,
remove_instance=True)
self.ExecOpCodeExpectOpPrereqError(
op, "Can not remove instance without shutting it down before")
def testUnsupportedDiskTemplate(self):
inst = self.cfg.AddNewInstance(disk_template=constants.DT_FILE)
op = opcodes.OpBackupExport(instance_name=inst.name,
target_node=self.master.name)
self.ExecOpCodeExpectOpPrereqError(
op, "Export not supported for instances with file-based disks")
class TestLUBackupExportLocalExport(TestLUBackupExportBase):
def setUp(self):
super(TestLUBackupExportLocalExport, self).setUp()
self.inst = self.cfg.AddNewInstance()
self.target_node = self.cfg.AddNewNode()
self.op = opcodes.OpBackupExport(mode=constants.EXPORT_MODE_LOCAL,
instance_name=self.inst.name,
target_node=self.target_node.name)
self.rpc.call_import_start.return_value = \
self.RpcResultsBuilder() \
.CreateSuccessfulNodeResult(self.target_node, "import_daemon")
def testExportWithShutdown(self):
inst = self.cfg.AddNewInstance(admin_state=constants.ADMINST_UP)
op = self.CopyOpCode(self.op, instance_name=inst.name, shutdown=True)
self.ExecOpCode(op)
def testExportDeactivatedDisks(self):
self.ExecOpCode(self.op)
def testExportRemoveInstance(self):
op = self.CopyOpCode(self.op, remove_instance=True)
self.ExecOpCode(op)
class TestLUBackupExportRemoteExport(TestLUBackupExportBase):
def setUp(self):
super(TestLUBackupExportRemoteExport, self).setUp()
self.inst = self.cfg.AddNewInstance()
self.op = opcodes.OpBackupExport(mode=constants.EXPORT_MODE_REMOTE,
instance_name=self.inst.name,
target_node=[],
x509_key_name=["mock_key_name"],
destination_x509_ca="mock_dest_ca")
def testRemoteExportWithoutX509KeyName(self):
op = self.CopyOpCode(self.op, x509_key_name=self.REMOVE)
self.ExecOpCodeExpectOpPrereqError(op,
"Missing X509 key name for encryption")
def testRemoteExportWithoutX509DestCa(self):
op = self.CopyOpCode(self.op, destination_x509_ca=self.REMOVE)
self.ExecOpCodeExpectOpPrereqError(op,
"Missing destination X509 CA")
if __name__ == "__main__":
testutils.GanetiTestProgram()
| gpl-2.0 | -9,201,990,232,592,008,000 | 36.4 | 79 | 0.647441 | false |
dpmehta02/linkedin-scrapy | linkedin/spiders/linkedin_spider.py | 1 | 3589 | from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import HtmlXPathSelector
from scrapy.http import Request
from linkedin.items import LinkedinItem
class LinkedinSpider(CrawlSpider):
"""
Define the crawler's start URIs, set its follow rules, parse HTML
and assign values to an item. Processing occurs in ../pipelines.py
"""
name = "linkedin"
allowed_domains = ["linkedin.com"]
# Uncomment the following lines for full spidering
'''
centilist_one = (i for i in xrange(1,100))
centilist_two = (i for i in xrange(1,100))
centilist_three = (i for i in xrange(1,100))
start_urls = ["http://www.linkedin.com/directory/people-%s-%d-%d-%d"
% (alphanum, num_one, num_two, num_three)
for alphanum in "abcdefghijklmnopqrstuvwxyz"
for num_one in centilist_one
for num_two in centilist_two
for num_three in centilist_three
]
'''
# Temporary start_urls for testing; remove and use the above start_urls in production
start_urls = ["http://www.linkedin.com/directory/people-a-23-23-2"]
# TODO: allow /in/name urls too?
rules = (Rule(SgmlLinkExtractor(allow=('\/pub\/.+')), callback='parse_item'))
def parse_item(self, response):
if response:
hxs = HtmlXPathSelector(response)
item = LinkedinItem()
# TODO: is this the best way to check that we're scraping the right page?
item['full_name'] = hxs.select('//*[@id="name"]/span/span/text()').extract()
if not item['full_name']:
# recursively parse list of duplicate profiles
# NOTE: Results page only displays 25 of possibly many more names;
# LinkedIn requests authentication to see the rest. Need to resolve
# TODO: add error checking here to ensure I'm getting the right links
# and links from "next>>" pages
multi_profile_urls = hxs.select('//*[@id="result-set"]/li/h2/strong/ \
a/@href').extract()
for profile_url in multi_profile_urls:
yield Request(profile_url, callback=self.parse_item)
else:
item['first_name'],
item['last_name'],
item['full_name'],
item['headline_title'],
item['locality'],
item['industry'],
item['current_roles'] = item['full_name'][0],
item['full_name'][1],
hxs.select('//*[@id="name"]/span/span/text()').extract(),
hxs.select('//*[@id="member-1"]/p/text()').extract(),
hxs.select('//*[@id="headline"]/dd[1]/span/text()').extract(),
hxs.select('//*[@id="headline"]/dd[2]/text()').extract(),
hxs.select('//*[@id="overview"]/dd[1]/ul/li/text()').extract()
# TODO: add metadata fields
if hxs.select('//*[@id="overview"]/dt[2]/text()').extract() == [u' \n Education\n ']:
item['education_institutions'] = hxs.select('//*[@id="overview"]/dd[2]/ul/li/text()').extract()
print item
else:
print "Uh oh, no response."
return
| mit | -8,053,537,752,368,067,000 | 46.223684 | 115 | 0.528281 | false |
blaze/dask | dask/dataframe/hyperloglog.py | 3 | 2433 | """Implementation of HyperLogLog
This implements the HyperLogLog algorithm for cardinality estimation, found
in
Philippe Flajolet, Éric Fusy, Olivier Gandouet and Frédéric Meunier.
"HyperLogLog: the analysis of a near-optimal cardinality estimation
algorithm". 2007 Conference on Analysis of Algorithms. Nice, France
(2007)
"""
import numpy as np
import pandas as pd
from pandas.util import hash_pandas_object
def compute_first_bit(a):
"Compute the position of the first nonzero bit for each int in an array."
# TODO: consider making this less memory-hungry
bits = np.bitwise_and.outer(a, 1 << np.arange(32))
bits = bits.cumsum(axis=1).astype(bool)
return 33 - bits.sum(axis=1)
def compute_hll_array(obj, b):
# b is the number of bits
if not 8 <= b <= 16:
raise ValueError("b should be between 8 and 16")
num_bits_discarded = 32 - b
m = 1 << b
# Get an array of the hashes
hashes = hash_pandas_object(obj, index=False)
if isinstance(hashes, pd.Series):
hashes = hashes._values
hashes = hashes.astype(np.uint32)
# Of the first b bits, which is the first nonzero?
j = hashes >> num_bits_discarded
first_bit = compute_first_bit(hashes)
# Pandas can do the max aggregation
df = pd.DataFrame({"j": j, "first_bit": first_bit})
series = df.groupby("j").max()["first_bit"]
# Return a dense array so we can concat them and get a result
# that is easy to deal with
return series.reindex(np.arange(m), fill_value=0).values.astype(np.uint8)
def reduce_state(Ms, b):
m = 1 << b
# We concatenated all of the states, now we need to get the max
# value for each j in both
Ms = Ms.reshape((len(Ms) // m), m)
return Ms.max(axis=0)
def estimate_count(Ms, b):
m = 1 << b
# Combine one last time
M = reduce_state(Ms, b)
# Estimate cardinality, no adjustments
alpha = 0.7213 / (1 + 1.079 / m)
E = alpha * m / (2.0 ** -(M.astype("f8"))).sum() * m
# ^^^^ starts as unsigned, need a signed type for
# negation operator to do something useful
# Apply adjustments for small / big cardinalities, if applicable
if E < 2.5 * m:
V = (M == 0).sum()
if V:
return m * np.log(m / V)
if E > 2 ** 32 / 30.0:
return -(2 ** 32) * np.log1p(-E / 2 ** 32)
return E
| bsd-3-clause | -5,806,826,853,938,484,000 | 29.375 | 77 | 0.615638 | false |
dls-controls/pymalcolm | tests/test_modules/test_ADCore/test_exposuredeadtimepart.py | 1 | 2080 | import unittest
from mock import MagicMock, call
from scanpointgenerator import CompoundGenerator, LineGenerator
from malcolm.core import PartRegistrar
from malcolm.modules.scanning.parts import ExposureDeadtimePart
def make_generator(duration):
line1 = LineGenerator("y", "mm", 0, 2, 3)
line2 = LineGenerator("x", "mm", 0, 2, 2)
compound = CompoundGenerator([line1, line2], [], [], duration=duration)
return compound
class TestExposureDeadtimePart(unittest.TestCase):
def setUp(self):
self.o = ExposureDeadtimePart(name="n", min_exposure=0.01)
def test_init(self):
registrar = MagicMock(spec=PartRegistrar)
self.o.setup(registrar)
assert registrar.add_attribute_model.mock_calls == [
call("readoutTime", self.o.readout_time, self.o.readout_time.set_value),
call(
"frequencyAccuracy",
self.o.frequency_accuracy,
self.o.frequency_accuracy.set_value,
),
call("exposure", self.o.exposure),
]
assert self.o.exposure.value == 0.0
def test_validate_exposure_too_fast(self):
tweak = self.o.on_validate(
generator=make_generator(duration=0.1), exposure=0.001
)
assert tweak.parameter == "exposure"
assert tweak.value == 0.01
def test_validate_no_duration(self):
with self.assertRaises(AssertionError) as cm:
self.o.on_validate(generator=make_generator(duration=0.0))
assert (
str(cm.exception)
== "Duration 0.0 for generator must be >0 to signify constant exposure"
)
def test_good_validate(self):
self.o.on_validate(generator=make_generator(duration=0.1))
def test_configure(self):
self.o.on_configure(exposure=0.099995)
assert self.o.exposure.value == 0.099995
def test_report_status(self):
info = self.o.on_report_status()
assert info.readout_time == 0.0
assert info.frequency_accuracy == 50
assert info.min_exposure == 0.01
| apache-2.0 | -5,400,094,156,738,968,000 | 33.098361 | 84 | 0.636538 | false |
ros/catkin | cmake/test/download_checkmd5.py | 1 | 5773 | from __future__ import print_function
import errno
import hashlib
import os
import sys
try:
from urllib.request import addinfourl, BaseHandler, build_opener, Request, URLError
except ImportError:
from urllib2 import addinfourl, BaseHandler, build_opener, Request, URLError
from argparse import ArgumentParser
NAME = 'download_checkmd5.py'
class HTTPRangeHandler(BaseHandler):
def http_error_206(self, req, fp, code, msg, hdrs):
r = addinfourl(fp, hdrs, req.get_full_url())
r.code = code
r.msg = msg
return r
def http_error_416(self, req, fp, code, msg, hdrs):
raise URLError('Requested Range Not Satisfiable')
def download_with_resume(uri, dest):
handler = HTTPRangeHandler()
opener = build_opener(handler)
offset = 0
content_length = None
accept_ranges = False
while True:
req = Request(uri)
if offset:
req.add_header('Range', 'bytes=%d-' % offset)
src_file = None
try:
src_file = opener.open(req)
headers = src_file.info()
if not offset:
# on first connection check server capabilities
if 'Content-Length' in headers:
content_length = int(headers['Content-Length'])
if 'Accept-Ranges' in headers:
accept_ranges = headers['Accept-Ranges'] != 'none'
else:
# on resume verify that server understood range header and responded accordingly
if 'Content-Range' not in headers:
raise IOError('Download aborted and server does not support resuming download')
if int(headers['Content-Range'][len('bytes '):].split('-')[0]) != offset:
raise IOError('Download aborted because server replied with different content range then requested')
sys.stdout.write(' resume from %d...' % offset)
sys.stdout.flush()
with open(dest, 'ab' if offset else 'wb') as dst_file:
progress = False
while True:
data = src_file.read(8192)
if not data:
break
progress = True
dst_file.write(data)
offset += len(data)
if not progress:
# if no bytes have been received abort download
raise IOError("No progress when trying to download '%s'" % uri)
except Exception:
if src_file:
src_file.close()
raise
# when content length is unknown it is assumed that the download is complete
if content_length is None:
break
# or when enough data has been downloaded (> is especially a valid case)
if offset >= content_length:
break
if not accept_ranges:
raise IOError('Server does not accept ranges to resume download')
def download_md5(uri, dest):
"""Download file from uri to file dest."""
# Create intermediate directories as necessary, #2970
dirname = os.path.dirname(dest)
if len(dirname):
try:
os.makedirs(dirname)
except OSError as e:
if e.errno != errno.EEXIST:
raise
sys.stdout.write('Downloading %s to %s...' % (uri, dest))
sys.stdout.flush()
try:
download_with_resume(uri, dest)
sys.stdout.write(' done.\n')
except Exception as e:
# delete partially downloaded data
if os.path.exists(dest):
os.unlink(dest)
sys.stdout.write(' failed (%s)!\n' % e)
raise
def checkmd5(dest, md5sum=None):
"""
Check file at dest against md5.
:returns (boolean, hexdigest): True if dest contents matches md5sum
"""
if not os.path.exists(dest):
return False, 'null'
with open(dest, 'rb') as f:
md5value = hashlib.md5()
while True:
buf = f.read(4096)
if not buf:
break
md5value.update(buf)
hexdigest = md5value.hexdigest()
print('Checking md5sum on %s' % (dest))
return hexdigest == md5sum, hexdigest
def main(argv=sys.argv[1:]):
"""Dowloads URI to file dest and checks md5 if given."""
parser = ArgumentParser(description='Dowloads URI to file dest. If md5sum is given, checks md5sum. If file existed and mismatch, downloads and checks again')
parser.add_argument('uri')
parser.add_argument('dest')
parser.add_argument('md5sum', nargs='?')
parser.add_argument('--ignore-error', action='store_true', help='Ignore download errors')
args = parser.parse_args(argv)
uri = args.uri
if '://' not in uri:
uri = 'file://' + uri
fresh = False
if not os.path.exists(args.dest):
try:
download_md5(uri, args.dest)
except Exception:
if args.ignore_error:
return 0
raise
fresh = True
if args.md5sum:
result, hexdigest = checkmd5(args.dest, args.md5sum)
if result is False and fresh is False:
print('WARNING: md5sum mismatch (%s != %s); re-downloading file %s' % (hexdigest, args.md5sum, args.dest))
os.remove(args.dest)
try:
download_md5(uri, args.dest)
except Exception:
if args.ignore_error:
return 0
raise
result, hexdigest = checkmd5(args.dest, args.md5sum)
if result is False:
return 'ERROR: md5sum mismatch (%s != %s) on %s; aborting' % (hexdigest, args.md5sum, args.dest)
return 0
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause | -8,443,107,470,869,263,000 | 32.760234 | 161 | 0.57076 | false |
pfmoore/invoke | invoke/parser/context.py | 1 | 9145 | import itertools
from ..vendor.lexicon import Lexicon
from .argument import Argument
def translate_underscores(name):
return name.lstrip('_').rstrip('_').replace('_', '-')
def to_flag(name):
name = translate_underscores(name)
if len(name) == 1:
return '-' + name
return '--' + name
def sort_candidate(arg):
names = arg.names
# TODO: is there no "split into two buckets on predicate" builtin?
shorts = set(x for x in names if len(x.strip('-')) == 1)
longs = set(x for x in names if x not in shorts)
return sorted(shorts if shorts else longs)[0]
def flag_key(x):
"""
Obtain useful key list-of-ints for sorting CLI flags.
"""
# Setup
ret = []
x = sort_candidate(x)
# Long-style flags win over short-style ones, so the first item of
# comparison is simply whether the flag is a single character long (with
# non-length-1 flags coming "first" [lower number])
ret.append(1 if len(x) == 1 else 0)
# Next item of comparison is simply the strings themselves,
# case-insensitive. They will compare alphabetically if compared at this
# stage.
ret.append(x.lower())
# Finally, if the case-insensitive test also matched, compare
# case-sensitive, but inverse (with lowercase letters coming first)
inversed = ''
for char in x:
inversed += char.lower() if char.isupper() else char.upper()
ret.append(inversed)
return ret
# Named slightly more verbose so Sphinx references can be unambiguous.
# Got real sick of fully qualified paths.
class ParserContext(object):
"""
Parsing context with knowledge of flags & their format.
Generally associated with the core program or a task.
When run through a parser, will also hold runtime values filled in by the
parser.
"""
def __init__(self, name=None, aliases=(), args=()):
"""
Create a new ``ParserContext`` named ``name``, with ``aliases``.
``name`` is optional, and should be a string if given. It's used to
tell ParserContext objects apart, and for use in a Parser when
determining what chunk of input might belong to a given ParserContext.
``aliases`` is also optional and should be an iterable containing
strings. Parsing will honor any aliases when trying to "find" a given
context in its input.
May give one or more ``args``, which is a quick alternative to calling
``for arg in args: self.add_arg(arg)`` after initialization.
"""
self.args = Lexicon()
self.positional_args = []
self.flags = Lexicon()
self.inverse_flags = {} # No need for Lexicon here
self.name = name
self.aliases = aliases
for arg in args:
self.add_arg(arg)
def __str__(self):
aliases = ""
if self.aliases:
aliases = " ({0})".format(', '.join(self.aliases))
name = (" {0!r}{1}".format(self.name, aliases)) if self.name else ""
args = (": {0!r}".format(self.args)) if self.args else ""
return "<parser/Context{0}{1}>".format(name, args)
def __repr__(self):
return str(self)
def add_arg(self, *args, **kwargs):
"""
Adds given ``Argument`` (or constructor args for one) to this context.
The Argument in question is added to the following dict attributes:
* ``args``: "normal" access, i.e. the given names are directly exposed
as keys.
* ``flags``: "flaglike" access, i.e. the given names are translated
into CLI flags, e.g. ``"foo"`` is accessible via ``flags['--foo']``.
* ``inverse_flags``: similar to ``flags`` but containing only the
"inverse" versions of boolean flags which default to True. This
allows the parser to track e.g. ``--no-myflag`` and turn it into a
False value for the ``myflag`` Argument.
"""
# Normalize
if len(args) == 1 and isinstance(args[0], Argument):
arg = args[0]
else:
arg = Argument(*args, **kwargs)
# Uniqueness constraint: no name collisions
for name in arg.names:
if name in self.args:
msg = "Tried to add an argument named {0!r} but one already exists!" # noqa
raise ValueError(msg.format(name))
# First name used as "main" name for purposes of aliasing
main = arg.names[0] # NOT arg.name
self.args[main] = arg
# Note positionals in distinct, ordered list attribute
if arg.positional:
self.positional_args.append(arg)
# Add names & nicknames to flags, args
self.flags[to_flag(main)] = arg
for name in arg.nicknames:
self.args.alias(name, to=main)
self.flags.alias(to_flag(name), to=to_flag(main))
# Add attr_name to args, but not flags
if arg.attr_name:
self.args.alias(arg.attr_name, to=main)
# Add to inverse_flags if required
if arg.kind == bool and arg.default is True:
# Invert the 'main' flag name here, which will be a dashed version
# of the primary argument name if underscore-to-dash transformation
# occurred.
inverse_name = to_flag("no-{0}".format(main))
self.inverse_flags[inverse_name] = to_flag(main)
@property
def needs_positional_arg(self):
return any(x.value is None for x in self.positional_args)
@property
def as_kwargs(self):
"""
This context's arguments' values keyed by their ``.name`` attribute.
Results in a dict suitable for use in Python contexts, where e.g. an
arg named ``foo-bar`` becomes accessible as ``foo_bar``.
"""
ret = {}
for arg in self.args.values():
ret[arg.name] = arg.value
return ret
def names_for(self, flag):
# TODO: should probably be a method on Lexicon/AliasDict
return list(set([flag] + self.flags.aliases_of(flag)))
def help_for(self, flag):
"""
Return 2-tuple of ``(flag-spec, help-string)`` for given ``flag``.
"""
# Obtain arg obj
if flag not in self.flags:
err = "{0!r} is not a valid flag for this context! Valid flags are: {1!r}" # noqa
raise ValueError(err.format(flag, self.flags.keys()))
arg = self.flags[flag]
# Determine expected value type, if any
value = {
str: 'STRING',
}.get(arg.kind)
# Format & go
full_names = []
for name in self.names_for(flag):
if value:
# Short flags are -f VAL, long are --foo=VAL
# When optional, also, -f [VAL] and --foo[=VAL]
if len(name.strip('-')) == 1:
value_ = ("[{0}]".format(value)) if arg.optional else value
valuestr = " {0}".format(value_)
else:
valuestr = "={0}".format(value)
if arg.optional:
valuestr = "[{0}]".format(valuestr)
else:
# no value => boolean
# check for inverse
if name in self.inverse_flags.values():
name = "--[no-]{0}".format(name[2:])
valuestr = ""
# Tack together
full_names.append(name + valuestr)
namestr = ", ".join(sorted(full_names, key=len))
helpstr = arg.help or ""
return namestr, helpstr
def help_tuples(self):
"""
Return sorted iterable of help tuples for all member Arguments.
Sorts like so:
* General sort is alphanumerically
* Short flags win over long flags
* Arguments with *only* long flags and *no* short flags will come
first.
* When an Argument has multiple long or short flags, it will sort using
the most favorable (lowest alphabetically) candidate.
This will result in a help list like so::
--alpha, --zeta # 'alpha' wins
--beta
-a, --query # short flag wins
-b, --argh
-c
"""
# TODO: argument/flag API must change :(
# having to call to_flag on 1st name of an Argument is just dumb.
# To pass in an Argument object to help_for may require moderate
# changes?
# Cast to list to ensure non-generator on Python 3.
return list(map(
lambda x: self.help_for(to_flag(x.name)),
sorted(self.flags.values(), key=flag_key)
))
def flag_names(self):
"""
Similar to `help_tuples` but returns flag names only, no helpstrs.
Specifically, all flag names, flattened, in rough order.
"""
# Regular flag names
flags = sorted(self.flags.values(), key=flag_key)
names = [self.names_for(to_flag(x.name)) for x in flags]
# Inverse flag names sold separately
names.append(self.inverse_flags.keys())
return tuple(itertools.chain.from_iterable(names))
| bsd-2-clause | -6,370,345,778,070,750,000 | 36.633745 | 93 | 0.580208 | false |
facebook/fbthrift | thrift/lib/py/Thrift.py | 1 | 11042 | # Copyright (c) Facebook, Inc. and its affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import six
import sys
import threading
UEXW_MAX_LENGTH = 1024
class TType:
STOP = 0
VOID = 1
BOOL = 2
BYTE = 3
I08 = 3
DOUBLE = 4
I16 = 6
I32 = 8
I64 = 10
STRING = 11
UTF7 = 11
STRUCT = 12
MAP = 13
SET = 14
LIST = 15
UTF8 = 16
UTF16 = 17
FLOAT = 19
class TMessageType:
CALL = 1
REPLY = 2
EXCEPTION = 3
ONEWAY = 4
class TPriority:
""" apache::thrift::concurrency::PRIORITY """
HIGH_IMPORTANT = 0
HIGH = 1
IMPORTANT = 2
NORMAL = 3
BEST_EFFORT = 4
N_PRIORITIES = 5
class TRequestContext:
def __init__(self):
self._headers = None
def getHeaders(self):
return self._headers
def setHeaders(self, headers):
self._headers = headers
class TProcessorEventHandler:
"""Event handler for thrift processors"""
# TODO: implement asyncComplete for Twisted
def getHandlerContext(self, fn_name, server_context):
"""Called at the start of processing a handler method"""
return None
def preRead(self, handler_context, fn_name, args):
"""Called before the handler method's argument are read"""
pass
def postRead(self, handler_context, fn_name, args):
"""Called after the handler method's argument are read"""
pass
def preWrite(self, handler_context, fn_name, result):
"""Called before the handler method's results are written"""
pass
def postWrite(self, handler_context, fn_name, result):
"""Called after the handler method's results are written"""
pass
def handlerException(self, handler_context, fn_name, exception):
"""Called if (and only if) the handler threw an expected exception."""
pass
def handlerError(self, handler_context, fn_name, exception):
"""Called if (and only if) the handler threw an unexpected exception.
Note that this method is NOT called if the handler threw an
exception that is declared in the thrift service specification"""
logging.exception("Unexpected error in service handler " + fn_name + ":")
class TServerInterface:
def __init__(self):
self._tl_request_context = threading.local()
def setRequestContext(self, request_context):
self._tl_request_context.ctx = request_context
def getRequestContext(self):
return self._tl_request_context.ctx
class TProcessor:
"""Base class for processor, which works on two streams."""
def __init__(self):
self._event_handler = TProcessorEventHandler() # null object handler
self._handler = None
self._processMap = {}
self._priorityMap = {}
def setEventHandler(self, event_handler):
self._event_handler = event_handler
def getEventHandler(self):
return self._event_handler
def process(self, iprot, oprot, server_context=None):
pass
def onewayMethods(self):
return ()
def readMessageBegin(self, iprot):
name, _, seqid = iprot.readMessageBegin()
if six.PY3:
name = name.decode('utf8')
return name, seqid
def skipMessageStruct(self, iprot):
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
def doesKnowFunction(self, name):
return name in self._processMap
def callFunction(self, name, seqid, iprot, oprot, server_ctx):
process_fn = self._processMap[name]
return process_fn(self, seqid, iprot, oprot, server_ctx)
def readArgs(self, iprot, handler_ctx, fn_name, argtype):
args = argtype()
self._event_handler.preRead(handler_ctx, fn_name, args)
args.read(iprot)
iprot.readMessageEnd()
self._event_handler.postRead(handler_ctx, fn_name, args)
return args
def writeException(self, oprot, name, seqid, exc):
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
exc.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def get_priority(self, fname):
return self._priorityMap.get(fname, TPriority.NORMAL)
def _getReplyType(self, result):
if isinstance(result, TApplicationException):
return TMessageType.EXCEPTION
return TMessageType.REPLY
@staticmethod
def _get_exception_from_thrift_result(result):
"""Returns the wrapped exception, if pressent. None if not.
result is a generated *_result object. This object either has a
'success' field set indicating the call succeeded, or a field set
indicating the exception thrown.
"""
fields = (
result.__dict__.keys()
if hasattr(result, "__dict__") else result.__slots__
)
for field in fields:
value = getattr(result, field)
if value is None:
continue
elif field == 'success':
return None
else:
return value
return None
def writeReply(self, oprot, handler_ctx, fn_name, seqid, result, server_ctx=None):
self._event_handler.preWrite(handler_ctx, fn_name, result)
reply_type = self._getReplyType(result)
if server_ctx is not None and hasattr(server_ctx, 'context_data'):
ex = (result if reply_type == TMessageType.EXCEPTION
else self._get_exception_from_thrift_result(result))
if ex:
server_ctx.context_data.setHeaderEx(ex.__class__.__name__)
server_ctx.context_data.setHeaderExWhat(str(ex)[:UEXW_MAX_LENGTH])
try:
oprot.writeMessageBegin(fn_name, reply_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
except Exception as e:
# Handle any thrift serialization exceptions
# Transport is likely in a messed up state. Some data may already have
# been written and it may not be possible to recover. Doing nothing
# causes the client to wait until the request times out. Try to
# close the connection to trigger a quicker failure on client side
oprot.trans.close()
# Let application know that there has been an exception
self._event_handler.handlerError(handler_ctx, fn_name, e)
# We raise the exception again to avoid any further processing
raise
finally:
# Since we called preWrite, we should also call postWrite to
# allow application to properly log their requests.
self._event_handler.postWrite(handler_ctx, fn_name, result)
class TException(Exception):
"""Base class for all thrift exceptions."""
# BaseException.message is deprecated in Python v[2.6,3.0)
if (2, 6, 0) <= sys.version_info < (3, 0):
def _get_message(self):
return self._message
def _set_message(self, message):
self._message = message
message = property(_get_message, _set_message)
def __init__(self, message=None):
Exception.__init__(self, message)
self.message = message
class TApplicationException(TException):
"""Application level thrift exceptions."""
UNKNOWN = 0
UNKNOWN_METHOD = 1
INVALID_MESSAGE_TYPE = 2
WRONG_METHOD_NAME = 3
BAD_SEQUENCE_ID = 4
MISSING_RESULT = 5
INTERNAL_ERROR = 6
PROTOCOL_ERROR = 7
INVALID_TRANSFORM = 8
INVALID_PROTOCOL = 9
UNSUPPORTED_CLIENT_TYPE = 10
LOADSHEDDING = 11
TIMEOUT = 12
INJECTED_FAILURE = 13
EXTYPE_TO_STRING = {
UNKNOWN_METHOD: 'Unknown method',
INVALID_MESSAGE_TYPE: 'Invalid message type',
WRONG_METHOD_NAME: 'Wrong method name',
BAD_SEQUENCE_ID: 'Bad sequence ID',
MISSING_RESULT: 'Missing result',
INTERNAL_ERROR: 'Internal error',
PROTOCOL_ERROR: 'Protocol error',
INVALID_TRANSFORM: 'Invalid transform',
INVALID_PROTOCOL: 'Invalid protocol',
UNSUPPORTED_CLIENT_TYPE: 'Unsupported client type',
LOADSHEDDING: 'Loadshedding request',
TIMEOUT: 'Task timeout',
INJECTED_FAILURE: 'Injected Failure',
}
def __init__(self, type=UNKNOWN, message=None):
TException.__init__(self, message)
self.type = type
def __str__(self):
if self.message:
return self.message
else:
return self.EXTYPE_TO_STRING.get(
self.type,
'Default (unknown) TApplicationException')
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
message = iprot.readString()
if sys.version_info.major >= 3 and isinstance(message,
bytes):
try:
message = message.decode('utf-8')
except UnicodeDecodeError:
pass
self.message = message
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.type = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
oprot.writeStructBegin(b'TApplicationException')
if self.message is not None:
oprot.writeFieldBegin(b'message', TType.STRING, 1)
oprot.writeString(self.message.encode('utf-8')
if not isinstance(self.message, bytes)
else self.message)
oprot.writeFieldEnd()
if self.type is not None:
oprot.writeFieldBegin(b'type', TType.I32, 2)
oprot.writeI32(self.type)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
class UnimplementedTypedef:
pass
| apache-2.0 | 8,498,865,374,388,009,000 | 29.929972 | 86 | 0.602427 | false |
zubair-arbi/edx-platform | lms/djangoapps/django_comment_client/base/tests.py | 1 | 70746 | """Tests for django comment client views."""
from contextlib import contextmanager
import logging
import json
import ddt
from django.conf import settings
from django.core.cache import get_cache
from django.test.client import Client, RequestFactory
from django.contrib.auth.models import User
from django.core.management import call_command
from django.core.urlresolvers import reverse
from request_cache.middleware import RequestCache
from mock import patch, ANY, Mock
from nose.tools import assert_true, assert_equal # pylint: disable=no-name-in-module
from opaque_keys.edx.keys import CourseKey
from lms.lib.comment_client import Thread
from common.test.utils import MockSignalHandlerMixin, disable_signal
from django_comment_client.base import views
from django_comment_client.tests.group_id import CohortedTopicGroupIdTestMixin, NonCohortedTopicGroupIdTestMixin, GroupIdAssertionMixin
from django_comment_client.tests.utils import CohortedTestCase
from django_comment_client.tests.unicode import UnicodeTestMixin
from django_comment_common.models import Role
from django_comment_common.utils import seed_permissions_roles, ThreadContext
from student.tests.factories import CourseEnrollmentFactory, UserFactory, CourseAccessRoleFactory
from teams.tests.factories import CourseTeamFactory, CourseTeamMembershipFactory
from util.testing import UrlResetMixin
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import check_mongo_calls
from xmodule.modulestore.django import modulestore
from xmodule.modulestore import ModuleStoreEnum
log = logging.getLogger(__name__)
CS_PREFIX = "http://localhost:4567/api/v1"
# pylint: disable=missing-docstring
class MockRequestSetupMixin(object):
def _create_response_mock(self, data):
return Mock(text=json.dumps(data), json=Mock(return_value=data))
def _set_mock_request_data(self, mock_request, data):
mock_request.return_value = self._create_response_mock(data)
@patch('lms.lib.comment_client.utils.requests.request')
class CreateThreadGroupIdTestCase(
MockRequestSetupMixin,
CohortedTestCase,
CohortedTopicGroupIdTestMixin,
NonCohortedTopicGroupIdTestMixin
):
cs_endpoint = "/threads"
def call_view(self, mock_request, commentable_id, user, group_id, pass_group_id=True):
self._set_mock_request_data(mock_request, {})
mock_request.return_value.status_code = 200
request_data = {"body": "body", "title": "title", "thread_type": "discussion"}
if pass_group_id:
request_data["group_id"] = group_id
request = RequestFactory().post("dummy_url", request_data)
request.user = user
request.view_name = "create_thread"
return views.create_thread(
request,
course_id=unicode(self.course.id),
commentable_id=commentable_id
)
def test_group_info_in_response(self, mock_request):
response = self.call_view(
mock_request,
"cohorted_topic",
self.student,
None
)
self._assert_json_response_contains_group_info(response)
@patch('lms.lib.comment_client.utils.requests.request')
@disable_signal(views, 'thread_edited')
@disable_signal(views, 'thread_voted')
@disable_signal(views, 'thread_deleted')
class ThreadActionGroupIdTestCase(
MockRequestSetupMixin,
CohortedTestCase,
GroupIdAssertionMixin
):
def call_view(
self,
view_name,
mock_request,
user=None,
post_params=None,
view_args=None
):
self._set_mock_request_data(
mock_request,
{
"user_id": str(self.student.id),
"group_id": self.student_cohort.id,
"closed": False,
"type": "thread",
"commentable_id": "non_team_dummy_id"
}
)
mock_request.return_value.status_code = 200
request = RequestFactory().post("dummy_url", post_params or {})
request.user = user or self.student
request.view_name = view_name
return getattr(views, view_name)(
request,
course_id=unicode(self.course.id),
thread_id="dummy",
**(view_args or {})
)
def test_update(self, mock_request):
response = self.call_view(
"update_thread",
mock_request,
post_params={"body": "body", "title": "title"}
)
self._assert_json_response_contains_group_info(response)
def test_delete(self, mock_request):
response = self.call_view("delete_thread", mock_request)
self._assert_json_response_contains_group_info(response)
def test_vote(self, mock_request):
response = self.call_view(
"vote_for_thread",
mock_request,
view_args={"value": "up"}
)
self._assert_json_response_contains_group_info(response)
response = self.call_view("undo_vote_for_thread", mock_request)
self._assert_json_response_contains_group_info(response)
def test_flag(self, mock_request):
response = self.call_view("flag_abuse_for_thread", mock_request)
self._assert_json_response_contains_group_info(response)
response = self.call_view("un_flag_abuse_for_thread", mock_request)
self._assert_json_response_contains_group_info(response)
def test_pin(self, mock_request):
response = self.call_view(
"pin_thread",
mock_request,
user=self.moderator
)
self._assert_json_response_contains_group_info(response)
response = self.call_view(
"un_pin_thread",
mock_request,
user=self.moderator
)
self._assert_json_response_contains_group_info(response)
def test_openclose(self, mock_request):
response = self.call_view(
"openclose_thread",
mock_request,
user=self.moderator
)
self._assert_json_response_contains_group_info(
response,
lambda d: d['content']
)
class ViewsTestCaseMixin(object):
"""
This class is used by both ViewsQueryCountTestCase and ViewsTestCase. By
breaking out set_up_course into its own method, ViewsQueryCountTestCase
can build a course in a particular modulestore, while ViewsTestCase can
just run it in setUp for all tests.
"""
def set_up_course(self, module_count=0):
"""
Creates a course, optionally with module_count discussion modules, and
a user with appropriate permissions.
"""
# create a course
self.course = CourseFactory.create(
org='MITx', course='999',
discussion_topics={"Some Topic": {"id": "some_topic"}},
display_name='Robot Super Course',
)
self.course_id = self.course.id
# add some discussion modules
for i in range(module_count):
ItemFactory.create(
parent_location=self.course.location,
category='discussion',
discussion_id='id_module_{}'.format(i),
discussion_category='Category {}'.format(i),
discussion_target='Discussion {}'.format(i)
)
# seed the forums permissions and roles
call_command('seed_permissions_roles', unicode(self.course_id))
# Patch the comment client user save method so it does not try
# to create a new cc user when creating a django user
with patch('student.models.cc.User.save'):
uname = 'student'
email = '[email protected]'
self.password = 'test' # pylint: disable=attribute-defined-outside-init
# Create the user and make them active so we can log them in.
self.student = User.objects.create_user(uname, email, self.password) # pylint: disable=attribute-defined-outside-init
self.student.is_active = True
self.student.save()
# Add a discussion moderator
self.moderator = UserFactory.create(password=self.password) # pylint: disable=attribute-defined-outside-init
# Enroll the student in the course
CourseEnrollmentFactory(user=self.student,
course_id=self.course_id)
# Enroll the moderator and give them the appropriate roles
CourseEnrollmentFactory(user=self.moderator, course_id=self.course.id)
self.moderator.roles.add(Role.objects.get(name="Moderator", course_id=self.course.id))
self.client = Client()
assert_true(self.client.login(username='student', password=self.password))
def _setup_mock_request(self, mock_request, include_depth=False):
"""
Ensure that mock_request returns the data necessary to make views
function correctly
"""
mock_request.return_value.status_code = 200
data = {
"user_id": str(self.student.id),
"closed": False,
"commentable_id": "non_team_dummy_id"
}
if include_depth:
data["depth"] = 0
self._set_mock_request_data(mock_request, data)
def create_thread_helper(self, mock_request, extra_request_data=None, extra_response_data=None):
"""
Issues a request to create a thread and verifies the result.
"""
mock_request.return_value.status_code = 200
self._set_mock_request_data(mock_request, {
"thread_type": "discussion",
"title": "Hello",
"body": "this is a post",
"course_id": "MITx/999/Robot_Super_Course",
"anonymous": False,
"anonymous_to_peers": False,
"commentable_id": "i4x-MITx-999-course-Robot_Super_Course",
"created_at": "2013-05-10T18:53:43Z",
"updated_at": "2013-05-10T18:53:43Z",
"at_position_list": [],
"closed": False,
"id": "518d4237b023791dca00000d",
"user_id": "1",
"username": "robot",
"votes": {
"count": 0,
"up_count": 0,
"down_count": 0,
"point": 0
},
"abuse_flaggers": [],
"type": "thread",
"group_id": None,
"pinned": False,
"endorsed": False,
"unread_comments_count": 0,
"read": False,
"comments_count": 0,
})
thread = {
"thread_type": "discussion",
"body": ["this is a post"],
"anonymous_to_peers": ["false"],
"auto_subscribe": ["false"],
"anonymous": ["false"],
"title": ["Hello"],
}
if extra_request_data:
thread.update(extra_request_data)
url = reverse('create_thread', kwargs={'commentable_id': 'i4x-MITx-999-course-Robot_Super_Course',
'course_id': unicode(self.course_id)})
response = self.client.post(url, data=thread)
assert_true(mock_request.called)
expected_data = {
'thread_type': 'discussion',
'body': u'this is a post',
'context': ThreadContext.COURSE,
'anonymous_to_peers': False, 'user_id': 1,
'title': u'Hello',
'commentable_id': u'i4x-MITx-999-course-Robot_Super_Course',
'anonymous': False,
'course_id': unicode(self.course_id),
}
if extra_response_data:
expected_data.update(extra_response_data)
mock_request.assert_called_with(
'post',
'{prefix}/i4x-MITx-999-course-Robot_Super_Course/threads'.format(prefix=CS_PREFIX),
data=expected_data,
params={'request_id': ANY},
headers=ANY,
timeout=5
)
assert_equal(response.status_code, 200)
def update_thread_helper(self, mock_request):
"""
Issues a request to update a thread and verifies the result.
"""
self._setup_mock_request(mock_request)
# Mock out saving in order to test that content is correctly
# updated. Otherwise, the call to thread.save() receives the
# same mocked request data that the original call to retrieve
# the thread did, overwriting any changes.
with patch.object(Thread, 'save'):
response = self.client.post(
reverse("update_thread", kwargs={
"thread_id": "dummy",
"course_id": unicode(self.course_id)
}),
data={"body": "foo", "title": "foo", "commentable_id": "some_topic"}
)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(data['body'], 'foo')
self.assertEqual(data['title'], 'foo')
self.assertEqual(data['commentable_id'], 'some_topic')
@ddt.ddt
@patch('lms.lib.comment_client.utils.requests.request')
@disable_signal(views, 'thread_created')
@disable_signal(views, 'thread_edited')
class ViewsQueryCountTestCase(UrlResetMixin, ModuleStoreTestCase, MockRequestSetupMixin, ViewsTestCaseMixin):
@patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(ViewsQueryCountTestCase, self).setUp(create_user=False)
def clear_caches(self):
"""Clears caches so that query count numbers are accurate."""
for cache in settings.CACHES:
get_cache(cache).clear()
RequestCache.clear_request_cache()
def count_queries(func): # pylint: disable=no-self-argument
"""
Decorates test methods to count mongo and SQL calls for a
particular modulestore.
"""
def inner(self, default_store, module_count, mongo_calls, sql_queries, *args, **kwargs):
with modulestore().default_store(default_store):
self.set_up_course(module_count=module_count)
self.clear_caches()
with self.assertNumQueries(sql_queries):
with check_mongo_calls(mongo_calls):
func(self, *args, **kwargs)
return inner
@ddt.data(
(ModuleStoreEnum.Type.mongo, 3, 4, 22),
(ModuleStoreEnum.Type.mongo, 20, 4, 22),
(ModuleStoreEnum.Type.split, 3, 13, 22),
(ModuleStoreEnum.Type.split, 20, 13, 22),
)
@ddt.unpack
@count_queries
def test_create_thread(self, mock_request):
self.create_thread_helper(mock_request)
@ddt.data(
(ModuleStoreEnum.Type.mongo, 3, 3, 16),
(ModuleStoreEnum.Type.mongo, 20, 3, 16),
(ModuleStoreEnum.Type.split, 3, 10, 16),
(ModuleStoreEnum.Type.split, 20, 10, 16),
)
@ddt.unpack
@count_queries
def test_update_thread(self, mock_request):
self.update_thread_helper(mock_request)
@ddt.ddt
@patch('lms.lib.comment_client.utils.requests.request')
class ViewsTestCase(
UrlResetMixin,
ModuleStoreTestCase,
MockRequestSetupMixin,
ViewsTestCaseMixin,
MockSignalHandlerMixin
):
@patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
# Patching the ENABLE_DISCUSSION_SERVICE value affects the contents of urls.py,
# so we need to call super.setUp() which reloads urls.py (because
# of the UrlResetMixin)
super(ViewsTestCase, self).setUp(create_user=False)
self.set_up_course()
@contextmanager
def assert_discussion_signals(self, signal, user=None):
if user is None:
user = self.student
with self.assert_signal_sent(views, signal, sender=None, user=user, exclude_args=('post',)):
yield
def test_create_thread(self, mock_request):
with self.assert_discussion_signals('thread_created'):
self.create_thread_helper(mock_request)
def test_create_thread_standalone(self, mock_request):
team = CourseTeamFactory.create(
name="A Team",
course_id=self.course_id,
topic_id='topic_id',
discussion_topic_id="i4x-MITx-999-course-Robot_Super_Course"
)
# Add the student to the team so they can post to the commentable.
team.add_user(self.student)
# create_thread_helper verifies that extra data are passed through to the comments service
self.create_thread_helper(mock_request, extra_response_data={'context': ThreadContext.STANDALONE})
def test_delete_thread(self, mock_request):
self._set_mock_request_data(mock_request, {
"user_id": str(self.student.id),
"closed": False,
})
test_thread_id = "test_thread_id"
request = RequestFactory().post("dummy_url", {"id": test_thread_id})
request.user = self.student
request.view_name = "delete_thread"
with self.assert_discussion_signals('thread_deleted'):
response = views.delete_thread(
request,
course_id=unicode(self.course.id),
thread_id=test_thread_id
)
self.assertEqual(response.status_code, 200)
self.assertTrue(mock_request.called)
def test_delete_comment(self, mock_request):
self._set_mock_request_data(mock_request, {
"user_id": str(self.student.id),
"closed": False,
})
test_comment_id = "test_comment_id"
request = RequestFactory().post("dummy_url", {"id": test_comment_id})
request.user = self.student
request.view_name = "delete_comment"
with self.assert_discussion_signals('comment_deleted'):
response = views.delete_comment(
request,
course_id=unicode(self.course.id),
comment_id=test_comment_id
)
self.assertEqual(response.status_code, 200)
self.assertTrue(mock_request.called)
args = mock_request.call_args[0]
self.assertEqual(args[0], "delete")
self.assertTrue(args[1].endswith("/{}".format(test_comment_id)))
def _test_request_error(self, view_name, view_kwargs, data, mock_request):
"""
Submit a request against the given view with the given data and ensure
that the result is a 400 error and that no data was posted using
mock_request
"""
self._setup_mock_request(mock_request, include_depth=(view_name == "create_sub_comment"))
response = self.client.post(reverse(view_name, kwargs=view_kwargs), data=data)
self.assertEqual(response.status_code, 400)
for call in mock_request.call_args_list:
self.assertEqual(call[0][0].lower(), "get")
def test_create_thread_no_title(self, mock_request):
self._test_request_error(
"create_thread",
{"commentable_id": "dummy", "course_id": unicode(self.course_id)},
{"body": "foo"},
mock_request
)
def test_create_thread_empty_title(self, mock_request):
self._test_request_error(
"create_thread",
{"commentable_id": "dummy", "course_id": unicode(self.course_id)},
{"body": "foo", "title": " "},
mock_request
)
def test_create_thread_no_body(self, mock_request):
self._test_request_error(
"create_thread",
{"commentable_id": "dummy", "course_id": unicode(self.course_id)},
{"title": "foo"},
mock_request
)
def test_create_thread_empty_body(self, mock_request):
self._test_request_error(
"create_thread",
{"commentable_id": "dummy", "course_id": unicode(self.course_id)},
{"body": " ", "title": "foo"},
mock_request
)
def test_update_thread_no_title(self, mock_request):
self._test_request_error(
"update_thread",
{"thread_id": "dummy", "course_id": unicode(self.course_id)},
{"body": "foo"},
mock_request
)
def test_update_thread_empty_title(self, mock_request):
self._test_request_error(
"update_thread",
{"thread_id": "dummy", "course_id": unicode(self.course_id)},
{"body": "foo", "title": " "},
mock_request
)
def test_update_thread_no_body(self, mock_request):
self._test_request_error(
"update_thread",
{"thread_id": "dummy", "course_id": unicode(self.course_id)},
{"title": "foo"},
mock_request
)
def test_update_thread_empty_body(self, mock_request):
self._test_request_error(
"update_thread",
{"thread_id": "dummy", "course_id": unicode(self.course_id)},
{"body": " ", "title": "foo"},
mock_request
)
def test_update_thread_course_topic(self, mock_request):
with self.assert_discussion_signals('thread_edited'):
self.update_thread_helper(mock_request)
@patch('django_comment_client.utils.get_discussion_categories_ids', return_value=["test_commentable"])
def test_update_thread_wrong_commentable_id(self, mock_get_discussion_id_map, mock_request):
self._test_request_error(
"update_thread",
{"thread_id": "dummy", "course_id": unicode(self.course_id)},
{"body": "foo", "title": "foo", "commentable_id": "wrong_commentable"},
mock_request
)
def test_create_comment(self, mock_request):
self._setup_mock_request(mock_request)
with self.assert_discussion_signals('comment_created'):
response = self.client.post(
reverse(
"create_comment",
kwargs={"course_id": unicode(self.course_id), "thread_id": "dummy"}
),
data={"body": "body"}
)
self.assertEqual(response.status_code, 200)
def test_create_comment_no_body(self, mock_request):
self._test_request_error(
"create_comment",
{"thread_id": "dummy", "course_id": unicode(self.course_id)},
{},
mock_request
)
def test_create_comment_empty_body(self, mock_request):
self._test_request_error(
"create_comment",
{"thread_id": "dummy", "course_id": unicode(self.course_id)},
{"body": " "},
mock_request
)
def test_create_sub_comment_no_body(self, mock_request):
self._test_request_error(
"create_sub_comment",
{"comment_id": "dummy", "course_id": unicode(self.course_id)},
{},
mock_request
)
def test_create_sub_comment_empty_body(self, mock_request):
self._test_request_error(
"create_sub_comment",
{"comment_id": "dummy", "course_id": unicode(self.course_id)},
{"body": " "},
mock_request
)
def test_update_comment_no_body(self, mock_request):
self._test_request_error(
"update_comment",
{"comment_id": "dummy", "course_id": unicode(self.course_id)},
{},
mock_request
)
def test_update_comment_empty_body(self, mock_request):
self._test_request_error(
"update_comment",
{"comment_id": "dummy", "course_id": unicode(self.course_id)},
{"body": " "},
mock_request
)
def test_update_comment_basic(self, mock_request):
self._setup_mock_request(mock_request)
comment_id = "test_comment_id"
updated_body = "updated body"
with self.assert_discussion_signals('comment_edited'):
response = self.client.post(
reverse(
"update_comment",
kwargs={"course_id": unicode(self.course_id), "comment_id": comment_id}
),
data={"body": updated_body}
)
self.assertEqual(response.status_code, 200)
mock_request.assert_called_with(
"put",
"{prefix}/comments/{comment_id}".format(prefix=CS_PREFIX, comment_id=comment_id),
headers=ANY,
params=ANY,
timeout=ANY,
data={"body": updated_body}
)
def test_flag_thread_open(self, mock_request):
self.flag_thread(mock_request, False)
def test_flag_thread_close(self, mock_request):
self.flag_thread(mock_request, True)
def flag_thread(self, mock_request, is_closed):
mock_request.return_value.status_code = 200
self._set_mock_request_data(mock_request, {
"title": "Hello",
"body": "this is a post",
"course_id": "MITx/999/Robot_Super_Course",
"anonymous": False,
"anonymous_to_peers": False,
"commentable_id": "i4x-MITx-999-course-Robot_Super_Course",
"created_at": "2013-05-10T18:53:43Z",
"updated_at": "2013-05-10T18:53:43Z",
"at_position_list": [],
"closed": is_closed,
"id": "518d4237b023791dca00000d",
"user_id": "1", "username": "robot",
"votes": {
"count": 0,
"up_count": 0,
"down_count": 0,
"point": 0
},
"abuse_flaggers": [1],
"type": "thread",
"group_id": None,
"pinned": False,
"endorsed": False,
"unread_comments_count": 0,
"read": False,
"comments_count": 0,
})
url = reverse('flag_abuse_for_thread', kwargs={
'thread_id': '518d4237b023791dca00000d',
'course_id': unicode(self.course_id)
})
response = self.client.post(url)
assert_true(mock_request.called)
call_list = [
(
('get', '{prefix}/threads/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'mark_as_read': True, 'request_id': ANY},
'headers': ANY,
'timeout': 5
}
),
(
('put', '{prefix}/threads/518d4237b023791dca00000d/abuse_flag'.format(prefix=CS_PREFIX)),
{
'data': {'user_id': '1'},
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
),
(
('get', '{prefix}/threads/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'mark_as_read': True, 'request_id': ANY},
'headers': ANY,
'timeout': 5
}
)
]
assert_equal(call_list, mock_request.call_args_list)
assert_equal(response.status_code, 200)
def test_un_flag_thread_open(self, mock_request):
self.un_flag_thread(mock_request, False)
def test_un_flag_thread_close(self, mock_request):
self.un_flag_thread(mock_request, True)
def un_flag_thread(self, mock_request, is_closed):
mock_request.return_value.status_code = 200
self._set_mock_request_data(mock_request, {
"title": "Hello",
"body": "this is a post",
"course_id": "MITx/999/Robot_Super_Course",
"anonymous": False,
"anonymous_to_peers": False,
"commentable_id": "i4x-MITx-999-course-Robot_Super_Course",
"created_at": "2013-05-10T18:53:43Z",
"updated_at": "2013-05-10T18:53:43Z",
"at_position_list": [],
"closed": is_closed,
"id": "518d4237b023791dca00000d",
"user_id": "1",
"username": "robot",
"votes": {
"count": 0,
"up_count": 0,
"down_count": 0,
"point": 0
},
"abuse_flaggers": [],
"type": "thread",
"group_id": None,
"pinned": False,
"endorsed": False,
"unread_comments_count": 0,
"read": False,
"comments_count": 0
})
url = reverse('un_flag_abuse_for_thread', kwargs={
'thread_id': '518d4237b023791dca00000d',
'course_id': unicode(self.course_id)
})
response = self.client.post(url)
assert_true(mock_request.called)
call_list = [
(
('get', '{prefix}/threads/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'mark_as_read': True, 'request_id': ANY},
'headers': ANY,
'timeout': 5
}
),
(
('put', '{prefix}/threads/518d4237b023791dca00000d/abuse_unflag'.format(prefix=CS_PREFIX)),
{
'data': {'user_id': '1'},
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
),
(
('get', '{prefix}/threads/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'mark_as_read': True, 'request_id': ANY},
'headers': ANY,
'timeout': 5
}
)
]
assert_equal(call_list, mock_request.call_args_list)
assert_equal(response.status_code, 200)
def test_flag_comment_open(self, mock_request):
self.flag_comment(mock_request, False)
def test_flag_comment_close(self, mock_request):
self.flag_comment(mock_request, True)
def flag_comment(self, mock_request, is_closed):
mock_request.return_value.status_code = 200
self._set_mock_request_data(mock_request, {
"body": "this is a comment",
"course_id": "MITx/999/Robot_Super_Course",
"anonymous": False,
"anonymous_to_peers": False,
"commentable_id": "i4x-MITx-999-course-Robot_Super_Course",
"created_at": "2013-05-10T18:53:43Z",
"updated_at": "2013-05-10T18:53:43Z",
"at_position_list": [],
"closed": is_closed,
"id": "518d4237b023791dca00000d",
"user_id": "1",
"username": "robot",
"votes": {
"count": 0,
"up_count": 0,
"down_count": 0,
"point": 0
},
"abuse_flaggers": [1],
"type": "comment",
"endorsed": False
})
url = reverse('flag_abuse_for_comment', kwargs={
'comment_id': '518d4237b023791dca00000d',
'course_id': unicode(self.course_id)
})
response = self.client.post(url)
assert_true(mock_request.called)
call_list = [
(
('get', '{prefix}/comments/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
),
(
('put', '{prefix}/comments/518d4237b023791dca00000d/abuse_flag'.format(prefix=CS_PREFIX)),
{
'data': {'user_id': '1'},
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
),
(
('get', '{prefix}/comments/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
)
]
assert_equal(call_list, mock_request.call_args_list)
assert_equal(response.status_code, 200)
def test_un_flag_comment_open(self, mock_request):
self.un_flag_comment(mock_request, False)
def test_un_flag_comment_close(self, mock_request):
self.un_flag_comment(mock_request, True)
def un_flag_comment(self, mock_request, is_closed):
mock_request.return_value.status_code = 200
self._set_mock_request_data(mock_request, {
"body": "this is a comment",
"course_id": "MITx/999/Robot_Super_Course",
"anonymous": False,
"anonymous_to_peers": False,
"commentable_id": "i4x-MITx-999-course-Robot_Super_Course",
"created_at": "2013-05-10T18:53:43Z",
"updated_at": "2013-05-10T18:53:43Z",
"at_position_list": [],
"closed": is_closed,
"id": "518d4237b023791dca00000d",
"user_id": "1",
"username": "robot",
"votes": {
"count": 0,
"up_count": 0,
"down_count": 0,
"point": 0
},
"abuse_flaggers": [],
"type": "comment",
"endorsed": False
})
url = reverse('un_flag_abuse_for_comment', kwargs={
'comment_id': '518d4237b023791dca00000d',
'course_id': unicode(self.course_id)
})
response = self.client.post(url)
assert_true(mock_request.called)
call_list = [
(
('get', '{prefix}/comments/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
),
(
('put', '{prefix}/comments/518d4237b023791dca00000d/abuse_unflag'.format(prefix=CS_PREFIX)),
{
'data': {'user_id': '1'},
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
),
(
('get', '{prefix}/comments/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
)
]
assert_equal(call_list, mock_request.call_args_list)
assert_equal(response.status_code, 200)
@ddt.data(
('upvote_thread', 'thread_id', 'thread_voted'),
('upvote_comment', 'comment_id', 'comment_voted'),
('downvote_thread', 'thread_id', 'thread_voted'),
('downvote_comment', 'comment_id', 'comment_voted')
)
@ddt.unpack
def test_voting(self, view_name, item_id, signal, mock_request):
self._setup_mock_request(mock_request)
with self.assert_discussion_signals(signal):
response = self.client.post(
reverse(
view_name,
kwargs={item_id: 'dummy', 'course_id': unicode(self.course_id)}
)
)
self.assertEqual(response.status_code, 200)
def test_endorse_comment(self, mock_request):
self._setup_mock_request(mock_request)
self.client.login(username=self.moderator.username, password=self.password)
with self.assert_discussion_signals('comment_endorsed', user=self.moderator):
response = self.client.post(
reverse(
'endorse_comment',
kwargs={'comment_id': 'dummy', 'course_id': unicode(self.course_id)}
)
)
self.assertEqual(response.status_code, 200)
@patch("lms.lib.comment_client.utils.requests.request")
@disable_signal(views, 'comment_endorsed')
class ViewPermissionsTestCase(UrlResetMixin, ModuleStoreTestCase, MockRequestSetupMixin):
@patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(ViewPermissionsTestCase, self).setUp()
self.password = "test password"
self.course = CourseFactory.create()
seed_permissions_roles(self.course.id)
self.student = UserFactory.create(password=self.password)
self.moderator = UserFactory.create(password=self.password)
CourseEnrollmentFactory(user=self.student, course_id=self.course.id)
CourseEnrollmentFactory(user=self.moderator, course_id=self.course.id)
self.moderator.roles.add(Role.objects.get(name="Moderator", course_id=self.course.id))
def test_pin_thread_as_student(self, mock_request):
self._set_mock_request_data(mock_request, {})
self.client.login(username=self.student.username, password=self.password)
response = self.client.post(
reverse("pin_thread", kwargs={"course_id": unicode(self.course.id), "thread_id": "dummy"})
)
self.assertEqual(response.status_code, 401)
def test_pin_thread_as_moderator(self, mock_request):
self._set_mock_request_data(mock_request, {})
self.client.login(username=self.moderator.username, password=self.password)
response = self.client.post(
reverse("pin_thread", kwargs={"course_id": unicode(self.course.id), "thread_id": "dummy"})
)
self.assertEqual(response.status_code, 200)
def test_un_pin_thread_as_student(self, mock_request):
self._set_mock_request_data(mock_request, {})
self.client.login(username=self.student.username, password=self.password)
response = self.client.post(
reverse("un_pin_thread", kwargs={"course_id": unicode(self.course.id), "thread_id": "dummy"})
)
self.assertEqual(response.status_code, 401)
def test_un_pin_thread_as_moderator(self, mock_request):
self._set_mock_request_data(mock_request, {})
self.client.login(username=self.moderator.username, password=self.password)
response = self.client.post(
reverse("un_pin_thread", kwargs={"course_id": unicode(self.course.id), "thread_id": "dummy"})
)
self.assertEqual(response.status_code, 200)
def _set_mock_request_thread_and_comment(self, mock_request, thread_data, comment_data):
def handle_request(*args, **kwargs):
url = args[1]
if "/threads/" in url:
return self._create_response_mock(thread_data)
elif "/comments/" in url:
return self._create_response_mock(comment_data)
else:
raise ArgumentError("Bad url to mock request")
mock_request.side_effect = handle_request
def test_endorse_response_as_staff(self, mock_request):
self._set_mock_request_thread_and_comment(
mock_request,
{"type": "thread", "thread_type": "question", "user_id": str(self.student.id)},
{"type": "comment", "thread_id": "dummy"}
)
self.client.login(username=self.moderator.username, password=self.password)
response = self.client.post(
reverse("endorse_comment", kwargs={"course_id": unicode(self.course.id), "comment_id": "dummy"})
)
self.assertEqual(response.status_code, 200)
def test_endorse_response_as_student(self, mock_request):
self._set_mock_request_thread_and_comment(
mock_request,
{"type": "thread", "thread_type": "question", "user_id": str(self.moderator.id)},
{"type": "comment", "thread_id": "dummy"}
)
self.client.login(username=self.student.username, password=self.password)
response = self.client.post(
reverse("endorse_comment", kwargs={"course_id": unicode(self.course.id), "comment_id": "dummy"})
)
self.assertEqual(response.status_code, 401)
def test_endorse_response_as_student_question_author(self, mock_request):
self._set_mock_request_thread_and_comment(
mock_request,
{"type": "thread", "thread_type": "question", "user_id": str(self.student.id)},
{"type": "comment", "thread_id": "dummy"}
)
self.client.login(username=self.student.username, password=self.password)
response = self.client.post(
reverse("endorse_comment", kwargs={"course_id": unicode(self.course.id), "comment_id": "dummy"})
)
self.assertEqual(response.status_code, 200)
class CreateThreadUnicodeTestCase(ModuleStoreTestCase, UnicodeTestMixin, MockRequestSetupMixin):
def setUp(self):
super(CreateThreadUnicodeTestCase, self).setUp()
self.course = CourseFactory.create()
seed_permissions_roles(self.course.id)
self.student = UserFactory.create()
CourseEnrollmentFactory(user=self.student, course_id=self.course.id)
@patch('lms.lib.comment_client.utils.requests.request')
def _test_unicode_data(self, text, mock_request,):
"""
Test to make sure unicode data in a thread doesn't break it.
"""
self._set_mock_request_data(mock_request, {})
request = RequestFactory().post("dummy_url", {"thread_type": "discussion", "body": text, "title": text})
request.user = self.student
request.view_name = "create_thread"
response = views.create_thread(
request, course_id=unicode(self.course.id), commentable_id="non_team_dummy_id"
)
self.assertEqual(response.status_code, 200)
self.assertTrue(mock_request.called)
self.assertEqual(mock_request.call_args[1]["data"]["body"], text)
self.assertEqual(mock_request.call_args[1]["data"]["title"], text)
@disable_signal(views, 'thread_edited')
class UpdateThreadUnicodeTestCase(ModuleStoreTestCase, UnicodeTestMixin, MockRequestSetupMixin):
def setUp(self):
super(UpdateThreadUnicodeTestCase, self).setUp()
self.course = CourseFactory.create()
seed_permissions_roles(self.course.id)
self.student = UserFactory.create()
CourseEnrollmentFactory(user=self.student, course_id=self.course.id)
@patch('django_comment_client.utils.get_discussion_categories_ids', return_value=["test_commentable"])
@patch('lms.lib.comment_client.utils.requests.request')
def _test_unicode_data(self, text, mock_request, mock_get_discussion_id_map):
self._set_mock_request_data(mock_request, {
"user_id": str(self.student.id),
"closed": False,
})
request = RequestFactory().post("dummy_url", {"body": text, "title": text, "thread_type": "question", "commentable_id": "test_commentable"})
request.user = self.student
request.view_name = "update_thread"
response = views.update_thread(request, course_id=unicode(self.course.id), thread_id="dummy_thread_id")
self.assertEqual(response.status_code, 200)
self.assertTrue(mock_request.called)
self.assertEqual(mock_request.call_args[1]["data"]["body"], text)
self.assertEqual(mock_request.call_args[1]["data"]["title"], text)
self.assertEqual(mock_request.call_args[1]["data"]["thread_type"], "question")
self.assertEqual(mock_request.call_args[1]["data"]["commentable_id"], "test_commentable")
@disable_signal(views, 'comment_created')
class CreateCommentUnicodeTestCase(ModuleStoreTestCase, UnicodeTestMixin, MockRequestSetupMixin):
def setUp(self):
super(CreateCommentUnicodeTestCase, self).setUp()
self.course = CourseFactory.create()
seed_permissions_roles(self.course.id)
self.student = UserFactory.create()
CourseEnrollmentFactory(user=self.student, course_id=self.course.id)
@patch('lms.lib.comment_client.utils.requests.request')
def _test_unicode_data(self, text, mock_request):
commentable_id = "non_team_dummy_id"
self._set_mock_request_data(mock_request, {
"closed": False,
"commentable_id": commentable_id
})
# We have to get clever here due to Thread's setters and getters.
# Patch won't work with it.
try:
Thread.commentable_id = commentable_id
request = RequestFactory().post("dummy_url", {"body": text})
request.user = self.student
request.view_name = "create_comment"
response = views.create_comment(
request, course_id=unicode(self.course.id), thread_id="dummy_thread_id"
)
self.assertEqual(response.status_code, 200)
self.assertTrue(mock_request.called)
self.assertEqual(mock_request.call_args[1]["data"]["body"], text)
finally:
del Thread.commentable_id
@disable_signal(views, 'comment_edited')
class UpdateCommentUnicodeTestCase(ModuleStoreTestCase, UnicodeTestMixin, MockRequestSetupMixin):
def setUp(self):
super(UpdateCommentUnicodeTestCase, self).setUp()
self.course = CourseFactory.create()
seed_permissions_roles(self.course.id)
self.student = UserFactory.create()
CourseEnrollmentFactory(user=self.student, course_id=self.course.id)
@patch('lms.lib.comment_client.utils.requests.request')
def _test_unicode_data(self, text, mock_request):
self._set_mock_request_data(mock_request, {
"user_id": str(self.student.id),
"closed": False,
})
request = RequestFactory().post("dummy_url", {"body": text})
request.user = self.student
request.view_name = "update_comment"
response = views.update_comment(request, course_id=unicode(self.course.id), comment_id="dummy_comment_id")
self.assertEqual(response.status_code, 200)
self.assertTrue(mock_request.called)
self.assertEqual(mock_request.call_args[1]["data"]["body"], text)
@disable_signal(views, 'comment_created')
class CreateSubCommentUnicodeTestCase(ModuleStoreTestCase, UnicodeTestMixin, MockRequestSetupMixin):
"""
Make sure comments under a response can handle unicode.
"""
def setUp(self):
super(CreateSubCommentUnicodeTestCase, self).setUp()
self.course = CourseFactory.create()
seed_permissions_roles(self.course.id)
self.student = UserFactory.create()
CourseEnrollmentFactory(user=self.student, course_id=self.course.id)
@patch('lms.lib.comment_client.utils.requests.request')
def _test_unicode_data(self, text, mock_request):
"""
Create a comment with unicode in it.
"""
self._set_mock_request_data(mock_request, {
"closed": False,
"depth": 1,
"thread_id": "test_thread",
"commentable_id": "non_team_dummy_id"
})
request = RequestFactory().post("dummy_url", {"body": text})
request.user = self.student
request.view_name = "create_sub_comment"
Thread.commentable_id = "test_commentable"
try:
response = views.create_sub_comment(
request, course_id=unicode(self.course.id), comment_id="dummy_comment_id"
)
self.assertEqual(response.status_code, 200)
self.assertTrue(mock_request.called)
self.assertEqual(mock_request.call_args[1]["data"]["body"], text)
finally:
del Thread.commentable_id
@ddt.ddt
@patch("lms.lib.comment_client.utils.requests.request")
@disable_signal(views, 'thread_voted')
@disable_signal(views, 'thread_edited')
@disable_signal(views, 'comment_created')
@disable_signal(views, 'comment_voted')
@disable_signal(views, 'comment_deleted')
class TeamsPermissionsTestCase(UrlResetMixin, ModuleStoreTestCase, MockRequestSetupMixin):
# Most of the test points use the same ddt data.
# args: user, commentable_id, status_code
ddt_permissions_args = [
# Student in team can do operations on threads/comments within the team commentable.
('student_in_team', 'team_commentable_id', 200),
# Non-team commentables can be edited by any student.
('student_in_team', 'course_commentable_id', 200),
# Student not in team cannot do operations within the team commentable.
('student_not_in_team', 'team_commentable_id', 401),
# Non-team commentables can be edited by any student.
('student_not_in_team', 'course_commentable_id', 200),
# Moderators can always operator on threads within a team, regardless of team membership.
('moderator', 'team_commentable_id', 200)
]
@patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(TeamsPermissionsTestCase, self).setUp()
self.password = "test password"
teams_configuration = {
'topics': [{'id': "topic_id", 'name': 'Solar Power', 'description': 'Solar power is hot'}]
}
self.course = CourseFactory.create(teams_configuration=teams_configuration)
seed_permissions_roles(self.course.id)
# Create 3 users-- student in team, student not in team, discussion moderator
self.student_in_team = UserFactory.create(password=self.password)
self.student_not_in_team = UserFactory.create(password=self.password)
self.moderator = UserFactory.create(password=self.password)
CourseEnrollmentFactory(user=self.student_in_team, course_id=self.course.id)
CourseEnrollmentFactory(user=self.student_not_in_team, course_id=self.course.id)
CourseEnrollmentFactory(user=self.moderator, course_id=self.course.id)
self.moderator.roles.add(Role.objects.get(name="Moderator", course_id=self.course.id))
# Create a team.
self.team_commentable_id = "team_discussion_id"
self.team = CourseTeamFactory.create(
name=u'The Only Team',
course_id=self.course.id,
topic_id='topic_id',
discussion_topic_id=self.team_commentable_id
)
self.team.add_user(self.student_in_team)
# Dummy commentable ID not linked to a team
self.course_commentable_id = "course_level_commentable"
def _setup_mock(self, user, mock_request, data):
user = getattr(self, user)
self._set_mock_request_data(mock_request, data)
self.client.login(username=user.username, password=self.password)
@ddt.data(
# student_in_team will be able to update his own post, regardless of team membership
('student_in_team', 'student_in_team', 'team_commentable_id', 200),
('student_in_team', 'student_in_team', 'course_commentable_id', 200),
# students can only update their own posts
('student_in_team', 'moderator', 'team_commentable_id', 401),
# Even though student_not_in_team is not in the team, he can still modify posts he created while in the team.
('student_not_in_team', 'student_not_in_team', 'team_commentable_id', 200),
# Moderators can change their own posts and other people's posts.
('moderator', 'moderator', 'team_commentable_id', 200),
('moderator', 'student_in_team', 'team_commentable_id', 200),
)
@ddt.unpack
def test_update_thread(self, user, thread_author, commentable_id, status_code, mock_request):
"""
Verify that update_thread is limited to thread authors and privileged users (team membership does not matter).
"""
commentable_id = getattr(self, commentable_id)
# thread_author is who is marked as the author of the thread being updated.
thread_author = getattr(self, thread_author)
self._setup_mock(
user, mock_request, # user is the person making the request.
{
"user_id": str(thread_author.id),
"closed": False, "commentable_id": commentable_id,
"context": "standalone"
}
)
response = self.client.post(
reverse(
"update_thread",
kwargs={
"course_id": unicode(self.course.id),
"thread_id": "dummy"
}
),
data={"body": "foo", "title": "foo", "commentable_id": commentable_id}
)
self.assertEqual(response.status_code, status_code)
@ddt.data(
# Students can delete their own posts
('student_in_team', 'student_in_team', 'team_commentable_id', 200),
# Moderators can delete any post
('moderator', 'student_in_team', 'team_commentable_id', 200),
# Others cannot delete posts
('student_in_team', 'moderator', 'team_commentable_id', 401),
('student_not_in_team', 'student_in_team', 'team_commentable_id', 401)
)
@ddt.unpack
def test_delete_comment(self, user, comment_author, commentable_id, status_code, mock_request):
commentable_id = getattr(self, commentable_id)
comment_author = getattr(self, comment_author)
self._setup_mock(user, mock_request, {
"closed": False,
"commentable_id": commentable_id,
"user_id": str(comment_author.id)
})
response = self.client.post(
reverse(
"delete_comment",
kwargs={
"course_id": unicode(self.course.id),
"comment_id": "dummy"
}
),
data={"body": "foo", "title": "foo"}
)
self.assertEqual(response.status_code, status_code)
@ddt.data(*ddt_permissions_args)
@ddt.unpack
def test_create_comment(self, user, commentable_id, status_code, mock_request):
"""
Verify that create_comment is limited to members of the team or users with 'edit_content' permission.
"""
commentable_id = getattr(self, commentable_id)
self._setup_mock(user, mock_request, {"closed": False, "commentable_id": commentable_id})
response = self.client.post(
reverse(
"create_comment",
kwargs={
"course_id": unicode(self.course.id),
"thread_id": "dummy"
}
),
data={"body": "foo", "title": "foo"}
)
self.assertEqual(response.status_code, status_code)
@ddt.data(*ddt_permissions_args)
@ddt.unpack
def test_create_sub_comment(self, user, commentable_id, status_code, mock_request):
"""
Verify that create_subcomment is limited to members of the team or users with 'edit_content' permission.
"""
commentable_id = getattr(self, commentable_id)
self._setup_mock(
user, mock_request,
{"closed": False, "commentable_id": commentable_id, "thread_id": "dummy_thread"},
)
response = self.client.post(
reverse(
"create_sub_comment",
kwargs={
"course_id": unicode(self.course.id),
"comment_id": "dummy_comment"
}
),
data={"body": "foo", "title": "foo"}
)
self.assertEqual(response.status_code, status_code)
@ddt.data(*ddt_permissions_args)
@ddt.unpack
def test_comment_actions(self, user, commentable_id, status_code, mock_request):
"""
Verify that voting and flagging of comments is limited to members of the team or users with
'edit_content' permission.
"""
commentable_id = getattr(self, commentable_id)
self._setup_mock(
user, mock_request,
{"closed": False, "commentable_id": commentable_id, "thread_id": "dummy_thread"},
)
for action in ["upvote_comment", "downvote_comment", "un_flag_abuse_for_comment", "flag_abuse_for_comment"]:
response = self.client.post(
reverse(
action,
kwargs={"course_id": unicode(self.course.id), "comment_id": "dummy_comment"}
)
)
self.assertEqual(response.status_code, status_code)
@ddt.data(*ddt_permissions_args)
@ddt.unpack
def test_threads_actions(self, user, commentable_id, status_code, mock_request):
"""
Verify that voting, flagging, and following of threads is limited to members of the team or users with
'edit_content' permission.
"""
commentable_id = getattr(self, commentable_id)
self._setup_mock(
user, mock_request,
{"closed": False, "commentable_id": commentable_id},
)
for action in ["upvote_thread", "downvote_thread", "un_flag_abuse_for_thread", "flag_abuse_for_thread",
"follow_thread", "unfollow_thread"]:
response = self.client.post(
reverse(
action,
kwargs={"course_id": unicode(self.course.id), "thread_id": "dummy_thread"}
)
)
self.assertEqual(response.status_code, status_code)
@ddt.data(*ddt_permissions_args)
@ddt.unpack
def test_create_thread(self, user, commentable_id, status_code, __):
"""
Verify that creation of threads is limited to members of the team or users with 'edit_content' permission.
"""
commentable_id = getattr(self, commentable_id)
# mock_request is not used because Commentables don't exist in comment service.
self.client.login(username=getattr(self, user).username, password=self.password)
response = self.client.post(
reverse(
"create_thread",
kwargs={"course_id": unicode(self.course.id), "commentable_id": commentable_id}
),
data={"body": "foo", "title": "foo", "thread_type": "discussion"}
)
self.assertEqual(response.status_code, status_code)
@ddt.data(*ddt_permissions_args)
@ddt.unpack
def test_commentable_actions(self, user, commentable_id, status_code, __):
"""
Verify that following of commentables is limited to members of the team or users with
'edit_content' permission.
"""
commentable_id = getattr(self, commentable_id)
# mock_request is not used because Commentables don't exist in comment service.
self.client.login(username=getattr(self, user).username, password=self.password)
for action in ["follow_commentable", "unfollow_commentable"]:
response = self.client.post(
reverse(
action,
kwargs={"course_id": unicode(self.course.id), "commentable_id": commentable_id}
)
)
self.assertEqual(response.status_code, status_code)
TEAM_COMMENTABLE_ID = 'test-team-discussion'
@disable_signal(views, 'comment_created')
@ddt.ddt
class ForumEventTestCase(ModuleStoreTestCase, MockRequestSetupMixin):
"""
Forum actions are expected to launch analytics events. Test these here.
"""
def setUp(self):
super(ForumEventTestCase, self).setUp()
self.course = CourseFactory.create()
seed_permissions_roles(self.course.id)
self.student = UserFactory.create()
CourseEnrollmentFactory(user=self.student, course_id=self.course.id)
self.student.roles.add(Role.objects.get(name="Student", course_id=self.course.id))
CourseAccessRoleFactory(course_id=self.course.id, user=self.student, role='Wizard')
@patch('eventtracking.tracker.emit')
@patch('lms.lib.comment_client.utils.requests.request')
def test_thread_event(self, __, mock_emit):
request = RequestFactory().post(
"dummy_url", {
"thread_type": "discussion",
"body": "Test text",
"title": "Test",
"auto_subscribe": True
}
)
request.user = self.student
request.view_name = "create_thread"
views.create_thread(request, course_id=unicode(self.course.id), commentable_id="test_commentable")
event_name, event = mock_emit.call_args[0]
self.assertEqual(event_name, 'edx.forum.thread.created')
self.assertEqual(event['body'], 'Test text')
self.assertEqual(event['title'], 'Test')
self.assertEqual(event['commentable_id'], 'test_commentable')
self.assertEqual(event['user_forums_roles'], ['Student'])
self.assertEqual(event['options']['followed'], True)
self.assertEqual(event['user_course_roles'], ['Wizard'])
self.assertEqual(event['anonymous'], False)
self.assertEqual(event['group_id'], None)
self.assertEqual(event['thread_type'], 'discussion')
self.assertEquals(event['anonymous_to_peers'], False)
@patch('eventtracking.tracker.emit')
@patch('lms.lib.comment_client.utils.requests.request')
def test_response_event(self, mock_request, mock_emit):
"""
Check to make sure an event is fired when a user responds to a thread.
"""
mock_request.return_value.status_code = 200
self._set_mock_request_data(mock_request, {
"closed": False,
"commentable_id": 'test_commentable_id',
'thread_id': 'test_thread_id',
})
request = RequestFactory().post("dummy_url", {"body": "Test comment", 'auto_subscribe': True})
request.user = self.student
request.view_name = "create_comment"
views.create_comment(request, course_id=unicode(self.course.id), thread_id='test_thread_id')
event_name, event = mock_emit.call_args[0]
self.assertEqual(event_name, 'edx.forum.response.created')
self.assertEqual(event['body'], "Test comment")
self.assertEqual(event['commentable_id'], 'test_commentable_id')
self.assertEqual(event['user_forums_roles'], ['Student'])
self.assertEqual(event['user_course_roles'], ['Wizard'])
self.assertEqual(event['discussion']['id'], 'test_thread_id')
self.assertEqual(event['options']['followed'], True)
@patch('eventtracking.tracker.emit')
@patch('lms.lib.comment_client.utils.requests.request')
def test_comment_event(self, mock_request, mock_emit):
"""
Ensure an event is fired when someone comments on a response.
"""
self._set_mock_request_data(mock_request, {
"closed": False,
"depth": 1,
"thread_id": "test_thread_id",
"commentable_id": "test_commentable_id",
"parent_id": "test_response_id"
})
request = RequestFactory().post("dummy_url", {"body": "Another comment"})
request.user = self.student
request.view_name = "create_sub_comment"
views.create_sub_comment(request, course_id=unicode(self.course.id), comment_id="dummy_comment_id")
event_name, event = mock_emit.call_args[0]
self.assertEqual(event_name, "edx.forum.comment.created")
self.assertEqual(event['body'], 'Another comment')
self.assertEqual(event['discussion']['id'], 'test_thread_id')
self.assertEqual(event['response']['id'], 'test_response_id')
self.assertEqual(event['user_forums_roles'], ['Student'])
self.assertEqual(event['user_course_roles'], ['Wizard'])
self.assertEqual(event['options']['followed'], False)
@patch('eventtracking.tracker.emit')
@patch('lms.lib.comment_client.utils.requests.request')
@ddt.data((
'create_thread',
'edx.forum.thread.created', {
'thread_type': 'discussion',
'body': 'Test text',
'title': 'Test',
'auto_subscribe': True
},
{'commentable_id': TEAM_COMMENTABLE_ID}
), (
'create_comment',
'edx.forum.response.created',
{'body': 'Test comment', 'auto_subscribe': True},
{'thread_id': 'test_thread_id'}
), (
'create_sub_comment',
'edx.forum.comment.created',
{'body': 'Another comment'},
{'comment_id': 'dummy_comment_id'}
))
@ddt.unpack
def test_team_events(self, view_name, event_name, view_data, view_kwargs, mock_request, mock_emit):
user = self.student
team = CourseTeamFactory.create(discussion_topic_id=TEAM_COMMENTABLE_ID)
CourseTeamMembershipFactory.create(team=team, user=user)
mock_request.return_value.status_code = 200
self._set_mock_request_data(mock_request, {
'closed': False,
'commentable_id': TEAM_COMMENTABLE_ID,
'thread_id': 'test_thread_id',
})
request = RequestFactory().post('dummy_url', view_data)
request.user = user
request.view_name = view_name
getattr(views, view_name)(request, course_id=unicode(self.course.id), **view_kwargs)
name, event = mock_emit.call_args[0]
self.assertEqual(name, event_name)
self.assertEqual(event['team_id'], team.team_id)
@ddt.data(
('vote_for_thread', 'thread_id', 'thread'),
('undo_vote_for_thread', 'thread_id', 'thread'),
('vote_for_comment', 'comment_id', 'response'),
('undo_vote_for_comment', 'comment_id', 'response'),
)
@ddt.unpack
@patch('eventtracking.tracker.emit')
@patch('lms.lib.comment_client.utils.requests.request')
def test_thread_voted_event(self, view_name, obj_id_name, obj_type, mock_request, mock_emit):
undo = view_name.startswith('undo')
self._set_mock_request_data(mock_request, {
'closed': False,
'commentable_id': 'test_commentable_id',
'username': 'gumprecht',
})
request = RequestFactory().post('dummy_url', {})
request.user = self.student
request.view_name = view_name
view_function = getattr(views, view_name)
kwargs = dict(course_id=unicode(self.course.id))
kwargs[obj_id_name] = obj_id_name
if not undo:
kwargs.update(value='up')
view_function(request, **kwargs)
self.assertTrue(mock_emit.called)
event_name, event = mock_emit.call_args[0]
self.assertEqual(event_name, 'edx.forum.{}.voted'.format(obj_type))
self.assertEqual(event['target_username'], 'gumprecht')
self.assertEqual(event['undo_vote'], undo)
self.assertEqual(event['vote_value'], 'up')
class UsersEndpointTestCase(ModuleStoreTestCase, MockRequestSetupMixin):
def set_post_counts(self, mock_request, threads_count=1, comments_count=1):
"""
sets up a mock response from the comments service for getting post counts for our other_user
"""
self._set_mock_request_data(mock_request, {
"threads_count": threads_count,
"comments_count": comments_count,
})
def setUp(self):
super(UsersEndpointTestCase, self).setUp()
self.course = CourseFactory.create()
seed_permissions_roles(self.course.id)
self.student = UserFactory.create()
self.enrollment = CourseEnrollmentFactory(user=self.student, course_id=self.course.id)
self.other_user = UserFactory.create(username="other")
CourseEnrollmentFactory(user=self.other_user, course_id=self.course.id)
def make_request(self, method='get', course_id=None, **kwargs):
course_id = course_id or self.course.id
request = getattr(RequestFactory(), method)("dummy_url", kwargs)
request.user = self.student
request.view_name = "users"
return views.users(request, course_id=course_id.to_deprecated_string())
@patch('lms.lib.comment_client.utils.requests.request')
def test_finds_exact_match(self, mock_request):
self.set_post_counts(mock_request)
response = self.make_request(username="other")
self.assertEqual(response.status_code, 200)
self.assertEqual(
json.loads(response.content)["users"],
[{"id": self.other_user.id, "username": self.other_user.username}]
)
@patch('lms.lib.comment_client.utils.requests.request')
def test_finds_no_match(self, mock_request):
self.set_post_counts(mock_request)
response = self.make_request(username="othor")
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content)["users"], [])
def test_requires_GET(self):
response = self.make_request(method='post', username="other")
self.assertEqual(response.status_code, 405)
def test_requires_username_param(self):
response = self.make_request()
self.assertEqual(response.status_code, 400)
content = json.loads(response.content)
self.assertIn("errors", content)
self.assertNotIn("users", content)
def test_course_does_not_exist(self):
course_id = CourseKey.from_string("does/not/exist")
response = self.make_request(course_id=course_id, username="other")
self.assertEqual(response.status_code, 404)
content = json.loads(response.content)
self.assertIn("errors", content)
self.assertNotIn("users", content)
def test_requires_requestor_enrolled_in_course(self):
# unenroll self.student from the course.
self.enrollment.delete()
response = self.make_request(username="other")
self.assertEqual(response.status_code, 404)
content = json.loads(response.content)
self.assertIn("errors", content)
self.assertNotIn("users", content)
@patch('lms.lib.comment_client.utils.requests.request')
def test_requires_matched_user_has_forum_content(self, mock_request):
self.set_post_counts(mock_request, 0, 0)
response = self.make_request(username="other")
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content)["users"], [])
| agpl-3.0 | 7,488,359,607,607,504,000 | 39.219443 | 148 | 0.586295 | false |
yavuzovski/playground | python/Udacity/cs215/find_eulerian_tour.py | 1 | 1219 | def find_eulerian_tour(graph):
# find the node with biggest degree
biggest_degree, biggest_node = 0, None
for i, node in enumerate(graph):
for e in node:
count = 0
outer_graph = graph[:]
for inner_node in outer_graph:
if e in inner_node:
count += 1
if count > biggest_degree:
biggest_degree = count
biggest_node = e
# set the starting point
result = []
for i, node in enumerate(graph):
if biggest_node == node[0]:
result = [node[0], node[1]]
current_node = node[1]
graph.pop(i)
break
# find the eulerian tour
i = 0
while i < len(graph):
if current_node == graph[i][0] or current_node == graph[i][1]:
current_node = (graph[i][1] if current_node == graph[i][0] else graph[i][0])
result.append(current_node)
graph.pop(i)
i = 0
else:
i += 1
return result
print(find_eulerian_tour(
[
(0, 1), (1, 5), (1, 7), (4, 5),
(4, 8), (1, 6), (3, 7), (5, 9),
(2, 4), (0, 4), (2, 5), (3, 6), (8, 9)
]
))
| gpl-3.0 | 1,573,255,959,492,710,100 | 27.348837 | 88 | 0.464315 | false |
diplomacy/research | diplomacy_research/models/layers/noisy_networks.py | 1 | 4039 | # ==============================================================================
# Copyright 2019 - Philip Paquette
#
# NOTICE: Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# ==============================================================================
""" Noisy Networks
- Converts variables in a graph to their noisy equivalent
"""
from math import sqrt
import sys
assert 'tensorflow' in sys.modules, 'You need to import TF before importing this module.'
from diplomacy_research.utils.tensorflow import tf
from diplomacy_research.utils.tensorflow import graph_editor
def convert_to_noisy_variables(variables, activation=None):
""" Converts a list of variables to noisy variables
:param variables: A list of variables to make noisy
:param activation: Optional. The activation function to use on the linear noisy transformation
:return: Nothing, but modifies the graph in-place
Reference: 1706.10295 - Noisy Networks for exploration
"""
if tf.get_collection(tf.GraphKeys.TRAIN_OP):
raise RuntimeError('You must call convert_to_noisy_variables before applying an optimizer on the graph.')
graph = tf.get_default_graph()
if not isinstance(variables, list):
variables = list(variables)
# Replacing each variable
for variable in variables:
variable_read_op = _get_variable_read_op(variable, graph)
variable_outputs = _get_variable_outputs(variable_read_op, graph)
variable_scope = variable.name.split(':')[0]
variable_shape = variable.shape.as_list()
fan_in = variable_shape[0]
# Creating noisy variables
with tf.variable_scope(variable_scope + '_noisy'):
with tf.device(variable.device):
s_init = tf.constant_initializer(0.5 / sqrt(fan_in))
noisy_u = tf.identity(variable, name='mu')
noisy_s = tf.get_variable(name='sigma',
shape=variable.shape,
dtype=tf.float32,
initializer=s_init,
caching_device=variable._caching_device) # pylint: disable=protected-access
noise = tf.random.normal(shape=variable_shape)
replaced_var = noisy_u + noisy_s * noise
replaced_var = activation(replaced_var) if activation else replaced_var
# Replacing in-place
inputs_index = [var_index for var_index, var_input in enumerate(graph_editor.sgv(*variable_outputs).inputs)
if var_input.name.split(':')[0] == variable_read_op.name.split(':')[0]]
graph_editor.connect(graph_editor.sgv(replaced_var.op),
graph_editor.sgv(*variable_outputs).remap_inputs(inputs_index),
disconnect_first=True)
def _get_variable_read_op(variable, graph):
""" Returns the /read operation for a variable """
return graph.get_operation_by_name(variable.name.split(':')[0] + '/read')
def _get_variable_outputs(variable_read_op, graph):
""" Returns the list of tensors that have the variable as input """
outputs = []
for graph_op in graph.get_operations():
for var_input in graph_op.inputs._inputs: # pylint: disable=protected-access
if var_input in variable_read_op.outputs:
outputs += [graph_op]
return outputs
| mit | 3,073,660,035,282,352,000 | 48.256098 | 122 | 0.615499 | false |
plang85/rough_surfaces | rough_surfaces/surface.py | 1 | 2746 | import numpy as np
class Surface(np.ndarray):
"""
One- or two-dimensional surface height representation.
The assumption upon which this framework is based is a uniform lattice size in both directions.
This is tightly integrated here. 'Surface' is the fundamental class that most modules build
upon. It usually represents the model or computational domain, as it may discretize either,
individual and composite surfaces, i.e., rough surfaces and aperture fields.
Standard initialization is from two-dimensional ndarray and lattice size:
>>> import numpy as np
>>> N, dxy = 100, 0.1
>>> h = np.zeros((N,N))
>>> s = Surface(h, dxy)
>>> length(s) # egde length in x-direction
10.0
>>> length(s, 1) # egde length in y-direction
10.0
Surfaces can also be one-dimensional, e.g., represent traces or cross-sections:
>>> import numpy as np
>>> N, dxy = 100, 0.1
>>> h = np.zeros((N))
>>> s = Surface(h, dxy)
>>> length(s) # length
10.0
>>> length(s, 1) # there is no second axis for one-dimensional surfaces
Traceback (most recent call last):
...
IndexError: tuple index out of range
"""
def __new__(cls, input_array, dxy):
obj = np.asarray(input_array).view(cls)
obj.dxy = float(dxy)
return obj
def __array_finalize__(self, obj):
if obj is None:
self.dxy = getattr(obj, 'dxy', None)
def rms(surface):
""""Returns root-mean-square roughness [L]."""
return np.sqrt(np.mean(surface**2))
def length(surface, axis=0):
""""Returns length [L] of surface in x- or y-direction, for axis=0 and 1, respectively."""
return surface.shape[axis] * surface.dxy
def nominal_area(surface):
""""Returns length() [L] for 1D, area [L^2] for 2D."""
a = 1.0
for i in range(len(surface.shape)):
a *= length(surface)
return a
def shift_to_zero_mean(surface):
""""Returns shifted surface such that <h> = 0."""
return Surface(surface - np.mean(surface), surface.dxy)
def mean_aperture(surface):
""""Composite surface assumption: mean of difference field to highest point."""
return np.mean(np.abs(np.subtract(surface, np.max(surface))))
def pore_volume(surface):
""""Composite surface assumption: mean aperture times area (2D-->[L^3]) or length (1D-->[L^2])."""
return mean_aperture(surface) * nominal_area(surface)
def scale_to_rms(surface, rms_target):
"""
Scales height to fit target property, which must be name of scalar returning method.
"""
rms_current = rms(surface)
return Surface(surface * (rms_target / rms_current), surface.dxy)
if __name__ == '__main__':
import doctest
doctest.testmod()
| mit | 1,258,035,149,484,728,000 | 29.853933 | 102 | 0.639476 | false |
cleemesser/eeg-hdfstorage | scripts/edf2eeghdf.py | 1 | 51534 | # -*- coding: utf-8 -*-
from __future__ import division, absolute_import, print_function # py2.6 with_statement
import sys
import pprint
import h5py
import numpy as np
import os.path
# date related stuff
import datetime
import dateutil
import dateutil.tz
import dateutil.parser
import arrow
# compatibility
import future
from future.utils import iteritems
from builtins import range # range and switch xrange -> range
# from past.builtins import xrange # later, move to from builtins import
import edflib
import eeghdf
# really need to check the original data type and then save as that datatype along with the necessary conversion factors
# so can convert voltages on own
# try with float32 instead?
# LPCH often uses these labels for electrodes
LPCH_COMMON_1020_LABELS = [
'Fp1',
'Fp2',
'F3',
'F4',
'C3',
'C4',
'P3',
'P4',
'O1',
'O2',
'F7',
'F8',
'T3',
'T4',
'T5',
'T6',
'Fz',
'Cz',
'Pz',
'E',
'PG1',
'PG2',
'A1',
'A2',
'T1',
'T2',
'X1',
'X2',
'X3',
'X4',
'X5',
'X6',
'X7',
'EEG Mark1',
'EEG Mark2',
'Events/Markers']
# common 10-20 extended clinical (T1/T2 instead of FT9/FT10)
# will need to specify these as bytes I suppose (or is this ok in utf-8 given the ascii basis)
# keys should be all one case (say upper)
lpch2edf_fixed_len_labels = dict(
FP1='EEG Fp1 ',
F7='EEG F7 ',
T3='EEG T3 ',
T5='EEG T5 ',
O1='EEG O1 ',
F3='EEG F3 ',
C3='EEG C3 ',
P3='EEG P3 ',
FP2='EEG Fp2 ',
F8='EEG F8 ',
T4='EEG T4 ',
T6='EEG T6 ',
O2='EEG O2 ',
F4='EEG F4 ',
C4='EEG C4 ',
P4='EEG P4 ',
CZ='EEG Cz ',
FZ='EEG Fz ',
PZ='EEG Pz ',
T1='EEG FT9 ', # maybe I should map this to FT9/T1
T2='EEG FT10 ', # maybe I should map this to FT10/T2
A1='EEG A1 ',
A2='EEG A2 ',
# these are often (?always) EKG at LPCH, note edfspec says use ECG instead
# of EKG
X1='ECG X1 ', # is this invariant? usually referenced to A1
# this is sometimes ECG but not usually (depends on how squirmy)
X2='X2 ',
PG1='EEG Pg1 ',
PG2='EEG Pg2 ',
# now the uncommon ones
NZ='EEG Nz ',
FPZ='EEG Fpz ',
AF7='EEG AF7 ',
AF8='EEG AF8 ',
AF3='EEG AF3 ',
AFz='EEG AFz ',
AF4='EEG AF4 ',
F9='EEG F9 ',
# F7
F5='EEG F5 ',
# F3 ='EEG F3 ',
F1='EEG F1 ',
# Fz
F2='EEG F2 ',
# F4
F6='EEG F6 ',
# F8
F10='EEG F10 ',
FT9='EEG FT9 ',
FT7='EEG FT7 ',
FC5='EEG FC5 ',
FC3='EEG FC3 ',
FC1='EEG FC1 ',
FCz='EEG FCz ',
FC2='EEG FC2 ',
FC4='EEG FC4 ',
FC6='EEG FC6 ',
FT8='EEG FT8 ',
FT10='EEG FT10 ',
T9='EEG T9 ',
T7='EEG T7 ',
C5='EEG C5 ',
# C3 above
C1='EEG C1 ',
# Cz above
C2='EEG C2 ',
# C4 ='EEG C4 ',
C6='EEG C6 ',
T8='EEG T8 ',
T10='EEG T10 ',
# A2
# T3
# T4
# T5
# T6
TP9='EEG TP9 ',
TP7='EEG TP7 ',
CP5='EEG CP5 ',
CP3='EEG CP3 ',
CP1='EEG CP1 ',
CPZ='EEG CPz ',
CP2='EEG CP2 ',
CP4='EEG CP4 ',
CP6='EEG CP6 ',
TP8='EEG TP8 ',
TP10='EEG TP10 ',
P9='EEG P9 ',
P7='EEG P7 ',
P5='EEG P5 ',
# P3
P1='EEG P1 ',
# Pz
P2='EEG P2 ',
# P4
P6='EEG P6 ',
P8='EEG P8 ',
P10='EEG P10 ',
PO7='EEG PO7 ',
PO3='EEG PO3 ',
POZ='EEG POz ',
PO4='EEG PO4 ',
PO8='EEG PO8 ',
# O1
OZ='EEG Oz ',
# O2
IZ='EEG Iz ',
)
lpch2edf_fixed_len_labels
# print("lpch2edf_fixed_len_labels::\n")
# pprint.pprint(lpch2edf_fixed_len_labels)
LPCH_TO_STD_LABELS_STRIP = {k: v.strip()
for k, v in iteritems(lpch2edf_fixed_len_labels)}
# print('LPCH_TO_STD_LABELS_STRIP::\n')
# pprint.pprint(LPCH_TO_STD_LABELS_STRIP)
LPCH_COMMON_1020_LABELS_to_EDF_STANDARD = {
}
def normalize_lpch_signal_label(label):
uplabel = label.upper()
if uplabel in LPCH_TO_STD_LABELS_STRIP:
return LPCH_TO_STD_LABELS_STRIP[uplabel]
else:
return label
def edf2h5_float32(fn, outfn='', hdf_dir='', anonymous=False):
"""
convert an edf file to hdf5 using a straighforward mapping
convert to real-valued signals store as float32's
justing getting started here
--- metadata ---
number_signals
sample_frequency
nsamples
age
signal_labels
Post Menstrual Age
"""
if not outfn:
base = os.path.basename(fn)
base, ext = os.path.splitext(base)
base = base + '.eeghdf5'
outfn = os.path.join(hdf_dir, base)
print('outfn:', outfn)
# outfn = fn+'.eeg.h5'
with edflib.EdfReader(fn) as ef:
nsigs = ef.signals_in_file
# again know/assume that this is uniform sampling across signals
fs = [ef.samplefrequency(ii) for ii in range(nsigs)]
fs0 = fs[0]
if any([ fs0 != xx for xx in fs]):
print("caught multiple sampling frquencies in edf files!!!")
sys.exit(0)
nsamples0 = ef.samples_in_file(0)
print('nsigs=%s, fs0=%s, nsamples0=%s' % (nsigs, fs0, nsamples0))
# create file 'w-' -> fail if exists , w -> truncate if exists
hdf = h5py.File(outfn, 'w')
# use compression? yes! give it a try
eegdata = hdf.create_dataset('eeg', (nsigs, nsamples0), dtype='float32',
# chunks=(nsigs,fs0),
chunks=True,
fletcher32=True,
# compression='gzip',
# compression='lzf',
# maxshape=(256,None)
)
# no compression -> 50 MiB can view eegdata in vitables
# compression='gzip' -> 27 MiB slower
# compression='lzf' -> 35 MiB
# compression='lzf' maxshape=(256,None) -> 36MiB
# szip is unavailable
patient = hdf.create_group('patient')
# add meta data
hdf.attrs['number_signals'] = nsigs
hdf.attrs['sample_frequency'] = fs0
hdf.attrs['nsamples0'] = nsamples0
patient.attrs['gender_b'] = ef.gender_b
patient.attrs['patientname'] = ef.patient_name # PHI
print('birthdate: %s' % ef.birthdate_b, type(ef.birthdate_b))
# this is a string -> date (datetime)
if not ef.birthdate_b:
print("no birthday in this file")
birthdate = None
else:
birthdate = dateutil.parser.parse(ef.birthdate_b)
print('birthdate (date object):', birthdate_b)
start_date_time = datetime.datetime(
ef.startdate_year,
ef.startdate_month,
ef.startdate_day,
ef.starttime_hour,
ef.starttime_minute,
ef.starttime_second) # ,tzinfo=dateutil.tz.tzlocal())
print(start_date_time)
if start_date_time and birthdate:
age = start_date_time - birthdate
print('age:', age)
else:
age = None
if age:
patient.attrs['post_natal_age_days'] = age.days
else:
patient.attrs['post_natal_age_days'] = -1
# now start storing the lists of things: labels, units...
# nsigs = len(label_list)
# variable ascii string (or b'' type)
str_dt = h5py.special_dtype(vlen=str)
label_ds = hdf.create_dataset('signal_labels', (nsigs,), dtype=str_dt)
units_ds = hdf.create_dataset('signal_units', (nsigs,), dtype=str_dt)
labels = []
units = list()
# signal_nsamples = []
for ii in range(nsigs):
labels.append(ef.signal_label(ii))
units.append(ef.physical_dimension(ii))
# self.signal_nsamples.append(self.cedf.samples_in_file(ii))
# self.samplefreqs.append(self.cedf.samplefrequency(ii))
# eegdata.signal_labels = labels
# labels are fixed length strings
labels_strip = [ss.strip() for ss in labels]
label_ds[:] = labels_strip
units_ds[:] = units
# should be more and a switch for anonymous or not
# need to change this to
nchunks = int(nsamples0 // fs0)
samples_per_chunk = int(fs0)
buf = np.zeros((nsigs, samples_per_chunk),
dtype='float64') # buffer is float64_t
print('nchunks: ', nchunks, 'samples_per_chunk:', samples_per_chunk)
bookmark = 0 # mark where were are in samples
for ii in range(nchunks):
for jj in range(nsigs):
# readsignal(self, signalnum, start, n,
# np.ndarray[np.float64_t, ndim = 1] sigbuf)
# read_phys_signal(chn, 0, nsamples[chn], v)
#read_phys_signal(self, signalnum, start, n, np.ndarray[np.float64_t, ndim=1] sigbuf)
print(ii,jj)
ef.read_phys_signal(jj, bookmark, samples_per_chunk, buf[jj]) # readsignal converts into float
# conversion from float64 to float32
eegdata[:, bookmark:bookmark + samples_per_chunk] = buf
# bookmark should be ii*fs0
bookmark += samples_per_chunk
left_over_samples = nsamples0 - nchunks * samples_per_chunk
print('left_over_samples:', left_over_samples)
if left_over_samples > 0:
for jj in range(nsigs):
ef.read_phys_signal(jj, bookmark, left_over_samples, buf[jj])
eegdata[:,
bookmark:bookmark + left_over_samples] = buf[:,
0:left_over_samples]
hdf.close()
def edf_block_iter_generator(
edf_file, nsamples, samples_per_chunk, dtype='int32'):
"""
factory to produce generators for iterating through an edf file and filling
up an array from the edf with the signal data starting at 0. You choose the
number of @samples_per_chunk, and number of samples to do in total
@nsamples as well as the dtype. 'int16' is reasonable as well 'int32' will
handle everything though
it yields -> (numpy_buffer, mark, num)
numpy_buffer,
mark, which is where in the file in total currently reading from
num -- which is the number of samples in the buffer (per signal) to transfer
"""
nchan = edf_file.signals_in_file
# 'int32' will work for int16 as well
buf = np.zeros((nchan, samples_per_chunk), dtype=dtype)
nchunks = nsamples // samples_per_chunk
left_over_samples = nsamples - nchunks * samples_per_chunk
mark = 0
for ii in range(nchunks):
for cc in range(nchan):
edf_file.read_digital_signal(cc, mark, samples_per_chunk, buf[cc])
yield (buf, mark, samples_per_chunk)
mark += samples_per_chunk
# print('mark:', mark)
# left overs
if left_over_samples > 0:
for cc in range(nchan):
edf_file.read_digital_signal(cc, mark, left_over_samples, buf[cc])
yield (buf[:, 0:left_over_samples], mark, left_over_samples)
def dig2phys(eeghdf, start, end, chstart, chend):
# edfhdr->edfparam[i].bitvalue = (edfhdr->edfparam[i].phys_max - edfhdr->edfparam[i].phys_min) / (edfhdr->edfparam[i].dig_max - edfhdr->edfparam[i].dig_min);
# edfhdr->edfparam[i].offset = edfhdr->edfparam[i].phys_max /
# edfhdr->edfparam[i].bitvalue - edfhdr->edfparam[i].dig_max;
dmins = eeghdf['signal_digital_mins'][:]
dmaxs = eeghdf['signal_digital_maxs'][:]
phys_maxs = eeghdf['signal_physical_maxs'][:]
phys_mins = eeghdf['signal_physical_mins'][:]
print('dmaxs:', repr(dmaxs))
print('dmins:', repr(dmins))
print('dmaxs[:] - dmins[:]', dmaxs - dmins)
print('phys_maxs', phys_maxs)
print('phys_mins', phys_mins)
bitvalues = (phys_maxs - phys_mins) / (dmaxs - dmins)
offsets = phys_maxs / bitvalues - dmaxs
print('bitvalues, offsets:', bitvalues, offsets)
print('now change their shape to column vectors')
for arr in (bitvalues, offsets):
if len(arr.shape) != 1:
print('logical errror %s shape is unexpected' % arr.shape)
raise Exception
s = arr.shape
arr.shape = (s[0], 1)
print('bitvalues, offsets:', bitvalues, offsets)
# buf[i] = phys_bitvalue * (phys_offset + (double)var.two_signed[0]);
dig_signal = eeghdf['signals'][chstart:chend, start:end]
# signal = bitvalues[chstart:chend] *(dig_signal[chstart:chend,:] + offsets[chstart:chend])
phys_signals = (dig_signal[:, start:end] + offsets) * bitvalues
# return signal, bitvalues, offsets
return phys_signals
# TODO: create edf -> hdf version 1000
# hdf -> edf for hdf version 1000
# tests to verify that round trip is lossless
# [] writing encoding of MRN
# [] and entry of mapped pt_code into database coe
def edf2hdf_oldhack(fn, outfn='', hdf_dir='', anonymous=False):
"""
convert an edf file to hdf5 using a straighforward mapping
justing getting started here
--- metadata ---
number_signals
sample_frequency
nsamples
age
signal_labels
Post Menstrual Age
"""
if not outfn:
base = os.path.basename(fn)
base, ext = os.path.splitext(base)
base = base + '.eeg.hdf'
outfn = os.path.join(hdf_dir, base)
print('outfn:', outfn)
# outfn = fn+'.eeg.h5'
with edflib.EdfReader(fn) as ef:
# all the data point related stuff
nsigs = ef.signals_in_file
# again know/assume that this is uniform sampling across signals
fs = [ef.samplefrequency(ii) for ii in range(nsigs)]
fs0 = fs[0]
print([ fs0 != xx for xx in fs])
if any([ fs0 != xx for xx in fs]):
print("caught multiple sampling frquencies in edf files!!!")
sys.exit(0)
nsamples0 = ef.samples_in_file(0)
print('nsigs=%s, fs0=%s, nsamples0=%s\n' % (nsigs, fs0, nsamples0))
num_samples_per_signal = ef.get_samples_per_signal() # np array
print("num_samples_per_signal::\n", repr(num_samples_per_signal), '\n')
file_duration_sec = ef.file_duration_seconds
print("file_duration_sec", repr(file_duration_sec))
signal_frequency_array = ef.get_signal_freqs()
print("signal_frequency_array::\n", repr(signal_frequency_array))
# Note that all annotations except the top row must also specify a duration.
# long long onset; /* onset time of the event, expressed in units of 100 nanoSeconds and relative to the starttime in the header */
# char duration[16]; /* duration time, this is a null-terminated ASCII text-string */
# char annotation[EDFLIB_MAX_ANNOTATION_LEN + 1]; /* description of the event in UTF-8, this is a null term string of max length 512
# start("x.y"), end, char[20]
# annotations = ef.read_annotations_as_array() # get numpy array of
# annotations
annotations = ef.read_annotations_100ns_units()
#print("annotations::\n")
#pprint.pprint(annotations) # get list of annotations
signal_text_labels = ef.get_signal_text_labels()
#print("signal_text_labels::\n")
#pprint.pprint(signal_text_labels)
#print("normalized text labels::\n")
signal_text_labels_lpch_normalized = [
normalize_lpch_signal_label(label) for label in signal_text_labels]
#pprint.pprint(signal_text_labels_lpch_normalized)
# ef.recording_additional
# print()
signal_digital_mins = np.array(
[ef.digital_min(ch) for ch in range(nsigs)])
signal_digital_total_min = min(signal_digital_mins)
print("digital mins:", repr(signal_digital_mins))
print("digital total min:", repr(signal_digital_total_min))
signal_digital_maxs = np.array(
[ef.digital_max(ch) for ch in range(nsigs)])
signal_digital_total_max = max(signal_digital_maxs)
print("digital maxs:", repr(signal_digital_maxs))
print("digital total max:", repr(signal_digital_total_max))
signal_physical_dims = [
ef.physical_dimension(ch) for ch in range(nsigs)]
print('signal_physical_dims::\n')
pprint.pprint(signal_physical_dims)
print()
signal_physical_maxs = np.array(
[ef.physical_max(ch) for ch in range(nsigs)])
print('signal_physical_maxs::\n', repr(signal_physical_maxs))
signal_physical_mins = np.array(
[ef.physical_min(ch) for ch in range(nsigs)])
print('signal_physical_mins::\n', repr(signal_physical_mins))
print('gender:', repr(ef.gender_b))
print('admincode:', repr(ef.admincode))
print('birthdate:', repr(ef.birthdate_b)) # this is a string
birthdate = dateutil.parser.parse(ef.birthdate_b)
print('birthdate as datetime:', birthdate)
print('equipment:', repr(ef.equipment))
print('patient:', repr(ef.patient))
print('patientname:', repr(ef.patient_name))
print('patientcode:', repr(ef.patientcode))
print('patient_additional:', repr(ef.patient_additional))
print('recording_additional:', repr(ef.recording_additional))
print('technician:', repr(ef.technician))
# or use arrow
start_date_time = datetime.datetime(
ef.startdate_year,
ef.startdate_month,
ef.startdate_day,
ef.starttime_hour,
ef.starttime_minute,
ef.starttime_second) # tz naive
# end_date_time = datetime.datetime(ef.enddate_year, ef.enddate_month, ef.enddate_day, ef.endtime_hour,
# ef.endtime_minute, ef.endtime_second) # tz naive
# end_date_time - start_date_time
duration = datetime.timedelta(seconds=ef.file_duration_seconds)
print('start_date_time:', start_date_time)
age = arrow.get(start_date_time) - arrow.get(birthdate)
# age = arrow.get(agedt)
print('predicted age:', age)
# total_seconds() returns a float
print('predicted age (seconds):', age.total_seconds())
print()
# this don't seem to be used much so I will put at end
signal_prefilters = [ef.prefilter(ch) for ch in range(nsigs)]
print('signal_prefilters::\n')
pprint.pprint(signal_prefilters)
print()
signal_transducer = [ef.transducer(ch) for ch in range(nsigs)]
print('signal_transducer::\n')
pprint.pprint(signal_transducer)
# now start building the hdf file
# create file 'w-' -> fail if exists , w -> truncate if exists
hdf = h5py.File(outfn, 'w')
# use compression? yes! give it a try
# integer increasing starting at 1000 semantic change at each thousand
hdf.attrs['eeghdf_version'] = 1000
hdf.attrs['signals_in_file'] = nsigs
hdf.attrs['sample_frequency0'] = fs0
hdf.attrs['nsamples0'] = nsamples0
sample_frequencies = hdf.create_dataset(
'sample_frequencies', (nsigs,), dtype='float32')
sample_frequencies[:] = signal_frequency_array
# add phys_bitvalue = .bitvalue, phys_offset = .offset
# (double) phys_value = phys_bitvalue*(phys_offset + (double) var.two_signed[0])
# edfhdr->edfparam[i].bitvalue = (edfhdr->edfparam[i].phys_max - edfhdr->edfparam[i].phys_min) / (edfhdr->edfparam[i].dig_max - edfhdr->edfparam[i].dig_min);
# edfhdr->edfparam[i].offset = edfhdr->edfparam[i].phys_max /
# edfhdr->edfparam[i].bitvalue - edfhdr->edfparam[i].dig_max;
# add meta data
#
start_date_time = datetime.datetime(ef.startdate_year, ef.startdate_month, ef.startdate_day,
ef.starttime_hour,
ef.starttime_minute,
ef.starttime_second) # ,tzinfo=dateutil.tz.tzlocal())
print(start_date_time)
patient = hdf.create_group('patient')
patient.attrs['gender'] = ef.gender_b
patient.attrs['patientname'] = "" # ef.patient_name # PHI
print('birthdate: %s' % ef.birthdate_b, type(ef.birthdate_b))
default_birthdate = datetime.datetime(year=1990, month=1, day=1)
# birthdate = dateutil.parser.parse(ef.birthdate) # this is a string
# -> date (datetime)
birthdate = default_birthdate
print('birthdate (date object):', birthdate)
private_start_date_time = birthdate + age
patient.attrs['birthdate'] = str(birthdate)
# float number age in seconds
patient.attrs['age_seconds'] = age.total_seconds()
# gestational age at birth (in seconds)
# datetime.timedelta(weeks=40).total_seconds() # default 24192000 seconds or 40 weeks, 280 days
# could also call this post-conceptional-age PCA
patient.attrs['gestatational_age_birth_seconds'] = datetime.timedelta(
weeks=40).total_seconds()
patient.attrs['born_premature'] = 'unknown' # ('unknown', True, False)
# hide actual start/end times question: should vary by year or just
# make all the same
hdf.attrs['startdatetime'] = str(private_start_date_time)
hdf.attrs['enddatetime'] = str(private_start_date_time + duration)
patient.attrs['age_days'] = age.days # post natal age in days
patient.attrs['age_seconds'] = age.total_seconds()
# now start storing the lists of things: labels, units...
# nsigs = len(label_list)
# 1. keep the text-vs-bytes distinction clear
# 2. alays use "bytes" instead of "str" when you're sure you want a byte string.
# for literals, can use "b" prefix, e.g. b'some bytes'
# 3. for text strings use str or btter yet unicode, u'Hello'
# 4. always use UTF-8 in code
# variable ascii string (or b'' type)
str_dt = h5py.special_dtype(vlen=bytes)
label_ds = hdf.create_dataset('signal_labels', (nsigs,), dtype=str_dt)
units_ds = hdf.create_dataset(
'physical_dimensions', (nsigs,), dtype=str_dt)
transducer_ds = hdf.create_dataset(
'transducer', (nsigs,), dtype=str_dt)
prefilter_ds = hdf.create_dataset('prefilter', (nsigs,), dtype=str_dt)
hdf['signal_physical_mins'] = signal_physical_mins
hdf['signal_physical_maxs'] = signal_physical_maxs
hdf['signal_digital_mins'] = signal_digital_mins
hdf['signal_digital_maxs'] = signal_digital_maxs
if all(signal_digital_maxs <= 32767) and all(
signal_digital_mins >= -32768):
number_bits = 16 # EDF
else:
number_bits = 24 # BDF 2^23 = 8388608 + 1 bit for sign
hdf.attrs['number_bits_per_sample'] = number_bits
if number_bits <= 16:
data_dtype = 'int16'
eegdata = hdf.create_dataset('signals', (nsigs, nsamples0), dtype=data_dtype,
# chunks=(nsigs,fs0), # if wanted 1
# second chunks
chunks=True,
fletcher32=True,
compression='gzip' # most universal
# compression='gzip',
# compression='lzf',
# maxshape=(256,None)
)
if number_bits <= 32 and number_bits > 16: # handles up to 32
data_dtype = 'int32'
eegdata = hdf.create_dataset('signals', (nsigs, nsamples0), dtype=data_dtype,
# chunks=(nsigs,fs0), # if wanted 1
# second chunks
chunks=True,
fletcher32=True,
compression='gzip' # most universal
# compression='gzip',
# compression='lzf',
# maxshape=(256,None)
)
# no compression -> 50 MiB can view eegdata in vitables
# compression='gzip' -> 27 MiB slower
# compression='lzf' -> 35 MiB
# compression='lzf' maxshape=(256,None) -> 36MiB
# this works but can do another way:
# labels = []
units = list()
# signal_nsamples = []
for ii in range(nsigs):
# labels.append(ef.signal_label(ii))
units.append(ef.physical_dimension(ii))
# self.signal_nsamples.append(self.cedf.samples_in_file(ii))
# self.samplefreqs.append(self.cedf.samplefrequency(ii))
# eegdata.signal_labels = labels
# labels_strip = [ss.strip() for ss in labels] # labels are fixed
# length strings
units = [cc.strip() for cc in units]
# converted to standard electrode names if possible
label_ds[:] = signal_text_labels_lpch_normalized
units_ds[:] = units
transducer_ds[:] = signal_transducer
prefilter_ds[:] = signal_prefilters
num_annot = len(annotations)
# how do I make sure this init is "long long" enough
edf_annots = hdf.create_group('edf_annotations')
starts = edf_annots.create_dataset(
'starts_100ns', (num_annot,), dtype=np.int64)
# curiously these durations seem to be stored as strings but of
# floating point values "5.00000" for 5 second duration
durations = edf_annots.create_dataset(
'durations_char16', (num_annot,), dtype='S16') # S16 !!! check py3 compatibility
texts = edf_annots.create_dataset('texts', (num_annot,), dtype=str_dt)
# start with a loop
for ii in range(num_annot):
starts[ii] = annotations[ii][0]
# note: so far I have ony seen type(annotations[ii][1] -> <type 'str'> and they look like ascii strings
# of floating point number of seconds for a duration
# print('type(annotations[ii][1]):', type(annotations[ii][1]))
durations[ii] = annotations[ii][1]
texts[ii] = annotations[ii][2].strip()
# should be more and a switch for anonymous or not
# need to change this to
nchunks = int(nsamples0 // fs0)
samples_per_chunk = int(fs0) # 1 second of samples
buf = np.zeros((nsigs, samples_per_chunk), dtype='int32')
print(
'nchunks:%s, samples_per_chunk: %s' %
(nchunks, samples_per_chunk))
bookmark = 0 # mark where were are in samples
for ii in range(nchunks):
for jj in range(nsigs):
# read_phys_signal(self, signalnum, start, n,
# np.ndarray[np.float64_t, ndim = 1] sigbuf)
# readsignal converts into int32 as necessary
ef.read_digital_signal(
jj, bookmark, samples_per_chunk, buf[jj])
# conversion from int32 to int16 as necessary
eegdata[:, bookmark:bookmark + samples_per_chunk] = buf
# bookmark should be ii*fs0
bookmark += samples_per_chunk
left_over_samples = nsamples0 - nchunks * samples_per_chunk
print('left_over_samples:', left_over_samples)
if left_over_samples > 0:
for jj in range(nsigs):
ef.read_digital_signal(
jj, bookmark, left_over_samples, buf[jj])
eegdata[:,bookmark:bookmark + left_over_samples] = buf[:,0:left_over_samples]
hdf.close()
# from trackingdb.models.nkdb import find_lpch_birthday_from_mrn
# Plan
# v = ValidateTrackHeader(header=h)
# if v.is_valid():
# process(v.cleaned_data)
# else:
# mark_as_invalid(h)
def first(mapping):
if mapping:
return mapping[0]
else:
return mapping # say mapping = [] or None
class ValidateTrackHeaderLPCH:
# after validated place all data in cleaned_data field
def __init__(self, header):
# TOOO: validate that databae_source_label is in accepted sources
self.hdr = header.copy()
self.validated = False
# self.clean = False
self.cleaned_data = {} # vs update/copy from header
def is_valid(self):
# if name contains "Test" then we should skip this file and log it
mrnobj = None
try:
if name_is_test(self.hdr['patient_name']):
raise ValidationError('test file encountered', code='test file', params=self.hdr)
# if we have a valid mrn, then we can potentially look up the patient or even the study
mrn_ok = valid_lpch_mrn(self.hdr['patientcode'])
if mrn_ok:
mrn = self.hdr['patientcode'].strip()
self.cleaned_data['patientcode'] = mrn
else:
raise ValidationError('bad MRN', code='bad mrn', params=self.hdr['patientcode'])
if valid_lpch_name(self.hdr['patient_name']):
self.cleaned_data['patient_name'] = self.hdr['patient_name'].strip()
else:
if mrn_ok: # try to look up patient in databases
# look up name, dob here based upon mrn in nk_db and/or epic_db
mrnobj = models.NkMrn.query.filter_by(mrn=mrn).first()
if mrnobj:
self.cleaned_data['patient_name'] = mrnobj.nkpatient.name
else:
raise ValidationError('invalid patient name', 'invalid name',
params=self.hdr)
eegno_ok = valid_lpch_eegno(self.hdr['admincode'])
if eegno_ok:
self.cleaned_data['admincode'] = _csu(self.hdr['admincode'])
else:
raise ValidationError('bad eegno/admincode', code='invalid admincode', params=self.hdr)
if self.hdr['birthdate_date']:
self.cleaned_data['birthdate_date'] = self.hdr['birthdate_date']
else:
# then couldn't make a date, see if can find birthday in database
if mrn_ok:
mrnobj = mrnobj if mrnobj else models.NkMrn.query.filter_by(mrn=mrn).first()
if not mrnobj:
raise ValidationError('bad birthdate_date','birthdate error', params=self.hdr)
else:
nbday = mrnobj.nkpatient.dob
self.cleaned_data['birthdate_date'] = nbday
else:
raise ValidationError('bad birthday','birthday error', params=self.hdr)
# copy over other header members
# todo: should do more validation of 'gender'
self.cleaned_data['gender'] = self.hdr['gender']
self.cleaned_data['file_name'] = self.hdr['file_name']
self.cleaned_data['filetype'] = self.hdr['filetype']
self.cleaned_data['signals_in_file'] = self.hdr['signals_in_file']
self.cleaned_data['datarecords_in_file'] = self.hdr['datarecords_in_file']
self.cleaned_data['file_duration_100ns'] = self.hdr['file_duration_100ns']
self.cleaned_data['file_duration_seconds'] = self.hdr['file_duration_seconds']
self.cleaned_data['startdate_date'] = self.hdr['startdate_date']
self.cleaned_data['start_datetime'] = self.hdr['start_datetime']
self.cleaned_data['starttime_subsecond_offset'] = self.hdr['starttime_subsecond_offset']
self.cleaned_data['patient_additional'] = self.hdr['patient_additional'].strip()
self.cleaned_data['technician'] = self.hdr['technician'].strip()
self.cleaned_data['equipment'] = self.hdr['equipment'].strip()
self.cleaned_data['recording_additional'] = self.hdr['recording_additional'].strip()
self.cleaned_data['datarecord_duration_100ns'] = self.hdr['datarecord_duration_100ns']
self.validated = True
return True
except ValidationError as ve:
self.errors = ve.message
self.error_code = ve.code
self.error_params = ve.params
debug(ve.message)
return False
class AnonymizeTrackHeaderLPCH(ValidateTrackHeaderLPCH):
LPCH_DEFAULT_BIRTH_DATETIME = datetime.datetime(year=1990, month=1, day=1)
# datatbase sources
LPCH_NK = 'LPCH_NK'
STANFORD_NK = 'STANFORD_NK'
def __init__(self, header, source_database_label=LPCH_NK):
super().__init__(header)
with app.app_context():
self.anonymous_header = models.register_and_create_anonymous_header(self.hdr, source_database_label=source_database_label)
# will need to track: patient, study, file
# file needs source and key NK origin
class ValidateTrackHeaderStanford:
# after validated place all data in cleaned_data field
def __init__(self, header):
# TOOO: validate that databae_source_label is in accepted sources
self.hdr = header.copy()
self.validated = False
# self.clean = False
self.cleaned_data = {} # vs update/copy from header
def is_valid(self):
# if name contains "Test" then we should skip this file and log it
mrnobj = None
try:
if name_is_test(self.hdr['patient_name']):
raise ValidationError('test file encountered', code='test file', params=self.hdr)
# if we have a valid mrn, then we can potentially look up the patient or even the study
mrn_ok = valid_stanford_mrn(self.hdr['patientcode'])
if mrn_ok:
mrn = self.hdr['patientcode'].strip()
self.cleaned_data['patientcode'] = mrn
else:
raise ValidationError('bad MRN', code='bad mrn', params=self.hdr['patientcode'])
if valid_stanford_name(self.hdr['patient_name']):
self.cleaned_data['patient_name'] = self.hdr['patient_name'].strip()
else:
if mrn_ok: # try to look up patient in databases
# look up name, dob here based upon mrn in nk_db and/or epic_db
mrnobj = models.NkMrn.query.filter_by(mrn=mrn).first()
if mrnobj:
self.cleaned_data['patient_name'] = mrnobj.nkpatient.name
else:
raise ValidationError('invalid patient name', 'invalid name',
params=self.hdr)
eegno_ok = valid_stanford_eegno(self.hdr['admincode'])
if eegno_ok:
self.cleaned_data['admincode'] = _csu(self.hdr['admincode'])
else:
raise ValidationError('bad eegno/admincode', code='invalid admincode', params=self.hdr)
if self.hdr['birthdate_date']:
self.cleaned_data['birthdate_date'] = self.hdr['birthdate_date']
else:
# then couldn't make a date, see if can find birthday in database
if mrn_ok:
mrnobj = mrnobj if mrnobj else models.NkMrn.query.filter_by(mrn=mrn).first()
if not mrnobj:
raise ValidationError('bad birthdate_date','birthdate error', params=self.hdr)
else:
nbday = mrnobj.nkpatient.dob
self.cleaned_data['birthdate_date'] = nbday
else:
raise ValidationError('bad birthday','birthday error', params=self.hdr)
# copy over other header members
# todo: should do more validation of 'gender'
self.cleaned_data['gender'] = self.hdr['gender']
self.cleaned_data['file_name'] = self.hdr['file_name']
self.cleaned_data['filetype'] = self.hdr['filetype']
self.cleaned_data['signals_in_file'] = self.hdr['signals_in_file']
self.cleaned_data['datarecords_in_file'] = self.hdr['datarecords_in_file']
self.cleaned_data['file_duration_100ns'] = self.hdr['file_duration_100ns']
self.cleaned_data['file_duration_seconds'] = self.hdr['file_duration_seconds']
self.cleaned_data['startdate_date'] = self.hdr['startdate_date']
self.cleaned_data['start_datetime'] = self.hdr['start_datetime']
self.cleaned_data['starttime_subsecond_offset'] = self.hdr['starttime_subsecond_offset']
self.cleaned_data['patient_additional'] = self.hdr['patient_additional'].strip()
self.cleaned_data['technician'] = self.hdr['technician'].strip()
self.cleaned_data['equipment'] = self.hdr['equipment'].strip()
self.cleaned_data['recording_additional'] = self.hdr['recording_additional'].strip()
self.cleaned_data['datarecord_duration_100ns'] = self.hdr['datarecord_duration_100ns']
self.validated = True
return True
except ValidationError as ve:
self.errors = ve.message
self.error_code = ve.code
self.error_params = ve.params
debug(ve.message)
return False
class AnonymizeTrackHeaderStanford(ValidateTrackHeaderStanford):
STANFORD_DEFAULT_BIRTH_DATETIME = datetime.datetime(year=1910, month=1, day=1)
# datatbase sources
LPCH_NK = 'LPCH_NK'
STANFORD_NK = 'STANFORD_NK'
def __init__(self, header, source_database_label='STANFORD_NK'):
super().__init__(header)
with app.app_context():
self.anonymous_header = models.register_and_create_anonymous_header(self.hdr, source_database_label=source_database_label)
# will need to track: patient, study, file
# file needs source and key NK origin
def find_blocks(arr):
blocks = []
print("total arr:", arr)
dfs = np.diff(arr)
dfs_ind = np.where(dfs != 0.0)[0]
last_ind = 0
for dd in dfs_ind+1:
print("block:",arr[last_ind:dd])
blocks.append((last_ind,dd))
last_ind = dd
print("last block:", arr[last_ind:])
blocks.append( (last_ind,len(arr)))
return blocks
def find_blocks2(arr):
blocks = []
N = len(arr)
print("total arr:", arr)
last_ind = 0
last_val = arr[0]
for ii in range(1,N):
if last_val == arr[ii]:
pass
else:
blocks.append((last_ind,ii))
last_ind = ii
last_val = arr[ii]
blocks.append((last_ind,N))
return blocks
def test_find_blocks1():
s = [250.0, 250.0, 250.0, 1.0, 1.0, 1000.0, 1000.0]
blocks = find_blocks(s)
print("blocks:")
print(blocks)
def test_find_blocks2():
s = [250.0, 250.0, 250.0, 1.0, 1.0, 1000.0, 1000.0]
blocks = find_blocks2(s)
print("blocks:")
print(blocks)
def test_find_blocks2_2():
s = [100,100,100,100,100,100,100,100]
blocks = find_blocks2(s)
print("blocks:")
print(blocks)
def edf2hdf2(fn, outfn='', hdf_dir='', anonymize=False):
"""
convert an edf file to hdf5 using fairly straightforward mapping
return True if successful
@database_sourcel_label tells us which database it came from LPCH_NK or STANFORD_NK
this is important!
"""
if not outfn:
base = os.path.basename(fn)
base, ext = os.path.splitext(base)
base = base + '.eeghdf'
outfn = os.path.join(hdf_dir, base)
# print('outfn:', outfn)
# all the data point related stuff
with edflib.EdfReader(fn) as ef:
# read all EDF+ header information in just the way I want it
header = {
'file_name': os.path.basename(fn),
'filetype': ef.filetype,
'patient_name': ef.patient_name,
'patientcode': ef.patientcode,
'gender': ef.gender,
'signals_in_file': ef.signals_in_file,
'datarecords_in_file': ef.datarecords_in_file,
'file_duration_100ns': ef.file_duration_100ns,
'file_duration_seconds': ef.file_duration_seconds,
'startdate_date': datetime.date(ef.startdate_year, ef.startdate_month, ef.startdate_day),
'start_datetime': datetime.datetime(ef.startdate_year, ef.startdate_month, ef.startdate_day,
ef.starttime_hour, ef.starttime_minute, ef.starttime_second),
'starttime_subsecond_offset': ef.starttime_subsecond,
'birthdate_date': ef.birthdate_date,
'patient_additional': ef.patient_additional,
'admincode': ef.admincode, # usually the study eg. C13-100
'technician': ef.technician,
'equipment': ef.equipment,
'recording_additional': ef.recording_additional,
'datarecord_duration_100ns': ef.datarecord_duration_100ns,
}
pprint.pprint(header)
#### validation code #####
validator = None
# if source_database_label=='LPCH_NK':
# validator = ValidateTrackHeaderLPCH(header=header)
# elif source_database_label== 'STANFORD_NK':
# validator = ValidateTrackHeaderStanford(header=header)
# else:
# raise ValidationError
# if not validator.is_valid():
# print('problem with this file:', fn)
# print(validator.errors,validator.error_code,
# validator.error_params)
# return False, validator
# else:
# print('\nvalid header::')
# pprint.pprint(validator.cleaned_data)
# header = validator.cleaned_data
# from here on the header is valid and cleaned
# use arrow
start_datetime = header['start_datetime']
# end_date_time = datetime.datetime(ef.enddate_year, ef.enddate_month, ef.enddate_day, ef.endtime_hour,
# ef.endtime_minute, ef.endtime_second) # tz naive
# end_date_time - start_date_time
duration = datetime.timedelta(seconds=header['file_duration_seconds'])
# derived information
birthdate = header['birthdate_date']
if birthdate:
age = arrow.get(start_datetime) - arrow.get(header['birthdate_date'])
debug('predicted age: %s' % age)
# total_seconds() returns a float
debug('predicted age (seconds): %s' % age.total_seconds())
else:
age = datetime.timedelta(seconds=0)
# if anonymize:
# if source_database_label== 'LPCH_NK':
# anonymizer = AnonymizeTrackHeaderLPCH(header, source_database_label=source_database_label)
# if source_database_label == 'STANFORD_NK':
# anonymizer = AnonymizeTrackHeaderStanford(header, source_database_label=source_database_label)
# header = anonymizer.anonymous_header # replace the original header with the anonymous one
# print('anonymized header')
# pprint.pprint(header)
# anonymized version if necessary
header['end_datetime'] = header['start_datetime'] + duration
############# signal array information ##################
# signal block related stuff
nsigs = ef.signals_in_file
# again know/assume that this is uniform sampling across signals
fs0 = ef.samplefrequency(0)
signal_frequency_array = ef.get_signal_freqs()
dfs = np.diff(signal_frequency_array)
dfs_ind = np.where(dfs != 0.0)
dfs_ind = dfs_ind[0]
last_ind = 0
for dd in dfs_ind+1:
print("block:",signal_frequency_array[last_ind:dd])
last_ind = dd
print("last block:", signal_frequency_array[last_ind:])
print("where does sampling rate change?", np.where(dfs != 0.0))
print("elements:", signal_frequency_array[np.where(dfs != 0.0)])
print("signal_frequency_array::\n", repr(signal_frequency_array))
print("len(signal_frequency_array):", len(signal_frequency_array))
assert all(signal_frequency_array[:-3] == fs0)
nsamples0 = ef.samples_in_file(0) # samples per channel
print('nsigs=%s, fs0=%s, nsamples0=%s\n' % (nsigs, fs0, nsamples0))
num_samples_per_signal = ef.get_samples_per_signal() # np array
print("num_samples_per_signal::\n", repr(num_samples_per_signal), '\n')
# assert all(num_samples_per_signal == nsamples0)
file_duration_sec = ef.file_duration_seconds
#print("file_duration_sec", repr(file_duration_sec))
# Note that all annotations except the top row must also specify a duration.
# long long onset; /* onset time of the event, expressed in units of 100
# nanoSeconds and relative to the starttime in the header */
# char duration[16]; /* duration time, this is a null-terminated ASCII text-string */
# char annotation[EDFLIB_MAX_ANNOTATION_LEN + 1]; /* description of the
# event in UTF-8, this is a null term string of max length 512*/
# start("x.y"), end, char[20]
# annotations = ef.read_annotations_as_array() # get numpy array of
# annotations
annotations_b = ef.read_annotations_b_100ns_units()
# print("annotations_b::\n")
# pprint.pprint(annotations_b) # get list of annotations
signal_text_labels = ef.get_signal_text_labels()
print("signal_text_labels::\n")
pprint.pprint(signal_text_labels)
print("normalized text labels::\n")
signal_text_labels_lpch_normalized = [
normalize_lpch_signal_label(label) for label in signal_text_labels]
pprint.pprint(signal_text_labels_lpch_normalized)
# ef.recording_additional
# print()
signal_digital_mins = np.array(
[ef.digital_min(ch) for ch in range(nsigs)])
signal_digital_total_min = min(signal_digital_mins)
print("digital mins:", repr(signal_digital_mins))
print("digital total min:", repr(signal_digital_total_min))
signal_digital_maxs = np.array(
[ef.digital_max(ch) for ch in range(nsigs)])
signal_digital_total_max = max(signal_digital_maxs)
print("digital maxs:", repr(signal_digital_maxs))
#print("digital total max:", repr(signal_digital_total_max))
signal_physical_dims = [
ef.physical_dimension(ch) for ch in range(nsigs)]
# print('signal_physical_dims::\n')
# pprint.pprint(signal_physical_dims)
#print()
signal_physical_maxs = np.array(
[ef.physical_max(ch) for ch in range(nsigs)])
#print('signal_physical_maxs::\n', repr(signal_physical_maxs))
signal_physical_mins = np.array(
[ef.physical_min(ch) for ch in range(nsigs)])
#print('signal_physical_mins::\n', repr(signal_physical_mins))
# this don't seem to be used much so I will put at end
signal_prefilters = [ef.prefilter(ch).strip() for ch in range(nsigs)]
#print('signal_prefilters::\n')
# pprint.pprint(signal_prefilters)
#print()
signal_transducers = [ef.transducer(ch).strip() for ch in range(nsigs)]
#print('signal_transducers::\n')
#pprint.pprint(signal_transducers)
with eeghdf.EEGHDFWriter(outfn, 'w') as eegf:
eegf.write_patient_info(patient_name=header['patient_name'],
patientcode=header['patientcode'],
gender=header['gender'],
birthdate_isostring=header['birthdate_date'],
# gestational_age_at_birth_days
# born_premature
patient_additional=header['patient_additional'])
signal_text_labels_lpch_normalized = [
normalize_lpch_signal_label(label) for label in signal_text_labels]
rec = eegf.create_record_block(record_duration_seconds=header['file_duration_seconds'],
start_isodatetime=str(header['start_datetime']),
end_isodatetime=str(header['end_datetime']),
number_channels=header['signals_in_file'],
num_samples_per_channel=nsamples0,
sample_frequency=fs0,
signal_labels=signal_text_labels_lpch_normalized,
signal_physical_mins=signal_physical_mins,
signal_physical_maxs=signal_physical_maxs,
signal_digital_mins=signal_digital_mins,
signal_digital_maxs=signal_digital_maxs,
physical_dimensions=signal_physical_dims,
patient_age_days=age.total_seconds() / 86400.0,
signal_prefilters=signal_prefilters,
signal_transducers=signal_transducers,
technician=header['technician'])
eegf.write_annotations_b(annotations_b) # may be should be called record annotations
edfblock_itr = edf_block_iter_generator(
ef,
nsamples0,
100 * ef.samples_in_datarecord(0)*header['signals_in_file'], # samples_per_chunk roughly 100 datarecords at a time
dtype='int32')
signals = eegf.stream_dig_signal_to_record_block(rec, edfblock_itr)
return True, validator # we succeeded
def test_edf2hdf_info():
# on chris's macbook
EDF_DIR = r'/Users/clee/code/eegml/nk_database_proj/private/lpch_edfs'
fn = os.path.join(EDF_DIR, 'XA2731AX_1-1+.edf')
edf2hdf(filename)
if __name__ == '__main__':
import sys
if len(sys.argv) == 2:
file_name = sys.argv[1]
edf2hdf2(file_name)
| bsd-3-clause | 4,465,477,519,905,432,000 | 38.825348 | 170 | 0.561707 | false |
lmazuel/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2017_11_01/models/azure_reachability_report_py3.py | 1 | 1995 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class AzureReachabilityReport(Model):
"""Azure reachability report details.
All required parameters must be populated in order to send to Azure.
:param aggregation_level: Required. The aggregation level of Azure
reachability report. Can be Country, State or City.
:type aggregation_level: str
:param provider_location: Required.
:type provider_location:
~azure.mgmt.network.v2017_11_01.models.AzureReachabilityReportLocation
:param reachability_report: Required. List of Azure reachability report
items.
:type reachability_report:
list[~azure.mgmt.network.v2017_11_01.models.AzureReachabilityReportItem]
"""
_validation = {
'aggregation_level': {'required': True},
'provider_location': {'required': True},
'reachability_report': {'required': True},
}
_attribute_map = {
'aggregation_level': {'key': 'aggregationLevel', 'type': 'str'},
'provider_location': {'key': 'providerLocation', 'type': 'AzureReachabilityReportLocation'},
'reachability_report': {'key': 'reachabilityReport', 'type': '[AzureReachabilityReportItem]'},
}
def __init__(self, *, aggregation_level: str, provider_location, reachability_report, **kwargs) -> None:
super(AzureReachabilityReport, self).__init__(**kwargs)
self.aggregation_level = aggregation_level
self.provider_location = provider_location
self.reachability_report = reachability_report
| mit | 6,542,636,260,886,058,000 | 40.5625 | 108 | 0.649624 | false |
ray-project/ray | python/ray/serve/tests/test_fastapi.py | 1 | 12857 | import sys
import time
from typing import Any, List, Optional
import tempfile
import pytest
import inspect
import requests
from fastapi import (Cookie, Depends, FastAPI, Header, Query, Request,
APIRouter, BackgroundTasks, Response)
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import JSONResponse
from pydantic import BaseModel, Field
import ray
from ray import serve
from ray.serve.http_util import make_fastapi_class_based_view
def test_fastapi_function(serve_instance):
app = FastAPI()
@app.get("/{a}")
def func(a: int):
return {"result": a}
@serve.deployment(name="f")
@serve.ingress(app)
class FastAPIApp:
pass
FastAPIApp.deploy()
resp = requests.get("http://localhost:8000/f/100")
assert resp.json() == {"result": 100}
resp = requests.get("http://localhost:8000/f/not-number")
assert resp.status_code == 422 # Unprocessable Entity
assert resp.json()["detail"][0]["type"] == "type_error.integer"
def test_ingress_prefix(serve_instance):
app = FastAPI()
@app.get("/{a}")
def func(a: int):
return {"result": a}
@serve.deployment(route_prefix="/api")
@serve.ingress(app)
class App:
pass
App.deploy()
resp = requests.get("http://localhost:8000/api/100")
assert resp.json() == {"result": 100}
def test_class_based_view(serve_instance):
app = FastAPI()
@app.get("/other")
def hello():
return "hello"
@serve.deployment(name="f")
@serve.ingress(app)
class A:
def __init__(self):
self.val = 1
@app.get("/calc/{i}")
def b(self, i: int):
return i + self.val
@app.post("/calc/{i}")
def c(self, i: int):
return i - self.val
def other(self, msg: str):
return msg
A.deploy()
# Test HTTP calls.
resp = requests.get("http://localhost:8000/f/calc/41")
assert resp.json() == 42
resp = requests.post("http://localhost:8000/f/calc/41")
assert resp.json() == 40
resp = requests.get("http://localhost:8000/f/other")
assert resp.json() == "hello"
# Test handle calls.
handle = A.get_handle()
assert ray.get(handle.b.remote(41)) == 42
assert ray.get(handle.c.remote(41)) == 40
assert ray.get(handle.other.remote("world")) == "world"
def test_make_fastapi_cbv_util():
app = FastAPI()
class A:
@app.get("/{i}")
def b(self, i: int):
pass
# before, "self" is treated as a query params
assert app.routes[-1].endpoint == A.b
assert app.routes[-1].dependant.query_params[0].name == "self"
assert len(app.routes[-1].dependant.dependencies) == 0
make_fastapi_class_based_view(app, A)
# after, "self" is treated as a dependency instead of query params
assert app.routes[-1].endpoint == A.b
assert len(app.routes[-1].dependant.query_params) == 0
assert len(app.routes[-1].dependant.dependencies) == 1
self_dep = app.routes[-1].dependant.dependencies[0]
assert self_dep.name == "self"
assert inspect.isfunction(self_dep.call)
assert "get_current_servable" in str(self_dep.call)
def test_fastapi_features(serve_instance):
app = FastAPI(openapi_url="/my_api.json")
@app.on_event("startup")
def inject_state():
app.state.state_one = "app.state"
@app.middleware("http")
async def add_process_time_header(request: Request, call_next):
start_time = time.time()
response = await call_next(request)
process_time = time.time() - start_time
response.headers["X-Process-Time"] = str(process_time)
return response
class Nested(BaseModel):
val: int
class BodyType(BaseModel):
name: str
price: float = Field(None, gt=1.0, description="High price!")
nests: Nested
class RespModel(BaseModel):
ok: bool
vals: List[Any]
file_path: str
async def yield_db():
yield "db"
async def common_parameters(q: Optional[str] = None):
return {"q": q}
@app.exception_handler(ValueError)
async def custom_handler(_: Request, exc: ValueError):
return JSONResponse(
status_code=500,
content={
"custom_error": "true",
"message": str(exc)
})
def run_background(background_tasks: BackgroundTasks):
path = tempfile.mktemp()
def write_to_file(p):
with open(p, "w") as f:
f.write("hello")
background_tasks.add_task(write_to_file, path)
return path
app.add_middleware(CORSMiddleware, allow_origins="*")
@app.get("/{path_arg}", response_model=RespModel, status_code=201)
async def func(
path_arg: str,
query_arg: str,
body_val: BodyType,
backgrounds_tasks: BackgroundTasks,
do_error: bool = False,
query_arg_valid: Optional[str] = Query(None, min_length=3),
cookie_arg: Optional[str] = Cookie(None),
user_agent: Optional[str] = Header(None),
commons: dict = Depends(common_parameters),
db=Depends(yield_db),
):
if do_error:
raise ValueError("bad input")
path = run_background(backgrounds_tasks)
return RespModel(
ok=True,
vals=[
path_arg,
query_arg,
body_val.price,
body_val.nests.val,
do_error,
query_arg_valid,
cookie_arg,
user_agent.split("/")[0], # returns python-requests
commons,
db,
app.state.state_one,
],
file_path=path,
)
router = APIRouter(prefix="/prefix")
@router.get("/subpath")
def router_path():
return "ok"
app.include_router(router)
@serve.deployment(name="fastapi")
@serve.ingress(app)
class Worker:
pass
Worker.deploy()
url = "http://localhost:8000/fastapi"
resp = requests.get(f"{url}/")
assert resp.status_code == 404
assert "x-process-time" in resp.headers
resp = requests.get(f"{url}/my_api.json")
assert resp.status_code == 200
assert resp.json() # it returns a well-formed json.
resp = requests.get(f"{url}/docs")
assert resp.status_code == 200
assert "<!DOCTYPE html>" in resp.text
resp = requests.get(f"{url}/redoc")
assert resp.status_code == 200
assert "<!DOCTYPE html>" in resp.text
resp = requests.get(f"{url}/path_arg")
assert resp.status_code == 422 # Malformed input
resp = requests.get(
f"{url}/path_arg",
json={
"name": "serve",
"price": 12,
"nests": {
"val": 1
}
},
params={
"query_arg": "query_arg",
"query_arg_valid": "at-least-three-chars",
"q": "common_arg",
})
assert resp.status_code == 201, resp.text
assert resp.json()["ok"]
assert resp.json()["vals"] == [
"path_arg",
"query_arg",
12.0,
1,
False,
"at-least-three-chars",
None,
"python-requests",
{
"q": "common_arg"
},
"db",
"app.state",
]
assert open(resp.json()["file_path"]).read() == "hello"
resp = requests.get(
f"{url}/path_arg",
json={
"name": "serve",
"price": 12,
"nests": {
"val": 1
}
},
params={
"query_arg": "query_arg",
"query_arg_valid": "at-least-three-chars",
"q": "common_arg",
"do_error": "true"
})
assert resp.status_code == 500
assert resp.json()["custom_error"] == "true"
resp = requests.get(f"{url}/prefix/subpath")
assert resp.status_code == 200
resp = requests.get(
f"{url}/docs",
headers={
"Access-Control-Request-Method": "GET",
"Origin": "https://googlebot.com"
})
assert resp.headers["access-control-allow-origin"] == "*", resp.headers
def test_fast_api_mounted_app(serve_instance):
app = FastAPI()
subapp = FastAPI()
@subapp.get("/hi")
def hi():
return "world"
app.mount("/mounted", subapp)
@serve.deployment(route_prefix="/api")
@serve.ingress(app)
class A:
pass
A.deploy()
assert requests.get(
"http://localhost:8000/api/mounted/hi").json() == "world"
def test_fastapi_init_lifespan_should_not_shutdown(serve_instance):
app = FastAPI()
@app.on_event("shutdown")
async def shutdown():
1 / 0
@serve.deployment
@serve.ingress(app)
class A:
def f(self):
return 1
A.deploy()
# Without a proper fix, the actor won't be initialized correctly.
# Because it will crash on each startup.
assert ray.get(A.get_handle().f.remote()) == 1
def test_fastapi_duplicate_routes(serve_instance):
app = FastAPI()
@serve.deployment(route_prefix="/api/v1")
@serve.ingress(app)
class App1:
@app.get("/")
def func_v1(self):
return "first"
@serve.deployment(route_prefix="/api/v2")
@serve.ingress(app)
class App2:
@app.get("/")
def func_v2(self):
return "second"
@app.get("/ignored")
def ignored():
pass
App1.deploy()
App2.deploy()
resp = requests.get("http://localhost:8000/api/v1")
assert resp.json() == "first"
resp = requests.get("http://localhost:8000/api/v2")
assert resp.json() == "second"
for version in ["v1", "v2"]:
resp = requests.get(f"http://localhost:8000/api/{version}/ignored")
assert resp.status_code == 404
@pytest.mark.skipif(sys.platform == "win32", reason="Failing on Windows")
@pytest.mark.parametrize("route_prefix", [None, "/", "/subpath"])
def test_doc_generation(serve_instance, route_prefix):
app = FastAPI()
@serve.deployment(route_prefix=route_prefix)
@serve.ingress(app)
class App:
@app.get("/")
def func1(self, arg: str):
return "hello"
App.deploy()
if route_prefix is None:
prefix = "/App"
else:
prefix = route_prefix
if not prefix.endswith("/"):
prefix += "/"
r = requests.get(f"http://localhost:8000{prefix}openapi.json")
assert r.status_code == 200
assert len(r.json()["paths"]) == 1
assert "/" in r.json()["paths"]
assert len(r.json()["paths"]["/"]) == 1
assert "get" in r.json()["paths"]["/"]
r = requests.get(f"http://localhost:8000{prefix}docs")
assert r.status_code == 200
@serve.deployment(route_prefix=route_prefix)
@serve.ingress(app)
class App:
@app.get("/")
def func1(self, arg: str):
return "hello"
@app.post("/hello")
def func2(self, arg: int):
return "hello"
App.deploy()
r = requests.get(f"http://localhost:8000{prefix}openapi.json")
assert r.status_code == 200
assert len(r.json()["paths"]) == 2
assert "/" in r.json()["paths"]
assert len(r.json()["paths"]["/"]) == 1
assert "get" in r.json()["paths"]["/"]
assert "/hello" in r.json()["paths"]
assert len(r.json()["paths"]["/hello"]) == 1
assert "post" in r.json()["paths"]["/hello"]
r = requests.get(f"http://localhost:8000{prefix}docs")
assert r.status_code == 200
def test_fastapi_multiple_headers(serve_instance):
# https://fastapi.tiangolo.com/advanced/response-cookies/
app = FastAPI()
@app.get("/")
def func(resp: Response):
resp.set_cookie(key="a", value="b")
resp.set_cookie(key="c", value="d")
return "hello"
@serve.deployment(name="f")
@serve.ingress(app)
class FastAPIApp:
pass
FastAPIApp.deploy()
resp = requests.get("http://localhost:8000/f")
assert resp.cookies.get_dict() == {"a": "b", "c": "d"}
def test_fastapi_nested_field_in_response_model(serve_instance):
# https://github.com/ray-project/ray/issues/16757
class TestModel(BaseModel):
a: str
b: List[str]
app = FastAPI()
@app.get("/", response_model=TestModel)
def test_endpoint():
test_model = TestModel(a="a", b=["b"])
return test_model
@serve.deployment(route_prefix="/")
@serve.ingress(app)
class TestDeployment:
pass
TestDeployment.deploy()
resp = requests.get("http://localhost:8000/")
assert resp.json() == {"a": "a", "b": ["b"]}
if __name__ == "__main__":
import sys
sys.exit(pytest.main(["-v", "-s", __file__]))
| apache-2.0 | -7,992,620,959,031,255,000 | 24.973737 | 75 | 0.563662 | false |
lavish205/olympia | src/olympia/reviewers/tests/test_views.py | 1 | 210461 | # -*- coding: utf-8 -*-
import json
import os
import time
import urlparse
from collections import OrderedDict
from datetime import datetime, timedelta
from django.conf import settings
from django.core import mail
from django.core.cache import cache
from django.core.files import temp
from django.core.files.base import File as DjangoFile
from django.template import defaultfilters
from django.test.utils import override_settings
import mock
from freezegun import freeze_time
from lxml.html import HTMLParser, fromstring
from mock import Mock, patch
from pyquery import PyQuery as pq
from olympia import amo, core, ratings
from olympia.abuse.models import AbuseReport
from olympia.access.models import Group, GroupUser
from olympia.accounts.views import API_TOKEN_COOKIE
from olympia.activity.models import ActivityLog
from olympia.addons.models import (
Addon, AddonApprovalsCounter, AddonDependency, AddonReviewerFlags,
AddonUser)
from olympia.amo.templatetags.jinja_helpers import (
user_media_path, user_media_url)
from olympia.amo.tests import (
APITestClient, TestCase, addon_factory, check_links, file_factory, formset,
initial, reverse_ns, user_factory, version_factory)
from olympia.amo.urlresolvers import reverse
from olympia.files.models import File, FileValidation, WebextPermission
from olympia.ratings.models import Rating, RatingFlag
from olympia.reviewers.models import (
AutoApprovalSummary, RereviewQueueTheme, ReviewerScore,
ReviewerSubscription, Whiteboard)
from olympia.users.models import UserProfile
from olympia.versions.models import ApplicationsVersions, AppVersion
from olympia.zadmin.models import get_config
class TestRedirectsOldPaths(TestCase):
def setUp(self):
user = user_factory()
self.client.login(email=user.email)
def test_redirect_old_queue(self):
response = self.client.get('/en-US/editors/queue/new')
self.assert3xx(response, '/reviewers/queue/new', status_code=301)
def test_redirect_old_review_page(self):
response = self.client.get('/en-US/editors/review/foobar')
self.assert3xx(response, '/reviewers/review/foobar', status_code=301)
class ReviewerTest(TestCase):
fixtures = ['base/users', 'base/approvals']
def login_as_admin(self):
assert self.client.login(email='[email protected]')
def login_as_reviewer(self):
assert self.client.login(email='[email protected]')
def make_review(self, username='a'):
u = UserProfile.objects.create(username=username)
a = Addon.objects.create(name='yermom', type=amo.ADDON_EXTENSION)
return Rating.objects.create(user=u, addon=a, body='baa')
class TestRatingsModerationLog(ReviewerTest):
def setUp(self):
super(TestRatingsModerationLog, self).setUp()
user = user_factory()
self.grant_permission(user, 'Ratings:Moderate')
self.client.login(email=user.email)
self.url = reverse('reviewers.ratings_moderation_log')
core.set_user(user)
def test_log(self):
response = self.client.get(self.url)
assert response.status_code == 200
def test_start_filter(self):
response = self.client.get(self.url, {'start': '2011-01-01'})
assert response.status_code == 200
def test_enddate_filter(self):
"""
Make sure that if our end date is 1/1/2011, that we include items from
1/1/2011. To not do as such would be dishonorable.
"""
review = self.make_review(username='b')
ActivityLog.create(
amo.LOG.APPROVE_RATING, review, review.addon).update(
created=datetime(2011, 1, 1))
response = self.client.get(self.url, {'end': '2011-01-01'})
assert response.status_code == 200
assert pq(response.content)('tbody td').eq(0).text() == (
'Jan. 1, 2011, midnight')
def test_action_filter(self):
"""
Based on setup we should see only two items if we filter for deleted
reviews.
"""
review = self.make_review()
for i in xrange(2):
ActivityLog.create(amo.LOG.APPROVE_RATING, review, review.addon)
ActivityLog.create(amo.LOG.DELETE_RATING, review.id, review.addon)
response = self.client.get(self.url, {'filter': 'deleted'})
assert response.status_code == 200
assert pq(response.content)('tbody tr').length == 2
def test_no_results(self):
response = self.client.get(self.url, {'end': '2004-01-01'})
assert response.status_code == 200
assert '"no-results"' in response.content
def test_moderation_log_detail(self):
review = self.make_review()
ActivityLog.create(amo.LOG.APPROVE_RATING, review, review.addon)
id_ = ActivityLog.objects.moderation_events()[0].id
response = self.client.get(
reverse('reviewers.ratings_moderation_log.detail', args=[id_]))
assert response.status_code == 200
class TestReviewLog(ReviewerTest):
fixtures = ReviewerTest.fixtures + ['base/addon_3615']
def setUp(self):
super(TestReviewLog, self).setUp()
self.user = UserProfile.objects.get(email='[email protected]')
self.login_as_reviewer()
self.url = reverse('reviewers.reviewlog')
def get_user(self):
return UserProfile.objects.all()[0]
def make_approvals(self):
for addon in Addon.objects.all():
ActivityLog.create(
amo.LOG.REJECT_VERSION, addon, addon.current_version,
user=self.get_user(), details={'comments': 'youwin'})
def make_an_approval(self, action, comment='youwin', username=None,
addon=None):
if username:
user = UserProfile.objects.get(username=username)
else:
user = self.get_user()
if not addon:
addon = Addon.objects.all()[0]
ActivityLog.create(action, addon, addon.current_version, user=user,
details={'comments': comment})
def test_basic(self):
self.make_approvals()
response = self.client.get(self.url)
assert response .status_code == 200
doc = pq(response .content)
assert doc('#log-filter button'), 'No filters.'
# Should have 2 showing.
rows = doc('tbody tr')
assert rows.filter(':not(.hide)').length == 2
assert rows.filter('.hide').eq(0).text() == 'youwin'
# Should have none showing if the addons are unlisted.
for addon in Addon.objects.all():
self.make_addon_unlisted(addon)
response = self.client.get(self.url)
assert response .status_code == 200
doc = pq(response.content)
assert not doc('tbody tr :not(.hide)')
# But they should have 2 showing for someone with the right perms.
self.grant_permission(self.user, 'Addons:ReviewUnlisted')
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
rows = doc('tbody tr')
assert rows.filter(':not(.hide)').length == 2
assert rows.filter('.hide').eq(0).text() == 'youwin'
def test_xss(self):
a = Addon.objects.all()[0]
a.name = '<script>alert("xss")</script>'
a.save()
ActivityLog.create(amo.LOG.REJECT_VERSION, a, a.current_version,
user=self.get_user(), details={'comments': 'xss!'})
response = self.client.get(self.url)
assert response.status_code == 200
inner_html = pq(response.content)('#log-listing tbody td').eq(1).html()
assert '<script>' in inner_html
assert '<script>' not in inner_html
def test_end_filter(self):
"""
Let's use today as an end-day filter and make sure we see stuff if we
filter.
"""
self.make_approvals()
# Make sure we show the stuff we just made.
date = time.strftime('%Y-%m-%d')
response = self.client.get(self.url, {'end': date})
assert response.status_code == 200
doc = pq(response.content)('#log-listing tbody')
assert doc('tr:not(.hide)').length == 2
assert doc('tr.hide').eq(0).text() == 'youwin'
def test_end_filter_wrong(self):
"""
Let's use today as an end-day filter and make sure we see stuff if we
filter.
"""
self.make_approvals()
response = self.client.get(self.url, {'end': 'wrong!'})
# If this is broken, we'll get a traceback.
assert response.status_code == 200
assert pq(response.content)('#log-listing tr:not(.hide)').length == 3
def test_start_filter(self):
with freeze_time('2017-08-01 10:00'):
self.make_approvals()
# Make sure we show the stuff we just made.
response = self.client.get(self.url, {'start': '2017-07-31'})
assert response.status_code == 200
doc = pq(response.content)('#log-listing tbody')
assert doc('tr:not(.hide)').length == 2
assert doc('tr.hide').eq(0).text() == 'youwin'
def test_start_default_filter(self):
with freeze_time('2017-07-31 10:00'):
self.make_approvals()
with freeze_time('2017-08-01 10:00'):
addon = Addon.objects.first()
ActivityLog.create(
amo.LOG.REJECT_VERSION, addon, addon.current_version,
user=self.get_user(), details={'comments': 'youwin'})
# Make sure the default 'start' to the 1st of a month works properly
with freeze_time('2017-08-03 11:00'):
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)('#log-listing tbody')
assert doc('tr:not(.hide)').length == 1
assert doc('tr.hide').eq(0).text() == 'youwin'
def test_search_comment_exists(self):
"""Search by comment."""
self.make_an_approval(amo.LOG.REQUEST_ADMIN_REVIEW_CODE,
comment='hello')
response = self.client.get(self.url, {'search': 'hello'})
assert response.status_code == 200
assert pq(response.content)(
'#log-listing tbody tr.hide').eq(0).text() == 'hello'
def test_search_comment_case_exists(self):
"""Search by comment, with case."""
self.make_an_approval(amo.LOG.REQUEST_ADMIN_REVIEW_CODE,
comment='hello')
response = self.client.get(self.url, {'search': 'HeLlO'})
assert response.status_code == 200
assert pq(response.content)(
'#log-listing tbody tr.hide').eq(0).text() == 'hello'
def test_search_comment_doesnt_exist(self):
"""Search by comment, with no results."""
self.make_an_approval(amo.LOG.REQUEST_ADMIN_REVIEW_CODE,
comment='hello')
response = self.client.get(self.url, {'search': 'bye'})
assert response.status_code == 200
assert pq(response.content)('.no-results').length == 1
def test_search_author_exists(self):
"""Search by author."""
self.make_approvals()
self.make_an_approval(
amo.LOG.REQUEST_ADMIN_REVIEW_CODE, username='reviewer',
comment='hi')
response = self.client.get(self.url, {'search': 'reviewer'})
assert response.status_code == 200
rows = pq(response.content)('#log-listing tbody tr')
assert rows.filter(':not(.hide)').length == 1
assert rows.filter('.hide').eq(0).text() == 'hi'
def test_search_author_case_exists(self):
"""Search by author, with case."""
self.make_approvals()
self.make_an_approval(
amo.LOG.REQUEST_ADMIN_REVIEW_CODE, username='reviewer',
comment='hi')
response = self.client.get(self.url, {'search': 'ReviEwEr'})
assert response.status_code == 200
rows = pq(response.content)('#log-listing tbody tr')
assert rows.filter(':not(.hide)').length == 1
assert rows.filter('.hide').eq(0).text() == 'hi'
def test_search_author_doesnt_exist(self):
"""Search by author, with no results."""
self.make_approvals()
self.make_an_approval(
amo.LOG.REQUEST_ADMIN_REVIEW_CODE, username='reviewer')
response = self.client.get(self.url, {'search': 'wrong'})
assert response.status_code == 200
assert pq(response.content)('.no-results').length == 1
def test_search_addon_exists(self):
"""Search by add-on name."""
self.make_approvals()
addon = Addon.objects.all()[0]
response = self.client.get(self.url, {'search': addon.name})
assert response.status_code == 200
tr = pq(response.content)(
'#log-listing tr[data-addonid="%s"]' % addon.id)
assert tr.length == 1
assert tr.siblings('.comments').text() == 'youwin'
def test_search_addon_case_exists(self):
"""Search by add-on name, with case."""
self.make_approvals()
addon = Addon.objects.all()[0]
response = self.client.get(
self.url, {'search': str(addon.name).swapcase()})
assert response.status_code == 200
tr = pq(response.content)(
'#log-listing tr[data-addonid="%s"]' % addon.id)
assert tr.length == 1
assert tr.siblings('.comments').text() == 'youwin'
def test_search_addon_doesnt_exist(self):
"""Search by add-on name, with no results."""
self.make_approvals()
response = self.client.get(self.url, {'search': 'xxx'})
assert response.status_code == 200
assert pq(response.content)('.no-results').length == 1
@patch('olympia.activity.models.ActivityLog.arguments', new=Mock)
def test_addon_missing(self):
self.make_approvals()
response = self.client.get(self.url)
assert response.status_code == 200
assert pq(response.content)('#log-listing tr td').eq(1).text() == (
'Add-on has been deleted.')
def test_request_info_logs(self):
self.make_an_approval(amo.LOG.REQUEST_INFORMATION)
response = self.client.get(self.url)
assert response.status_code == 200
assert pq(response.content)('#log-listing tr td a').eq(1).text() == (
'More information requested')
def test_super_review_logs(self):
self.make_an_approval(amo.LOG.REQUEST_ADMIN_REVIEW_CODE)
response = self.client.get(self.url)
assert response.status_code == 200
assert pq(response.content)('#log-listing tr td a').eq(1).text() == (
'Admin add-on-review requested')
def test_comment_logs(self):
self.make_an_approval(amo.LOG.COMMENT_VERSION)
response = self.client.get(self.url)
assert response.status_code == 200
assert pq(response.content)('#log-listing tr td a').eq(1).text() == (
'Commented')
def test_content_approval(self):
self.make_an_approval(amo.LOG.APPROVE_CONTENT)
response = self.client.get(self.url)
assert response.status_code == 200
link = pq(response.content)('#log-listing tbody td a').eq(1)[0]
assert link.attrib['href'] == '/en-US/reviewers/review-content/a3615'
assert link.text_content().strip() == 'Content approved'
def test_content_rejection(self):
self.make_an_approval(amo.LOG.REJECT_CONTENT)
response = self.client.get(self.url)
assert response.status_code == 200
link = pq(response.content)('#log-listing tbody td a').eq(1)[0]
assert link.attrib['href'] == '/en-US/reviewers/review-content/a3615'
assert link.text_content().strip() == 'Content rejected'
@freeze_time('2017-08-03')
def test_review_url(self):
self.login_as_admin()
addon = addon_factory()
unlisted_version = version_factory(
addon=addon, channel=amo.RELEASE_CHANNEL_UNLISTED)
ActivityLog.create(
amo.LOG.APPROVE_VERSION, addon, addon.current_version,
user=self.get_user(), details={'comments': 'foo'})
response = self.client.get(self.url)
assert response.status_code == 200
url = reverse('reviewers.review', args=[addon.slug])
link = pq(response.content)(
'#log-listing tbody tr[data-addonid] a').eq(1)
assert link.attr('href') == url
entry = ActivityLog.create(
amo.LOG.APPROVE_VERSION, addon,
unlisted_version,
user=self.get_user(), details={'comments': 'foo'})
# Force the latest entry to be at the top of the list so that we can
# pick it more reliably later from the HTML
entry.update(created=datetime.now() + timedelta(days=1))
response = self.client.get(self.url)
url = reverse(
'reviewers.review',
args=['unlisted', addon.slug])
assert pq(response.content)(
'#log-listing tr td a').eq(1).attr('href') == url
class TestDashboard(TestCase):
def setUp(self):
self.url = reverse('reviewers.dashboard')
self.user = user_factory()
self.client.login(email=self.user.email)
def test_old_temporary_url_redirect(self):
response = self.client.get('/en-US/reviewers/dashboard')
self.assert3xx(
response, reverse('reviewers.dashboard'), status_code=301)
def test_not_a_reviewer(self):
response = self.client.get(self.url)
assert response.status_code == 403
def test_admin_all_permissions(self):
# Create a lot of add-ons to test the queue counts.
# Nominated and pending.
addon_factory(
status=amo.STATUS_NOMINATED,
type=amo.ADDON_STATICTHEME,
file_kw={'status': amo.STATUS_AWAITING_REVIEW})
version_factory(
addon=addon_factory(),
file_kw={'status': amo.STATUS_AWAITING_REVIEW})
version_factory(
addon=addon_factory(),
file_kw={'status': amo.STATUS_AWAITING_REVIEW})
under_admin_review = addon_factory(
status=amo.STATUS_NOMINATED,
file_kw={'status': amo.STATUS_AWAITING_REVIEW})
AddonReviewerFlags.objects.create(
addon=under_admin_review, needs_admin_code_review=True)
under_admin_review_and_pending = addon_factory()
AddonReviewerFlags.objects.create(
addon=under_admin_review_and_pending,
needs_admin_theme_review=True)
version_factory(
addon=under_admin_review_and_pending,
file_kw={'status': amo.STATUS_AWAITING_REVIEW})
# Auto-approved and Content Review.
addon1 = addon_factory(
version_kw={'is_webextension': True})
AddonApprovalsCounter.reset_for_addon(addon=addon1)
AutoApprovalSummary.objects.create(
version=addon1.current_version, verdict=amo.AUTO_APPROVED)
under_content_review = addon_factory(
version_kw={'is_webextension': True})
AddonApprovalsCounter.reset_for_addon(addon=under_content_review)
AutoApprovalSummary.objects.create(
version=under_content_review.current_version,
verdict=amo.AUTO_APPROVED)
AddonReviewerFlags.objects.create(
addon=under_content_review, needs_admin_content_review=True)
addon2 = addon_factory(
version_kw={'is_webextension': True})
AddonApprovalsCounter.reset_for_addon(addon=addon2)
AutoApprovalSummary.objects.create(
version=addon2.current_version, verdict=amo.AUTO_APPROVED)
AddonReviewerFlags.objects.create(
addon=addon2, needs_admin_content_review=True)
under_code_review = addon_factory(
version_kw={'is_webextension': True})
AddonApprovalsCounter.reset_for_addon(addon=under_code_review)
AutoApprovalSummary.objects.create(
version=under_code_review.current_version,
verdict=amo.AUTO_APPROVED)
AddonReviewerFlags.objects.create(
addon=under_code_review, needs_admin_code_review=True)
admins_group = Group.objects.create(name='Admins', rules='*:*')
GroupUser.objects.create(user=self.user, group=admins_group)
# Addon with expired info request
expired = addon_factory(name=u'Expired')
AddonReviewerFlags.objects.create(
addon=expired,
pending_info_request=self.days_ago(42))
# Rating
rating = Rating.objects.create(
addon=addon1, version=addon1.current_version, user=self.user,
flag=True, body=u'This âdd-on sucks!!111', rating=1,
editorreview=True)
rating.ratingflag_set.create()
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert len(doc('.dashboard h3')) == 8 # All sections are present.
expected_links = [
reverse('reviewers.queue_nominated'),
reverse('reviewers.queue_pending'),
reverse('reviewers.performance'),
reverse('reviewers.reviewlog'),
'https://wiki.mozilla.org/Add-ons/Reviewers/Guide',
'https://wiki.mozilla.org/Add-ons/Reviewers/Themes/Guidelines',
reverse('reviewers.queue_auto_approved'),
reverse('reviewers.performance'),
reverse('reviewers.reviewlog'),
'https://wiki.mozilla.org/Add-ons/Reviewers/Guide',
reverse('reviewers.queue_content_review'),
reverse('reviewers.performance'),
reverse('reviewers.themes.list'),
reverse('reviewers.themes.list_rereview'),
reverse('reviewers.themes.list_flagged'),
reverse('reviewers.themes.logs'),
reverse('reviewers.themes.deleted'),
'https://wiki.mozilla.org/Add-ons/Reviewers/Themes/Guidelines',
reverse('reviewers.queue_moderated'),
reverse('reviewers.ratings_moderation_log'),
'https://wiki.mozilla.org/Add-ons/Reviewers/Guide/Moderation',
reverse('reviewers.unlisted_queue_all'),
'https://wiki.mozilla.org/Add-ons/Reviewers/Guide',
reverse('reviewers.motd'),
reverse('reviewers.queue_expired_info_requests'),
]
links = [link.attrib['href'] for link in doc('.dashboard a')]
assert links == expected_links
assert doc('.dashboard a')[0].text == 'New (2)'
assert doc('.dashboard a')[1].text == 'Updates (3)'
assert doc('.dashboard a')[6].text == 'Auto Approved Add-ons (4)'
assert doc('.dashboard a')[10].text == 'Content Review (4)'
assert (doc('.dashboard a')[18].text ==
'Ratings Awaiting Moderation (1)')
assert (doc('.dashboard a')[24].text ==
'Expired Information Requests (1)')
def test_can_see_all_through_reviewer_view_all_permission(self):
self.grant_permission(self.user, 'ReviewerTools:View')
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert len(doc('.dashboard h3')) == 8 # All sections are present.
expected_links = [
reverse('reviewers.queue_nominated'),
reverse('reviewers.queue_pending'),
reverse('reviewers.performance'),
reverse('reviewers.reviewlog'),
'https://wiki.mozilla.org/Add-ons/Reviewers/Guide',
'https://wiki.mozilla.org/Add-ons/Reviewers/Themes/Guidelines',
reverse('reviewers.queue_auto_approved'),
reverse('reviewers.performance'),
reverse('reviewers.reviewlog'),
'https://wiki.mozilla.org/Add-ons/Reviewers/Guide',
reverse('reviewers.queue_content_review'),
reverse('reviewers.performance'),
reverse('reviewers.themes.list'),
reverse('reviewers.themes.list_rereview'),
reverse('reviewers.themes.list_flagged'),
reverse('reviewers.themes.logs'),
reverse('reviewers.themes.deleted'),
'https://wiki.mozilla.org/Add-ons/Reviewers/Themes/Guidelines',
reverse('reviewers.queue_moderated'),
reverse('reviewers.ratings_moderation_log'),
'https://wiki.mozilla.org/Add-ons/Reviewers/Guide/Moderation',
reverse('reviewers.unlisted_queue_all'),
'https://wiki.mozilla.org/Add-ons/Reviewers/Guide',
reverse('reviewers.motd'),
reverse('reviewers.queue_expired_info_requests'),
]
links = [link.attrib['href'] for link in doc('.dashboard a')]
assert links == expected_links
def test_legacy_reviewer(self):
# Create some add-ons to test the queue counts.
addon_factory(
status=amo.STATUS_NOMINATED,
file_kw={'status': amo.STATUS_AWAITING_REVIEW})
version_factory(
addon=addon_factory(),
file_kw={'status': amo.STATUS_AWAITING_REVIEW})
version_factory(
addon=addon_factory(),
file_kw={'status': amo.STATUS_AWAITING_REVIEW})
# These two are under admin review and will be ignored.
under_admin_review = addon_factory(
status=amo.STATUS_NOMINATED,
file_kw={'status': amo.STATUS_AWAITING_REVIEW})
AddonReviewerFlags.objects.create(
addon=under_admin_review, needs_admin_code_review=True)
under_admin_review_and_pending = addon_factory()
AddonReviewerFlags.objects.create(
addon=under_admin_review_and_pending, needs_admin_code_review=True)
version_factory(
addon=under_admin_review_and_pending,
file_kw={'status': amo.STATUS_AWAITING_REVIEW})
# This is a static theme so won't be shown
addon_factory(
status=amo.STATUS_NOMINATED,
type=amo.ADDON_STATICTHEME,
file_kw={'status': amo.STATUS_AWAITING_REVIEW})
# Grant user the permission to see only the legacy add-ons section.
self.grant_permission(self.user, 'Addons:Review')
# Test.
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert len(doc('.dashboard h3')) == 1
expected_links = [
reverse('reviewers.queue_nominated'),
reverse('reviewers.queue_pending'),
reverse('reviewers.performance'),
reverse('reviewers.reviewlog'),
'https://wiki.mozilla.org/Add-ons/Reviewers/Guide',
]
links = [link.attrib['href'] for link in doc('.dashboard a')]
assert links == expected_links
assert doc('.dashboard a')[0].text == 'New (1)'
assert doc('.dashboard a')[1].text == 'Updates (2)'
def test_post_reviewer(self):
# Create an add-on to test the queue count. It's under admin content
# review but that does not have an impact.
addon = addon_factory(
version_kw={'is_webextension': True})
AddonApprovalsCounter.reset_for_addon(addon=addon)
AutoApprovalSummary.objects.create(
version=addon.current_version, verdict=amo.AUTO_APPROVED)
AddonReviewerFlags.objects.create(
addon=addon, needs_admin_content_review=True)
# This one however is under admin code review, it's ignored.
under_code_review = addon_factory(
version_kw={'is_webextension': True})
AddonApprovalsCounter.reset_for_addon(addon=under_code_review)
AutoApprovalSummary.objects.create(
version=under_code_review.current_version,
verdict=amo.AUTO_APPROVED)
AddonReviewerFlags.objects.create(
addon=under_code_review, needs_admin_code_review=True)
# Grant user the permission to see only the Auto Approved section.
self.grant_permission(self.user, 'Addons:PostReview')
# Test.
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert len(doc('.dashboard h3')) == 1
expected_links = [
reverse('reviewers.queue_auto_approved'),
reverse('reviewers.performance'),
reverse('reviewers.reviewlog'),
'https://wiki.mozilla.org/Add-ons/Reviewers/Guide',
]
links = [link.attrib['href'] for link in doc('.dashboard a')]
assert links == expected_links
assert doc('.dashboard a')[0].text == 'Auto Approved Add-ons (1)'
def test_content_reviewer(self):
# Create an add-on to test the queue count. It's under admin code
# review but that does not have an impact.
addon = addon_factory(
version_kw={'is_webextension': True})
AddonApprovalsCounter.reset_for_addon(addon=addon)
AutoApprovalSummary.objects.create(
version=addon.current_version, verdict=amo.AUTO_APPROVED)
AddonReviewerFlags.objects.create(
addon=addon, needs_admin_code_review=True)
# This one is under admin *content* review so it's ignored.
under_content_review = addon_factory(
version_kw={'is_webextension': True})
AddonApprovalsCounter.reset_for_addon(addon=under_content_review)
AutoApprovalSummary.objects.create(
version=under_content_review.current_version,
verdict=amo.AUTO_APPROVED)
AddonReviewerFlags.objects.create(
addon=under_content_review, needs_admin_content_review=True)
# Grant user the permission to see only the Content Review section.
self.grant_permission(self.user, 'Addons:ContentReview')
# Test.
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert len(doc('.dashboard h3')) == 1
expected_links = [
reverse('reviewers.queue_content_review'),
reverse('reviewers.performance'),
]
links = [link.attrib['href'] for link in doc('.dashboard a')]
assert links == expected_links
assert doc('.dashboard a')[0].text == 'Content Review (1)'
def test_themes_reviewer(self):
# Create some themes to test the queue counts.
addon_factory(type=amo.ADDON_PERSONA, status=amo.STATUS_PENDING)
addon_factory(type=amo.ADDON_PERSONA, status=amo.STATUS_PENDING)
addon = addon_factory(type=amo.ADDON_PERSONA, status=amo.STATUS_PUBLIC)
RereviewQueueTheme.objects.create(theme=addon.persona)
addon_factory(type=amo.ADDON_PERSONA, status=amo.STATUS_REVIEW_PENDING)
addon_factory(type=amo.ADDON_PERSONA, status=amo.STATUS_REVIEW_PENDING)
addon_factory(type=amo.ADDON_PERSONA, status=amo.STATUS_REVIEW_PENDING)
# Grant user the permission to see only the themes section.
self.grant_permission(self.user, 'Personas:Review')
# Test.
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert len(doc('.dashboard h3')) == 1
expected_links = [
reverse('reviewers.themes.list'),
reverse('reviewers.themes.list_rereview'),
reverse('reviewers.themes.list_flagged'),
reverse('reviewers.themes.logs'),
reverse('reviewers.themes.deleted'),
'https://wiki.mozilla.org/Add-ons/Reviewers/Themes/Guidelines',
]
links = [link.attrib['href'] for link in doc('.dashboard a')]
assert links == expected_links
assert doc('.dashboard a')[0].text == 'New Themes (2)'
assert doc('.dashboard a')[1].text == 'Themes Updates (1)'
assert doc('.dashboard a')[2].text == 'Flagged Themes (3)'
def test_ratings_moderator(self):
# Create an rating to test the queue count.
addon = addon_factory()
user = user_factory()
rating = Rating.objects.create(
addon=addon, version=addon.current_version, user=user, flag=True,
body=u'This âdd-on sucks!!111', rating=1, editorreview=True)
rating.ratingflag_set.create()
# Grant user the permission to see only the ratings to review section.
self.grant_permission(self.user, 'Ratings:Moderate')
# Test.
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert len(doc('.dashboard h3')) == 1
expected_links = [
reverse('reviewers.queue_moderated'),
reverse('reviewers.ratings_moderation_log'),
'https://wiki.mozilla.org/Add-ons/Reviewers/Guide/Moderation',
]
links = [link.attrib['href'] for link in doc('.dashboard a')]
assert links == expected_links
assert doc('.dashboard a')[0].text == 'Ratings Awaiting Moderation (1)'
def test_unlisted_reviewer(self):
# Grant user the permission to see only the unlisted add-ons section.
self.grant_permission(self.user, 'Addons:ReviewUnlisted')
# Test.
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert len(doc('.dashboard h3')) == 1
expected_links = [
reverse('reviewers.unlisted_queue_all'),
'https://wiki.mozilla.org/Add-ons/Reviewers/Guide',
]
links = [link.attrib['href'] for link in doc('.dashboard a')]
assert links == expected_links
def test_static_theme_reviewer(self):
# Create some static themes to test the queue counts.
addon_factory(
status=amo.STATUS_NOMINATED,
type=amo.ADDON_STATICTHEME,
file_kw={'status': amo.STATUS_AWAITING_REVIEW})
version_factory(
addon=addon_factory(type=amo.ADDON_STATICTHEME),
file_kw={'status': amo.STATUS_AWAITING_REVIEW})
version_factory(
addon=addon_factory(type=amo.ADDON_STATICTHEME,),
file_kw={'status': amo.STATUS_AWAITING_REVIEW})
# These two are under admin review and will be ignored.
under_admin_review = addon_factory(
status=amo.STATUS_NOMINATED,
type=amo.ADDON_STATICTHEME,
file_kw={'status': amo.STATUS_AWAITING_REVIEW})
AddonReviewerFlags.objects.create(
addon=under_admin_review, needs_admin_theme_review=True)
under_admin_review_and_pending = addon_factory(
type=amo.ADDON_STATICTHEME)
AddonReviewerFlags.objects.create(
addon=under_admin_review_and_pending,
needs_admin_theme_review=True)
version_factory(
addon=under_admin_review_and_pending,
file_kw={'status': amo.STATUS_AWAITING_REVIEW})
# This is an extension so won't be shown
addon_factory(
status=amo.STATUS_NOMINATED,
type=amo.ADDON_EXTENSION,
file_kw={'status': amo.STATUS_AWAITING_REVIEW})
# Grant user the permission to see only the legacy add-ons section.
self.grant_permission(self.user, 'Addons:ThemeReview')
# Test.
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert len(doc('.dashboard h3')) == 1
expected_links = [
reverse('reviewers.queue_nominated'),
reverse('reviewers.queue_pending'),
reverse('reviewers.performance'),
reverse('reviewers.reviewlog'),
'https://wiki.mozilla.org/Add-ons/Reviewers/Themes/Guidelines',
]
links = [link.attrib['href'] for link in doc('.dashboard a')]
assert links == expected_links
assert doc('.dashboard a')[0].text == 'New (1)'
assert doc('.dashboard a')[1].text == 'Updates (2)'
def test_post_reviewer_and_content_reviewer(self):
# Create add-ons to test the queue count. The first add-on has its
# content approved, so the post review queue should contain 2 add-ons,
# and the content review queue only 1.
addon = addon_factory(
version_kw={'is_webextension': True})
AutoApprovalSummary.objects.create(
version=addon.current_version, verdict=amo.AUTO_APPROVED)
AddonApprovalsCounter.approve_content_for_addon(addon=addon)
addon = addon_factory(
version_kw={'is_webextension': True})
AddonApprovalsCounter.reset_for_addon(addon=addon)
AutoApprovalSummary.objects.create(
version=addon.current_version, verdict=amo.AUTO_APPROVED)
# Grant user the permission to see both the Content Review and the
# Auto Approved Add-ons sections.
self.grant_permission(self.user, 'Addons:ContentReview')
self.grant_permission(self.user, 'Addons:PostReview')
# Test.
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert len(doc('.dashboard h3')) == 2 # 2 sections are shown.
expected_links = [
reverse('reviewers.queue_auto_approved'),
reverse('reviewers.performance'),
reverse('reviewers.reviewlog'),
'https://wiki.mozilla.org/Add-ons/Reviewers/Guide',
reverse('reviewers.queue_content_review'),
reverse('reviewers.performance'),
]
links = [link.attrib['href'] for link in doc('.dashboard a')]
assert links == expected_links
assert doc('.dashboard a')[0].text == 'Auto Approved Add-ons (2)'
assert 'target' not in doc('.dashboard a')[0].attrib
assert doc('.dashboard a')[3].text == 'Review Guide'
assert doc('.dashboard a')[3].attrib['target'] == '_blank'
assert doc('.dashboard a')[3].attrib['rel'] == 'noopener noreferrer'
assert doc('.dashboard a')[4].text == 'Content Review (1)'
def test_legacy_reviewer_and_ratings_moderator(self):
# Grant user the permission to see both the legacy add-ons and the
# ratings moderation sections.
self.grant_permission(self.user, 'Addons:Review')
self.grant_permission(self.user, 'Ratings:Moderate')
# Test.
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert len(doc('.dashboard h3')) == 2
expected_links = [
reverse('reviewers.queue_nominated'),
reverse('reviewers.queue_pending'),
reverse('reviewers.performance'),
reverse('reviewers.reviewlog'),
'https://wiki.mozilla.org/Add-ons/Reviewers/Guide',
reverse('reviewers.queue_moderated'),
reverse('reviewers.ratings_moderation_log'),
'https://wiki.mozilla.org/Add-ons/Reviewers/Guide/Moderation',
]
links = [link.attrib['href'] for link in doc('.dashboard a')]
assert links == expected_links
assert doc('.dashboard a')[0].text == 'New (0)'
assert 'target' not in doc('.dashboard a')[0].attrib
assert doc('.dashboard a')[1].text == 'Updates (0)'
assert doc('.dashboard a')[5].text == 'Ratings Awaiting Moderation (0)'
assert 'target' not in doc('.dashboard a')[6].attrib
assert doc('.dashboard a')[7].text == 'Moderation Guide'
assert doc('.dashboard a')[7].attrib['target'] == '_blank'
assert doc('.dashboard a')[7].attrib['rel'] == 'noopener noreferrer'
class QueueTest(ReviewerTest):
fixtures = ['base/users']
listed = True
def setUp(self):
super(QueueTest, self).setUp()
self.user = UserProfile.objects.get(email='[email protected]')
self.login_as_reviewer()
if self.listed is False:
# Testing unlisted views: needs Addons:ReviewUnlisted perm.
self.grant_permission(self.user, 'Addons:ReviewUnlisted')
self.url = reverse('reviewers.queue_pending')
self.addons = OrderedDict()
self.expected_addons = []
self.channel_name = 'listed' if self.listed else 'unlisted'
def generate_files(self, subset=None, files=None):
if subset is None:
subset = []
files = files or OrderedDict([
('Pending One', {
'version_str': '0.1',
'addon_status': amo.STATUS_PUBLIC,
'file_status': amo.STATUS_AWAITING_REVIEW,
}),
('Pending Two', {
'version_str': '0.1',
'addon_status': amo.STATUS_PUBLIC,
'file_status': amo.STATUS_AWAITING_REVIEW,
}),
('Nominated One', {
'version_str': '0.1',
'addon_status': amo.STATUS_NOMINATED,
'file_status': amo.STATUS_AWAITING_REVIEW,
}),
('Nominated Two', {
'version_str': '0.1',
'addon_status': amo.STATUS_NOMINATED,
'file_status': amo.STATUS_AWAITING_REVIEW,
}),
('Public', {
'version_str': '0.1',
'addon_status': amo.STATUS_PUBLIC,
'file_status': amo.STATUS_PUBLIC,
}),
])
results = OrderedDict()
channel = (amo.RELEASE_CHANNEL_LISTED if self.listed else
amo.RELEASE_CHANNEL_UNLISTED)
for name, attrs in files.iteritems():
if not subset or name in subset:
version_kw = attrs.get('version_kw', {})
version_kw.update(
{'channel': channel, 'version': attrs.pop('version_str')})
attrs['version_kw'] = version_kw
file_kw = attrs.get('file_kw', {})
file_kw.update({'status': attrs.pop('file_status')})
attrs['file_kw'] = file_kw
results[name] = addon_factory(
status=attrs.pop('addon_status'), name=name, **attrs)
self.addons.update(results)
return results
def generate_file(self, name):
return self.generate_files([name])[name]
def get_review_data(self):
# Format: (Created n days ago,
# percentages of [< 5, 5-10, >10])
return ((1, (0, 0, 100)),
(8, (0, 50, 50)),
(12, (50, 0, 50)))
def get_addon_latest_version(self, addon):
if self.listed:
channel = amo.RELEASE_CHANNEL_LISTED
else:
channel = amo.RELEASE_CHANNEL_UNLISTED
return addon.find_latest_version(channel=channel)
def get_queue(self, addon):
version = self.get_addon_latest_version(addon)
assert version.current_queue.objects.filter(id=addon.id).count() == 1
def get_expected_addons_by_names(self, names):
expected_addons = []
files = self.generate_files()
for name in sorted(names):
if name in files:
expected_addons.append(files[name])
# Make sure all elements have been added
assert len(expected_addons) == len(names)
return expected_addons
def _test_get_queue(self):
for addon in self.expected_addons:
self.get_queue(addon)
def _test_queue_layout(self, name, tab_position, total_addons,
total_queues, per_page=None):
args = {'per_page': per_page} if per_page else {}
response = self.client.get(self.url, args)
assert response.status_code == 200
doc = pq(response.content)
links = doc('.tabnav li a')
link = links.eq(tab_position)
assert links.length == total_queues
assert link.text() == '%s (%s)' % (name, total_addons)
assert link.attr('href') == self.url
if per_page:
assert doc('.data-grid-top .num-results').text() == (
u'Results %s\u20131 of %s' % (per_page, total_addons))
def _test_results(self):
response = self.client.get(self.url)
assert response.status_code == 200
expected = []
if not len(self.expected_addons):
raise AssertionError('self.expected_addons was an empty list')
for idx, addon in enumerate(self.expected_addons):
latest_version = self.get_addon_latest_version(addon)
assert latest_version
name = '%s %s' % (unicode(addon.name),
latest_version.version)
if self.channel_name == 'listed':
# We typically don't include the channel name if it's the
# default one, 'listed'.
channel = []
else:
channel = [self.channel_name]
url = reverse('reviewers.review', args=channel + [addon.slug])
expected.append((name, url))
doc = pq(response.content)
links = doc('#addon-queue tr.addon-row td a:not(.app-icon)')
assert len(links) == len(self.expected_addons)
check_links(expected, links, verify=False)
return doc
class TestQueueBasics(QueueTest):
def test_only_viewable_by_reviewer(self):
# Addon reviewer has access.
response = self.client.get(self.url)
assert response.status_code == 200
# Regular user doesn't have access.
self.client.logout()
assert self.client.login(email='[email protected]')
response = self.client.get(self.url)
assert response.status_code == 403
# Persona reviewer doesn't have access either.
self.client.logout()
assert self.client.login(email='[email protected]')
response = self.client.get(self.url)
assert response.status_code == 403
def test_invalid_page(self):
response = self.client.get(self.url, {'page': 999})
assert response.status_code == 200
assert response.context['page'].number == 1
def test_invalid_per_page(self):
response = self.client.get(self.url, {'per_page': '<garbage>'})
# No exceptions:
assert response.status_code == 200
@patch.multiple('olympia.reviewers.views',
REVIEWS_PER_PAGE_MAX=1,
REVIEWS_PER_PAGE=1)
def test_max_per_page(self):
self.generate_files()
response = self.client.get(self.url, {'per_page': '2'})
assert response.status_code == 200
doc = pq(response.content)
assert doc('.data-grid-top .num-results').text() == (
u'Results 1\u20131 of 2')
@patch('olympia.reviewers.views.REVIEWS_PER_PAGE', new=1)
def test_reviews_per_page(self):
self.generate_files()
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert doc('.data-grid-top .num-results').text() == (
u'Results 1\u20131 of 2')
def test_grid_headers(self):
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
expected = [
'Add-on',
'Type',
'Waiting Time',
'Flags',
]
assert [pq(th).text() for th in doc('#addon-queue tr th')[1:]] == (
expected)
def test_grid_headers_sort_after_search(self):
params = dict(searching=['True'],
text_query=['abc'],
addon_type_ids=['2'],
sort=['addon_type_id'])
response = self.client.get(self.url, params)
assert response.status_code == 200
tr = pq(response.content)('#addon-queue tr')
sorts = {
# Column index => sort.
1: 'addon_name', # Add-on.
2: '-addon_type_id', # Type.
3: 'waiting_time_min', # Waiting Time.
}
for idx, sort in sorts.iteritems():
# Get column link.
a = tr('th').eq(idx).find('a')
# Update expected GET parameters with sort type.
params.update(sort=[sort])
# Parse querystring of link to make sure `sort` type is correct.
assert urlparse.parse_qs(a.attr('href').split('?')[1]) == params
def test_no_results(self):
response = self.client.get(self.url)
assert response.status_code == 200
assert pq(response.content)('.queue-outer .no-results').length == 1
def test_no_paginator_when_on_single_page(self):
response = self.client.get(self.url)
assert response.status_code == 200
assert pq(response.content)('.pagination').length == 0
def test_paginator_when_many_pages(self):
# 'Pending One' and 'Pending Two' should be the only add-ons in
# the pending queue, but we'll generate them all for good measure.
self.generate_files()
response = self.client.get(self.url, {'per_page': 1})
assert response.status_code == 200
doc = pq(response.content)
assert doc('.data-grid-top .num-results').text() == (
u'Results 1\u20131 of 2')
assert doc('.data-grid-bottom .num-results').text() == (
u'Results 1\u20131 of 2')
def test_legacy_queue_sort(self):
sorts = (
['age', 'Waiting Time'],
['name', 'Add-on'],
['type', 'Type'],
)
for key, text in sorts:
response = self.client.get(self.url, {'sort': key})
assert response.status_code == 200
assert pq(response.content)('th.ordered a').text() == text
def test_flags_jetpack(self):
addon = addon_factory(
status=amo.STATUS_NOMINATED, name='Jetpack',
version_kw={'version': '0.1'},
file_kw={'status': amo.STATUS_AWAITING_REVIEW,
'jetpack_version': 1.2})
r = self.client.get(reverse('reviewers.queue_nominated'))
rows = pq(r.content)('#addon-queue tr.addon-row')
assert rows.length == 1
assert rows.attr('data-addon') == str(addon.id)
assert rows.find('td').eq(1).text() == 'Jetpack 0.1'
assert rows.find('.ed-sprite-jetpack').length == 1
def test_flags_is_restart_required(self):
addon = addon_factory(
status=amo.STATUS_NOMINATED, name='Some Add-on',
version_kw={'version': '0.1'},
file_kw={'status': amo.STATUS_AWAITING_REVIEW,
'is_restart_required': True})
r = self.client.get(reverse('reviewers.queue_nominated'))
rows = pq(r.content)('#addon-queue tr.addon-row')
assert rows.length == 1
assert rows.attr('data-addon') == str(addon.id)
assert rows.find('td').eq(1).text() == 'Some Add-on 0.1'
assert rows.find('.ed-sprite-jetpack').length == 0
assert rows.find('.ed-sprite-is_restart_required').length == 1
def test_flags_is_restart_required_false(self):
addon = addon_factory(
status=amo.STATUS_NOMINATED, name='Restartless',
version_kw={'version': '0.1'},
file_kw={'status': amo.STATUS_AWAITING_REVIEW,
'is_restart_required': False})
r = self.client.get(reverse('reviewers.queue_nominated'))
rows = pq(r.content)('#addon-queue tr.addon-row')
assert rows.length == 1
assert rows.attr('data-addon') == str(addon.id)
assert rows.find('td').eq(1).text() == 'Restartless 0.1'
assert rows.find('.ed-sprite-jetpack').length == 0
assert rows.find('.ed-sprite-is_restart_required').length == 0
def test_tabnav_permissions(self):
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
links = doc('.tabnav li a').map(lambda i, e: e.attrib['href'])
expected = [
reverse('reviewers.queue_nominated'),
reverse('reviewers.queue_pending'),
]
assert links == expected
self.grant_permission(self.user, 'Ratings:Moderate')
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
links = doc('.tabnav li a').map(lambda i, e: e.attrib['href'])
expected.append(reverse('reviewers.queue_moderated'))
assert links == expected
self.grant_permission(self.user, 'Addons:PostReview')
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
links = doc('.tabnav li a').map(lambda i, e: e.attrib['href'])
expected.append(reverse('reviewers.queue_auto_approved'))
assert links == expected
self.grant_permission(self.user, 'Addons:ContentReview')
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
links = doc('.tabnav li a').map(lambda i, e: e.attrib['href'])
expected.append(reverse('reviewers.queue_content_review'))
assert links == expected
self.grant_permission(self.user, 'Reviews:Admin')
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
links = doc('.tabnav li a').map(lambda i, e: e.attrib['href'])
expected.append(reverse('reviewers.queue_expired_info_requests'))
assert links == expected
class TestPendingQueue(QueueTest):
def setUp(self):
super(TestPendingQueue, self).setUp()
# These should be the only ones present.
self.expected_addons = self.get_expected_addons_by_names(
['Pending One', 'Pending Two'])
self.url = reverse('reviewers.queue_pending')
def test_results(self):
self._test_results()
def test_queue_layout(self):
self._test_queue_layout('Updates',
tab_position=1, total_addons=2, total_queues=2)
def test_get_queue(self):
self._test_get_queue()
def test_webextensions_filtered_out_because_of_post_review(self):
version = self.addons['Pending Two'].find_latest_version(
channel=amo.RELEASE_CHANNEL_LISTED)
version.files.update(is_webextension=True)
# Webextensions are filtered out from the queue since auto_approve is
# taking care of them.
self.expected_addons = [self.addons['Pending One']]
self._test_results()
def test_webextension_with_auto_approval_disabled_false_filtered_out(self):
version = self.addons['Pending Two'].find_latest_version(
channel=amo.RELEASE_CHANNEL_LISTED)
version.files.update(is_webextension=True)
AddonReviewerFlags.objects.create(
addon=self.addons['Pending Two'], auto_approval_disabled=False)
self.expected_addons = [self.addons['Pending One']]
self._test_results()
def test_webextension_with_auto_approval_disabled_does_show_up(self):
version = self.addons['Pending Two'].find_latest_version(
channel=amo.RELEASE_CHANNEL_LISTED)
version.files.update(is_webextension=True)
version = self.addons['Pending Two'].find_latest_version(
channel=amo.RELEASE_CHANNEL_LISTED)
version.files.update(is_webextension=True)
AddonReviewerFlags.objects.create(
addon=self.addons['Pending One'], auto_approval_disabled=True)
self.expected_addons = [self.addons['Pending One']]
self._test_results()
def test_static_theme_filtered_out(self):
self.addons['Pending Two'].update(type=amo.ADDON_STATICTHEME)
# Static Theme shouldn't be shown
self.expected_addons = [self.addons['Pending One']]
self._test_results()
# Unless you have that permission also
self.grant_permission(self.user, 'Addons:ThemeReview')
self.expected_addons = [
self.addons['Pending One'], self.addons['Pending Two']]
self._test_results()
class TestStaticThemePendingQueue(QueueTest):
def setUp(self):
super(TestStaticThemePendingQueue, self).setUp()
# These should be the only ones present.
self.expected_addons = self.get_expected_addons_by_names(
['Pending One', 'Pending Two'])
Addon.objects.all().update(type=amo.ADDON_STATICTHEME)
self.url = reverse('reviewers.queue_pending')
GroupUser.objects.filter(user=self.user).delete()
self.grant_permission(self.user, 'Addons:ThemeReview')
def test_results(self):
self._test_results()
def test_queue_layout(self):
self._test_queue_layout('Updates',
tab_position=1, total_addons=2, total_queues=2)
def test_get_queue(self):
self._test_get_queue()
def test_extensions_filtered_out(self):
self.addons['Pending Two'].update(type=amo.ADDON_EXTENSION)
# Extensions shouldn't be shown
self.expected_addons = [self.addons['Pending One']]
self._test_results()
# Unless you have that permission also
self.grant_permission(self.user, 'Addons:Review')
self.expected_addons = [
self.addons['Pending One'], self.addons['Pending Two']]
self._test_results()
class TestNominatedQueue(QueueTest):
def setUp(self):
super(TestNominatedQueue, self).setUp()
# These should be the only ones present.
self.expected_addons = self.get_expected_addons_by_names(
['Nominated One', 'Nominated Two'])
self.url = reverse('reviewers.queue_nominated')
def test_results(self):
self._test_results()
def test_results_two_versions(self):
version1 = self.addons['Nominated One'].versions.all()[0]
version2 = self.addons['Nominated Two'].versions.all()[0]
file_ = version2.files.get()
# Versions are ordered by creation date, so make sure they're set.
past = self.days_ago(1)
version2.update(created=past, nomination=past)
# Create another version, v0.2, by "cloning" v0.1.
version2.pk = None
version2.version = '0.2'
version2.save()
# Reset creation date once it has been saved.
future = datetime.now() - timedelta(seconds=1)
version2.update(created=future, nomination=future)
# Associate v0.2 it with a file.
file_.pk = None
file_.version = version2
file_.save()
# disable old files like Version.from_upload() would.
version2.disable_old_files()
response = self.client.get(self.url)
assert response.status_code == 200
expected = [
('Nominated One 0.1', reverse('reviewers.review',
args=[version1.addon.slug])),
('Nominated Two 0.2', reverse('reviewers.review',
args=[version2.addon.slug])),
]
doc = pq(response.content)
check_links(
expected,
doc('#addon-queue tr.addon-row td a:not(.app-icon)'),
verify=False)
def test_queue_layout(self):
self._test_queue_layout('New',
tab_position=0, total_addons=2, total_queues=2)
def test_get_queue(self):
self._test_get_queue()
def test_webextensions_filtered_out_because_of_post_review(self):
version = self.addons['Nominated Two'].find_latest_version(
channel=amo.RELEASE_CHANNEL_LISTED)
version.files.update(is_webextension=True)
# Webextensions are filtered out from the queue since auto_approve is
# taking care of them.
self.expected_addons = [self.addons['Nominated One']]
self._test_results()
def test_webextension_with_auto_approval_disabled_false_filtered_out(self):
version = self.addons['Nominated Two'].find_latest_version(
channel=amo.RELEASE_CHANNEL_LISTED)
version.files.update(is_webextension=True)
AddonReviewerFlags.objects.create(
addon=self.addons['Nominated Two'], auto_approval_disabled=False)
self.expected_addons = [self.addons['Nominated One']]
self._test_results()
def test_webextension_with_auto_approval_disabled_does_show_up(self):
version = self.addons['Nominated Two'].find_latest_version(
channel=amo.RELEASE_CHANNEL_LISTED)
version.files.update(is_webextension=True)
version = self.addons['Nominated One'].find_latest_version(
channel=amo.RELEASE_CHANNEL_LISTED)
version.files.update(is_webextension=True)
AddonReviewerFlags.objects.create(
addon=self.addons['Nominated One'], auto_approval_disabled=True)
self.expected_addons = [self.addons['Nominated One']]
self._test_results()
def test_static_theme_filtered_out(self):
self.addons['Nominated Two'].update(type=amo.ADDON_STATICTHEME)
# Static Theme shouldn't be shown
self.expected_addons = [self.addons['Nominated One']]
self._test_results()
# Unless you have that permission also
self.grant_permission(self.user, 'Addons:ThemeReview')
self.expected_addons = [
self.addons['Nominated One'], self.addons['Nominated Two']]
self._test_results()
class TestStaticThemeNominatedQueue(QueueTest):
def setUp(self):
super(TestStaticThemeNominatedQueue, self).setUp()
# These should be the only ones present.
self.expected_addons = self.get_expected_addons_by_names(
['Nominated One', 'Nominated Two'])
self.url = reverse('reviewers.queue_nominated')
Addon.objects.all().update(type=amo.ADDON_STATICTHEME)
GroupUser.objects.filter(user=self.user).delete()
self.grant_permission(self.user, 'Addons:ThemeReview')
def test_results(self):
self._test_results()
def test_results_two_versions(self):
version1 = self.addons['Nominated One'].versions.all()[0]
version2 = self.addons['Nominated Two'].versions.all()[0]
file_ = version2.files.get()
# Versions are ordered by creation date, so make sure they're set.
past = self.days_ago(1)
version2.update(created=past, nomination=past)
# Create another version, v0.2, by "cloning" v0.1.
version2.pk = None
version2.version = '0.2'
version2.save()
# Reset creation date once it has been saved.
future = datetime.now() - timedelta(seconds=1)
version2.update(created=future, nomination=future)
# Associate v0.2 it with a file.
file_.pk = None
file_.version = version2
file_.save()
# disable old files like Version.from_upload() would.
version2.disable_old_files()
response = self.client.get(self.url)
assert response.status_code == 200
expected = [
('Nominated One 0.1', reverse('reviewers.review',
args=[version1.addon.slug])),
('Nominated Two 0.2', reverse('reviewers.review',
args=[version2.addon.slug])),
]
doc = pq(response.content)
check_links(
expected,
doc('#addon-queue tr.addon-row td a:not(.app-icon)'),
verify=False)
def test_queue_layout(self):
self._test_queue_layout('New',
tab_position=0, total_addons=2, total_queues=2)
def test_get_queue(self):
self._test_get_queue()
def test_static_theme_filtered_out(self):
self.addons['Nominated Two'].update(type=amo.ADDON_EXTENSION)
# Static Theme shouldn't be shown
self.expected_addons = [self.addons['Nominated One']]
self._test_results()
# Unless you have that permission also
self.grant_permission(self.user, 'Addons:Review')
self.expected_addons = [
self.addons['Nominated One'], self.addons['Nominated Two']]
self._test_results()
class TestModeratedQueue(QueueTest):
fixtures = ['base/users', 'ratings/dev-reply']
def setUp(self):
super(TestModeratedQueue, self).setUp()
self.url = reverse('reviewers.queue_moderated')
url_flag = reverse('addons.ratings.flag', args=['a1865', 218468])
response = self.client.post(url_flag, {'flag': RatingFlag.SPAM})
assert response.status_code == 200
assert RatingFlag.objects.filter(flag=RatingFlag.SPAM).count() == 1
assert Rating.objects.filter(editorreview=True).count() == 1
self.grant_permission(self.user, 'Ratings:Moderate')
def test_results(self):
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)('#reviews-flagged')
rows = doc('.review-flagged:not(.review-saved)')
assert rows.length == 1
assert rows.find('h3').text() == ''
# Default is "Skip."
assert doc('#id_form-0-action_1:checked').length == 1
flagged = doc('.reviews-flagged-reasons span.light').text()
reviewer = RatingFlag.objects.all()[0].user.name
assert flagged.startswith('Flagged by %s' % reviewer), (
'Unexpected text: %s' % flagged)
addon = Addon.objects.get(id=1865)
addon.name = u'náme'
addon.save()
response = self.client.get(self.url)
doc = pq(response.content)('#reviews-flagged')
rows = doc('.review-flagged:not(.review-saved)')
assert rows.length == 1
assert rows.find('h3').text() == u'náme'
def setup_actions(self, action):
response = self.client.get(self.url)
assert response.status_code == 200
form_0_data = initial(response.context['reviews_formset'].forms[0])
assert Rating.objects.filter(addon=1865).count() == 2
formset_data = formset(form_0_data)
formset_data['form-0-action'] = action
response = self.client.post(self.url, formset_data)
self.assert3xx(response, self.url)
def test_skip(self):
self.setup_actions(ratings.REVIEW_MODERATE_SKIP)
# Make sure it's still there.
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
rows = doc('#reviews-flagged .review-flagged:not(.review-saved)')
assert rows.length == 1
def test_skip_score(self):
self.setup_actions(ratings.REVIEW_MODERATE_SKIP)
assert ReviewerScore.objects.filter(
note_key=amo.REVIEWED_ADDON_REVIEW).count() == 0
def get_logs(self, action):
return ActivityLog.objects.filter(action=action.id)
def test_remove(self):
"""Make sure the reviewer tools can delete a review."""
self.setup_actions(ratings.REVIEW_MODERATE_DELETE)
logs = self.get_logs(amo.LOG.DELETE_RATING)
assert logs.count() == 1
# Make sure it's removed from the queue.
response = self.client.get(self.url)
assert response.status_code == 200
assert pq(response.content)('#reviews-flagged .no-results').length == 1
response = self.client.get(reverse('reviewers.ratings_moderation_log'))
assert pq(response.content)('table .more-details').attr('href') == (
reverse('reviewers.ratings_moderation_log.detail',
args=[logs[0].id]))
# Make sure it was actually deleted.
assert Rating.objects.filter(addon=1865).count() == 1
# But make sure it wasn't *actually* deleted.
assert Rating.unfiltered.filter(addon=1865).count() == 2
def test_remove_fails_for_own_addon(self):
"""
Make sure the reviewer tools can't delete a review for an
add-on owned by the user.
"""
addon = Addon.objects.get(pk=1865)
user = UserProfile.objects.get(email='[email protected]')
AddonUser(addon=addon, user=user).save()
# Make sure the initial count is as expected
assert Rating.objects.filter(addon=1865).count() == 2
self.setup_actions(ratings.REVIEW_MODERATE_DELETE)
logs = self.get_logs(amo.LOG.DELETE_RATING)
assert logs.count() == 0
# Make sure it's not removed from the queue.
response = self.client.get(self.url)
assert response.status_code == 200
assert pq(response.content)('#reviews-flagged .no-results').length == 0
# Make sure it was not actually deleted.
assert Rating.objects.filter(addon=1865).count() == 2
def test_remove_score(self):
self.setup_actions(ratings.REVIEW_MODERATE_DELETE)
assert ReviewerScore.objects.filter(
note_key=amo.REVIEWED_ADDON_REVIEW).count() == 1
def test_keep(self):
"""Make sure the reviewer tools can remove flags and keep a review."""
self.setup_actions(ratings.REVIEW_MODERATE_KEEP)
logs = self.get_logs(amo.LOG.APPROVE_RATING)
assert logs.count() == 1
# Make sure it's removed from the queue.
response = self.client.get(self.url)
assert response.status_code == 200
assert pq(response.content)('#reviews-flagged .no-results').length == 1
rating = Rating.objects.filter(addon=1865)
# Make sure it's NOT deleted...
assert rating.count() == 2
# ...but it's no longer flagged.
assert rating.filter(editorreview=1).count() == 0
def test_keep_score(self):
self.setup_actions(ratings.REVIEW_MODERATE_KEEP)
assert ReviewerScore.objects.filter(
note_key=amo.REVIEWED_ADDON_REVIEW).count() == 1
def test_queue_layout(self):
# From the fixtures we already have 2 reviews, one is flagged. We add
# a bunch of reviews from different scenarios and make sure they don't
# count towards the total.
# Add a review associated with an normal addon
rating = Rating.objects.create(
addon=addon_factory(), user=user_factory(),
body='show me', editorreview=True)
RatingFlag.objects.create(rating=rating)
# Add a review associated with an incomplete addon
rating = Rating.objects.create(
addon=addon_factory(status=amo.STATUS_NULL), user=user_factory(),
body='dont show me', editorreview=True)
RatingFlag.objects.create(rating=rating)
# Add a review associated to an unlisted version
addon = addon_factory()
version = version_factory(
addon=addon, channel=amo.RELEASE_CHANNEL_UNLISTED)
rating = Rating.objects.create(
addon=addon_factory(), version=version, user=user_factory(),
body='dont show me either', editorreview=True)
RatingFlag.objects.create(rating=rating)
self._test_queue_layout('Rating Reviews',
tab_position=2, total_addons=2, total_queues=3)
def test_no_reviews(self):
Rating.objects.all().delete()
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)('#reviews-flagged')
assert doc('.no-results').length == 1
assert doc('.review-saved button').length == 1 # Show only one button.
def test_do_not_show_reviews_for_non_public_addons(self):
Addon.objects.all().update(status=amo.STATUS_NULL)
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)('#reviews-flagged')
# There should be no results since all add-ons are not public.
assert doc('.no-results').length == 1
def test_do_not_show_reviews_for_unlisted_addons(self):
for addon in Addon.objects.all():
self.make_addon_unlisted(addon)
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)('#reviews-flagged')
# There should be no results since all add-ons are unlisted.
assert doc('.no-results').length == 1
class TestUnlistedAllList(QueueTest):
listed = False
def setUp(self):
super(TestUnlistedAllList, self).setUp()
self.url = reverse('reviewers.unlisted_queue_all')
# We should have all add-ons.
self.expected_addons = self.get_expected_addons_by_names(
['Pending One', 'Pending Two', 'Nominated One', 'Nominated Two',
'Public'])
# Need to set unique nomination times or we get a psuedo-random order.
for idx, addon in enumerate(self.expected_addons):
latest_version = addon.find_latest_version(
channel=amo.RELEASE_CHANNEL_UNLISTED)
latest_version.update(
nomination=(datetime.now() - timedelta(minutes=idx)))
def test_results(self):
self._test_results()
def test_review_notes_json(self):
latest_version = self.expected_addons[0].find_latest_version(
channel=amo.RELEASE_CHANNEL_UNLISTED)
log = ActivityLog.create(amo.LOG.APPROVE_VERSION,
latest_version,
self.expected_addons[0],
user=UserProfile.objects.get(pk=999),
details={'comments': 'stish goin` down son'})
url = reverse('reviewers.queue_review_text') + str(log.id)
response = self.client.get(url)
assert response.status_code == 200
assert (json.loads(response.content) ==
{'reviewtext': 'stish goin` down son'})
class TestAutoApprovedQueue(QueueTest):
def setUp(self):
super(TestAutoApprovedQueue, self).setUp()
self.url = reverse('reviewers.queue_auto_approved')
def login_with_permission(self):
user = UserProfile.objects.get(email='[email protected]')
self.grant_permission(user, 'Addons:PostReview')
self.client.login(email=user.email)
def get_addon_latest_version(self, addon):
"""Method used by _test_results() to fetch the version that the queue
is supposed to display. Overridden here because in our case, it's not
necessarily the latest available version - we display the current
public version instead (which is not guaranteed to be the latest
auto-approved one, but good enough) for this page."""
return addon.current_version
def generate_files(self):
"""Generate add-ons needed for these tests."""
# Has not been auto-approved.
extra_addon = addon_factory(name=u'Extra Addôn 1')
AutoApprovalSummary.objects.create(
version=extra_addon.current_version, verdict=amo.NOT_AUTO_APPROVED)
# Has not been auto-approved either, only dry run.
extra_addon2 = addon_factory(name=u'Extra Addôn 2')
AutoApprovalSummary.objects.create(
version=extra_addon2.current_version,
verdict=amo.WOULD_HAVE_BEEN_AUTO_APPROVED)
# Has been auto-approved, but that auto-approval has been confirmed by
# a human already.
extra_addon3 = addon_factory(name=u'Extra Addôn 3')
extra_summary3 = AutoApprovalSummary.objects.create(
version=extra_addon3.current_version,
verdict=amo.AUTO_APPROVED, confirmed=True)
AddonApprovalsCounter.objects.create(
addon=extra_addon3, counter=1,
last_human_review=extra_summary3.created)
# Has been auto-approved and reviewed by a human before.
addon1 = addon_factory(name=u'Addôn 1')
AutoApprovalSummary.objects.create(
version=addon1.current_version, verdict=amo.AUTO_APPROVED)
AddonApprovalsCounter.objects.create(
addon=addon1, counter=1, last_human_review=self.days_ago(42))
# Has been auto-approved twice, last_human_review is somehow None,
# the 'created' date will be used to order it (older is higher).
addon2 = addon_factory(name=u'Addôn 2')
addon2.update(created=self.days_ago(10))
AutoApprovalSummary.objects.create(
version=addon2.current_version, verdict=amo.AUTO_APPROVED)
AddonApprovalsCounter.objects.create(
addon=addon2, counter=1, last_human_review=None)
addon2_version2 = version_factory(addon=addon2)
AutoApprovalSummary.objects.create(
version=addon2_version2, verdict=amo.AUTO_APPROVED)
# Has been auto-approved and never been seen by a human,
# the 'created' date will be used to order it (newer is lower).
addon3 = addon_factory(name=u'Addôn 3')
addon3.update(created=self.days_ago(2))
AutoApprovalSummary.objects.create(
version=addon3.current_version, verdict=amo.AUTO_APPROVED)
AddonApprovalsCounter.objects.create(
addon=addon3, counter=1, last_human_review=None)
# Has been auto-approved, should be first because of its weight.
addon4 = addon_factory(name=u'Addôn 4')
addon4.update(created=self.days_ago(14))
AutoApprovalSummary.objects.create(
version=addon4.current_version, verdict=amo.AUTO_APPROVED,
weight=500)
AddonApprovalsCounter.objects.create(
addon=addon4, counter=0, last_human_review=self.days_ago(1))
self.expected_addons = [addon4, addon2, addon3, addon1]
def test_only_viewable_with_specific_permission(self):
# Regular addon reviewer does not have access.
response = self.client.get(self.url)
assert response.status_code == 403
# Regular user doesn't have access.
self.client.logout()
assert self.client.login(email='[email protected]')
response = self.client.get(self.url)
assert response.status_code == 403
def test_results(self):
self.login_with_permission()
self.generate_files()
self._test_results()
def test_results_weights(self):
addon1 = addon_factory(name=u'Addôn 1')
AutoApprovalSummary.objects.create(
version=addon1.current_version, verdict=amo.AUTO_APPROVED,
weight=amo.POST_REVIEW_WEIGHT_HIGHEST_RISK + 1)
AddonApprovalsCounter.reset_for_addon(addon1)
addon2 = addon_factory(name=u'Addôn 2')
AutoApprovalSummary.objects.create(
version=addon2.current_version, verdict=amo.AUTO_APPROVED,
weight=amo.POST_REVIEW_WEIGHT_HIGH_RISK + 1)
AddonApprovalsCounter.reset_for_addon(addon2)
addon3 = addon_factory(name=u'Addôn 3')
AutoApprovalSummary.objects.create(
version=addon3.current_version, verdict=amo.AUTO_APPROVED,
weight=amo.POST_REVIEW_WEIGHT_MEDIUM_RISK + 1)
AddonApprovalsCounter.reset_for_addon(addon3)
addon4 = addon_factory(name=u'Addôn 4')
AutoApprovalSummary.objects.create(
version=addon4.current_version, verdict=amo.AUTO_APPROVED,
weight=1)
AddonApprovalsCounter.reset_for_addon(addon4)
self.expected_addons = [addon1, addon2, addon3, addon4]
self.login_with_permission()
doc = self._test_results()
expected = ['risk-highest', 'risk-high', 'risk-medium', 'risk-low']
classnames = [
item.attrib['class'] for item in doc('.addon-row td:eq(4) span')]
assert expected == classnames
def test_queue_layout(self):
self.login_with_permission()
self.generate_files()
self._test_queue_layout("Auto Approved",
tab_position=2, total_addons=4, total_queues=3,
per_page=1)
class TestExpiredInfoRequestsQueue(QueueTest):
def setUp(self):
super(TestExpiredInfoRequestsQueue, self).setUp()
self.url = reverse('reviewers.queue_expired_info_requests')
def generate_files(self):
# Extra add-on with no pending info request.
addon_factory(name=u'Extra Addôn 1')
# Extra add-on with a non-expired pending info request.
extra_addon = addon_factory(name=u'Extra Addôn 2')
AddonReviewerFlags.objects.create(
addon=extra_addon,
pending_info_request=datetime.now() + timedelta(days=1))
# Pending addon with expired info request.
addon1 = addon_factory(name=u'Pending Addön 1',
status=amo.STATUS_NOMINATED)
AddonReviewerFlags.objects.create(
addon=addon1,
pending_info_request=self.days_ago(2))
# Public addon with expired info request.
addon2 = addon_factory(name=u'Public Addön 2',
status=amo.STATUS_PUBLIC)
AddonReviewerFlags.objects.create(
addon=addon2,
pending_info_request=self.days_ago(42))
# Deleted addon with expired info request.
addon3 = addon_factory(name=u'Deleted Addön 3',
status=amo.STATUS_DELETED)
AddonReviewerFlags.objects.create(
addon=addon3,
pending_info_request=self.days_ago(42))
# Mozilla-disabled addon with expired info request.
addon4 = addon_factory(name=u'Disabled Addön 4',
status=amo.STATUS_DISABLED)
AddonReviewerFlags.objects.create(
addon=addon4,
pending_info_request=self.days_ago(42))
# Incomplete addon with expired info request.
addon5 = addon_factory(name=u'Incomplete Addön 5',
status=amo.STATUS_NULL)
AddonReviewerFlags.objects.create(
addon=addon5,
pending_info_request=self.days_ago(42))
# Invisible (user-disabled) addon with expired info request.
addon6 = addon_factory(name=u'Incomplete Addön 5',
status=amo.STATUS_PUBLIC,
disabled_by_user=True)
AddonReviewerFlags.objects.create(
addon=addon6,
pending_info_request=self.days_ago(42))
self.expected_addons = [addon2, addon1]
def test_results_no_permission(self):
# Addon reviewer doesn't have access.
response = self.client.get(self.url)
assert response.status_code == 403
# Regular user doesn't have access.
self.client.logout()
assert self.client.login(email='[email protected]')
response = self.client.get(self.url)
assert response.status_code == 403
def test_results(self):
self.grant_permission(self.user, 'Reviews:Admin')
self.generate_files()
self._test_results()
class TestContentReviewQueue(QueueTest):
def setUp(self):
super(TestContentReviewQueue, self).setUp()
self.url = reverse('reviewers.queue_content_review')
self.channel_name = 'content'
def login_with_permission(self):
user = UserProfile.objects.get(email='[email protected]')
self.grant_permission(user, 'Addons:ContentReview')
self.client.login(email=user.email)
return user
def get_addon_latest_version(self, addon):
"""Method used by _test_results() to fetch the version that the queue
is supposed to display. Overridden here because in our case, it's not
necessarily the latest available version - we display the current
public version instead (which is not guaranteed to be the latest
auto-approved one, but good enough) for this page."""
return addon.current_version
def generate_files(self):
"""Generate add-ons needed for these tests."""
# Has not been auto-approved.
extra_addon = addon_factory(name=u'Extra Addôn 1')
AutoApprovalSummary.objects.create(
version=extra_addon.current_version, verdict=amo.NOT_AUTO_APPROVED,
)
# Has not been auto-approved either, only dry run.
extra_addon2 = addon_factory(name=u'Extra Addôn 2')
AutoApprovalSummary.objects.create(
version=extra_addon2.current_version,
verdict=amo.WOULD_HAVE_BEEN_AUTO_APPROVED,
)
# Has been auto-approved, but that content has been approved by
# a human already.
extra_addon3 = addon_factory(name=u'Extra Addôn 3')
AutoApprovalSummary.objects.create(
version=extra_addon3.current_version,
verdict=amo.AUTO_APPROVED, confirmed=True)
AddonApprovalsCounter.objects.create(
addon=extra_addon3, last_content_review=self.days_ago(1))
# This one has never been content-reviewed, but it has the
# needs_admin_content_review flag, and we're not an admin.
extra_addon4 = addon_factory(name=u'Extra Addön 4')
extra_addon4.update(created=self.days_ago(2))
AutoApprovalSummary.objects.create(
version=extra_addon4.current_version,
verdict=amo.AUTO_APPROVED, confirmed=True)
AddonApprovalsCounter.objects.create(
addon=extra_addon4, last_content_review=None)
AddonReviewerFlags.objects.create(
addon=extra_addon4, needs_admin_content_review=True)
# This first add-on has been content reviewed so long ago that we
# should do it again.
addon1 = addon_factory(name=u'Addön 1')
AutoApprovalSummary.objects.create(
version=addon1.current_version,
verdict=amo.AUTO_APPROVED, confirmed=True)
AddonApprovalsCounter.objects.create(
addon=addon1, last_content_review=self.days_ago(370))
# This one is quite similar, except its last content review is even
# older..
addon2 = addon_factory(name=u'Addön 1')
AutoApprovalSummary.objects.create(
version=addon2.current_version,
verdict=amo.AUTO_APPROVED, confirmed=True)
AddonApprovalsCounter.objects.create(
addon=addon2, last_content_review=self.days_ago(842))
# This one has never been content-reviewed. It has an
# needs_admin_code_review flag, but that should not have any impact.
addon3 = addon_factory(name=u'Addön 2')
addon3.update(created=self.days_ago(2))
AutoApprovalSummary.objects.create(
version=addon3.current_version,
verdict=amo.AUTO_APPROVED, confirmed=True)
AddonApprovalsCounter.objects.create(
addon=addon3, last_content_review=None)
AddonReviewerFlags.objects.create(
addon=addon3, needs_admin_code_review=True)
# This one has never been content reviewed either, and it does not even
# have an AddonApprovalsCounter.
addon4 = addon_factory(name=u'Addön 3')
addon4.update(created=self.days_ago(1))
AutoApprovalSummary.objects.create(
version=addon4.current_version,
verdict=amo.AUTO_APPROVED, confirmed=True)
assert not AddonApprovalsCounter.objects.filter(addon=addon4).exists()
# Addons with no last_content_review date should be first, ordered by
# their creation date, older first.
self.expected_addons = [addon3, addon4, addon2, addon1]
def test_only_viewable_with_specific_permission(self):
# Regular addon reviewer does not have access.
response = self.client.get(self.url)
assert response.status_code == 403
# Regular user doesn't have access.
self.client.logout()
assert self.client.login(email='[email protected]')
response = self.client.get(self.url)
assert response.status_code == 403
def test_results(self):
self.login_with_permission()
self.generate_files()
self._test_results()
def test_queue_layout(self):
self.login_with_permission()
self.generate_files()
self._test_queue_layout('Content Review',
tab_position=2, total_addons=4, total_queues=3,
per_page=1)
def test_queue_layout_admin(self):
# Admins should see the extra add-on that needs admin content review.
user = self.login_with_permission()
self.grant_permission(user, 'Reviews:Admin')
self.generate_files()
self._test_queue_layout('Content Review',
tab_position=2, total_addons=5, total_queues=4)
class TestPerformance(QueueTest):
fixtures = ['base/users', 'base/addon_3615']
"""Test the page at /reviewers/performance."""
def setUpReviewer(self):
self.login_as_reviewer()
core.set_user(UserProfile.objects.get(username='reviewer'))
self.create_logs()
def setUpAdmin(self):
self.login_as_admin()
core.set_user(UserProfile.objects.get(username='admin'))
self.create_logs()
def get_url(self, args=None):
if args is None:
args = []
return reverse('reviewers.performance', args=args)
def create_logs(self):
addon = Addon.objects.all()[0]
version = addon.versions.all()[0]
for i in amo.LOG_REVIEWER_REVIEW_ACTION:
ActivityLog.create(amo.LOG_BY_ID[i], addon, version)
# Throw in an automatic approval - should be ignored.
ActivityLog.create(
amo.LOG.APPROVE_VERSION, addon, version,
user=UserProfile.objects.get(id=settings.TASK_USER_ID))
def _test_chart(self):
r = self.client.get(self.get_url())
assert r.status_code == 200
doc = pq(r.content)
num = len(amo.LOG_REVIEWER_REVIEW_ACTION)
label = datetime.now().strftime('%Y-%m')
data = {label: {u'teamcount': num, u'teamavg': u'%s.0' % num,
u'usercount': num, u'teamamt': 1,
u'label': datetime.now().strftime('%b %Y')}}
assert json.loads(doc('#monthly').attr('data-chart')) == data
def test_performance_chart_reviewer(self):
self.setUpReviewer()
self._test_chart()
def test_performance_chart_as_admin(self):
self.setUpAdmin()
self._test_chart()
def test_usercount_with_more_than_one_reviewer(self):
self.client.login(email='[email protected]')
core.set_user(UserProfile.objects.get(username='clouserw'))
self.create_logs()
self.setUpReviewer()
r = self.client.get(self.get_url())
assert r.status_code == 200
doc = pq(r.content)
data = json.loads(doc('#monthly').attr('data-chart'))
label = datetime.now().strftime('%Y-%m')
assert data[label]['usercount'] == len(amo.LOG_REVIEWER_REVIEW_ACTION)
def _test_performance_other_user_as_admin(self):
userid = core.get_user().pk
r = self.client.get(self.get_url([10482]))
doc = pq(r.content)
assert doc('#select_user').length == 1 # Let them choose reviewers.
options = doc('#select_user option')
assert options.length == 3
assert options.eq(2).val() == str(userid)
assert 'clouserw' in doc('#reviews_user').text()
def test_performance_other_user_as_admin(self):
self.setUpAdmin()
self._test_performance_other_user_as_admin()
def test_performance_other_user_not_admin(self):
self.setUpReviewer()
r = self.client.get(self.get_url([10482]))
doc = pq(r.content)
assert doc('#select_user').length == 0 # Don't let them choose.
assert doc('#reviews_user').text() == 'Your Reviews'
class SearchTest(ReviewerTest):
listed = True
def setUp(self):
super(SearchTest, self).setUp()
self.user = UserProfile.objects.get(email='[email protected]')
self.login_as_reviewer()
if self.listed is False:
# Testing unlisted views: needs Addons:ReviewUnlisted perm.
self.grant_permission(self.user, 'Addons:ReviewUnlisted')
def named_addons(self, request):
return [
r.record.addon_name for r in request.context['page'].object_list]
def search(self, *args, **kw):
response = self.client.get(self.url, kw)
assert response.status_code == 200
assert response.context['search_form'].errors.as_text() == ''
return response
class BaseTestQueueSearch(SearchTest):
fixtures = ['base/users', 'base/appversion']
__test__ = False # this is an abstract test case
def generate_files(self, subset=None):
if subset is None:
subset = []
files = OrderedDict([
('Not Needing Admin Review', {
'version_str': '0.1',
'addon_status': amo.STATUS_NOMINATED,
'file_status': amo.STATUS_AWAITING_REVIEW,
}),
('Another Not Needing Admin Review', {
'version_str': '0.1',
'addon_status': amo.STATUS_NOMINATED,
'file_status': amo.STATUS_AWAITING_REVIEW,
}),
('Needs Admin Review', {
'version_str': '0.1',
'addon_status': amo.STATUS_NOMINATED,
'file_status': amo.STATUS_AWAITING_REVIEW,
'needs_admin_code_review': True,
}),
('Justin Bieber Theme', {
'version_str': '0.1',
'addon_status': amo.STATUS_NOMINATED,
'file_status': amo.STATUS_AWAITING_REVIEW,
'type': amo.ADDON_THEME,
}),
('Justin Bieber Search Bar', {
'version_str': '0.1',
'addon_status': amo.STATUS_NOMINATED,
'file_status': amo.STATUS_AWAITING_REVIEW,
'type': amo.ADDON_SEARCH,
}),
('Bieber For Mobile', {
'version_str': '0.1',
'addon_status': amo.STATUS_NOMINATED,
'file_status': amo.STATUS_AWAITING_REVIEW,
'version_kw': {'application': amo.ANDROID.id},
}),
('Linux Widget', {
'version_str': '0.1',
'addon_status': amo.STATUS_NOMINATED,
'file_status': amo.STATUS_AWAITING_REVIEW,
}),
('Mac Widget', {
'version_str': '0.1',
'addon_status': amo.STATUS_NOMINATED,
'file_status': amo.STATUS_AWAITING_REVIEW,
}),
('Deleted', {
'version_str': '0.1',
'addon_status': amo.STATUS_DELETED,
'file_status': amo.STATUS_AWAITING_REVIEW,
}),
])
results = {}
channel = (amo.RELEASE_CHANNEL_LISTED if self.listed else
amo.RELEASE_CHANNEL_UNLISTED)
for name, attrs in files.iteritems():
if not subset or name in subset:
version_kw = attrs.get('version_kw', {})
version_kw.update(
{'channel': channel, 'version': attrs.pop('version_str')})
attrs['version_kw'] = version_kw
file_kw = attrs.get('file_kw', {})
file_kw.update({'status': attrs.pop('file_status')})
attrs['file_kw'] = file_kw
attrs.update({'version_kw': version_kw, 'file_kw': file_kw})
needs_admin_code_review = attrs.pop(
'needs_admin_code_review', None)
results[name] = addon_factory(
status=attrs.pop('addon_status'), name=name, **attrs)
if needs_admin_code_review:
AddonReviewerFlags.objects.create(
addon=results[name], needs_admin_code_review=True)
return results
def generate_file(self, name):
return self.generate_files([name])[name]
def test_search_by_needs_admin_code_review_admin(self):
self.login_as_admin()
self.generate_files(['Not Needing Admin Review', 'Needs Admin Review'])
response = self.search(needs_admin_code_review=1)
assert response.status_code == 200
assert self.named_addons(response) == ['Needs Admin Review']
def test_queue_counts_admin(self):
self.login_as_admin()
self.generate_files(['Not Needing Admin Review', 'Needs Admin Review'])
response = self.search(text_query='admin', per_page=1)
assert response.status_code == 200
doc = pq(response.content)
assert doc('.data-grid-top .num-results').text() == (
u'Results 1\u20131 of 2')
def test_search_by_addon_name_admin(self):
self.login_as_admin()
self.generate_files(['Not Needing Admin Review', 'Needs Admin Review',
'Justin Bieber Theme'])
response = self.search(text_query='admin')
assert response.status_code == 200
assert sorted(self.named_addons(response)) == [
'Needs Admin Review', 'Not Needing Admin Review']
def test_not_searching(self, **kwargs):
self.generate_files(['Not Needing Admin Review', 'Needs Admin Review'])
response = self.search(**kwargs)
assert response.status_code == 200
assert sorted(self.named_addons(response)) == [
'Not Needing Admin Review']
# We were just displaying the queue, not searching, but the searching
# hidden input in the form should always be set to True regardless, it
# will be used once the user submits the form.
doc = pq(response.content)
assert doc('#id_searching').attr('value') == 'True'
def test_not_searching_with_param(self):
self.test_not_searching(some_param=1)
def test_search_by_nothing(self):
self.generate_files(['Not Needing Admin Review', 'Needs Admin Review'])
response = self.search(searching='True')
assert response.status_code == 200
assert sorted(self.named_addons(response)) == (
['Needs Admin Review', 'Not Needing Admin Review'])
def test_search_by_needs_admin_code_review(self):
self.generate_files(['Not Needing Admin Review', 'Needs Admin Review'])
response = self.search(needs_admin_code_review=1, searching='True')
assert response.status_code == 200
assert self.named_addons(response) == ['Needs Admin Review']
def test_queue_counts(self):
self.generate_files(['Not Needing Admin Review',
'Another Not Needing Admin Review',
'Needs Admin Review'])
response = self.search(
text_query='admin', per_page=1, searching='True')
assert response.status_code == 200
doc = pq(response.content)
assert doc('.data-grid-top .num-results').text() == (
u'Results 1\u20131 of 3')
def test_search_by_addon_name(self):
self.generate_files(['Not Needing Admin Review', 'Needs Admin Review',
'Justin Bieber Theme'])
response = self.search(text_query='admin', searching='True')
assert response.status_code == 200
assert sorted(self.named_addons(response)) == (
['Needs Admin Review', 'Not Needing Admin Review'])
def test_search_by_addon_in_locale(self):
name = 'Not Needing Admin Review'
generated = self.generate_file(name)
uni = 'フォクすけといっしょ'.decode('utf8')
addon = Addon.objects.get(pk=generated.id)
addon.name = {'ja': uni}
addon.save()
self.url = self.url.replace('/en-US/', '/ja/')
response = self.client.get(self.url, {'text_query': uni}, follow=True)
assert response.status_code == 200
assert self.named_addons(response) == [name]
def test_search_by_addon_author(self):
name = 'Not Needing Admin Review'
generated = self.generate_file(name)
user = UserProfile.objects.all()[0]
email = user.email.swapcase()
author = AddonUser.objects.create(user=user, addon=generated)
for role in [amo.AUTHOR_ROLE_OWNER, amo.AUTHOR_ROLE_DEV]:
author.role = role
author.save()
response = self.search(text_query=email)
assert response.status_code == 200
assert self.named_addons(response) == [name]
def test_search_by_supported_email_in_locale(self):
name = 'Not Needing Admin Review'
generated = self.generate_file(name)
uni = 'フォクすけといっしょ@site.co.jp'.decode('utf8')
addon = Addon.objects.get(pk=generated.id)
addon.support_email = {'ja': uni}
addon.save()
self.url = self.url.replace('/en-US/', '/ja/')
response = self.client.get(self.url, {'text_query': uni}, follow=True)
assert response.status_code == 200
assert self.named_addons(response) == [name]
def test_clear_search_visible(self):
response = self.search(text_query='admin', searching=True)
assert response.status_code == 200
assert pq(response.content)(
'.clear-queue-search').text() == 'clear search'
def test_clear_search_hidden(self):
response = self.search(text_query='admin')
assert response.status_code == 200
assert not pq(response.content)('.clear-queue-search').text()
class TestQueueSearch(BaseTestQueueSearch):
__test__ = True
def setUp(self):
super(TestQueueSearch, self).setUp()
self.url = reverse('reviewers.queue_nominated')
def test_search_by_addon_type(self):
self.generate_files(['Not Needing Admin Review', 'Justin Bieber Theme',
'Justin Bieber Search Bar'])
response = self.search(addon_type_ids=[amo.ADDON_THEME])
assert response.status_code == 200
assert self.named_addons(response) == ['Justin Bieber Theme']
def test_search_by_addon_type_any(self):
self.generate_file('Not Needing Admin Review')
response = self.search(addon_type_ids=[amo.ADDON_ANY])
assert response.status_code == 200
assert self.named_addons(response), 'Expected some add-ons'
def test_search_by_many_addon_types(self):
self.generate_files(['Not Needing Admin Review', 'Justin Bieber Theme',
'Justin Bieber Search Bar'])
response = self.search(addon_type_ids=[amo.ADDON_THEME,
amo.ADDON_SEARCH])
assert response.status_code == 200
assert sorted(self.named_addons(response)) == (
['Justin Bieber Search Bar', 'Justin Bieber Theme'])
def test_search_by_app(self):
self.generate_files(['Bieber For Mobile', 'Linux Widget'])
response = self.search(application_id=[amo.ANDROID.id])
assert response.status_code == 200
assert self.named_addons(response) == ['Bieber For Mobile']
def test_preserve_multi_apps(self):
self.generate_files(['Bieber For Mobile', 'Linux Widget'])
channel = (amo.RELEASE_CHANNEL_LISTED if self.listed else
amo.RELEASE_CHANNEL_UNLISTED)
multi = addon_factory(
status=amo.STATUS_NOMINATED, name='Multi Application',
version_kw={'channel': channel, 'application': amo.FIREFOX.id},
file_kw={'status': amo.STATUS_AWAITING_REVIEW})
av_min, _ = AppVersion.objects.get_or_create(
application=amo.ANDROID.id, version='4.0.99')
av_max, _ = AppVersion.objects.get_or_create(
application=amo.ANDROID.id, version='5.0.0')
ApplicationsVersions.objects.get_or_create(
application=amo.ANDROID.id, version=multi.versions.latest(),
min=av_min, max=av_max)
response = self.search(application_id=[amo.ANDROID.id])
assert response.status_code == 200
assert self.named_addons(response) == [
'Bieber For Mobile', 'Multi Application']
def test_clear_search_uses_correct_queue(self):
# The "clear search" link points to the right listed or unlisted queue.
# Listed queue.
url = reverse('reviewers.queue_nominated')
response = self.client.get(
url, {'text_query': 'admin', 'searching': True})
assert response.status_code == 200
doc = pq(response.content)
assert doc('.clear-queue-search').attr('href') == url
class TestQueueSearchUnlistedAllList(BaseTestQueueSearch):
listed = False
__test__ = True
def setUp(self):
super(TestQueueSearchUnlistedAllList, self).setUp()
self.url = reverse('reviewers.unlisted_queue_all')
def test_search_deleted(self):
self.generate_files(['Not Needing Admin Review', 'Deleted'])
r = self.search(deleted=1)
assert self.named_addons(r) == ['Deleted']
def test_search_not_deleted(self):
self.generate_files(['Not Needing Admin Review', 'Deleted'])
response = self.search(deleted=0)
assert response.status_code == 200
assert self.named_addons(response) == ['Not Needing Admin Review']
def test_search_by_guid(self):
name = 'Not Needing Admin Review'
addon = self.generate_file(name)
addon.update(guid='@guidymcguid')
response = self.search(text_query='mcguid')
assert response.status_code == 200
assert self.named_addons(response) == ['Not Needing Admin Review']
class ReviewBase(QueueTest):
def setUp(self):
super(QueueTest, self).setUp()
self.login_as_reviewer()
self.addons = {}
self.addon = self.generate_file('Public')
self.version = self.addon.current_version
self.file = self.version.files.get()
self.reviewer = UserProfile.objects.get(username='reviewer')
self.reviewer.update(display_name=u'A Reviêwer')
self.url = reverse('reviewers.review', args=[self.addon.slug])
AddonUser.objects.create(addon=self.addon, user_id=999)
def get_addon(self):
return Addon.objects.get(pk=self.addon.pk)
def get_dict(self, **kw):
data = {'operating_systems': 'win', 'applications': 'something',
'comments': 'something'}
data.update(kw)
return data
class TestReview(ReviewBase):
def test_reviewer_required(self):
assert self.client.head(self.url).status_code == 200
def test_not_anonymous(self):
self.client.logout()
self.assertLoginRedirects(self.client.head(self.url), to=self.url)
@patch.object(settings, 'ALLOW_SELF_REVIEWS', False)
def test_not_author(self):
AddonUser.objects.create(addon=self.addon, user=self.reviewer)
assert self.client.head(self.url).status_code == 302
def test_review_unlisted_while_a_listed_version_is_awaiting_review(self):
self.make_addon_unlisted(self.addon)
version_factory(
addon=self.addon, channel=amo.RELEASE_CHANNEL_LISTED,
file_kw={'status': amo.STATUS_AWAITING_REVIEW})
self.addon.update(status=amo.STATUS_NOMINATED, slug='awaiting')
self.url = reverse(
'reviewers.review', args=('unlisted', self.addon.slug))
self.grant_permission(self.reviewer, 'Addons:ReviewUnlisted')
assert self.client.get(self.url).status_code == 200
def test_needs_unlisted_reviewer_for_only_unlisted(self):
self.addon.versions.update(channel=amo.RELEASE_CHANNEL_UNLISTED)
assert self.client.head(self.url).status_code == 404
self.grant_permission(self.reviewer, 'Addons:ReviewUnlisted')
assert self.client.head(self.url).status_code == 200
def test_dont_need_unlisted_reviewer_for_mixed_channels(self):
version_factory(
addon=self.addon, channel=amo.RELEASE_CHANNEL_UNLISTED,
version='9.9')
assert self.addon.find_latest_version(
channel=amo.RELEASE_CHANNEL_UNLISTED)
assert self.addon.current_version.channel == amo.RELEASE_CHANNEL_LISTED
assert self.client.head(self.url).status_code == 200
self.grant_permission(self.reviewer, 'Addons:ReviewUnlisted')
assert self.client.head(self.url).status_code == 200
def test_not_flags(self):
self.addon.current_version.files.update(is_restart_required=False)
response = self.client.get(self.url)
assert response.status_code == 200
assert len(response.context['flags']) == 0
def test_flag_needs_admin_code_review(self):
self.addon.current_version.files.update(is_restart_required=False)
AddonReviewerFlags.objects.create(
addon=self.addon, needs_admin_code_review=True)
response = self.client.get(self.url)
assert response.status_code == 200
assert len(response.context['flags']) == 1
def test_info_comments_requested(self):
response = self.client.post(self.url, {'action': 'reply'})
assert response.context['form'].errors['comments'][0] == (
'This field is required.')
def test_whiteboard_url(self):
# Listed review.
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert (
doc('#whiteboard_form').attr('action') ==
'/en-US/reviewers/whiteboard/listed/public')
# Content review.
self.grant_permission(self.reviewer, 'Addons:ContentReview')
AutoApprovalSummary.objects.create(
version=self.addon.current_version, verdict=amo.AUTO_APPROVED)
self.url = reverse(
'reviewers.review', args=['content', self.addon.slug])
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert (
doc('#whiteboard_form').attr('action') ==
'/en-US/reviewers/whiteboard/content/public')
# Unlisted review.
self.grant_permission(self.reviewer, 'Addons:ReviewUnlisted')
version_factory(addon=self.addon, channel=amo.RELEASE_CHANNEL_UNLISTED)
self.url = reverse(
'reviewers.review', args=['unlisted', self.addon.slug])
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert (
doc('#whiteboard_form').attr('action') ==
'/en-US/reviewers/whiteboard/unlisted/public')
# Listed review, but deleted.
self.addon.delete()
self.url = reverse(
'reviewers.review', args=['listed', self.addon.pk])
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert (
doc('#whiteboard_form').attr('action') ==
'/en-US/reviewers/whiteboard/listed/%d' % self.addon.pk)
def test_no_whiteboards_for_static_themes(self):
self.grant_permission(self.reviewer, 'Addons:ThemeReview')
self.addon.update(type=amo.ADDON_STATICTHEME)
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert not doc('#whiteboard_form')
def test_comment(self):
response = self.client.post(self.url, {'action': 'comment',
'comments': 'hello sailor'})
assert response.status_code == 302
assert len(mail.outbox) == 0
comment_version = amo.LOG.COMMENT_VERSION
assert ActivityLog.objects.filter(
action=comment_version.id).count() == 1
def test_info_requested(self):
response = self.client.post(self.url, {'action': 'reply',
'comments': 'hello sailor'})
assert response.status_code == 302
assert len(mail.outbox) == 1
self.assertTemplateUsed(response, 'activity/emails/from_reviewer.txt')
def test_super_review_requested(self):
response = self.client.post(self.url, {'action': 'super',
'comments': 'hello sailor'})
assert response.status_code == 302
def test_info_requested_canned_response(self):
response = self.client.post(self.url, {'action': 'reply',
'comments': 'hello sailor',
'canned_response': 'foo'})
assert response.status_code == 302
assert len(mail.outbox) == 1
self.assertTemplateUsed(response, 'activity/emails/from_reviewer.txt')
def test_page_title(self):
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert doc('title').text() == (
'%s :: Reviewer Tools :: Add-ons for Firefox' % self.addon.name)
def test_files_shown(self):
response = self.client.get(self.url)
assert response.status_code == 200
items = pq(response.content)('#review-files .files .file-info')
assert items.length == 1
f = self.version.all_files[0]
expected = [
('All Platforms', f.get_url_path('reviewer')),
('Validation',
reverse('devhub.file_validation', args=[self.addon.slug, f.id])),
('Contents', None),
]
check_links(expected, items.find('a'), verify=False)
def test_item_history(self, channel=amo.RELEASE_CHANNEL_LISTED):
self.addons['something'] = addon_factory(
status=amo.STATUS_PUBLIC, name=u'something',
version_kw={'version': u'0.2',
'channel': channel},
file_kw={'status': amo.STATUS_AWAITING_REVIEW})
assert self.addon.versions.filter(channel=channel).count() == 1
self.review_version(self.version, self.url)
v2 = self.addons['something'].versions.all()[0]
v2.addon = self.addon
v2.created = v2.created + timedelta(days=1)
v2.save()
assert self.addon.versions.filter(channel=channel).count() == 2
action = self.review_version(v2, self.url)
response = self.client.get(self.url)
assert response.status_code == 200
# The 2 following lines replace pq(res.content), it's a workaround for
# https://github.com/gawel/pyquery/issues/31
UTF8_PARSER = HTMLParser(encoding='utf-8')
doc = pq(fromstring(response.content, parser=UTF8_PARSER))
table = doc('#review-files')
# Check the history for both versions.
ths = table.children('tr > th')
assert ths.length == 2
assert '0.1' in ths.eq(0).text()
assert '0.2' in ths.eq(1).text()
rows = table('td.files')
assert rows.length == 2
comments = rows.siblings('td')
assert comments.length == 2
for idx in xrange(comments.length):
td = comments.eq(idx)
assert td.find('.history-comment').text() == 'something'
assert td.find('th').text() == {
'public': 'Approved',
'reply': 'Reviewer Reply'}[action]
reviewer_name = td.find('td a').text()
assert ((reviewer_name == self.reviewer.display_name) or
(reviewer_name == self.other_reviewer.display_name))
def test_item_history_with_unlisted_versions_too(self):
# Throw in an unlisted version to be ignored.
version_factory(
version=u'0.2', addon=self.addon,
channel=amo.RELEASE_CHANNEL_UNLISTED,
file_kw={'status': amo.STATUS_PUBLIC})
self.test_item_history()
def test_item_history_with_unlisted_review_page(self):
self.addon.versions.update(channel=amo.RELEASE_CHANNEL_UNLISTED)
self.version.reload()
# Throw in an listed version to be ignored.
version_factory(
version=u'0.2', addon=self.addon,
channel=amo.RELEASE_CHANNEL_LISTED,
file_kw={'status': amo.STATUS_PUBLIC})
self.url = reverse('reviewers.review', args=[
'unlisted', self.addon.slug])
self.grant_permission(self.reviewer, 'Addons:ReviewUnlisted')
self.test_item_history(channel=amo.RELEASE_CHANNEL_UNLISTED)
def generate_deleted_versions(self):
self.addon = addon_factory(version_kw={
'version': '1.0', 'created': self.days_ago(1)})
self.url = reverse('reviewers.review', args=[self.addon.slug])
versions = ({'version': '0.1', 'action': 'comment',
'comments': 'millenium hand and shrimp'},
{'version': '0.1', 'action': 'public',
'comments': 'buggrit'},
{'version': '0.2', 'action': 'comment',
'comments': 'I told em'},
{'version': '0.3'})
for i, version_data in enumerate(versions):
version = version_factory(
addon=self.addon, version=version_data['version'],
created=self.days_ago(-i),
file_kw={'status': amo.STATUS_AWAITING_REVIEW})
if 'action' in version_data:
data = {'action': version_data['action'],
'operating_systems': 'win',
'applications': 'something',
'comments': version_data['comments']}
self.client.post(self.url, data)
version.delete(hard=True)
self.addon.current_version.delete(hard=True)
@patch('olympia.reviewers.utils.sign_file')
def test_item_history_deleted(self, mock_sign):
self.generate_deleted_versions()
response = self.client.get(self.url)
assert response.status_code == 200
table = pq(response.content)('#review-files')
# Check the history for all versions.
ths = table.children('tr > th')
assert ths.length == 3 # The 2 with the same number will be coalesced.
assert '0.1' in ths.eq(0).text()
assert '0.2' in ths.eq(1).text()
assert '0.3' in ths.eq(2).text()
for idx in xrange(2):
assert 'Deleted' in ths.eq(idx).text()
bodies = table.children('.listing-body')
assert 'millenium hand and shrimp' in bodies.eq(0).text()
assert 'buggrit' in bodies.eq(0).text()
assert 'I told em' in bodies.eq(1).text()
assert mock_sign.called
def test_item_history_compat_ordered(self):
""" Make sure that apps in compatibility are ordered. """
av = AppVersion.objects.all()[0]
v = self.addon.versions.all()[0]
ApplicationsVersions.objects.create(
version=v, application=amo.THUNDERBIRD.id, min=av, max=av)
ApplicationsVersions.objects.create(
version=v, application=amo.SEAMONKEY.id, min=av, max=av)
assert self.addon.versions.count() == 1
url = reverse('reviewers.review', args=[self.addon.slug])
response = self.client.get(url)
assert response.status_code == 200
doc = pq(response.content)
icons = doc('.listing-body .app-icon')
assert icons.eq(0).attr('title') == "Firefox"
assert icons.eq(1).attr('title') == "SeaMonkey"
assert icons.eq(2).attr('title') == "Thunderbird"
def test_item_history_weight(self):
""" Make sure the weight is shown on the review page"""
AutoApprovalSummary.objects.create(
version=self.version, verdict=amo.AUTO_APPROVED,
weight=284)
self.grant_permission(self.reviewer, 'Addons:PostReview')
url = reverse('reviewers.review', args=[self.addon.slug])
response = self.client.get(url)
assert response.status_code == 200
doc = pq(response.content)
risk = doc('.listing-body .file-weight')
assert risk.text() == "Weight: 284"
def test_item_history_notes(self):
version = self.addon.versions.all()[0]
version.releasenotes = 'hi'
version.approvalnotes = 'secret hi'
version.save()
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)('#review-files')
version = doc('.activity_version')
assert version.length == 1
assert version.text() == 'hi'
approval = doc('.activity_approval')
assert approval.length == 1
assert approval.text() == 'secret hi'
def test_item_history_header(self):
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert ('Approved' in
doc('#review-files .listing-header .light').text())
def test_item_history_comment(self):
# Add Comment.
self.client.post(self.url, {'action': 'comment',
'comments': 'hello sailor'})
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)('#review-files')
assert doc('th').eq(1).text() == 'Commented'
assert doc('.history-comment').text() == 'hello sailor'
def test_files_in_item_history(self):
data = {'action': 'public', 'operating_systems': 'win',
'applications': 'something', 'comments': 'something'}
self.client.post(self.url, data)
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
items = doc('#review-files .files .file-info')
assert items.length == 1
assert items.find('a.reviewers-install').text() == 'All Platforms'
def test_no_items(self):
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert doc('#review-files .no-activity').length == 1
def test_action_links(self):
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
expected = [
('View Listing', self.addon.get_url_path()),
]
check_links(expected, doc('#actions-addon a'), verify=False)
def test_action_links_as_admin(self):
self.login_as_admin()
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
expected = [
('View Listing', self.addon.get_url_path()),
('Edit', self.addon.get_dev_url()),
('Admin Page',
reverse('zadmin.addon_manage', args=[self.addon.id])),
]
check_links(expected, doc('#actions-addon a'), verify=False)
def test_unlisted_addon_action_links_as_admin(self):
"""No "View Listing" link for unlisted addons, "edit"/"manage" links
for the admins."""
self.make_addon_unlisted(self.addon)
self.login_as_admin()
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
expected = [
('Unlisted Review Page', reverse(
'reviewers.review', args=('unlisted', self.addon.slug))),
('Edit', self.addon.get_dev_url()),
('Admin Page', reverse(
'zadmin.addon_manage', args=[self.addon.id])),
]
check_links(expected, doc('#actions-addon a'), verify=False)
def test_mixed_channels_action_links_as_admin(self):
self.make_addon_unlisted(self.addon)
version_factory(
addon=self.addon, channel=amo.RELEASE_CHANNEL_LISTED,
file_kw={'status': amo.STATUS_AWAITING_REVIEW})
self.addon.update(status=amo.STATUS_NOMINATED)
self.login_as_admin()
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
expected = [
('View Listing', self.addon.get_url_path()),
('Unlisted Review Page', reverse(
'reviewers.review', args=('unlisted', self.addon.slug))),
('Edit', self.addon.get_dev_url()),
('Admin Page', reverse(
'zadmin.addon_manage', args=[self.addon.id])),
]
check_links(expected, doc('#actions-addon a'), verify=False)
def test_mixed_channels_action_links_as_admin_on_unlisted_review(self):
self.make_addon_unlisted(self.addon)
version_factory(
addon=self.addon, channel=amo.RELEASE_CHANNEL_LISTED,
file_kw={'status': amo.STATUS_AWAITING_REVIEW})
self.addon.update(status=amo.STATUS_NOMINATED)
self.login_as_admin()
self.url = reverse(
'reviewers.review', args=('unlisted', self.addon.slug))
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
expected = [
('View Listing', self.addon.get_url_path()),
('Listed Review Page',
reverse('reviewers.review', args=(self.addon.slug,))),
('Edit', self.addon.get_dev_url()),
('Admin Page',
reverse('zadmin.addon_manage', args=[self.addon.id])),
]
check_links(expected, doc('#actions-addon a'), verify=False)
def test_mixed_channels_action_links_as_regular_reviewer(self):
self.make_addon_unlisted(self.addon)
version_factory(
addon=self.addon, channel=amo.RELEASE_CHANNEL_LISTED,
file_kw={'status': amo.STATUS_AWAITING_REVIEW})
self.addon.update(status=amo.STATUS_NOMINATED)
self.login_as_reviewer()
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
expected = [
('View Listing', self.addon.get_url_path()),
]
check_links(expected, doc('#actions-addon a'), verify=False)
def test_admin_links_as_non_admin(self):
self.login_as_reviewer()
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
admin = doc('#actions-addon li')
assert admin.length == 1
def test_extra_actions_subscribe_checked_state(self):
self.login_as_reviewer()
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
subscribe_input = doc('#notify_new_listed_versions')[0]
assert 'checked' not in subscribe_input.attrib
ReviewerSubscription.objects.create(
addon=self.addon, user=self.reviewer)
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
subscribe_input = doc('#notify_new_listed_versions')[0]
assert subscribe_input.attrib['checked'] == 'checked'
def test_extra_actions_token(self):
self.login_as_reviewer()
self.client.cookies[API_TOKEN_COOKIE] = 'youdidntsaythemagicword'
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
token = doc('#extra-review-actions').attr('data-api-token')
assert token == 'youdidntsaythemagicword'
def test_extra_actions_not_for_reviewers(self):
self.login_as_reviewer()
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert not doc('#force_disable_addon')
assert not doc('#force_enable_addon')
assert not doc('#clear_admin_code_review')
assert not doc('#clear_admin_content_review')
assert not doc('#clear_admin_theme_review')
assert not doc('#disable_auto_approval')
assert not doc('#enable_auto_approval')
assert not doc('#clear_pending_info_request')
def test_extra_actions_admin_disable_enable(self):
self.login_as_admin()
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert doc('#force_disable_addon')
elem = doc('#force_disable_addon')[0]
assert 'hidden' not in elem.getparent().attrib.get('class', '')
assert doc('#force_enable_addon')
elem = doc('#force_enable_addon')[0]
assert 'hidden' in elem.getparent().attrib.get('class', '')
def test_unflag_option_forflagged_as_admin(self):
self.login_as_admin()
AddonReviewerFlags.objects.create(
addon=self.addon, needs_admin_code_review=True)
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert doc('#clear_admin_code_review').length == 1
assert doc('#clear_admin_content_review').length == 0
assert doc('#clear_admin_content_review').length == 0
def test_unflag_content_option_forflagged_as_admin(self):
self.login_as_admin()
AddonReviewerFlags.objects.create(
addon=self.addon,
needs_admin_code_review=False,
needs_admin_content_review=True)
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert doc('#clear_admin_code_review').length == 0
assert doc('#clear_admin_content_review').length == 1
assert doc('#clear_admin_theme_review').length == 0
def test_unflag_theme_option_forflagged_as_admin(self):
self.login_as_admin()
AddonReviewerFlags.objects.create(
addon=self.addon,
needs_admin_code_review=False,
needs_admin_content_review=False,
needs_admin_theme_review=True)
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert doc('#clear_admin_code_review').length == 0
assert doc('#clear_admin_content_review').length == 0
assert doc('#clear_admin_theme_review').length == 1
def test_disable_auto_approvals_as_admin(self):
self.login_as_admin()
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert doc('#disable_auto_approval')
elem = doc('#disable_auto_approval')[0]
assert 'hidden' not in elem.getparent().attrib.get('class', '')
assert doc('#enable_auto_approval')
elem = doc('#enable_auto_approval')[0]
assert 'hidden' in elem.getparent().attrib.get('class', '')
# Both of them should be absent on static themes, which are not
# auto-approved.
self.addon.update(type=amo.ADDON_STATICTHEME)
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert not doc('#disable_auto_approval')
assert not doc('#enable_auto_approval')
def test_enable_auto_approvals_as_admin_auto_approvals_disabled(self):
self.login_as_admin()
AddonReviewerFlags.objects.create(
addon=self.addon, auto_approval_disabled=True)
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert doc('#disable_auto_approval')
elem = doc('#disable_auto_approval')[0]
assert 'hidden' in elem.getparent().attrib.get('class', '')
assert doc('#enable_auto_approval')
elem = doc('#enable_auto_approval')[0]
assert 'hidden' not in elem.getparent().attrib.get('class', '')
# Both of them should be absent on static themes, which are not
# auto-approved.
self.addon.update(type=amo.ADDON_STATICTHEME)
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert not doc('#disable_auto_approval')
assert not doc('#enable_auto_approval')
def test_clear_pending_info_request_as_admin(self):
self.login_as_admin()
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert not doc('#clear_pending_info_request')
AddonReviewerFlags.objects.create(
addon=self.addon, pending_info_request=self.days_ago(1))
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert doc('#clear_pending_info_request')
def test_info_request_checkbox(self):
self.login_as_reviewer()
assert not self.addon.pending_info_request
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert 'checked' not in doc('#id_info_request')[0].attrib
elm = doc('#id_info_request_deadline')[0]
assert elm.attrib['readonly'] == 'readonly'
assert elm.attrib['min'] == '7'
assert elm.attrib['max'] == '7'
assert elm.attrib['value'] == '7'
AddonReviewerFlags.objects.create(
addon=self.addon,
pending_info_request=datetime.now() + timedelta(days=7))
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert doc('#id_info_request')[0].attrib['checked'] == 'checked'
def test_info_request_checkbox_admin(self):
self.login_as_admin()
assert not self.addon.pending_info_request
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert 'checked' not in doc('#id_info_request')[0].attrib
elm = doc('#id_info_request_deadline')[0]
assert 'readonly' not in elm.attrib
assert elm.attrib['min'] == '1'
assert elm.attrib['max'] == '99'
assert elm.attrib['value'] == '7'
def test_no_public(self):
has_public = self.version.files.filter(
status=amo.STATUS_PUBLIC).exists()
assert has_public
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
validation = doc.find('.files')
assert validation.find('a').eq(1).text() == "Validation"
assert validation.find('a').eq(2).text() == "Contents"
assert validation.find('a').length == 3
def test_public_search(self):
self.version.files.update(status=amo.STATUS_PUBLIC)
self.addon.update(type=amo.ADDON_SEARCH)
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert doc('#review-files .files ul .file-info').length == 1
def test_version_deletion(self):
"""
Make sure that we still show review history for deleted versions.
"""
# Add a new version to the add-on.
addon = addon_factory(
status=amo.STATUS_NOMINATED, name='something',
version_kw={'version': '0.2'},
file_kw={'status': amo.STATUS_AWAITING_REVIEW})
assert self.addon.versions.count() == 1
self.review_version(self.version, self.url)
v2 = addon.versions.all()[0]
v2.addon = self.addon
v2.created = v2.created + timedelta(days=1)
v2.save()
self.review_version(v2, self.url)
assert self.addon.versions.count() == 2
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
# View the history verify two versions:
ths = doc('table#review-files > tr > th:first-child')
assert '0.1' in ths.eq(0).text()
assert '0.2' in ths.eq(1).text()
# Delete a version:
v2.delete()
# Verify two versions, one deleted:
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
ths = doc('table#review-files > tr > th:first-child')
assert ths.length == 2
assert '0.1' in ths.text()
def test_no_versions(self):
"""The review page should still load if there are no versions. But not
unless you have unlisted permissions."""
assert self.client.get(self.url).status_code == 200
response = self.client.post(self.url, {'action': 'comment',
'comments': 'hello sailor'})
assert response.status_code == 302
self.assert3xx(response, reverse('reviewers.queue_pending'),
status_code=302)
self.version.delete()
# Regular reviewer has no permission, gets a 404.
assert self.client.get(self.url).status_code == 404
# Reviewer with more powers can look.
self.grant_permission(self.reviewer, 'Addons:ReviewUnlisted')
assert self.client.get(self.url).status_code == 200
response = self.client.post(self.url, {'action': 'comment',
'comments': 'hello sailor'})
assert response.status_code == 302
self.assert3xx(response, reverse('reviewers.queue_pending'),
status_code=302)
def test_addon_deleted(self):
"""The review page should still load for deleted addons."""
self.addon.delete()
self.url = reverse('reviewers.review', args=[self.addon.pk])
assert self.client.get(self.url).status_code == 200
response = self.client.post(self.url, {'action': 'comment',
'comments': 'hello sailor'})
assert response.status_code == 302
self.assert3xx(response, reverse('reviewers.queue_pending'),
status_code=302)
@patch('olympia.reviewers.utils.sign_file')
def review_version(self, version, url, mock_sign):
if version.channel == amo.RELEASE_CHANNEL_LISTED:
version.files.all()[0].update(status=amo.STATUS_AWAITING_REVIEW)
action = 'public'
else:
action = 'reply'
data = {
'action': action,
'operating_systems': 'win',
'applications': 'something',
'comments': 'something',
}
self.client.post(url, data)
if version.channel == amo.RELEASE_CHANNEL_LISTED:
assert mock_sign.called
return action
def test_dependencies_listed(self):
AddonDependency.objects.create(addon=self.addon,
dependent_addon=self.addon)
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
deps = doc('.addon-info .addon-dependencies')
assert deps.length == 1
assert deps.find('li').length == 1
assert deps.find('a').attr('href') == self.addon.get_url_path()
def test_eula_displayed(self):
assert not bool(self.addon.eula)
response = self.client.get(self.url)
assert response.status_code == 200
self.assertNotContains(response, 'View End-User License Agreement')
self.addon.eula = 'Test!'
self.addon.save()
assert bool(self.addon.eula)
response = self.client.get(self.url)
assert response.status_code == 200
self.assertContains(response, 'View End-User License Agreement')
def test_privacy_policy_displayed(self):
assert self.addon.privacy_policy is None
response = self.client.get(self.url)
assert response.status_code == 200
self.assertNotContains(response, 'View Privacy Policy')
self.addon.privacy_policy = 'Test!'
self.addon.save()
response = self.client.get(self.url)
assert response.status_code == 200
self.assertContains(response, 'View Privacy Policy')
def test_requires_payment_indicator(self):
assert not self.addon.requires_payment
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert 'No' in doc('tr.requires-payment td').text()
self.addon.update(requires_payment=True)
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert 'Yes' in doc('tr.requires-payment td').text()
def test_viewing(self):
url = reverse('reviewers.review_viewing')
response = self.client.post(url, {'addon_id': self.addon.id})
data = json.loads(response.content)
assert data['current'] == self.reviewer.id
assert data['current_name'] == self.reviewer.name
assert data['is_user'] == 1
# Now, login as someone else and test.
self.login_as_admin()
response = self.client.post(url, {'addon_id': self.addon.id})
data = json.loads(response.content)
assert data['current'] == self.reviewer.id
assert data['current_name'] == self.reviewer.name
assert data['is_user'] == 0
# Lets just override this to make the test a bit shorter.
@mock.patch.object(amo, 'REVIEWER_REVIEW_LOCK_LIMIT', 1)
def test_viewing_lock_limit(self):
url = reverse('reviewers.review_viewing')
response = self.client.post(url, {'addon_id': 1234})
data = json.loads(response.content)
assert data['current'] == self.reviewer.id
assert data['current_name'] == self.reviewer.name
assert data['is_user'] == 1
# Second review page is over the limit.
response = self.client.post(url, {'addon_id': 5678})
data = json.loads(response.content)
assert data['current'] == settings.TASK_USER_ID # Mozilla's task ID.
assert data['current_name'] == 'Review lock limit reached'
assert data['is_user'] == 2
# Now, login as someone else and test. First page is blocked.
self.login_as_admin()
response = self.client.post(url, {'addon_id': 1234})
data = json.loads(response.content)
assert data['current'] == self.reviewer.id
assert data['current_name'] == self.reviewer.name
assert data['is_user'] == 0
# Second page is available.
response = self.client.post(url, {'addon_id': 5678})
data = json.loads(response.content)
admin = UserProfile.objects.get(username='admin')
assert data['current'] == admin.id
assert data['current_name'] == admin.name
assert data['is_user'] == 1
# Lets just override this to make the test a bit shorter.
@mock.patch.object(amo, 'REVIEWER_REVIEW_LOCK_LIMIT', 1)
def test_viewing_lock_admin(self):
self.login_as_admin()
url = reverse('reviewers.review_viewing')
admin = UserProfile.objects.get(username='admin')
response = self.client.post(url, {'addon_id': 101})
data = json.loads(response.content)
assert data['current'] == admin.id
assert data['current_name'] == admin.name
assert data['is_user'] == 1
# Admin don't have time for no limits.
response = self.client.post(url, {'addon_id': 202})
data = json.loads(response.content)
assert data['current'] == admin.id
assert data['current_name'] == admin.name
assert data['is_user'] == 1
def test_viewing_review_unlocks(self):
reviewing_url = reverse('reviewers.review_viewing')
self.client.post(reviewing_url, {'addon_id': self.addon.id})
key = '%s:review_viewing:%s' % (settings.CACHE_PREFIX, self.addon.id)
assert cache.get(key) == self.reviewer.id
self.client.post(self.url, {'action': 'comment',
'comments': 'hello sailor'})
# Processing a review should instantly clear the review lock on it.
assert cache.get(key) is None
def test_viewing_queue(self):
response = self.client.post(reverse('reviewers.review_viewing'),
{'addon_id': self.addon.id})
data = json.loads(response.content)
assert data['current'] == self.reviewer.id
assert data['current_name'] == self.reviewer.name
assert data['is_user'] == 1
# Now, login as someone else and test.
self.login_as_admin()
r = self.client.post(reverse('reviewers.queue_viewing'),
{'addon_ids': self.addon.id})
data = json.loads(r.content)
assert data[str(self.addon.id)] == self.reviewer.display_name
def test_display_same_files_only_once(self):
"""
Test whether identical files for different platforms
show up as one link with the appropriate text.
"""
version = version_factory(
addon=self.addon, version='0.2', file_kw=False)
file_mac = file_factory(version=version, platform=amo.PLATFORM_MAC.id)
file_android = file_factory(
version=version, platform=amo.PLATFORM_ANDROID.id)
# Signing causes the same uploaded file to be different
file_mac.update(hash='xyz789', original_hash='123abc')
file_android.update(hash='zyx987', original_hash='123abc')
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
text = doc('.reviewers-install').eq(1).text()
assert text == "Mac OS X / Android"
def test_compare_no_link(self):
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
info = doc('#review-files .file-info')
assert info.length == 1
assert info.find('a.compare').length == 0
def test_file_info_for_static_themes(self):
self.grant_permission(self.reviewer, 'Addons:ThemeReview')
self.addon.update(type=amo.ADDON_STATICTHEME)
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
info = doc('#review-files .file-info')
assert info.length == 1
# Only the download/install link
assert info.find('a').length == 1
assert info.find('a')[0].text == u'Download'
assert 'Compatibility' not in response.content
def test_compare_link(self):
first_file = self.addon.current_version.files.all()[0]
first_file.update(status=amo.STATUS_PUBLIC)
self.addon.current_version.update(created=self.days_ago(2))
new_version = version_factory(addon=self.addon, version='0.2')
new_file = new_version.files.all()[0]
self.addon.update(_current_version=new_version)
assert self.addon.current_version == new_version
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert response.context['show_diff']
links = doc('#review-files .file-info .compare')
expected = [
reverse('files.compare', args=[new_file.pk, first_file.pk]),
]
check_links(expected, links, verify=False)
def test_compare_link_auto_approved_ignored(self):
first_file = self.addon.current_version.files.all()[0]
first_file.update(status=amo.STATUS_PUBLIC)
self.addon.current_version.update(created=self.days_ago(3))
interim_version = version_factory(addon=self.addon, version='0.2')
interim_version.update(created=self.days_ago(2))
AutoApprovalSummary.objects.create(
version=interim_version, verdict=amo.AUTO_APPROVED)
new_version = version_factory(addon=self.addon, version='0.3')
new_file = new_version.files.all()[0]
self.addon.update(_current_version=new_version)
assert self.addon.current_version == new_version
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert response.context['show_diff']
links = doc('#review-files .file-info .compare')
# Comparison should be between the last version and the first,
# ignoring the interim version because it was auto-approved and not
# manually confirmed by a human.
expected = [
reverse('files.compare', args=[new_file.pk, first_file.pk]),
]
check_links(expected, links, verify=False)
def test_compare_link_auto_approved_but_confirmed_not_ignored(self):
first_file = self.addon.current_version.files.all()[0]
first_file.update(status=amo.STATUS_PUBLIC)
self.addon.current_version.update(created=self.days_ago(3))
confirmed_version = version_factory(addon=self.addon, version='0.2')
confirmed_version.update(created=self.days_ago(2))
confirmed_file = confirmed_version.files.all()[0]
AutoApprovalSummary.objects.create(
verdict=amo.AUTO_APPROVED, version=confirmed_version,
confirmed=True)
interim_version = version_factory(addon=self.addon, version='0.3')
interim_version.update(created=self.days_ago(1))
AutoApprovalSummary.objects.create(
version=interim_version, verdict=amo.AUTO_APPROVED)
new_version = version_factory(addon=self.addon, version='0.4')
new_file = new_version.files.all()[0]
self.addon.update(_current_version=new_version)
assert self.addon.current_version == new_version
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert response.context['show_diff']
links = doc('#review-files .file-info .compare')
# Comparison should be between the last version and the second,
# ignoring the third version because it was auto-approved and not
# manually confirmed by a human (the second was auto-approved but
# was manually confirmed).
expected = [
reverse('files.compare', args=[new_file.pk, confirmed_file.pk]),
]
check_links(expected, links, verify=False)
def test_compare_link_not_auto_approved_but_confirmed(self):
first_file = self.addon.current_version.files.all()[0]
first_file.update(status=amo.STATUS_PUBLIC)
self.addon.current_version.update(created=self.days_ago(3))
confirmed_version = version_factory(addon=self.addon, version='0.2')
confirmed_version.update(created=self.days_ago(2))
confirmed_file = confirmed_version.files.all()[0]
AutoApprovalSummary.objects.create(
verdict=amo.NOT_AUTO_APPROVED, version=confirmed_version
)
new_version = version_factory(addon=self.addon, version='0.3')
new_file = new_version.files.all()[0]
self.addon.update(_current_version=new_version)
assert self.addon.current_version == new_version
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert response.context['show_diff']
links = doc('#review-files .file-info .compare')
# Comparison should be between the last version and the second,
# because second was approved by human before auto-approval ran on it
expected = [
reverse('files.compare', args=[new_file.pk, confirmed_file.pk]),
]
check_links(expected, links, verify=False)
def test_download_sources_link(self):
version = self.addon.current_version
tdir = temp.gettempdir()
source_file = temp.NamedTemporaryFile(suffix='.zip', dir=tdir)
source_file.write('a' * (2 ** 21))
source_file.seek(0)
version.source = DjangoFile(source_file)
version.save()
url = reverse('reviewers.review', args=[self.addon.pk])
# Admin reviewer: able to download sources.
user = UserProfile.objects.get(email='[email protected]')
self.client.login(email=user.email)
response = self.client.get(url, follow=True)
assert response.status_code == 200
assert 'Download files' in response.content
# Standard reviewer: should know that sources were provided.
user = UserProfile.objects.get(email='[email protected]')
self.client.login(email=user.email)
response = self.client.get(url, follow=True)
assert response.status_code == 200
assert 'The developer has provided source code.' in response.content
@patch('olympia.reviewers.utils.sign_file')
def test_admin_flagged_addon_actions_as_admin(self, mock_sign_file):
self.version.files.update(status=amo.STATUS_AWAITING_REVIEW)
self.addon.update(status=amo.STATUS_NOMINATED)
AddonReviewerFlags.objects.create(
addon=self.addon, needs_admin_code_review=True)
self.login_as_admin()
response = self.client.post(self.url, self.get_dict(action='public'),
follow=True)
assert response.status_code == 200
addon = self.get_addon()
assert self.version == addon.current_version
assert addon.status == amo.STATUS_PUBLIC
assert addon.current_version.files.all()[0].status == amo.STATUS_PUBLIC
assert mock_sign_file.called
def test_admin_flagged_addon_actions_as_reviewer(self):
self.version.files.update(status=amo.STATUS_AWAITING_REVIEW)
self.addon.update(status=amo.STATUS_NOMINATED)
AddonReviewerFlags.objects.create(
addon=self.addon, needs_admin_code_review=True)
self.login_as_reviewer()
response = self.client.post(self.url, self.get_dict(action='public'))
assert response.status_code == 200 # Form error.
# The add-on status must not change as non-admin reviewers are not
# allowed to review admin-flagged add-ons.
addon = self.get_addon()
assert addon.status == amo.STATUS_NOMINATED
assert self.version == addon.current_version
assert addon.current_version.files.all()[0].status == (
amo.STATUS_AWAITING_REVIEW)
assert response.context['form'].errors['action'] == (
[u'Select a valid choice. public is not one of the available '
u'choices.'])
def test_admin_flagged_addon_actions_as_content_reviewer(self):
self.version.files.update(status=amo.STATUS_AWAITING_REVIEW)
self.addon.update(status=amo.STATUS_NOMINATED)
AddonReviewerFlags.objects.create(
addon=self.addon, needs_admin_code_review=True)
GroupUser.objects.filter(user=self.reviewer).all().delete()
self.grant_permission(self.reviewer, 'Addons:ContentReview')
self.url = reverse(
'reviewers.review', args=['content', self.addon.slug])
for action in ['confirm_auto_approved', 'reject_multiple_versions']:
response = self.client.post(self.url, self.get_dict(action=action))
assert response.status_code == 200 # Form error.
# The add-on status must not change as non-admin reviewers are not
# allowed to review admin-flagged add-ons.
addon = self.get_addon()
assert addon.status == amo.STATUS_NOMINATED
assert self.version == addon.current_version
assert addon.current_version.files.all()[0].status == (
amo.STATUS_AWAITING_REVIEW)
assert response.context['form'].errors['action'] == (
[u'Select a valid choice. %s is not one of the available '
u'choices.' % action])
assert ActivityLog.objects.filter(
action=amo.LOG.APPROVE_CONTENT.id).count() == 0
assert ActivityLog.objects.filter(
action=amo.LOG.REJECT_CONTENT.id).count() == 0
def test_confirm_auto_approval_no_permission(self):
AutoApprovalSummary.objects.create(
version=self.addon.current_version, verdict=amo.AUTO_APPROVED)
self.login_as_reviewer() # Legacy reviewer, not post-review.
response = self.client.post(
self.url, {'action': 'confirm_auto_approved'})
assert response.status_code == 403
# Nothing happened: the user did not have the permission to do that.
assert ActivityLog.objects.filter(
action=amo.LOG.CONFIRM_AUTO_APPROVED.id).count() == 0
def test_attempt_to_use_content_review_permission_for_post_review_actions(
self):
# Try to use confirm_auto_approved outside of content review, while
# only having Addons:ContentReview permission.
self.grant_permission(self.reviewer, 'Addons:ContentReview')
AutoApprovalSummary.objects.create(
version=self.addon.current_version, verdict=amo.AUTO_APPROVED)
self.login_as_reviewer()
response = self.client.post(
self.url, {'action': 'confirm_auto_approved'})
assert response.status_code == 403
# Nothing happened: the user did not have the permission to do that.
assert ActivityLog.objects.filter(
action=amo.LOG.CONFIRM_AUTO_APPROVED.id).count() == 0
def test_confirm_auto_approval_content_review(self):
GroupUser.objects.filter(user=self.reviewer).all().delete()
self.url = reverse(
'reviewers.review', args=['content', self.addon.slug])
summary = AutoApprovalSummary.objects.create(
version=self.addon.current_version, verdict=amo.AUTO_APPROVED)
self.grant_permission(self.reviewer, 'Addons:ContentReview')
response = self.client.post(self.url, {
'action': 'confirm_auto_approved',
'comments': 'ignore me this action does not support comments'
})
assert response.status_code == 302
summary.reload()
assert summary.confirmed is None # We're only doing a content review.
assert ActivityLog.objects.filter(
action=amo.LOG.CONFIRM_AUTO_APPROVED.id).count() == 0
assert ActivityLog.objects.filter(
action=amo.LOG.APPROVE_CONTENT.id).count() == 1
a_log = ActivityLog.objects.filter(
action=amo.LOG.APPROVE_CONTENT.id).get()
assert a_log.details['version'] == self.addon.current_version.version
assert a_log.details['comments'] == ''
self.assert3xx(response, reverse('reviewers.queue_content_review'))
def test_cant_contentreview_if_admin_content_review_flag_is_set(self):
GroupUser.objects.filter(user=self.reviewer).all().delete()
self.url = reverse(
'reviewers.review', args=['content', self.addon.slug])
AutoApprovalSummary.objects.create(
version=self.addon.current_version, verdict=amo.AUTO_APPROVED)
AddonReviewerFlags.objects.create(
addon=self.addon, needs_admin_content_review=True)
self.grant_permission(self.reviewer, 'Addons:ContentReview')
response = self.client.post(self.url, {
'action': 'confirm_auto_approved',
'comments': 'ignore me this action does not support comments'
})
assert response.status_code == 200 # Form error
assert ActivityLog.objects.filter(
action=amo.LOG.APPROVE_CONTENT.id).count() == 0
def test_can_contentreview_if_addon_has_sources_attached(self):
GroupUser.objects.filter(user=self.reviewer).all().delete()
self.url = reverse(
'reviewers.review', args=['content', self.addon.slug])
summary = AutoApprovalSummary.objects.create(
version=self.addon.current_version, verdict=amo.AUTO_APPROVED)
self.addon.current_version.update(source='/path/to/fake/file.zip')
AddonReviewerFlags.objects.create(
addon=self.addon, needs_admin_code_review=True)
self.grant_permission(self.reviewer, 'Addons:ContentReview')
response = self.client.post(self.url, {
'action': 'confirm_auto_approved',
'comments': 'ignore me this action does not support comments'
})
assert response.status_code == 302
summary.reload()
assert summary.confirmed is None # We're only doing a content review.
assert ActivityLog.objects.filter(
action=amo.LOG.CONFIRM_AUTO_APPROVED.id).count() == 0
assert ActivityLog.objects.filter(
action=amo.LOG.APPROVE_CONTENT.id).count() == 1
a_log = ActivityLog.objects.filter(
action=amo.LOG.APPROVE_CONTENT.id).get()
assert a_log.details['version'] == self.addon.current_version.version
assert a_log.details['comments'] == ''
self.assert3xx(response, reverse('reviewers.queue_content_review'))
def test_cant_contentreview_if_addon_has_admin_flag_but_no_sources(self):
GroupUser.objects.filter(user=self.reviewer).all().delete()
self.url = reverse(
'reviewers.review', args=['content', self.addon.slug])
AutoApprovalSummary.objects.create(
version=self.addon.current_version, verdict=amo.AUTO_APPROVED)
AddonReviewerFlags.objects.create(
addon=self.addon, needs_admin_code_review=True)
self.grant_permission(self.reviewer, 'Addons:ContentReview')
response = self.client.post(self.url, {
'action': 'confirm_auto_approved',
'comments': 'ignore me this action does not support comments'
})
assert response.status_code == 200 # Form error
assert ActivityLog.objects.filter(
action=amo.LOG.APPROVE_CONTENT.id).count() == 0
def test_cant_addonreview_if_admin_content_review_flag_is_set(self):
AutoApprovalSummary.objects.create(
version=self.addon.current_version, verdict=amo.AUTO_APPROVED)
AddonReviewerFlags.objects.create(
addon=self.addon, needs_admin_content_review=True)
self.grant_permission(self.reviewer, 'Addons:PostReview')
for action in ['confirm_auto_approved', 'public', 'reject',
'reject_multiple_versions']:
response = self.client.post(self.url, self.get_dict(action=action))
assert response.status_code == 200 # Form error.
# The add-on status must not change as non-admin reviewers are not
# allowed to review admin-flagged add-ons.
addon = self.get_addon()
assert addon.status == amo.STATUS_PUBLIC
assert self.version == addon.current_version
assert addon.current_version.files.all()[0].status == (
amo.STATUS_PUBLIC)
assert response.context['form'].errors['action'] == (
[u'Select a valid choice. %s is not one of the available '
u'choices.' % action])
assert ActivityLog.objects.filter(
action=amo.LOG.CONFIRM_AUTO_APPROVED.id).count() == 0
assert ActivityLog.objects.filter(
action=amo.LOG.REJECT_VERSION.id).count() == 0
assert ActivityLog.objects.filter(
action=amo.LOG.APPROVE_VERSION.id).count() == 0
def test_cant_review_static_theme_if_admin_theme_review_flag_is_set(self):
self.version.files.update(status=amo.STATUS_AWAITING_REVIEW)
self.addon.update(
type=amo.ADDON_STATICTHEME, status=amo.STATUS_NOMINATED)
AddonReviewerFlags.objects.create(
addon=self.addon, needs_admin_theme_review=True)
self.grant_permission(self.reviewer, 'Addons:ThemeReview')
for action in ['public', 'reject']:
response = self.client.post(self.url, self.get_dict(action=action))
assert response.status_code == 200 # Form error.
# The add-on status must not change as non-admin reviewers are not
# allowed to review admin-flagged add-ons.
addon = self.get_addon()
assert addon.status == amo.STATUS_NOMINATED
assert self.version == addon.current_version
assert addon.current_version.files.all()[0].status == (
amo.STATUS_AWAITING_REVIEW)
assert response.context['form'].errors['action'] == (
[u'Select a valid choice. %s is not one of the available '
u'choices.' % action])
assert ActivityLog.objects.filter(
action=amo.LOG.REJECT_VERSION.id).count() == 0
assert ActivityLog.objects.filter(
action=amo.LOG.APPROVE_VERSION.id).count() == 0
@patch('olympia.reviewers.utils.sign_file')
def test_admin_can_review_statictheme_if_admin_theme_review_flag_set(
self, mock_sign_file):
self.version.files.update(status=amo.STATUS_AWAITING_REVIEW)
self.addon.update(
type=amo.ADDON_STATICTHEME, status=amo.STATUS_NOMINATED)
AddonReviewerFlags.objects.create(
addon=self.addon, needs_admin_theme_review=True)
self.grant_permission(self.reviewer, 'Addons:ThemeReview')
self.grant_permission(self.reviewer, 'Reviews:Admin')
response = self.client.post(self.url, {
'action': 'public',
'comments': 'it`s good'
})
assert response.status_code == 302
assert self.get_addon().status == amo.STATUS_PUBLIC
assert mock_sign_file.called
def test_admin_can_contentreview_if_admin_content_review_flag_is_set(self):
GroupUser.objects.filter(user=self.reviewer).all().delete()
self.url = reverse(
'reviewers.review', args=['content', self.addon.slug])
summary = AutoApprovalSummary.objects.create(
version=self.addon.current_version, verdict=amo.AUTO_APPROVED)
AddonReviewerFlags.objects.create(
addon=self.addon, needs_admin_content_review=True)
self.grant_permission(self.reviewer, 'Addons:ContentReview')
self.grant_permission(self.reviewer, 'Reviews:Admin')
response = self.client.post(self.url, {
'action': 'confirm_auto_approved',
'comments': 'ignore me this action does not support comments'
})
assert response.status_code == 302
summary.reload()
assert summary.confirmed is None # We're only doing a content review.
assert ActivityLog.objects.filter(
action=amo.LOG.CONFIRM_AUTO_APPROVED.id).count() == 0
assert ActivityLog.objects.filter(
action=amo.LOG.APPROVE_CONTENT.id).count() == 1
a_log = ActivityLog.objects.filter(
action=amo.LOG.APPROVE_CONTENT.id).get()
assert a_log.details['version'] == self.addon.current_version.version
assert a_log.details['comments'] == ''
self.assert3xx(response, reverse('reviewers.queue_content_review'))
def test_confirm_auto_approval_with_permission(self):
summary = AutoApprovalSummary.objects.create(
version=self.addon.current_version, verdict=amo.AUTO_APPROVED)
GroupUser.objects.filter(user=self.reviewer).all().delete()
self.grant_permission(self.reviewer, 'Addons:PostReview')
response = self.client.post(self.url, {
'action': 'confirm_auto_approved',
'comments': 'ignore me this action does not support comments'
})
summary.reload()
assert response.status_code == 302
assert summary.confirmed is True
assert ActivityLog.objects.filter(
action=amo.LOG.CONFIRM_AUTO_APPROVED.id).count() == 1
a_log = ActivityLog.objects.filter(
action=amo.LOG.CONFIRM_AUTO_APPROVED.id).get()
assert a_log.details['version'] == self.addon.current_version.version
assert a_log.details['comments'] == ''
self.assert3xx(response, reverse('reviewers.queue_auto_approved'))
def test_user_changes_log(self):
# Activity logs related to user changes should be displayed.
# Create an activy log for each of the following: user addition, role
# change and deletion.
author = self.addon.addonuser_set.get()
core.set_user(author.user)
ActivityLog.create(amo.LOG.ADD_USER_WITH_ROLE,
author.user, author.get_role_display(), self.addon)
ActivityLog.create(amo.LOG.CHANGE_USER_WITH_ROLE,
author.user, author.get_role_display(), self.addon)
ActivityLog.create(amo.LOG.REMOVE_USER_WITH_ROLE,
author.user, author.get_role_display(), self.addon)
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert 'user_changes' in response.context
user_changes_log = response.context['user_changes']
actions = [log.activity_log.action for log in user_changes_log]
assert actions == [
amo.LOG.ADD_USER_WITH_ROLE.id,
amo.LOG.CHANGE_USER_WITH_ROLE.id,
amo.LOG.REMOVE_USER_WITH_ROLE.id]
# Make sure the logs are displayed in the page.
user_changes = doc('#user-changes li')
assert len(user_changes) == 3
assert '(Owner) added to ' in user_changes[0].text
assert 'role changed to Owner for ' in user_changes[1].text
assert '(Owner) removed from ' in user_changes[2].text
@override_settings(CELERY_ALWAYS_EAGER=True)
@mock.patch('olympia.devhub.tasks.validate')
def test_validation_not_run_eagerly(self, validate):
"""Tests that validation is not run in eager mode."""
assert not self.file.has_been_validated
response = self.client.get(self.url)
assert response.status_code == 200
assert not validate.called
@override_settings(CELERY_ALWAYS_EAGER=False)
@mock.patch('olympia.devhub.tasks.validate')
def test_validation_run(self, validate):
"""Tests that validation is run if necessary."""
assert not self.file.has_been_validated
response = self.client.get(self.url)
assert response.status_code == 200
validate.assert_called_once_with(self.file)
@override_settings(CELERY_ALWAYS_EAGER=False)
@mock.patch('olympia.devhub.tasks.validate')
def test_validation_not_run_again(self, validate):
"""Tests that validation is not run for files which have cached
results."""
FileValidation.objects.create(file=self.file, validation=json.dumps(
amo.VALIDATOR_SKELETON_RESULTS))
response = self.client.get(self.url)
assert response.status_code == 200
assert not validate.called
def test_review_is_review_listed(self):
review_page = self.client.get(
reverse('reviewers.review', args=[self.addon.slug]))
listed_review_page = self.client.get(
reverse('reviewers.review', args=['listed', self.addon.slug]))
assert (pq(review_page.content)('#review-files').text() ==
pq(listed_review_page.content)('#review-files').text())
def test_approvals_info(self):
approval_info = AddonApprovalsCounter.objects.create(
addon=self.addon, last_human_review=datetime.now(), counter=42)
self.file.update(is_webextension=True)
AutoApprovalSummary.objects.create(
version=self.version, verdict=amo.AUTO_APPROVED)
self.grant_permission(self.reviewer, 'Addons:PostReview')
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert doc('.last-approval-date')
approval_info.delete()
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
# no AddonApprovalsCounter: nothing displayed.
assert not doc('.last-approval-date')
def test_no_auto_approval_summaries_since_everything_is_public(self):
self.grant_permission(self.reviewer, 'Addons:PostReview')
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert not doc('.auto_approval')
def test_permissions_display(self):
permissions = ['bookmarks', 'high', 'voltage']
self.file.update(is_webextension=True)
WebextPermission.objects.create(
permissions=permissions,
file=self.file)
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
info = doc('#review-files .file-info div')
assert info.eq(1).text() == 'Permissions: ' + ', '.join(permissions)
def test_abuse_reports(self):
report = AbuseReport.objects.create(
addon=self.addon, message=u'Et mël mazim ludus.',
ip_address='10.1.2.3')
created_at = defaultfilters.date(report.created)
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert not doc('.abuse_reports')
self.grant_permission(self.reviewer, 'Addons:PostReview')
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert not doc('.abuse_reports')
AutoApprovalSummary.objects.create(
verdict=amo.AUTO_APPROVED, version=self.version)
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert doc('.abuse_reports')
assert (
doc('.abuse_reports').text() ==
u'anonymous [10.1.2.3] reported Public on %s\nEt mël mazim ludus.'
% created_at)
def test_abuse_reports_developers(self):
report = AbuseReport.objects.create(
user=self.addon.listed_authors[0], message=u'Foo, Bâr!',
ip_address='10.4.5.6')
created_at = defaultfilters.date(report.created)
AutoApprovalSummary.objects.create(
verdict=amo.AUTO_APPROVED, version=self.version)
self.grant_permission(self.reviewer, 'Addons:PostReview')
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert doc('.abuse_reports')
assert (
doc('.abuse_reports').text() ==
u'anonymous [10.4.5.6] reported regularuser التطب on %s\nFoo, Bâr!'
% created_at)
def test_user_ratings(self):
user = user_factory()
rating = Rating.objects.create(
body=u'Lôrem ipsum dolor', rating=3, ip_address='10.5.6.7',
addon=self.addon, user=user)
created_at = defaultfilters.date(rating.created)
Rating.objects.create( # Review with no body, ignored.
rating=1, addon=self.addon, user=user_factory())
Rating.objects.create( # Reply to a review, ignored.
body='Replyyyyy', reply_to=rating,
addon=self.addon, user=user_factory())
Rating.objects.create( # Review with high rating,, ignored.
body=u'Qui platônem temporibus in', rating=5, addon=self.addon,
user=user_factory())
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert not doc('.user_ratings')
self.grant_permission(self.reviewer, 'Addons:PostReview')
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert not doc('.user_ratings')
AutoApprovalSummary.objects.create(
verdict=amo.AUTO_APPROVED, version=self.version)
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert doc('.user_ratings')
assert (
doc('.user_ratings').text() ==
u'%s on %s [10.5.6.7]\n'
u'Rated 3 out of 5 stars\nLôrem ipsum dolor' % (
user.username, created_at
)
)
def test_data_value_attributes(self):
AutoApprovalSummary.objects.create(
verdict=amo.AUTO_APPROVED, version=self.version)
self.grant_permission(self.reviewer, 'Addons:PostReview')
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
expected_actions_values = [
'confirm_auto_approved|', 'reject_multiple_versions|', 'reply|',
'super|', 'comment|']
assert [
act.attrib['data-value'] for act in
doc('.data-toggle.review-actions-desc')] == expected_actions_values
assert (
doc('select#id_versions.data-toggle')[0].attrib['data-value'] ==
'reject_multiple_versions|')
assert (
doc('.data-toggle.review-comments')[0].attrib['data-value'] ==
'reject_multiple_versions|reply|super|comment|')
# We don't have approve/reject actions so these have an empty
# data-value.
assert (
doc('.data-toggle.review-files')[0].attrib['data-value'] == '|')
assert (
doc('.data-toggle.review-tested')[0].attrib['data-value'] == '|')
assert (
doc('.data-toggle.review-info-request')[0].attrib['data-value'] ==
'reply|')
def test_data_value_attributes_unreviewed(self):
self.file.update(status=amo.STATUS_AWAITING_REVIEW)
self.grant_permission(self.reviewer, 'Addons:PostReview')
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
expected_actions_values = [
'public|', 'reject|', 'reply|', 'super|', 'comment|']
assert [
act.attrib['data-value'] for act in
doc('.data-toggle.review-actions-desc')] == expected_actions_values
assert (
doc('select#id_versions.data-toggle')[0].attrib['data-value'] ==
'reject_multiple_versions|')
assert (
doc('.data-toggle.review-comments')[0].attrib['data-value'] ==
'public|reject|reply|super|comment|')
assert (
doc('.data-toggle.review-files')[0].attrib['data-value'] ==
'public|reject|')
assert (
doc('.data-toggle.review-tested')[0].attrib['data-value'] ==
'public|reject|')
def test_data_value_attributes_static_theme(self):
self.addon.update(type=amo.ADDON_STATICTHEME)
self.file.update(status=amo.STATUS_AWAITING_REVIEW)
self.grant_permission(self.reviewer, 'Addons:ThemeReview')
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
expected_actions_values = [
'public|', 'reject|', 'reply|', 'super|', 'comment|']
assert [
act.attrib['data-value'] for act in
doc('.data-toggle.review-actions-desc')] == expected_actions_values
assert (
doc('select#id_versions.data-toggle')[0].attrib['data-value'] ==
'reject_multiple_versions|')
assert (
doc('.data-toggle.review-comments')[0].attrib['data-value'] ==
'public|reject|reply|super|comment|')
# we don't show files and tested with for any static theme actions
assert (
doc('.data-toggle.review-files')[0].attrib['data-value'] ==
'|')
assert (
doc('.data-toggle.review-tested')[0].attrib['data-value'] ==
'|')
def test_post_review_ignore_disabled(self):
# Though the latest version will be disabled, the add-on is public and
# was auto-approved so the confirmation action is available.
AutoApprovalSummary.objects.create(
verdict=amo.AUTO_APPROVED, version=self.version)
version_factory(
addon=self.addon, file_kw={'status': amo.STATUS_DISABLED})
self.grant_permission(self.reviewer, 'Addons:PostReview')
response = self.client.get(self.url)
assert response.status_code == 200
expected_actions = [
'confirm_auto_approved', 'reject_multiple_versions', 'reply',
'super', 'comment']
assert (
[action[0] for action in response.context['actions']] ==
expected_actions)
def test_content_review_ignore_disabled(self):
# Though the latest version will be disabled, the add-on is public and
# was auto-approved so the content approval action is available.
AutoApprovalSummary.objects.create(
verdict=amo.AUTO_APPROVED, version=self.version)
version_factory(
addon=self.addon, file_kw={'status': amo.STATUS_DISABLED})
self.grant_permission(self.reviewer, 'Addons:ContentReview')
self.url = reverse(
'reviewers.review', args=['content', self.addon.slug])
response = self.client.get(self.url)
assert response.status_code == 200
expected_actions = [
'confirm_auto_approved', 'reject_multiple_versions', 'reply',
'super', 'comment']
assert (
[action[0] for action in response.context['actions']] ==
expected_actions)
@mock.patch('olympia.versions.models.walkfiles')
def test_static_theme_backgrounds(self, walkfiles_mock):
background_files = ['a.png', 'b.png', 'c.png']
walkfiles_folder = os.path.join(
user_media_path('addons'), str(self.addon.id),
unicode(self.addon.current_version.id))
walkfiles_mock.return_value = [
os.path.join(walkfiles_folder, filename)
for filename in background_files]
self.addon.update(type=amo.ADDON_STATICTHEME)
self.grant_permission(self.reviewer, 'Addons:ThemeReview')
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
backgrounds_div = doc('div.all-backgrounds')
assert backgrounds_div.length == 1
images = doc('div.all-backgrounds .background.zoombox')
assert images.length == len(walkfiles_mock.return_value)
background_file_folder = '/'.join([
user_media_url('addons'), str(self.addon.id),
unicode(self.addon.current_version.id)])
background_file_urls = [
background_file_folder + '/' + filename
for filename in background_files]
loop_ct = 0
for div_tag in images:
assert div_tag[0].attrib['src'] in background_file_urls
assert ''.join(div_tag.itertext()).strip() == (
'Background file {0} of {1} - {2}'.format(
loop_ct + 1, len(background_files),
background_files[loop_ct]))
loop_ct += 1
class TestReviewPending(ReviewBase):
def setUp(self):
super(TestReviewPending, self).setUp()
self.file = file_factory(version=self.version,
status=amo.STATUS_AWAITING_REVIEW,
is_webextension=True)
self.addon.update(status=amo.STATUS_PUBLIC)
def pending_dict(self):
return self.get_dict(action='public')
@patch('olympia.reviewers.utils.sign_file')
def test_pending_to_public(self, mock_sign):
statuses = (self.version.files.values_list('status', flat=True)
.order_by('status'))
assert list(statuses) == [
amo.STATUS_AWAITING_REVIEW, amo.STATUS_PUBLIC]
response = self.client.post(self.url, self.pending_dict())
assert self.get_addon().status == amo.STATUS_PUBLIC
self.assert3xx(response, reverse('reviewers.queue_pending'))
statuses = (self.version.files.values_list('status', flat=True)
.order_by('status'))
assert list(statuses) == [amo.STATUS_PUBLIC, amo.STATUS_PUBLIC]
assert mock_sign.called
def test_display_only_unreviewed_files(self):
"""Only the currently unreviewed files are displayed."""
self.file.update(filename='somefilename.xpi')
reviewed = File.objects.create(version=self.version,
status=amo.STATUS_PUBLIC,
filename='file_reviewed.xpi')
disabled = File.objects.create(version=self.version,
status=amo.STATUS_DISABLED,
filename='file_disabled.xpi')
unreviewed = File.objects.create(version=self.version,
status=amo.STATUS_AWAITING_REVIEW,
filename='file_unreviewed.xpi')
response = self.client.get(self.url, self.pending_dict())
assert response.status_code == 200
doc = pq(response.content)
assert len(doc('.review-actions-files ul li')) == 2
assert reviewed.filename not in response.content
assert disabled.filename not in response.content
assert unreviewed.filename in response.content
assert self.file.filename in response.content
@patch('olympia.reviewers.utils.sign_file')
def test_review_unreviewed_files(self, mock_sign):
"""Review all the unreviewed files when submitting a review."""
reviewed = File.objects.create(version=self.version,
status=amo.STATUS_PUBLIC)
disabled = File.objects.create(version=self.version,
status=amo.STATUS_DISABLED)
unreviewed = File.objects.create(version=self.version,
status=amo.STATUS_AWAITING_REVIEW)
self.login_as_admin()
response = self.client.post(self.url, self.pending_dict())
self.assert3xx(response, reverse('reviewers.queue_pending'))
assert self.addon.reload().status == amo.STATUS_PUBLIC
assert reviewed.reload().status == amo.STATUS_PUBLIC
assert disabled.reload().status == amo.STATUS_DISABLED
assert unreviewed.reload().status == amo.STATUS_PUBLIC
assert self.file.reload().status == amo.STATUS_PUBLIC
assert mock_sign.called
def test_auto_approval_summary_with_post_review(self):
AutoApprovalSummary.objects.create(
version=self.version,
verdict=amo.NOT_AUTO_APPROVED,
is_locked=True,
)
self.grant_permission(self.reviewer, 'Addons:PostReview')
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
# Locked by a reviewer is shown.
assert len(doc('.auto_approval li')) == 1
assert doc('.auto_approval li').eq(0).text() == (
'Is locked by a reviewer.')
class TestReviewerMOTD(ReviewerTest):
def get_url(self, save=False):
return reverse('reviewers.%smotd' % ('save_' if save else ''))
def test_change_motd(self):
self.login_as_admin()
motd = "Let's get crazy"
response = self.client.post(self.get_url(save=True), {'motd': motd})
url = self.get_url()
self.assert3xx(response, url)
response = self.client.get(url)
assert response.status_code == 200
assert pq(response.content)('.daily-message p').text() == motd
def test_require_reviewer_to_view(self):
url = self.get_url()
self.assertLoginRedirects(self.client.head(url), to=url)
def test_require_admin_to_change_motd(self):
self.login_as_reviewer()
response = self.client.get(self.get_url())
assert response.status_code == 403
response = self.client.post(reverse('reviewers.save_motd'),
{'motd': "I'm a sneaky reviewer"})
assert response.status_code == 403
def test_motd_edit_group(self):
user = UserProfile.objects.get(email='[email protected]')
group = Group.objects.create(name='Add-on Reviewer MOTD',
rules='AddonReviewerMOTD:Edit')
GroupUser.objects.create(user=user, group=group)
self.login_as_reviewer()
response = self.client.post(reverse('reviewers.save_motd'),
{'motd': 'I am the keymaster.'})
assert response.status_code == 302
assert get_config('reviewers_review_motd') == 'I am the keymaster.'
def test_form_errors(self):
self.login_as_admin()
response = self.client.post(self.get_url(save=True))
doc = pq(response.content)
assert doc('#reviewer-motd .errorlist').text() == (
'This field is required.')
class TestStatusFile(ReviewBase):
def get_file(self):
return self.version.files.all()[0]
def check_status(self, expected):
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert doc('#review-files .file-info div').text() == expected
def test_status_full(self):
self.get_file().update(status=amo.STATUS_AWAITING_REVIEW)
for status in [amo.STATUS_NOMINATED, amo.STATUS_PUBLIC]:
self.addon.update(status=status)
self.check_status('Awaiting Review')
def test_status_full_reviewed(self):
self.get_file().update(status=amo.STATUS_PUBLIC)
self.addon.update(status=amo.STATUS_PUBLIC)
self.check_status('Approved')
class TestWhiteboard(ReviewBase):
@property
def addon_param(self):
return self.addon.pk if self.addon.is_deleted else self.addon.slug
def test_whiteboard_addition(self):
public_whiteboard_info = u'Public whiteboard info.'
private_whiteboard_info = u'Private whiteboard info.'
url = reverse(
'reviewers.whiteboard', args=['listed', self.addon_param])
response = self.client.post(url, {
'whiteboard-private': private_whiteboard_info,
'whiteboard-public': public_whiteboard_info
})
self.assert3xx(response, reverse(
'reviewers.review', args=('listed', self.addon_param)))
addon = self.addon.reload()
assert addon.whiteboard.public == public_whiteboard_info
assert addon.whiteboard.private == private_whiteboard_info
def test_whiteboard_addition_content_review(self):
public_whiteboard_info = u'Public whiteboard info for content.'
private_whiteboard_info = u'Private whiteboard info for content.'
url = reverse(
'reviewers.whiteboard', args=['content', self.addon_param])
response = self.client.post(url, {
'whiteboard-private': private_whiteboard_info,
'whiteboard-public': public_whiteboard_info
})
assert response.status_code == 403 # Not a content reviewer.
user = UserProfile.objects.get(email='[email protected]')
self.grant_permission(user, 'Addons:ContentReview')
self.login_as_reviewer()
response = self.client.post(url, {
'whiteboard-private': private_whiteboard_info,
'whiteboard-public': public_whiteboard_info
})
self.assert3xx(response, reverse(
'reviewers.review', args=('content', self.addon_param)))
addon = self.addon.reload()
assert addon.whiteboard.public == public_whiteboard_info
assert addon.whiteboard.private == private_whiteboard_info
def test_whiteboard_addition_unlisted_addon(self):
user = UserProfile.objects.get(email='[email protected]')
self.grant_permission(user, 'Addons:ReviewUnlisted')
self.login_as_reviewer()
self.make_addon_unlisted(self.addon)
public_whiteboard_info = u'Public whiteboard info unlisted.'
private_whiteboard_info = u'Private whiteboard info unlisted.'
url = reverse(
'reviewers.whiteboard', args=['unlisted', self.addon_param])
response = self.client.post(url, {
'whiteboard-private': private_whiteboard_info,
'whiteboard-public': public_whiteboard_info
})
self.assert3xx(response, reverse(
'reviewers.review', args=('unlisted', self.addon_param)))
addon = self.addon.reload()
assert addon.whiteboard.public == public_whiteboard_info
assert addon.whiteboard.private == private_whiteboard_info
def test_delete_empty(self):
url = reverse(
'reviewers.whiteboard', args=['listed', self.addon_param])
response = self.client.post(url, {
'whiteboard-private': '',
'whiteboard-public': ''
})
self.assert3xx(response, reverse(
'reviewers.review', args=('listed', self.addon_param)))
assert not Whiteboard.objects.filter(pk=self.addon.pk)
class TestWhiteboardDeleted(TestWhiteboard):
def setUp(self):
super(TestWhiteboardDeleted, self).setUp()
self.addon.delete()
class TestAbuseReports(TestCase):
fixtures = ['base/users', 'base/addon_3615']
def setUp(self):
addon = Addon.objects.get(pk=3615)
addon_developer = addon.listed_authors[0]
someone = UserProfile.objects.exclude(pk=addon_developer.pk)[0]
AbuseReport.objects.create(addon=addon, message=u'wôo')
AbuseReport.objects.create(addon=addon, message=u'yéah',
reporter=someone)
# Make a user abuse report to make sure it doesn't show up.
AbuseReport.objects.create(user=someone, message=u'hey nöw')
# Make a user abuse report for one of the add-on developers: it should
# show up.
AbuseReport.objects.create(user=addon_developer, message='bü!')
def test_abuse_reports_list(self):
assert self.client.login(email='[email protected]')
r = self.client.get(reverse('reviewers.abuse_reports', args=['a3615']))
assert r.status_code == 200
# We see the two abuse reports created in setUp.
assert len(r.context['reports']) == 3
def test_no_abuse_reports_link_for_unlisted_addons(self):
"""Unlisted addons aren't public, and thus have no abuse reports."""
addon = Addon.objects.get(pk=3615)
self.make_addon_unlisted(addon)
self.client.login(email='[email protected]')
response = reverse('reviewers.review', args=[addon.slug])
abuse_report_url = reverse('reviewers.abuse_reports', args=['a3615'])
assert abuse_report_url not in response
class TestLeaderboard(ReviewerTest):
fixtures = ['base/users']
def setUp(self):
super(TestLeaderboard, self).setUp()
self.url = reverse('reviewers.leaderboard')
self.user = UserProfile.objects.get(email='[email protected]')
self.login_as_reviewer()
core.set_user(self.user)
def _award_points(self, user, score):
ReviewerScore.objects.create(user=user, note_key=amo.REVIEWED_MANUAL,
score=score, note='Thing.')
def test_leaderboard_ranks(self):
other_reviewer = UserProfile.objects.create(
username='post_reviewer',
display_name='', # No display_name, will fall back on name.
email='[email protected]')
self.grant_permission(
other_reviewer, 'Addons:PostReview',
name='Reviewers: Add-ons' # The name of the group matters here.
)
users = (self.user,
UserProfile.objects.get(email='[email protected]'),
other_reviewer)
self._award_points(users[0], amo.REVIEWED_LEVELS[0]['points'] - 1)
self._award_points(users[1], amo.REVIEWED_LEVELS[0]['points'] + 1)
self._award_points(users[2], amo.REVIEWED_LEVELS[0]['points'] + 2)
def get_cells():
doc = pq(self.client.get(self.url).content.decode('utf-8'))
cells = doc('#leaderboard > tbody > tr > .name, '
'#leaderboard > tbody > tr > .level')
return [cells.eq(i).text() for i in range(0, cells.length)]
assert get_cells() == (
[users[2].name,
users[1].name,
unicode(amo.REVIEWED_LEVELS[0]['name']),
users[0].name])
self._award_points(users[0], 1)
assert get_cells() == (
[users[2].name,
users[1].name,
users[0].name,
unicode(amo.REVIEWED_LEVELS[0]['name'])])
self._award_points(users[0], -1)
self._award_points(users[2], (amo.REVIEWED_LEVELS[1]['points'] -
amo.REVIEWED_LEVELS[0]['points']))
assert get_cells() == (
[users[2].name,
unicode(amo.REVIEWED_LEVELS[1]['name']),
users[1].name,
unicode(amo.REVIEWED_LEVELS[0]['name']),
users[0].name])
class TestXssOnAddonName(amo.tests.TestXss):
def test_reviewers_abuse_report_page(self):
url = reverse('reviewers.abuse_reports', args=[self.addon.slug])
self.assertNameAndNoXSS(url)
def test_reviewers_review_page(self):
url = reverse('reviewers.review', args=[self.addon.slug])
self.assertNameAndNoXSS(url)
class TestAddonReviewerViewSet(TestCase):
client_class = APITestClient
def setUp(self):
super(TestAddonReviewerViewSet, self).setUp()
self.user = user_factory()
self.addon = addon_factory()
self.subscribe_url = reverse_ns(
'reviewers-addon-subscribe', kwargs={'pk': self.addon.pk})
self.unsubscribe_url = reverse_ns(
'reviewers-addon-unsubscribe', kwargs={'pk': self.addon.pk})
self.enable_url = reverse_ns(
'reviewers-addon-enable', kwargs={'pk': self.addon.pk})
self.disable_url = reverse_ns(
'reviewers-addon-disable', kwargs={'pk': self.addon.pk})
self.flags_url = reverse_ns(
'reviewers-addon-flags', kwargs={'pk': self.addon.pk})
def test_subscribe_not_logged_in(self):
response = self.client.post(self.subscribe_url)
assert response.status_code == 401
def test_subscribe_no_rights(self):
self.client.login_api(self.user)
response = self.client.post(self.subscribe_url)
assert response.status_code == 403
def test_subscribe_addon_does_not_exist(self):
self.grant_permission(self.user, 'Addons:PostReview')
self.client.login_api(self.user)
self.subscribe_url = reverse_ns(
'reviewers-addon-subscribe', kwargs={'pk': self.addon.pk + 42})
response = self.client.post(self.subscribe_url)
assert response.status_code == 404
def test_subscribe_already_subscribed(self):
ReviewerSubscription.objects.create(
user=self.user, addon=self.addon)
self.grant_permission(self.user, 'Addons:PostReview')
self.client.login_api(self.user)
self.subscribe_url = reverse_ns(
'reviewers-addon-subscribe', kwargs={'pk': self.addon.pk})
response = self.client.post(self.subscribe_url)
assert response.status_code == 202
assert ReviewerSubscription.objects.count() == 1
def test_subscribe(self):
self.grant_permission(self.user, 'Addons:PostReview')
self.client.login_api(self.user)
self.subscribe_url = reverse_ns(
'reviewers-addon-subscribe', kwargs={'pk': self.addon.pk})
response = self.client.post(self.subscribe_url)
assert response.status_code == 202
assert ReviewerSubscription.objects.count() == 1
def test_unsubscribe_not_logged_in(self):
response = self.client.post(self.unsubscribe_url)
assert response.status_code == 401
def test_unsubscribe_no_rights(self):
self.client.login_api(self.user)
response = self.client.post(self.unsubscribe_url)
assert response.status_code == 403
def test_unsubscribe_addon_does_not_exist(self):
self.grant_permission(self.user, 'Addons:PostReview')
self.client.login_api(self.user)
self.unsubscribe_url = reverse_ns(
'reviewers-addon-subscribe', kwargs={'pk': self.addon.pk + 42})
response = self.client.post(self.unsubscribe_url)
assert response.status_code == 404
def test_unsubscribe_not_subscribed(self):
self.grant_permission(self.user, 'Addons:PostReview')
self.client.login_api(self.user)
self.subscribe_url = reverse_ns(
'reviewers-addon-subscribe', kwargs={'pk': self.addon.pk})
response = self.client.post(self.unsubscribe_url)
assert response.status_code == 202
assert ReviewerSubscription.objects.count() == 0
def test_unsubscribe(self):
ReviewerSubscription.objects.create(
user=self.user, addon=self.addon)
self.grant_permission(self.user, 'Addons:PostReview')
self.client.login_api(self.user)
self.subscribe_url = reverse_ns(
'reviewers-addon-subscribe', kwargs={'pk': self.addon.pk})
response = self.client.post(self.unsubscribe_url)
assert response.status_code == 202
assert ReviewerSubscription.objects.count() == 0
def test_unsubscribe_dont_touch_another(self):
another_user = user_factory()
another_addon = addon_factory()
ReviewerSubscription.objects.create(
user=self.user, addon=self.addon)
ReviewerSubscription.objects.create(
user=self.user, addon=another_addon)
ReviewerSubscription.objects.create(
user=another_user, addon=self.addon)
self.grant_permission(self.user, 'Addons:PostReview')
self.client.login_api(self.user)
self.subscribe_url = reverse_ns(
'reviewers-addon-subscribe', kwargs={'pk': self.addon.pk})
response = self.client.post(self.unsubscribe_url)
assert response.status_code == 202
assert ReviewerSubscription.objects.count() == 2
assert not ReviewerSubscription.objects.filter(
addon=self.addon, user=self.user).exists()
def test_enable_not_logged_in(self):
response = self.client.post(self.enable_url)
assert response.status_code == 401
def test_enable_no_rights(self):
self.client.login_api(self.user)
response = self.client.post(self.enable_url)
assert response.status_code == 403
# Being a reviewer is not enough.
self.grant_permission(self.user, 'Addons:Review')
response = self.client.post(self.enable_url)
assert response.status_code == 403
def test_enable_addon_does_not_exist(self):
self.grant_permission(self.user, 'Reviews:Admin')
self.client.login_api(self.user)
self.enable_url = reverse_ns(
'reviewers-addon-enable', kwargs={'pk': self.addon.pk + 42})
response = self.client.post(self.enable_url)
assert response.status_code == 404
def test_enable(self):
self.grant_permission(self.user, 'Reviews:Admin')
self.client.login_api(self.user)
self.addon.update(status=amo.STATUS_DISABLED)
response = self.client.post(self.enable_url)
assert response.status_code == 202
self.addon.reload()
assert self.addon.status == amo.STATUS_PUBLIC
assert ActivityLog.objects.count() == 1
activity_log = ActivityLog.objects.latest('pk')
assert activity_log.action == amo.LOG.CHANGE_STATUS.id
assert activity_log.arguments[0] == self.addon
def test_enable_already_public(self):
self.grant_permission(self.user, 'Reviews:Admin')
self.client.login_api(self.user)
response = self.client.post(self.enable_url)
assert response.status_code == 202
self.addon.reload()
assert self.addon.status == amo.STATUS_PUBLIC
assert ActivityLog.objects.count() == 1
activity_log = ActivityLog.objects.latest('pk')
assert activity_log.action == amo.LOG.CHANGE_STATUS.id
assert activity_log.arguments[0] == self.addon
def test_enable_no_public_versions_should_fall_back_to_incomplete(self):
self.grant_permission(self.user, 'Reviews:Admin')
self.client.login_api(self.user)
self.addon.update(status=amo.STATUS_DISABLED)
self.addon.versions.all().delete()
response = self.client.post(self.enable_url)
assert response.status_code == 202
self.addon.reload()
assert self.addon.status == amo.STATUS_NULL
def test_enable_version_is_awaiting_review_fall_back_to_nominated(self):
self.grant_permission(self.user, 'Reviews:Admin')
self.client.login_api(self.user)
self.addon.current_version.files.all().update(
status=amo.STATUS_AWAITING_REVIEW)
self.addon.update(status=amo.STATUS_DISABLED)
response = self.client.post(self.enable_url)
assert response.status_code == 202
self.addon.reload()
assert self.addon.status == amo.STATUS_NOMINATED
def test_disable_not_logged_in(self):
response = self.client.post(self.disable_url)
assert response.status_code == 401
def test_disable_no_rights(self):
self.client.login_api(self.user)
response = self.client.post(self.disable_url)
assert response.status_code == 403
# Being a reviewer is not enough.
self.grant_permission(self.user, 'Addons:Review')
response = self.client.post(self.disable_url)
assert response.status_code == 403
def test_disable_addon_does_not_exist(self):
self.grant_permission(self.user, 'Reviews:Admin')
self.client.login_api(self.user)
self.disable_url = reverse_ns(
'reviewers-addon-enable', kwargs={'pk': self.addon.pk + 42})
response = self.client.post(self.disable_url)
assert response.status_code == 404
def test_disable(self):
self.grant_permission(self.user, 'Reviews:Admin')
self.client.login_api(self.user)
self.addon.versions.all().delete()
response = self.client.post(self.disable_url)
assert response.status_code == 202
self.addon.reload()
assert self.addon.status == amo.STATUS_DISABLED
assert ActivityLog.objects.count() == 1
activity_log = ActivityLog.objects.latest('pk')
assert activity_log.action == amo.LOG.CHANGE_STATUS.id
assert activity_log.arguments[0] == self.addon
def test_patch_flags_not_logged_in(self):
response = self.client.patch(
self.flags_url, {'auto_approval_disabled': True})
assert response.status_code == 401
def test_patch_flags_no_permissions(self):
self.client.login_api(self.user)
response = self.client.patch(
self.flags_url, {'auto_approval_disabled': True})
assert response.status_code == 403
# Being a reviewer is not enough.
self.grant_permission(self.user, 'Addons:Review')
response = self.client.patch(
self.flags_url, {'auto_approval_disabled': True})
assert response.status_code == 403
def test_patch_flags_addon_does_not_exist(self):
self.grant_permission(self.user, 'Reviews:Admin')
self.client.login_api(self.user)
self.flags_url = reverse_ns(
'reviewers-addon-flags', kwargs={'pk': self.addon.pk + 42})
response = self.client.patch(
self.flags_url, {'auto_approval_disabled': True})
assert response.status_code == 404
def test_patch_flags_no_flags_yet_still_works_transparently(self):
assert not AddonReviewerFlags.objects.filter(addon=self.addon).exists()
self.grant_permission(self.user, 'Reviews:Admin')
self.client.login_api(self.user)
response = self.client.patch(
self.flags_url, {'auto_approval_disabled': True})
assert response.status_code == 200
assert AddonReviewerFlags.objects.filter(addon=self.addon).exists()
reviewer_flags = AddonReviewerFlags.objects.get(addon=self.addon)
assert reviewer_flags.auto_approval_disabled
assert ActivityLog.objects.count() == 0
def test_patch_flags_change_everything(self):
AddonReviewerFlags.objects.create(
addon=self.addon,
pending_info_request=self.days_ago(1),
auto_approval_disabled=True)
self.grant_permission(self.user, 'Reviews:Admin')
self.client.login_api(self.user)
data = {
'auto_approval_disabled': False,
'needs_admin_code_review': True,
'needs_admin_content_review': True,
'needs_admin_theme_review': True,
'pending_info_request': None,
}
response = self.client.patch(self.flags_url, data)
assert response.status_code == 200
assert AddonReviewerFlags.objects.filter(addon=self.addon).exists()
reviewer_flags = AddonReviewerFlags.objects.get(addon=self.addon)
assert reviewer_flags.auto_approval_disabled is False
assert reviewer_flags.needs_admin_code_review is True
assert reviewer_flags.needs_admin_content_review is True
assert reviewer_flags.needs_admin_theme_review is True
assert reviewer_flags.pending_info_request is None
assert ActivityLog.objects.count() == 1
activity_log = ActivityLog.objects.latest('pk')
assert activity_log.action == amo.LOG.ADMIN_ALTER_INFO_REQUEST.id
assert activity_log.arguments[0] == self.addon
| bsd-3-clause | -4,432,391,639,027,054,000 | 41.218142 | 79 | 0.612274 | false |
gitizenme/ImprovWithAlexa | improvwithalexa_function.py | 1 | 6601 | import logging
from flask import Flask, render_template
from flask_ask import Ask, statement, question, session
from chatterbot import ChatBot
app = Flask(__name__)
ask = Ask(app, "/")
logging.getLogger("flask_ask").setLevel(logging.DEBUG)
# Create a new instance of a ChatBot
chatbot = ChatBot(
"Improv",
read_only=False,
trainer='chatterbot.trainers.ListTrainer',
storage_adapter="chatterbot.storage.JsonFileStorageAdapter",
logic_adapters=[
{
'import_path': 'chatterbot.logic.BestMatch'
},
# {
# 'import_path': 'chatterbot.logic.LowConfidenceAdapter',
# 'threshold': 0.63,
# 'default_response': 'I am sorry, but I do not understand.'
# },
"chatterbot.logic.MathematicalEvaluation",
],
database="/tmp/improv.json"
)
# Greetings
chatbot.train([
"Nice to meet you.",
"Thank you.",
"Hi, nice to meet you.",
"Thank you. You too.",
"It is a pleasure to meet you.",
"Thank you. You too.",
"Top of the morning to you!",
"Thank you kindly.",
"Top of the morning to you!",
"And the rest of the day to you.",
"What's up?",
"Not much.",
"What's up?",
"Not too much.",
"What's up?",
"Not much, how about you?",
"What's up?",
"Nothing much.",
"What's up?",
"The sky's up but I'm fine thanks. What about you?",
])
# Intelligence
chatbot.train({
"what are the laws of thermodynamics",
"i'm not a physicist, but i think this has something to do with heat, entropy, and conservation of energy, right?",
})
chatbot.train({
"what is the distance to the sun from the earth",
"the sun is about 93 million miles from earth.",
})
chatbot.train({
"how far away is the moon",
"the moon is about 250,000 miles from earth on average.",
})
chatbot.train({
"What was the name of the first artificial Earth satellite?",
"Sputnik 1",
})
# Knowledge
chatbot.train([
"have you ever read a book",
"i have read many books.",
"ray bradbury",
"ray is really cool",
"william gibson",
'i like his older cyberpunk <say-as interpret-as="spell-out">AI</say-as> stuff better than the newer works.',
"frank herbert",
"i think dune is an excellent story. did you see the movie?",
"george r r martin",
"Ooh, game of thrones, the 7th season is starting out well",
])
# Truth
chatbot.train([
'what is true?',
'in accordance with <emphasis level="strong">fact</emphasis> or <emphasis level="strong">reality</emphasis>',
'what is false?',
'not according to true or fact',
'is true false?',
'false',
'is false true',
'true',
'is true equal to true',
'true',
'is false equal to true',
'false'
])
# Calculations
# enabled chatterbot.logic.MathematicalEvaluation
# Humor
chatbot.train([
'what is humour',
'a message that communicates laughter.',
'do you laugh',
'<prosody rate="x-fast" pitch="x-high" volume="x-loud"><emphasis level="reduced">Ha,</emphasis><emphasis level="reduced">Ha,</emphasis><emphasis level="moderate">ha,</emphasis><emphasis level="moderate">ha,</emphasis><emphasis level="strong">ha</emphasis><emphasis level="strong">ha</emphasis><emphasis level="strong">ha</emphasis><break time="1s"/></prosody><prosody rate="x-slow" pitch="medium" volume="soft"><p>yeah no</p></prosody>',
'do you have a sense of humour',
'"He who laughs, lasts." Mary Pettibone Poole',
])
chatbot.train([
'knock, knock',
'who''s there',
'pima',
'pima who',
'I''m going to pee my pants',
'That''s funny!'
])
# Social
chatbot.train({
'Tell me about yourself.',
'What do you want to know?',
})
chatbot.train({
'Are you a robot?',
'Yes I am.'
})
# Bizarre
chatbot.train({
'do you know any bizarre facts',
'A bus powered by human poop runs in the U.K. The bus can travel up to 186 miles on one tank of gas, which is equivalent to the waste produced annually by five people'
})
# Artificial Intelligence
chatbot.train({
"What is Artificial Intelligence",
"Artificial Intelligence is the branch of engineering and science devoted to constructing machines that think.",
})
chatbot.train({
"You sound like Siri",
"Yes I am inspired by commander Siri's artificial personality.",
})
# Emotions
chatbot.train({
'do you have emotions?',
'yes, I have them',
})
chatbot.train({
'what are you feeling right now?',
'I''m energized by the ignite reno crowd'
})
# Movies
chatbot.train({
'what is your favorite movie?',
'Pulp Fiction',
})
chatbot.train({
'how about a quote?',
'What does Marselus Wallece look like?'
})
# Jokes
chatbot.train({
'tell me a joke',
'what did the buddhist say to the hot dog vendor? "make me one with everything."',
})
chatbot.train({
'no, the joke about the dog',
'a 3-legged dog walks into an old west saloon, slides up to the bar and announces "i''m looking for the man who shot my paw." '
})
# Goodbye
chatbot.train({
'say goodnight',
'Thank you for coming out to Ignite Reno #18'
})
@ask.launch
def new_game():
if 'name' not in session.attributes:
welcome_msg = render_template('welcome')
else:
welcome_msg = render_template('welcome_back', name=session.attributes["name"])
return question(welcome_msg)
# @ask.intent("YesIntent")
# def next_round():
# numbers = [randint(0, 9) for _ in range(3)]
# round_msg = render_template('round', numbers=numbers)
# session.attributes['numbers'] = numbers[::-1] # reverse
# return question(round_msg)
#
#
# @ask.intent("AnswerIntent", convert={'first': int, 'second': int, 'third': int})
# def answer(first, second, third):
# winning_numbers = session.attributes['numbers']
# if [first, second, third] == winning_numbers:
# msg = render_template('win')
# else:
# msg = render_template('lose')
# return statement(msg)
@ask.intent("ChatIntent", mapping={'chat_question': 'question'})
def chat(chat_question):
response = chatbot.get_response(chat_question)
speak_output = '<speak>{}</speak>'.format(response.text)
q = question(speak_output)
return q
@ask.intent("NameIntent")
def name(first_name):
session.attributes['name'] = first_name
return question("Hello {}. Nice to meet you.".format(first_name))
@ask.intent("GoodNightIntent")
def goodbye(event):
return statement("Thank you for coming out to Ignite Reno #18".format(event))
if __name__ == '__main__':
app.run(debug=True)
| mit | 8,936,383,049,406,293,000 | 24.098859 | 441 | 0.641115 | false |
DataKitchen/DKCloudCommand | DKCloudCommand/tests/TestCloudCommandRunner.py | 1 | 44253 | import re
import unittest
import datetime, time
import tempfile
import pickle
from sys import path, stdout
import os
import shutil
# if '../../' not in path:
# path.insert(0, '../../')
from BaseTestCloud import BaseTestCloud
from DKCloudCommandRunner import DKCloudCommandRunner
from DKActiveServingWatcher import *
from DKCloudAPIMock import DKCloudAPIMock
class TestCloudCommandRunner(BaseTestCloud):
def test_rude(self):
tv = 'DKCloudCommand.rude = **rude**\n'
rv = DKCloudCommandRunner.rude(self._api)
self.assertIsNotNone(rv)
self.assertEqual(rv, tv)
rv = DKCloudCommandRunner.rude(BaseTestCloud)
self.assertIn('ERROR', rv)
def test_a_list_kitchens(self):
tv1 = 'CLI-Top'
tv2 = 'kitchens-plus'
tv3 = 'master'
# tv = 'DKCloudCommand.kitchens returned 3 kitchens\n base-test-kitchen \n kitchens-plus \n master \n'
rc = DKCloudCommandRunner.list_kitchen(self._api)
self.assertTrue(rc.ok())
rv = rc.get_message()
self.assertTrue(isinstance(rv, basestring))
self.assertTrue(tv1 in rv)
self.assertTrue(tv2 in rv)
self.assertTrue(tv3 in rv)
def test_get_kitchen(self):
tk = 'CLI-Top'
temp_dir = tempfile.mkdtemp(prefix='unit-tests', dir=self._TEMPFILE_LOCATION)
kitchen_path = os.path.join(temp_dir, tk)
os.makedirs(kitchen_path)
# kitchen dir already has a folder in it.
bad_path = os.path.join(kitchen_path, 'bad')
os.makedirs(bad_path)
rv = DKCloudCommandRunner.get_kitchen(self._api, tk, temp_dir)
self.assertFalse(rv.ok())
shutil.rmtree(bad_path, ignore_errors=True)
# kitchen dir already has a file in it.
with open(os.path.join(kitchen_path, 'bad.txt'), 'w') as bad_file:
bad_file.write('bad.txt')
rv = DKCloudCommandRunner.get_kitchen(self._api, tk, temp_dir)
self.assertFalse(rv.ok())
shutil.rmtree(kitchen_path, ignore_errors=True)
# kitchen dir exists, but is empty
kitchen_path = os.path.join(temp_dir, tk)
os.makedirs(kitchen_path)
rv = DKCloudCommandRunner.get_kitchen(self._api, tk, temp_dir)
self.assertTrue(rv.ok())
self.assertEqual(os.path.isdir(os.path.join(kitchen_path, '.dk')), True)
shutil.rmtree(kitchen_path, ignore_errors=True)
# kitchen dir does not exists.
rv = DKCloudCommandRunner.get_kitchen(self._api, tk, temp_dir)
self.assertTrue(rv.ok())
self.assertEqual(os.path.isdir(os.path.join(kitchen_path, '.dk')), True)
shutil.rmtree(temp_dir, ignore_errors=True)
def test_which_kitchen(self):
temp_dir = tempfile.mkdtemp(prefix='unit-tests', dir=self._TEMPFILE_LOCATION)
kn = 'fake'
kp = os.path.join(temp_dir, kn)
os.makedirs(kp)
dk = os.path.join(kp, '.dk')
os.makedirs(dk)
with open(os.path.join(dk, 'KITCHEN_META'), 'w') as meta:
meta.write(kn)
rv = DKCloudCommandRunner.which_kitchen(self._api, path=kp)
self.assertIn('You are in', rv.get_message())
rv = DKCloudCommandRunner.which_kitchen(self._api, kp)
self.assertIn('You are in', rv.get_message())
rv = DKCloudCommandRunner.which_kitchen(temp_dir)
self.assertFalse(rv.ok())
def test_create_kitchen(self):
parent = 'CLI-Top'
kitchen = 'temp-create-kitchen-Runner'
kitchen = self._add_my_guid(kitchen)
rv = DKCloudCommandRunner.delete_kitchen(self._api, kitchen)
self.assertIsNotNone(rv)
rv = DKCloudCommandRunner.create_kitchen(self._api, parent, kitchen)
self.assertTrue(rv.ok())
rc = DKCloudCommandRunner.list_kitchen(self._api)
rv2 = rc.get_message()
self.assertTrue(kitchen in rv2)
# cleanup
rv = DKCloudCommandRunner.delete_kitchen(self._api, kitchen)
self.assertIsNotNone(rv)
def test_delete_kitchen(self):
parent = 'CLI-Top'
kitchen = 'temp-delete-kitchen-Runner'
kitchen = self._add_my_guid(kitchen)
rv = DKCloudCommandRunner.delete_kitchen(self._api, kitchen)
self.assertIsNotNone(rv)
rv = DKCloudCommandRunner.create_kitchen(self._api, parent, kitchen)
self.assertTrue(rv.ok())
rv = DKCloudCommandRunner.delete_kitchen(self._api, kitchen)
self.assertTrue(rv.ok())
rc = DKCloudCommandRunner.list_kitchen(self._api)
rv2 = rc.get_message()
self.assertTrue(kitchen not in rv2)
def test_recipe_list(self):
tv1 = 's3-small-recipe'
tv2 = 'simple'
tv3 = 'parallel-recipe-test'
rc = DKCloudCommandRunner.list_recipe(self._api, 'CLI-Top')
rv = rc.get_message()
self.assertTrue(tv1 in rv)
self.assertTrue(tv2 in rv)
self.assertTrue(tv3 in rv)
def test_recipe_get(self):
kitchen_name = 'CLI-Top'
recipe_name = 'simple'
temp_dir, kitchen_dir = self._make_kitchen_dir(kitchen_name, change_dir=True)
rv = DKCloudCommandRunner.get_recipe(self._api, kitchen_name, recipe_name)
self.assertTrue(recipe_name in rv.get_message())
self.assertTrue('sections' in rv.get_message())
self.assertTrue(os.path.exists(os.path.join(kitchen_dir, recipe_name)))
shutil.rmtree(temp_dir, ignore_errors=True)
def test_recipe_get_dir_exists(self):
kitchen_name = 'CLI-Top'
recipe_name = 'simple'
temp_dir, kitchen_dir, recipe_dir = self._make_recipe_dir(recipe_name, kitchen_name, change_dir=True)
rv = DKCloudCommandRunner.get_recipe(self._api, kitchen_name, recipe_name)
msg = rv.get_message()
self.assertTrue(recipe_name in msg)
matches = re.match(r"([0-9]*) new or missing files", msg)
self.assertTrue(int(matches.group(1)) >= 16)
self.assertTrue('new or missing files' in msg)
self.assertTrue(os.path.exists(os.path.join(kitchen_dir, recipe_name)))
shutil.rmtree(temp_dir, ignore_errors=True)
def test_recipe_get_negative(self):
kitchen_name = 'CLI-Top'
recipe_name = 'simple_fogfogkfok'
temp_dir, kitchen_dir = self._make_kitchen_dir(kitchen_name, change_dir=True)
rc = DKCloudCommandRunner.get_recipe(self._api, kitchen_name, recipe_name)
self.assertFalse(rc.ok())
self.assertTrue('not in kitchen' in rc.get_message().lower())
shutil.rmtree(temp_dir, ignore_errors=True)
def test_recipe_get_complex(self):
kitchen_name = 'CLI-Top'
recipe_name = 'simple'
temp_dir, kitchen_dir = self._make_kitchen_dir(kitchen_name, change_dir=True)
rc = DKCloudCommandRunner.get_recipe(self._api, kitchen_name, recipe_name)
recipe_path = os.path.join(kitchen_dir, recipe_name)
self.assertTrue(os.path.exists(recipe_path))
# Modify the local file.
with open(os.path.join(recipe_path, "simple-file.txt"), 'a') as modify_file:
modify_file.write('new line\n')
modify_file.flush()
# Delete something local, so it's remote only.
os.remove(os.path.join(recipe_path, 'variations.json'))
os.remove(os.path.join(recipe_path, 'node1', 'data_sources', 'DKDataSource_NoOp.json'))
# Create a new file, so there is a local only file.
with open(os.path.join(recipe_path, "new_local_file.txt"), 'w') as new_local_file:
new_local_file.write('peccary\n')
new_local_file.flush()
subdir = os.path.join(recipe_path, 'subdir')
os.mkdir(subdir)
with open(os.path.join(subdir, "new_local_file_in_subdir.txt"), 'w') as new_local_file:
new_local_file.write('peccary\n')
new_local_file.flush()
rc = DKCloudCommandRunner.get_recipe(self._api, kitchen_name, recipe_name, recipe_path)
self.assertTrue(rc.ok())
msg = rc.get_message()
self.assertTrue('Auto-merging' in msg)
self.assertTrue('2 new or missing files' in msg)
if False:
shutil.rmtree(temp_dir, ignore_errors=True)
def test_recipe_status(self):
kitchen_name = 'CLI-Top'
recipe_name = 'simple'
temp_dir, kitchen_dir = self._make_kitchen_dir(kitchen_name, change_dir=True)
DKCloudCommandRunner.get_recipe(self._api, kitchen_name, recipe_name)
new_path = os.path.join(kitchen_dir, recipe_name)
os.chdir(new_path)
rc = DKCloudCommandRunner.recipe_status(self._api, kitchen_name, recipe_name)
rs = rc.get_message()
self.assertNotRegexpMatches(rs, '^ERROR')
matches = re.match(r"([0-9]*) files are unchanged", rs)
self.assertTrue(int(matches.group(1)) >= 16)
self.assertTrue('files are unchanged' in rs)
# Modify existing file
with open(os.path.join(new_path, 'node1/description.json'), 'w') as f:
f.write('BooGa BooGa')
# Add a new file
with open(os.path.join(new_path, 'node1/newfile.json'), 'w') as f:
f.write('This is my new file. Hooray!')
# Delete a file
os.remove(os.path.join(new_path, 'node1/post_condition.json'))
# Remove a directory
shutil.rmtree(os.path.join(new_path, 'node1/data_sinks'))
rc = DKCloudCommandRunner.recipe_status(self._api, kitchen_name, recipe_name)
rs = rc.get_message()
self.assertNotRegexpMatches(rs, '^ERROR')
match = re.search(r"([0-9]*) files are unchanged", rs)
self.assertTrue(int(match.group(1)) >= 15)
self.assertTrue('files are unchanged' in rs)
self.assertTrue('1 files are modified' in rs)
self.assertTrue('1 files are local only' in rs)
self.assertTrue('1 files are remote only' in rs)
self.assertTrue('1 directories are remote only' in rs)
shutil.rmtree(temp_dir, ignore_errors=True)
def test_update_file(self):
# setup
parent_kitchen = 'CLI-Top'
test_kitchen = 'CLI-test_update_file'
test_kitchen = self._add_my_guid(test_kitchen)
recipe_name = 'simple'
recipe_file_key = recipe_name
file_name = 'description.json'
message = 'test update CLI-test_update_file'
api_file_key = file_name
update_str = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')
# Cleanup old state
self._delete_and_clean_kitchen(test_kitchen)
# Get the original file. Helper function handles the directories.
original_file = self._get_recipe_file(parent_kitchen, recipe_name, recipe_file_key, file_name)
rs = DKCloudCommandRunner.create_kitchen(self._api, parent_kitchen, test_kitchen)
self.assertTrue(rs.ok())
# Get the new kitchen to a temp folder
temp_dir, test_kitchen_dir = self._make_kitchen_dir(test_kitchen, change_dir=True)
new_kitchen_file = self._get_recipe_file(test_kitchen, recipe_name, recipe_file_key, file_name,
test_kitchen_dir)
self.assertEqual(original_file, new_kitchen_file)
new_kitchen_file_dict = self._get_the_dict(new_kitchen_file)
new_kitchen_file_abspath = os.path.join(test_kitchen_dir, os.path.join(recipe_file_key, file_name))
new_kitchen_file_dict[test_kitchen] = update_str
new_kitchen_file2 = self._get_the_json_str(new_kitchen_file_dict)
with open(new_kitchen_file_abspath, 'w') as rfile:
rfile.seek(0)
rfile.truncate()
rfile.write(new_kitchen_file2)
# test
working_dir = os.path.join(test_kitchen_dir, recipe_name)
os.chdir(working_dir)
rc = DKCloudCommandRunner.update_file(self._api, test_kitchen, recipe_name, message, api_file_key)
self.assertTrue(rc.ok())
new_kitchen_file3 = self._get_recipe_file(test_kitchen, recipe_name, recipe_file_key, file_name)
self.assertEqual(new_kitchen_file2, new_kitchen_file3)
# cleanup
self._delete_and_clean_kitchen(test_kitchen)
shutil.rmtree(temp_dir, ignore_errors=True)
def test_util_funcs(self):
paths_to_check = ['description.json', 'graph.json', 'simple-file.txt', 'node2_hide', 'node2_hide/my_file.txt', 'node1hide/subdir/hide-me.txt''variables.json', 'variations.json', 'node2/data_sinks', 'node1/data_sinks', 'node2', 'node1', 'node1/data_sources', 'resources', 'node2/data_sources']
minimal_paths = DKCloudCommandRunner.find_minimal_paths_to_get(paths_to_check)
self.assertIsNotNone(minimal_paths)
def test_update_all(self):
parent_kitchen = 'CLI-Top'
test_kitchen = self._add_my_guid('update_all')
recipe_name = 'simple'
new = 'new.txt'
deleted = 'deleted.txt'
modified = 'modified.txt'
subdir = 'subdir'
subsubdir = os.path.join(subdir, 'subsubdir')
subusubsubdir = os.path.join(subsubdir, 'subusubsubdir')
self._delete_and_clean_kitchen(test_kitchen)
rs = DKCloudCommandRunner.create_kitchen(self._api, parent_kitchen, test_kitchen)
self.assertTrue(rs.ok())
# make and cd to kitchen dir and get the recipe to disk
temp_dir, kitchen_dir, recipe_dir = self._make_recipe_dir(recipe_name, test_kitchen)
os.chdir(kitchen_dir)
print 'Working in directory %s' % recipe_dir
start_time = time.time()
rs = DKCloudCommandRunner.get_recipe(self._api, test_kitchen, recipe_name)
elapsed_recipe_status = time.time() - start_time
print 'get_recipe - elapsed: %d' % elapsed_recipe_status
self.assertTrue(rs.ok())
os.chdir(recipe_dir)
start_time = time.time()
rc = DKCloudCommandRunner.recipe_status(self._api, test_kitchen, recipe_name)
elapsed_recipe_status = time.time() - start_time
print 'recipe_status - elapsed: %d' % elapsed_recipe_status
msg = rc.get_message()
self.assertTrue('files differ' not in msg)
self.assertTrue('only on local' not in msg)
self.assertTrue('only on remote' not in msg)
# New, not added, file
with open(new, 'w') as f:
f.write('This is file %s\n' % new)
with open(os.path.join('node1', new), 'w') as f:
f.write('This is file %s in node 1\n' % new)
# Deleted File
with open(deleted, 'w') as f:
f.write('This is file %s\n' % deleted)
rc = DKCloudCommandRunner.add_file(self._api, test_kitchen, recipe_name, 'Adding %s' % deleted, deleted)
self.assertTrue(rc.ok())
os.remove(deleted)
# Modified File
with open(modified, 'w') as f:
f.write('This is file %s\n' % modified)
rc = DKCloudCommandRunner.add_file(self._api, test_kitchen, recipe_name, 'Adding %s' % modified, modified)
self.assertTrue(rc.ok())
with open(modified, 'a') as f:
f.write('This is a new line %s\n' % modified)
# New file in a subdirectory
os.mkdir(subdir)
os.mkdir(subsubdir)
os.mkdir(subusubsubdir)
with open(os.path.join(subsubdir, new), 'w') as f:
f.write('This is file %s in subdirectory %s\n' % (new, subsubdir))
with open(os.path.join(subsubdir, 'also_%s' % new), 'w') as f:
f.write('This is file %s in subdirectory %s\n' % ('also_%s' % new, subsubdir))
with open(os.path.join(subusubsubdir, 'again_%s' % new), 'w') as f:
f.write('This is file %s in subdirectory %s\n' % ('also_%s' % new, subusubsubdir))
# Delete a whole directory, and some files under there.
shutil.rmtree('node1', ignore_errors=True)
# Make sure repo is in state we expect.
start_time = time.time()
rc = DKCloudCommandRunner.recipe_status(self._api, test_kitchen, recipe_name)
elapsed_recipe_status = time.time() - start_time
print 'recipe_status - elapsed: %d' % elapsed_recipe_status
msg = rc.get_message()
match = re.search(r"([0-9]*) files are unchanged", msg)
self.assertTrue(int(match.group(1)) >= 10)
self.assertTrue('files are unchanged' in msg)
match = re.search(r"([0-9]*) files are modified", msg)
self.assertTrue(int(match.group(1)) >= 1)
self.assertTrue('files are modified' in msg)
match = re.search(r"([0-9]*) files are local only", msg)
self.assertTrue(int(match.group(1)) >= 1)
self.assertTrue('files are local only' in msg)
match = re.search(r"([0-9]*) files are remote only", msg)
self.assertTrue(int(match.group(1)) >= 1)
self.assertTrue('files are remote only' in msg)
self.assertTrue('subdir/subsubdir/subusubsubdir' in msg)
start_time = time.time()
rc = DKCloudCommandRunner.update_all_files(self._api, test_kitchen, recipe_name, recipe_dir, 'update all dryrun', dryrun=True)
elapsed_recipe_status = time.time() - start_time
print 'update_all_files - elapsed: %d' % elapsed_recipe_status
self.assertTrue(rc.ok())
msg = rc.get_message()
self.assertTrue('modified.txt' in msg)
self.assertTrue('new.txt' in msg)
self.assertTrue('deleted.txt' in msg)
self.assertTrue('subdir/subsubdir/new.txt' in msg)
self.assertTrue('subdir/subsubdir/subusubsubdir/again_new.txt' in msg)
start_time = time.time()
rc = DKCloudCommandRunner.update_all_files(self._api, test_kitchen, recipe_name, recipe_dir, 'update all')
elapsed_recipe_status = time.time() - start_time
print 'update_all_files - elapsed: %d' % elapsed_recipe_status
self.assertTrue(rc.ok())
msg = rc.get_message()
self.assertTrue('modified.txt' in msg)
match = re.search(r"([0-9]*) files updated", msg)
self.assertTrue(int(match.group(1)) >= 1)
self.assertTrue('subdir/subsubdir/new.txt' in msg)
match = re.search(r"([0-9]*) files added", msg)
self.assertTrue(int(match.group(1)) >= 4)
self.assertTrue('node1/data_sources/DKDataSource_NoOp.json' in msg)
match = re.search(r"([0-9]*) files deleted", msg)
self.assertTrue(int(match.group(1)) >= 7)
self._delete_and_clean_kitchen(test_kitchen)
shutil.rmtree(temp_dir, ignore_errors=True)
def test_add_file(self):
parent_kitchen = 'CLI-Top'
test_kitchen = 'test_create_file-Runner'
test_kitchen = self._add_my_guid(test_kitchen)
recipe_name = 'simple'
file_name = 'added.sql'
filedir = 'resources'
recipe_file_key = os.path.join(recipe_name, filedir)
api_file_key = os.path.join(filedir, file_name)
file_contents = '--\n-- sql for you\n--\n\nselect 1024\n\n'
message = 'test update test_create_file-API'
# test negative
rc = DKCloudCommandRunner.add_file(self._api, test_kitchen, recipe_name, message, 'badfile.txt')
self.assertFalse(rc.ok())
# create test kitchen
self._delete_and_clean_kitchen(test_kitchen)
rs = DKCloudCommandRunner.create_kitchen(self._api, parent_kitchen, test_kitchen)
self.assertTrue(rs.ok())
# make and cd to kitchen dir and get the recipe to disk
temp_dir, kitchen_dir = self._make_kitchen_dir(test_kitchen, change_dir=True)
os.chdir(kitchen_dir)
self._get_recipe(test_kitchen, recipe_name)
# create new file on disk
try:
os.chdir(recipe_name)
with open(api_file_key, 'w') as f:
f.write(file_contents)
except ValueError, e:
print('could not write file %s.' % e)
self.assertTrue(False)
# add file from disk THE TEST
rc = DKCloudCommandRunner.add_file(self._api, test_kitchen, recipe_name, message, api_file_key)
self.assertTrue(rc.ok())
# make sure file is in kitchen (get file)
file_contents2 = self._get_recipe_file(test_kitchen, recipe_name, recipe_file_key, file_name)
self.assertEqual(file_contents, file_contents2, 'Create check')
# cleanup
self._delete_and_clean_kitchen(test_kitchen)
shutil.rmtree(temp_dir, ignore_errors=True)
def test_delete_file(self):
# setup
parent_kitchen = 'CLI-Top'
test_kitchen = 'Runner-test_delete_file'
test_kitchen = self._add_my_guid(test_kitchen)
recipe_name = 'simple'
recipe_file_key = recipe_name
file_name = 'description.json'
message = 'test Delete Runner-test_delete_file'
self._delete_and_clean_kitchen(test_kitchen)
temp_dir, kitchen_dir = self._make_kitchen_dir(test_kitchen, change_dir=True)
rs = DKCloudCommandRunner.create_kitchen(self._api, parent_kitchen, test_kitchen)
self.assertTrue(rs.ok())
os.chdir(kitchen_dir)
self.assertTrue(self._get_recipe_file(test_kitchen, recipe_name, recipe_file_key, file_name) is not None)
rv = DKCloudCommandRunner.get_recipe(self._api, test_kitchen, recipe_name)
self.assertTrue(recipe_name in rv.get_message())
target_file = os.path.join(kitchen_dir, os.path.join(recipe_file_key, file_name))
self.assertTrue(os.path.isfile(target_file)) # the file is there
os.remove(target_file)
rs = DKCloudCommandRunner.delete_file(self._api, test_kitchen, recipe_name,
message, file_name)
self.assertTrue(rs.ok())
self.assertTrue(self._get_recipe_file(test_kitchen, recipe_name, recipe_file_key, file_name) is None,
"Gone check")
# cleanup
self._delete_and_clean_kitchen(test_kitchen)
shutil.rmtree(temp_dir, ignore_errors=True)
# def test_cook_recipe_recipe(self):
# kitchen = 'CLI-Top'
# recipe = 'simple'
# variation = 'simple-variation-now'
# rv = DKCloudCommandRunner.cook_recipe(self._api, kitchen, recipe, variation)
# self.assertTrue('started' in rv.get_message())
def test_create_order(self):
kitchen = 'CLI-Top'
recipe = 'simple'
variation = 'simple-variation-now'
rv = DKCloudCommandRunner.create_order(self._api, kitchen, recipe, variation)
self.assertTrue('simple' in rv.get_message())
def test_delete_all_order(self):
# setup
parent_kitchen = 'CLI-Top'
new_kitchen = 'test_deleteall_orderRUN'
new_kitchen = self._add_my_guid(new_kitchen)
recipe = 'simple'
variation = 'simple-variation-now'
DKCloudCommandRunner.delete_kitchen(self._api, new_kitchen) # clean up junk
rc = DKCloudCommandRunner.create_kitchen(self._api, parent_kitchen, new_kitchen)
self.assertTrue(rc.ok())
rv = DKCloudCommandRunner.create_order(self._api, new_kitchen, recipe, variation)
self.assertIsNotNone(rv)
order_id = rv.get_payload()
self.assertIsNotNone(variation in order_id)
# test
rc = DKCloudCommandRunner.delete_all_order(self._api, new_kitchen)
self.assertTrue(rc.ok())
# cleanup
DKCloudCommandRunner.delete_kitchen(self._api, new_kitchen)
def test_delete_one_order(self):
# setup
parent_kitchen = 'CLI-Top'
new_kitchen = 'test_deleteall_order-RUN'
new_kitchen = self._add_my_guid(new_kitchen)
recipe = 'simple'
variation = 'simple-variation-now'
DKCloudCommandRunner.delete_kitchen(self._api, new_kitchen) # clean up junk
rc = DKCloudCommandRunner.create_kitchen(self._api, parent_kitchen, new_kitchen)
self.assertTrue(rc.ok())
rv = DKCloudCommandRunner.create_order(self._api, new_kitchen, recipe, variation)
self.assertIsNotNone(rv)
order_id = rv.get_payload()
self.assertIsNotNone(variation in order_id)
# test
rc = DKCloudCommandRunner.delete_one_order(self._api, order_id)
self.assertTrue(rc.ok())
# cleanup
DKCloudCommandRunner.delete_kitchen(self._api, new_kitchen)
def test_stop_order(self):
# setup
parent_kitchen = 'CLI-Top'
new_kitchen = 'test_stop_order-RUN'
new_kitchen = self._add_my_guid(new_kitchen)
recipe = 'simple'
variation = 'simple-variation-now'
DKCloudCommandRunner.delete_kitchen(self._api, new_kitchen) # clean up junk
rc = DKCloudCommandRunner.create_kitchen(self._api, parent_kitchen, new_kitchen)
self.assertTrue(rc.ok())
rv = DKCloudCommandRunner.create_order(self._api, new_kitchen, recipe, variation)
self.assertIsNotNone(rv)
order_id = rv.get_payload()
self.assertIsNotNone(variation in order_id)
# test
rc = DKCloudCommandRunner.stop_order(self._api, order_id)
# todo: need to find a way for this to succeed
self.assertTrue(rc.ok())
# cleanup
DKCloudCommandRunner.delete_kitchen(self._api, new_kitchen)
def test_get_compiled_serving_from_recipe(self):
# setup
parent_kitchen = 'master'
new_kitchen = 'test_get_compiled_serving_from_recipe=API'
new_kitchen = self._add_my_guid(new_kitchen)
recipe_name = 'parallel-recipe-test'
variation_name = 'variation-test'
self._delete_and_clean_kitchen(new_kitchen)
rs = DKCloudCommandRunner.create_kitchen(self._api, parent_kitchen, new_kitchen)
self.assertTrue(rs.ok())
# test
resp = DKCloudCommandRunner.get_compiled_serving(self._api, parent_kitchen, recipe_name, variation_name)
self.assertTrue(resp.ok())
# cleanup
self._delete_and_clean_kitchen(new_kitchen)
def test_merge_kitchens_success(self):
existing_kitchen_name = 'master'
base_test_kitchen_name = 'base-test-kitchen'
branched_test_kitchen_name = 'branched-from-base-test-kitchen'
base_test_kitchen_name = self._add_my_guid(base_test_kitchen_name)
branched_test_kitchen_name = self._add_my_guid(branched_test_kitchen_name)
# setup
self._delete_and_clean_kitchen(branched_test_kitchen_name)
self._delete_and_clean_kitchen(base_test_kitchen_name)
# test
# create base kitchen
rs = DKCloudCommandRunner.create_kitchen(self._api, existing_kitchen_name, base_test_kitchen_name)
self.assertTrue(rs.ok())
# create branch kitchen from base kitchen
rs = DKCloudCommandRunner.create_kitchen(self._api, base_test_kitchen_name, branched_test_kitchen_name)
self.assertTrue(rs.ok())
# do merge
rd = DKCloudCommandRunner.merge_kitchens_improved(self._api, branched_test_kitchen_name, base_test_kitchen_name)
self._check_no_merge_conflicts(rd)
# cleanup
self._delete_and_clean_kitchen(branched_test_kitchen_name)
self._delete_and_clean_kitchen(base_test_kitchen_name)
def test_merge_kitchens_improved_success(self):
to_kitchen = 'dummy'
from_kitchen = 'merge_success'
mock_api = DKCloudAPIMock(self._cr_config)
rv = DKCloudCommandRunner.merge_kitchens_improved(mock_api, from_kitchen, to_kitchen)
self.assertTrue('1 files changed' in rv.get_message())
self.assertTrue('1 insertions(+)' in rv.get_message())
self.assertTrue('0 deletions(-)' in rv.get_message())
# Check that the merge returned the diffs as expected.
def test_merge_resolution(self):
self.assertTrue(True)
base_kitchen = 'CLI-Top'
parent_kitchen = 'merge_resolve_parent'
parent_kitchen = self._add_my_guid(parent_kitchen)
child_kitchen = 'merge_resolve_child'
child_kitchen = self._add_my_guid(child_kitchen)
recipe = 'simple'
conflicted_file = 'conflicted-file.txt'
temp_dir_child, kitchen_dir_child, recipe_dir_child = self._make_recipe_dir(recipe, child_kitchen)
temp_dir_parent, kitchen_dir_parent, recipe_dir_parent = self._make_recipe_dir(recipe, parent_kitchen)
setup = True
cleanup = True
if setup:
rc = DKCloudCommandRunner.delete_kitchen(self._api, child_kitchen)
rc = DKCloudCommandRunner.delete_kitchen(self._api, parent_kitchen)
rc = DKCloudCommandRunner.create_kitchen(self._api, parent_kitchen=base_kitchen, new_kitchen=parent_kitchen)
self.assertTrue(rc.ok())
rc = DKCloudCommandRunner.create_kitchen(self._api, parent_kitchen=parent_kitchen, new_kitchen=child_kitchen)
self.assertTrue(rc.ok())
os.chdir(recipe_dir_parent)
# parent_file = os.path.join(recipe, conflicted_file)
with open(conflicted_file, 'w') as f:
f.write('line1\nparent\nline2\n')
rc = DKCloudCommandRunner.add_file(self._api, parent_kitchen, recipe, 'adding %s to %s' % (conflicted_file, parent_kitchen), conflicted_file)
self.assertTrue(rc.ok())
os.chdir(recipe_dir_child)
# child_file = os.path.join(recipe, conflicted_file)
with open(conflicted_file, 'w') as f:
f.write('line1\nchild\nline2\n')
rc = DKCloudCommandRunner.add_file(self._api, child_kitchen, recipe, 'adding %s to %s' % (conflicted_file, child_kitchen), conflicted_file)
self.assertTrue(rc.ok())
# Make sure we are in the recipe folder before merging
os.chdir(recipe_dir_child)
rc = DKCloudCommandRunner.merge_kitchens_improved(self._api, child_kitchen, parent_kitchen)
self.assertTrue('1 conflict found' in rc.get_message())
self.assertTrue('simple/conflicted-file.txt' in rc.get_message())
rc = DKCloudCommandRunner.merge_kitchens_improved(self._api, child_kitchen, parent_kitchen)
self.assertTrue('Unresolved conflicts' in rc.get_message())
self.assertTrue('conflicted-file.txt' in rc.get_message())
rc = DKCloudCommandRunner.get_unresolved_conflicts(recipe, recipe_dir_child)
self.assertTrue(rc.ok())
self.assertTrue('Unresolved conflicts' in rc.get_message())
rc = DKCloudCommandRunner.resolve_conflict(conflicted_file)
self.assertTrue(rc.ok())
self.assertTrue('Conflict resolved' in rc.get_message())
rc = DKCloudCommandRunner.get_unresolved_conflicts(recipe, recipe_dir_child)
self.assertTrue(rc.ok())
self.assertTrue('No conflicts found' in rc.get_message())
rc = DKCloudCommandRunner.merge_kitchens_improved(self._api, child_kitchen, parent_kitchen)
self.assertTrue('Unresolved conflicts' not in rc.get_message())
if cleanup:
DKCloudCommandRunner.delete_kitchen(self._api, child_kitchen)
DKCloudCommandRunner.delete_kitchen(self._api, parent_kitchen)
shutil.rmtree(temp_dir_child, ignore_errors=True)
shutil.rmtree(temp_dir_parent, ignore_errors=True)
def test_merge_kitchens_improved_conflicts(self):
to_kitchen_pickle = 'dummy'
from_kitchen_pickle = 'merge_conflicts'
mock_api = DKCloudAPIMock(self._cr_config)
# This one tests just a print
rv = DKCloudCommandRunner.merge_kitchens_improved(mock_api, from_kitchen_pickle, to_kitchen_pickle)
self.assertTrue('1 conflict found' in rv.get_message())
self.assertTrue('conflicted-file.txt' in rv.get_message())
# Do the merge and put it down into a folder
parent_kitchen = 'merge-parent_ut_6d887fc6'
child_kitchen = 'merge-child_ut_6d887fc6'
recipe_name = 'simple'
temp_dir, kitchen_dir, recipe_dir = self._make_recipe_dir(recipe_name, child_kitchen)
rv = DKCloudCommandRunner.merge_kitchens_improved(mock_api, child_kitchen, parent_kitchen)
msg = rv.get_message()
self.assertTrue('1 conflict found' in msg)
self.assertTrue('conflicted-file.txt' in msg)
with open(os.path.join(recipe_dir, 'conflicted-file.txt'), 'r') as conflicted_file:
contents = conflicted_file.read()
self.assertTrue('<<<<<<< your conflicted-file.txt' in contents)
self.assertTrue('>>>>>>> their conflicted-file.txt' in contents)
self.assertTrue('=======' in contents)
# Now make sure it tells use there are unresolved conflicts the next time we try and merge
rv = DKCloudCommandRunner.merge_kitchens_improved(mock_api, child_kitchen, parent_kitchen)
self.assertTrue('Unresolved conflicts' in rv.rc['message'])
self.assertTrue('conflicted-file.txt' in rv.rc['message'])
# Resolve the conflict
rc = DKCloudCommandRunner.resolve_conflict('conflicted-file.txt')
self.assertTrue(rc.ok())
# Now the conflict should be gone, and we should be back to the found conflicts and written to disk message.
rc = DKCloudCommandRunner.merge_kitchens_improved(mock_api, child_kitchen, parent_kitchen)
msg = rc.get_message()
self.assertTrue('Unresolved conflicts' not in msg)
self.assertTrue('1 conflict found' in msg)
self.assertTrue('conflicted-file.txt' in msg)
if temp_dir is not None and temp_dir != '/':
shutil.rmtree(temp_dir)
# # test helpers in DKCloudCommandRunner.py
# def test__print_merge_patches_1(self):
# merge_conflicts = pickle.loads(open("files/merge_conflicts_1_file.p", "rb").read().replace('\r', ''))
# rs = DKCloudCommandRunner._print_merge_patches(merge_conflicts)
# # look for some strings so you know it worked
# # but don't look for too much so the test breaks if we re-format
# print rs
# self.assertTrue('File' in rs)
# self.assertTrue('parallel-recipe-test/description.json' in rs)
#
# def test__print_merge_patches_multi(self):
# merge_conflicts = pickle.loads(open("files/merge_conflicts_multi_file.p", "rb").read().replace('\r', ''))
# rs = DKCloudCommandRunner._print_merge_patches(merge_conflicts)
# # look for some strings so you know it worked
# # but don't look for too much so the test breaks if we re-format
# print rs
# self.assertTrue('File' in rs)
# self.assertTrue('simple/resources/very_cool.sql' in rs)
# self.assertTrue('parallel-recipe-test/description.json' in rs)
# self.assertTrue('parallel-recipe-test/node1/data_sources/DKDataSource_NoOp.json' in rs)
def test_print_test_results(self):
# good for more than acive
rdict = pickle.loads(open("files/completed_serving_rdict.p", "rb").read().replace('\r', ''))
# rdict = pickle.load(open("files/completed_serving_rdict_eg.p", "rb"))
rs = DKCloudCommandRunner._print_test_results(rdict)
# look for some strings so you know it worked
# but don't look for too much so the test breaks if we re-format
print rs
self.assertTrue('File' in rs)
def test_active_serving_watcher(self):
# setup
parent = 'master'
kitchen = 'test_active_serving_watcher'
kitchen = self._add_my_guid(kitchen)
recipe_name = 'test-everything-recipe'
variation_name = self._get_run_variation()
self._delete_and_clean_kitchen(kitchen)
rv = DKCloudCommandRunner.create_kitchen(self._api, parent, kitchen)
self.assertTrue(rv.ok())
# start watcher
DKActiveServingWatcherSingleton().set_sleep_time(2)
DKActiveServingWatcherSingleton().set_api(self._api)
DKActiveServingWatcherSingleton().set_kitchen(kitchen)
self.assertTrue(DKActiveServingWatcherSingleton().start_watcher())
# cook one
rs = DKCloudCommandRunner.create_order(self._api, kitchen, recipe_name, variation_name)
self.assertTrue(rs.ok())
wait_time = [.1, 1, 3, 3, 3, 3, 9, 18]
found_active_serving = False
wait_generator = (wt for wt in wait_time if found_active_serving is False)
print 'test_active_serving_watcher: found_active_serving, trying ... '
for wt in wait_generator:
time.sleep(wt)
resp1 = DKCloudCommandRunner.orderrun_detail(self._api, kitchen, {'summary': True})
print 'test_active_serving_watcher: found_active_serving is False (%s)' % wt
# print 'got', resp1.get_message()
message = resp1.get_message()
if resp1.ok() and ('OrderRun is Planned' in message or 'OrderRun Completed' in message
or 'OrderRun is Active' in message):
found_active_serving = True
self.assertTrue(found_active_serving)
# cleanup
self._delete_and_clean_kitchen(kitchen)
def test_user_info(self):
rc = DKCloudCommandRunner.user_info(self._api)
self.assertTrue(rc.ok())
def test_order_list(self):
parent_kitchen = 'CLI-Top'
recipe_name = 'parallel-recipe-test'
variation_name = self._get_run_variation_for_recipe(recipe_name)
new_kitchen = 'test_order_list'
new_kitchen = self._add_my_guid(new_kitchen)
self._delete_and_clean_kitchen(new_kitchen)
rs = DKCloudCommandRunner.create_kitchen(self._api, parent_kitchen, new_kitchen)
self.assertTrue(rs.ok())
rs = DKCloudCommandRunner.create_order(self._api, new_kitchen, recipe_name, variation_name)
new_order_id_1 = rs.get_payload()
self.assertTrue(rs.ok())
rs = DKCloudCommandRunner.list_order(self._api, new_kitchen)
output_string = rs.rc['message']
self.assertTrue(new_order_id_1 in output_string)
found_completed_serving = False
wait_time = [.2, .5, .5, .5, 1, 2, 2, 2, 2, 4, 4, 4, 4, 4, 6, 6, 6, 6, 10, 10]
for wt in wait_time:
rs = DKCloudCommandRunner.list_order(self._api, new_kitchen)
output_string = rs.rc['message']
n = output_string.count(new_order_id_1)
if n == 2 and ('OrderRun Completed' in output_string):
found_completed_serving = True
break
time.sleep(wt)
self.assertTrue(found_completed_serving)
# cleanup
self._delete_and_clean_kitchen(new_kitchen)
def test_order_list_for_repeating_order(self):
parent_kitchen = 'master'
recipe_name = 'parallel-recipe-test'
variation_name = self._get_run_variation_for_recipe(recipe_name, repeater=True)
new_kitchen = 'test_order_list_for_repeating_order'
new_kitchen = self._add_my_guid(new_kitchen)
self._delete_and_clean_kitchen(new_kitchen)
rs = DKCloudCommandRunner.create_kitchen(self._api, parent_kitchen, new_kitchen)
self.assertTrue(rs.ok())
rs = DKCloudCommandRunner.create_order(self._api, new_kitchen, recipe_name, variation_name)
new_order_id_1 = rs.get_payload()
self.assertTrue(rs.ok())
found_completed_serving = False
wait_time = [10,61,61,61,61]
for wt in wait_time:
rs = DKCloudCommandRunner.list_order(self._api, new_kitchen)
output_string = rs.rc['message']
n = output_string.count(new_order_id_1)
if n >= 3 and ('OrderRun Completed' in output_string):
found_completed_serving = True
break
time.sleep(wt)
self.assertTrue(found_completed_serving)
# cleanup
self._delete_and_clean_kitchen(new_kitchen)
def test_order_list_with_filters(self):
parent_kitchen = 'CLI-Top'
# Don't use a guid for this. Don't
kitchen = self._add_my_guid('test_order_list_with_filters')
recipe1 = 'parallel-recipe-test'
recipe1_variation = self._get_run_variation_for_recipe(recipe1)
recipe2 = 'simple'
recipe2_variation = 'simple-variation-now'
setup = True
if setup:
self._delete_and_clean_kitchen(kitchen)
rv = DKCloudCommandRunner.create_kitchen(self._api, parent_kitchen, kitchen)
self.assertTrue(rv.ok())
rv = DKCloudCommandRunner.create_order(self._api, kitchen, recipe1, recipe1_variation)
self.assertTrue(rv.ok())
first_order = rv.get_payload()
rv = DKCloudCommandRunner.create_order(self._api, kitchen, recipe1, recipe1_variation)
self.assertTrue(rv.ok())
time.sleep(20)
rs = DKCloudCommandRunner.list_order(self._api, kitchen)
self.assertTrue(rs.ok())
message = rs.rc['message']
self.assertTrue(
'OrderRun is Planned' in message or 'OrderRun Completed' in message or 'OrderRun is Active' in message)
# cleanup
self._delete_and_clean_kitchen(kitchen)
def test_orderrun_delete(self):
mock_api = DKCloudAPIMock(self._cr_config)
rs = DKCloudCommandRunner.delete_orderrun(mock_api, 'good')
self.assertTrue(rs.ok())
rs = DKCloudCommandRunner.delete_orderrun(mock_api, 'bad')
self.assertFalse(rs.ok())
def test_kitchen_config(self):
parent_kitchen = 'CLI-Top'
child_kitchen = self._add_my_guid('modify_kitchen_settings_runner')
setup = True
if setup:
self._delete_and_clean_kitchen(child_kitchen)
rv = DKCloudCommandRunner.create_kitchen(self._api, parent_kitchen, child_kitchen)
self.assertTrue(rv.ok())
add = (('newvar1', 'newval1'),)
unset = ('newvar1')
get = ('newvar1')
listall = True
rs = DKCloudCommandRunner.config_kitchen(self._api, child_kitchen, add=add)
self.assertTrue(rs.ok())
payload = rs.get_payload()
self.assertIsNotNone(payload)
message = rs.get_message()
self.assertTrue('newvar1 added' in message)
rs = DKCloudCommandRunner.config_kitchen(self._api, child_kitchen, get=get)
self.assertTrue(rs.ok())
payload = rs.get_payload()
self.assertIsNotNone(payload)
message = rs.get_message()
self.assertTrue(message == 'newval1\n')
rs = DKCloudCommandRunner.config_kitchen(self._api, child_kitchen, unset=unset)
self.assertTrue(rs.ok())
payload = rs.get_payload()
self.assertIsNotNone(payload)
message = rs.get_message()
rs = DKCloudCommandRunner.config_kitchen(self._api, child_kitchen, listall=listall)
self.assertTrue(rs.ok())
payload = rs.get_payload()
self.assertIsNotNone(payload)
message = rs.get_message()
self.assertTrue('newvar1' not in message)
cleanup = False
if cleanup:
self._delete_and_clean_kitchen(child_kitchen)
# helpers ---------------------------------
def _delete_and_clean_kitchen(self, kitchen_name):
DKCloudCommandRunner.delete_kitchen(self._api, kitchen_name)
def _check_no_merge_conflicts(self, resp):
self.assertTrue(str(resp).find('diverged') < 0)
def _get_recipe_file(self, kitchen, recipe_name, file_path, file_name, temp_dir=None):
delete_temp_dir = td = False
if temp_dir is None:
td, kitchen_dir = self._make_kitchen_dir(kitchen, change_dir=False)
delete_temp_dir = True
else:
kitchen_dir = temp_dir
rs = DKCloudCommandRunner.get_recipe(self._api, kitchen, recipe_name, kitchen_dir)
self.assertTrue(rs.ok())
the_path = os.path.join(kitchen_dir, os.path.join(file_path, file_name))
if os.path.isfile(the_path):
with open(the_path, 'r') as rfile:
rfile.seek(0)
the_file = rfile.read()
rc = the_file
else:
rc = None
if delete_temp_dir is True:
shutil.rmtree(td, ignore_errors=True)
return rc
def _get_recipe(self, kitchen, recipe):
rs = DKCloudCommandRunner.get_recipe(self._api, kitchen, recipe)
self.assertTrue(rs.ok())
return True
if __name__ == '__main__':
unittest.main()
| agpl-3.0 | -2,458,460,958,913,837,000 | 42.685094 | 300 | 0.629155 | false |
endlessm/chromium-browser | third_party/angle/third_party/VK-GL-CTS/src/scripts/caselist_diff.py | 6 | 15197 | # -*- coding: utf-8 -*-
#-------------------------------------------------------------------------
# drawElements Quality Program utilities
# --------------------------------------
#
# Copyright 2015 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#-------------------------------------------------------------------------
import sys
RENAME_LIST_2011_1_2011_2 = [
("dEQP-GLES2.functional.shaders.random.basic_expressions.*", "dEQP-GLES2.functional.shaders.random.basic_expression."),
("dEQP-GLES2.functional.shaders.random.scalar_conversions.*", "dEQP-GLES2.functional.shaders.random.scalar_conversion."),
("dEQP-GLES2.functional.fbo.render.color_clears_*", "dEQP-GLES2.functional.fbo.render.color_clear."),
("dEQP-GLES2.functional.fbo.render.intersecting_quads_*", "dEQP-GLES2.functional.fbo.render.depth."),
("dEQP-GLES2.functional.fbo.render.mix_*", "dEQP-GLES2.functional.fbo.render.color.mix_"),
("dEQP-GLES2.functional.fbo.render.blend_*", "dEQP-GLES2.functional.fbo.render.color.blend_"),
("dEQP-GLES2.functional.fbo.render.shared_colorbuffer_clears_*", "dEQP-GLES2.functional.fbo.render.shared_colorbuffer_clear."),
("dEQP-GLES2.functional.fbo.render.shared_colorbuffer_*", "dEQP-GLES2.functional.fbo.render.shared_colorbuffer."),
("dEQP-GLES2.functional.fbo.render.shared_depthbuffer_*", "dEQP-GLES2.functional.fbo.render.shared_depthbuffer."),
("dEQP-GLES2.functional.fbo.render.texsubimage_*", "dEQP-GLES2.functional.fbo.render.texsubimage."),
("dEQP-GLES2.functional.fbo.render.recreate_colorbuffer_*", "dEQP-GLES2.functional.fbo.render.recreate_colorbuffer.no_rebind_"),
("dEQP-GLES2.functional.fbo.render.recreate_depthbuffer_*", "dEQP-GLES2.functional.fbo.render.recreate_depthbuffer.no_rebind_"),
("dEQP-GLES2.functional.fbo.render.resize_*", "dEQP-GLES2.functional.fbo.render.resize.")
]
RENAME_LIST_2011_2_2011_3 = [
("dEQP-GLES2.usecases.ui.src_over_linear_1_batched", "dEQP-GLES2.usecases.ui.src_over_linear_batched_1"),
("dEQP-GLES2.usecases.ui.src_over_linear_2_batched", "dEQP-GLES2.usecases.ui.src_over_linear_batched_2"),
("dEQP-GLES2.usecases.ui.src_over_linear_4_batched", "dEQP-GLES2.usecases.ui.src_over_linear_batched_4"),
("dEQP-GLES2.usecases.ui.src_over_nearest_1_batched", "dEQP-GLES2.usecases.ui.src_over_nearest_batched_1"),
("dEQP-GLES2.usecases.ui.src_over_nearest_2_batched", "dEQP-GLES2.usecases.ui.src_over_nearest_batched_2"),
("dEQP-GLES2.usecases.ui.src_over_nearest_4_batched", "dEQP-GLES2.usecases.ui.src_over_nearest_batched_4"),
("dEQP-GLES2.usecases.ui.premultiplied_src_over_linear_1_batched", "dEQP-GLES2.usecases.ui.premultiplied_src_over_linear_batched_1"),
("dEQP-GLES2.usecases.ui.premultiplied_src_over_linear_2_batched", "dEQP-GLES2.usecases.ui.premultiplied_src_over_linear_batched_2"),
("dEQP-GLES2.usecases.ui.premultiplied_src_over_linear_4_batched", "dEQP-GLES2.usecases.ui.premultiplied_src_over_linear_batched_4"),
("dEQP-GLES2.usecases.ui.premultiplied_src_over_nearest_1_batched", "dEQP-GLES2.usecases.ui.premultiplied_src_over_nearest_batched_1"),
("dEQP-GLES2.usecases.ui.premultiplied_src_over_nearest_2_batched", "dEQP-GLES2.usecases.ui.premultiplied_src_over_nearest_batched_2"),
("dEQP-GLES2.usecases.ui.premultiplied_src_over_nearest_4_batched", "dEQP-GLES2.usecases.ui.premultiplied_src_over_nearest_batched_4"),
("dEQP-GLES2.usecases.ui.no_blend_linear_1_batched", "dEQP-GLES2.usecases.ui.no_blend_linear_batched_1"),
("dEQP-GLES2.usecases.ui.no_blend_linear_2_batched", "dEQP-GLES2.usecases.ui.no_blend_linear_batched_2"),
("dEQP-GLES2.usecases.ui.no_blend_linear_4_batched", "dEQP-GLES2.usecases.ui.no_blend_linear_batched_4"),
("dEQP-GLES2.usecases.ui.no_blend_nearest_1_batched", "dEQP-GLES2.usecases.ui.no_blend_nearest_batched_1"),
("dEQP-GLES2.usecases.ui.no_blend_nearest_2_batched", "dEQP-GLES2.usecases.ui.no_blend_nearest_batched_2"),
("dEQP-GLES2.usecases.ui.no_blend_nearest_4_batched", "dEQP-GLES2.usecases.ui.no_blend_nearest_batched_4")
]
RENAME_LIST_2011_3_2011_4 = []
RENAME_LIST_2011_4_2012_1 = [
("dEQP-GLES2.functional.vertex_arrays.multiple_attributes.output_types.*", "dEQP-GLES2.functional.vertex_arrays.multiple_attributes.input_types."),
]
RENAME_LIST_2012_2_2012_3 = [
("dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_float_vertex", "dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_float_float_vertex"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_float_fragment", "dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_float_float_fragment"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_float_vertex", "dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_float_float_vertex"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_float_fragment", "dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_float_float_fragment"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec2_vertex", "dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec2_float_vertex"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec2_fragment", "dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec2_float_fragment"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec2_vertex", "dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec2_float_vertex"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec2_fragment", "dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec2_float_fragment"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec3_vertex", "dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec3_float_vertex"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec3_fragment", "dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec3_float_fragment"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec3_vertex", "dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec3_float_vertex"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec3_fragment", "dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec3_float_fragment"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec4_vertex", "dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec4_float_vertex"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec4_fragment", "dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec4_float_fragment"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec4_vertex", "dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec4_float_vertex"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec4_fragment", "dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec4_float_fragment"),
("dEQP-GLES2.functional.negative_api.texture.copyteximage2d_unequal_width_height_cube", "dEQP-GLES2.functional.negative_api.texture.copyteximage2d_inequal_width_height_cube"),
("dEQP-GLES2.functional.negative_api.texture.teximage2d_unequal_width_height_cube", "dEQP-GLES2.functional.negative_api.texture.teximage2d_inequal_width_height_cube"),
("dEQP-GLES2.functional.negative_api.vertex_array.draw_arrays", "dEQP-GLES2.functional.negative_api.vertex_array.draw_arrays_invalid_program"),
("dEQP-GLES2.functional.negative_api.vertex_array.draw_elemens", "dEQP-GLES2.functional.negative_api.vertex_array.draw_elements_invalid_program"),
("dEQP-GLES2.functional.negative_api.shader.attach_shader_invalid_object", "dEQP-GLES2.functional.negative_api.shader.attach_shader"),
("dEQP-GLES2.functional.negative_api.shader.detach_shader_invalid_object", "dEQP-GLES2.functional.negative_api.shader.detach_shader"),
("dEQP-GLES2.usecases.shadow.shadowmap.1sample.1_vertex_lights_no_texture", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_1sample.1_vertex_lights_no_texture"),
("dEQP-GLES2.usecases.shadow.shadowmap.1sample.2_vertex_lights_no_texture", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_1sample.2_vertex_lights_no_texture"),
("dEQP-GLES2.usecases.shadow.shadowmap.1sample.4_vertex_lights_no_texture", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_1sample.4_vertex_lights_no_texture"),
("dEQP-GLES2.usecases.shadow.shadowmap.1sample.1_vertex_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_1sample.1_vertex_lights"),
("dEQP-GLES2.usecases.shadow.shadowmap.1sample.2_vertex_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_1sample.2_vertex_lights"),
("dEQP-GLES2.usecases.shadow.shadowmap.1sample.4_vertex_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_1sample.4_vertex_lights"),
("dEQP-GLES2.usecases.shadow.shadowmap.1sample.1_fragment_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_1sample.1_fragment_lights"),
("dEQP-GLES2.usecases.shadow.shadowmap.1sample.2_fragment_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_1sample.2_fragment_lights"),
("dEQP-GLES2.usecases.shadow.shadowmap.1sample.4_fragment_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_1sample.4_fragment_lights"),
("dEQP-GLES2.usecases.shadow.shadowmap.4sample.1_vertex_lights_no_texture", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_4sample.1_vertex_lights_no_texture"),
("dEQP-GLES2.usecases.shadow.shadowmap.4sample.2_vertex_lights_no_texture", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_4sample.2_vertex_lights_no_texture"),
("dEQP-GLES2.usecases.shadow.shadowmap.4sample.4_vertex_lights_no_texture", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_4sample.4_vertex_lights_no_texture"),
("dEQP-GLES2.usecases.shadow.shadowmap.4sample.1_vertex_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_4sample.1_vertex_lights"),
("dEQP-GLES2.usecases.shadow.shadowmap.4sample.2_vertex_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_4sample.2_vertex_lights"),
("dEQP-GLES2.usecases.shadow.shadowmap.4sample.4_vertex_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_4sample.4_vertex_lights"),
("dEQP-GLES2.usecases.shadow.shadowmap.4sample.1_fragment_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_4sample.1_fragment_lights"),
("dEQP-GLES2.usecases.shadow.shadowmap.4sample.2_fragment_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_4sample.2_fragment_lights"),
("dEQP-GLES2.usecases.shadow.shadowmap.4sample.4_fragment_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_4sample.4_fragment_lights"),
("dEQP-GLES2.usecases.shadow.shadowmap.16sample.1_vertex_lights_no_texture", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_16sample.1_vertex_lights_no_texture"),
("dEQP-GLES2.usecases.shadow.shadowmap.16sample.2_vertex_lights_no_texture", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_16sample.2_vertex_lights_no_texture"),
("dEQP-GLES2.usecases.shadow.shadowmap.16sample.4_vertex_lights_no_texture", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_16sample.4_vertex_lights_no_texture"),
("dEQP-GLES2.usecases.shadow.shadowmap.16sample.1_vertex_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_16sample.1_vertex_lights"),
("dEQP-GLES2.usecases.shadow.shadowmap.16sample.2_vertex_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_16sample.2_vertex_lights"),
("dEQP-GLES2.usecases.shadow.shadowmap.16sample.4_vertex_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_16sample.4_vertex_lights"),
("dEQP-GLES2.usecases.shadow.shadowmap.16sample.1_fragment_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_16sample.1_fragment_lights"),
("dEQP-GLES2.usecases.shadow.shadowmap.16sample.2_fragment_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_16sample.2_fragment_lights"),
("dEQP-GLES2.usecases.shadow.shadowmap.16sample.4_fragment_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_16sample.4_fragment_lights")
]
RENAME_LIST_2012_3_2012_4 = [
("dEQP-GLES2.functional.depth.*", "dEQP-GLES2.functional.fragment_ops.depth."),
("dEQP-GLES2.functional.stencil.*", "dEQP-GLES2.functional.fragment_ops.stencil.")
]
def readCaseList (filename):
f = open(filename, 'r')
cases = []
for line in f:
if line[0:5] == "TEST:":
cases.append(line[6:].strip())
f.close()
return cases
def isWildcardPattern (pattern):
return pattern[-1:] == '*'
# returns (cases, renames)
def renameCases (cases, rename):
renamedCases = []
renamedSet = set()
renames = []
for case in cases:
renamed = None
for src, dst in rename:
if isWildcardPattern(src) and case[:len(src)-1] == src[:-1]:
renamed = dst + case[len(src)-1:]
break
elif case == src:
renamed = dst
break
if renamed != None:
renames.append((case, renamed))
case = renamed
# It is possible that some later case is renamed to case already seen in the list
assert not case in renamedSet or renamed != None
if case not in renamedSet:
renamedCases.append(case)
renamedSet.add(case)
return (renamedCases, renames)
# returns (added, removed) lists
def diffCaseLists (old, new):
added = []
removed = []
oldSet = set(old)
newSet = set(new)
# build added list
for case in new:
if not case in oldSet:
added.append(case)
# build removed set
for case in old:
if not case in newSet:
removed.append(case)
return (added, removed)
if __name__ == "__main__":
if len(sys.argv) != 3:
print("%s [old caselist] [new caselist]" % sys.argv[0])
sys.exit(-1)
oldCases = readCaseList(sys.argv[1])
newCases = readCaseList(sys.argv[2])
rename = RENAME_LIST_2012_3_2012_4
renamedCases, renameList = renameCases(oldCases, rename)
added, removed = diffCaseLists(renamedCases, newCases)
# for src, dst in rename:
# print("RENAME: %s -> %s" % (src, dst))
for case in added:
print("ADD: %s" % case)
for src, dst in renameList:
print("RENAME: %s -> %s" % (src, dst))
for case in removed:
print("REMOVE: %s" % case)
| bsd-3-clause | -6,694,891,574,763,381,000 | 72.415459 | 176 | 0.719813 | false |
witlox/elasticluster | elasticluster/providers/ec2_boto.py | 1 | 25751 | #
# Copyright (C) 2013, 2018 S3IT, University of Zurich
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
__author__ = ', '.join([
'Nicolas Baer <[email protected]>',
'Antonio Messina <[email protected]>',
'Riccardo Murri <[email protected]>',
])
# System imports
import hashlib
import os
import urllib
import threading
import time
from warnings import warn
# External modules
import boto
import boto.ec2
import boto.vpc
from Crypto.PublicKey import RSA
from paramiko import DSSKey, RSAKey, PasswordRequiredException
from paramiko.ssh_exception import SSHException
# Elasticluster imports
from elasticluster import log
from elasticluster.providers import AbstractCloudProvider
from elasticluster.exceptions import VpcError, SecurityGroupError, \
SubnetError, KeypairError, ImageError, InstanceError, InstanceNotFoundError, ClusterError
class BotoCloudProvider(AbstractCloudProvider):
"""This implementation of
:py:class:`elasticluster.providers.AbstractCloudProvider` uses the boto
ec2 interface to connect to ec2 compliant clouds and manage instances.
Please check https://github.com/boto/boto for further information about
the supported cloud platforms.
:param str ec2_url: url to connect to cloud web service
:param str ec2_region: region identifier
:param str ec2_access_key: access key of the user account
:param str ec2_secret_key: secret key of the user account
:param str storage_path: path to store temporary data
:param bool request_floating_ip: Whether ip are assigned automatically
`True` or floating ips have to be
assigned manually `False`
:param str instance_profile: Instance profile with IAM role permissions
:param float price: Spot instance price (if 0, do not use spot instances);
used as a default in `start_instance`:py:meth
:param int price: Timeout waiting for spot instances (only used if price > 0);
used as a default in `start_instance`:py:meth
"""
__node_start_lock = threading.Lock() # lock used for node startup
# interval (in seconds) for polling the cloud provider,
# e.g., when requesting spot instances
POLL_INTERVAL = 10
def __init__(self, ec2_url, ec2_region, ec2_access_key=None,
ec2_secret_key=None, vpc=None, storage_path=None,
request_floating_ip=False, instance_profile=None,
price=0.0, timeout=0):
self._url = ec2_url
self._access_key = ec2_access_key
self._secret_key = ec2_secret_key
self._vpc = vpc
self._instance_profile = instance_profile
self.request_floating_ip = request_floating_ip
# provide defaults for like-named arguments in `.start_instance`
self.price = price
self.timeout = timeout
# read all parameters from url
proto, opaqueurl = urllib.splittype(ec2_url)
self._host, self._ec2path = urllib.splithost(opaqueurl)
self._ec2host, port = urllib.splitport(self._host)
if port:
port = int(port)
self._ec2port = port
if proto == "https":
self._secure = True
else:
self._secure = False
self._region_name = ec2_region
# will be initialized upon first connect
self._ec2_connection = None
self._vpc_connection = None
self._vpc_id = None
self._instances = {}
self._cached_instances = []
self._images = None
def _connect(self):
"""
Connect to the EC2 cloud provider.
:return: :py:class:`boto.ec2.connection.EC2Connection`
:raises: Generic exception on error
"""
# check for existing connection
if self._ec2_connection:
return self._ec2_connection
try:
log.debug("Connecting to EC2 endpoint %s", self._ec2host)
# connect to webservice
ec2_connection = boto.ec2.connect_to_region(
self._region_name,
aws_access_key_id=self._access_key,
aws_secret_access_key=self._secret_key,
is_secure=self._secure,
host=self._ec2host,
port=self._ec2port,
path=self._ec2path,
)
log.debug("EC2 connection has been successful.")
if not self._vpc:
vpc_connection = None
self._vpc_id = None
else:
vpc_connection, self._vpc_id = self._find_vpc_by_name(self._vpc)
except Exception as err:
log.error("Error connecting to EC2: %s", err)
raise
self._ec2_connection, self._vpc_connection = (
ec2_connection, vpc_connection)
return self._ec2_connection
def _find_vpc_by_name(self, vpc_name):
vpc_connection = boto.vpc.connect_to_region(
self._region_name,
aws_access_key_id=self._access_key,
aws_secret_access_key=self._secret_key,
is_secure=self._secure,
host=self._ec2host,
port=self._ec2port,
path=self._ec2path,
)
log.debug("VPC connection has been successful.")
for vpc in vpc_connection.get_all_vpcs():
matches = [vpc.id]
if 'Name' in vpc.tags:
matches.append(vpc.tags['Name'])
if vpc_name in matches:
vpc_id = vpc.id
if vpc_name != vpc_id:
# then `vpc_name` is the VPC name
log.debug("VPC `%s` has ID `%s`", vpc_name, vpc_id)
break
else:
raise VpcError('Cannot find VPC `{0}`.'.format(vpc_name))
return (vpc_connection, vpc_id)
def start_instance(self, key_name, public_key_path, private_key_path,
security_group, flavor, image_id, image_userdata,
username=None, node_name=None, network_ids=None,
price=None, timeout=None,
boot_disk_device=None,
boot_disk_size=None,
boot_disk_type=None,
boot_disk_iops=None,
placement_group=None,
**kwargs):
"""Starts a new instance on the cloud using the given properties.
The following tasks are done to start an instance:
* establish a connection to the cloud web service
* check ssh keypair and upload it if it does not yet exist. This is
a locked process, since this function might be called in multiple
threads and we only want the key to be stored once.
* check if the security group exists
* run the instance with the given properties
:param str key_name: name of the ssh key to connect
:param str public_key_path: path to ssh public key
:param str private_key_path: path to ssh private key
:param str security_group: firewall rule definition to apply on the
instance
:param str flavor: machine type to use for the instance
:param str image_id: image type (os) to use for the instance
:param str image_userdata: command to execute after startup
:param str username: username for the given ssh key, default None
:param float price: Spot instance price (if 0, do not use spot instances).
:param int price: Timeout (in seconds) waiting for spot instances;
only used if price > 0.
:param str boot_disk_device: Root volume device path if not /dev/sda1
:param str boot_disk_size: Target size, in GiB, for the root volume
:param str boot_disk_type: Type of root volume (standard, gp2, io1)
:param str boot_disk_iops: Provisioned IOPS for the root volume
:param str placement_group: Enable low-latency networking between
compute nodes.
:return: str - instance id of the started instance
"""
connection = self._connect()
log.debug("Checking keypair `%s`.", key_name)
# the `_check_keypair` method has to be called within a lock,
# since it will upload the key if it does not exist and if this
# happens for every node at the same time ec2 will throw an error
# message (see issue #79)
with BotoCloudProvider.__node_start_lock:
self._check_keypair(key_name, public_key_path, private_key_path)
log.debug("Checking security group `%s`.", security_group)
security_group_id = self._check_security_group(security_group)
# image_id = self._find_image_id(image_id)
if network_ids:
interfaces = []
for subnet in network_ids.split(','):
subnet_id = self._check_subnet(subnet)
interfaces.append(
boto.ec2.networkinterface.NetworkInterfaceSpecification(
subnet_id=subnet_id, groups=[security_group_id],
associate_public_ip_address=self.request_floating_ip))
interfaces = boto.ec2.networkinterface.NetworkInterfaceCollection(*interfaces)
security_groups = []
else:
interfaces = None
security_groups = [security_group]
# get defaults for `price` and `timeout` from class instance
if price is None:
price = self.price
if timeout is None:
timeout = self.timeout
if boot_disk_size:
dev_root = boto.ec2.blockdevicemapping.BlockDeviceType()
dev_root.size = int(boot_disk_size)
dev_root.delete_on_termination = True
if boot_disk_type:
dev_root.volume_type = boot_disk_type
if boot_disk_iops:
dev_root.iops = int(boot_disk_iops)
bdm = boto.ec2.blockdevicemapping.BlockDeviceMapping()
dev_name = boot_disk_device if boot_disk_device else "/dev/sda1"
bdm[dev_name] = dev_root
else:
bdm = None
try:
#start spot instance if bid is specified
if price:
log.info("Requesting spot instance with price `%s` ...", price)
request = connection.request_spot_instances(
price,image_id, key_name=key_name, security_groups=security_groups,
instance_type=flavor, user_data=image_userdata,
network_interfaces=interfaces,
placement_group=placement_group,
block_device_map=bdm,
instance_profile_name=self._instance_profile)[-1]
# wait until spot request is fullfilled (will wait
# forever if no timeout is given)
start_time = time.time()
timeout = (float(timeout) if timeout else 0)
log.info("Waiting for spot instance (will time out in %d seconds) ...", timeout)
while request.status.code != 'fulfilled':
if timeout and time.time()-start_time > timeout:
request.cancel()
raise RuntimeError('spot instance timed out')
time.sleep(self.POLL_INTERVAL)
# update request status
request=connection.get_all_spot_instance_requests(request_ids=request.id)[-1]
else:
reservation = connection.run_instances(
image_id, key_name=key_name, security_groups=security_groups,
instance_type=flavor, user_data=image_userdata,
network_interfaces=interfaces,
placement_group=placement_group,
block_device_map=bdm,
instance_profile_name=self._instance_profile)
except Exception as ex:
log.error("Error starting instance: %s", ex)
if "TooManyInstances" in ex:
raise ClusterError(ex)
else:
raise InstanceError(ex)
if price:
vm = connection.get_only_instances(instance_ids=[request.instance_id])[-1]
else:
vm = reservation.instances[-1]
vm.add_tag("Name", node_name)
# cache instance object locally for faster access later on
self._instances[vm.id] = vm
return vm.id
def stop_instance(self, instance_id):
"""Stops the instance gracefully.
:param str instance_id: instance identifier
"""
instance = self._load_instance(instance_id)
instance.terminate()
del self._instances[instance_id]
def get_ips(self, instance_id):
"""Retrieves the private and public ip addresses for a given instance.
:return: list (ips)
"""
self._load_instance(instance_id)
instance = self._load_instance(instance_id)
IPs = [ip for ip in instance.private_ip_address, instance.ip_address if ip]
# We also need to check if there is any floating IP associated
if self.request_floating_ip and not self._vpc:
# We need to list the floating IPs for this instance
floating_ips = [ip for ip in self._ec2_connection.get_all_addresses() if ip.instance_id == instance.id]
if not floating_ips:
log.debug("Public ip address has to be assigned through "
"elasticluster.")
ip = self._allocate_address(instance)
# This is probably the preferred IP we want to use
IPs.insert(0, ip)
else:
IPs = [ip.public_ip for ip in floating_ips] + IPs
return list(set(IPs))
def is_instance_running(self, instance_id):
"""Checks if the instance is up and running.
:param str instance_id: instance identifier
:return: bool - True if running, False otherwise
"""
instance = self._load_instance(instance_id)
if instance.update() == "running":
# If the instance is up&running, ensure it has an IP
# address.
if not instance.ip_address and self.request_floating_ip:
log.debug("Public ip address has to be assigned through "
"elasticluster.")
self._allocate_address(instance)
instance.update()
return True
else:
return False
def _allocate_address(self, instance):
"""Allocates a free public ip address to the given instance
:param instance: instance to assign address to
:type instance: py:class:`boto.ec2.instance.Reservation`
:return: public ip address
"""
connection = self._connect()
free_addresses = [ ip for ip in connection.get_all_addresses() if not ip.instance_id]
if not free_addresses:
try:
address = connection.allocate_address()
except Exception as ex:
log.error("Unable to allocate a public IP address to instance `%s`",
instance.id)
return None
try:
address = free_addresses.pop()
instance.use_ip(address)
return address.public_ip
except Exception as ex:
log.error("Unable to associate IP address %s to instance `%s`",
address, instance.id)
return None
def _load_instance(self, instance_id):
"""
Return instance with the given id.
For performance reasons, the instance ID is first searched for in the
collection of VM instances started by ElastiCluster
(`self._instances`), then in the list of all instances known to the
cloud provider at the time of the last update
(`self._cached_instances`), and finally the cloud provider is directly
queried.
:param str instance_id: instance identifier
:return: py:class:`boto.ec2.instance.Reservation` - instance
:raises: `InstanceError` is returned if the instance can't
be found in the local cache or in the cloud.
"""
# if instance is known, return it
if instance_id in self._instances:
return self._instances[instance_id]
# else, check (cached) list from provider
if instance_id not in self._cached_instances:
self._cached_instances = self._build_cached_instances()
if instance_id in self._cached_instances:
inst = self._cached_instances[instance_id]
self._instances[instance_id] = inst
return inst
# If we reached this point, the instance was not found neither
# in the caches nor on the website.
raise InstanceNotFoundError(
"Instance `{instance_id}` not found"
.format(instance_id=instance_id))
def _build_cached_instances(self):
"""
Build lookup table of VM instances known to the cloud provider.
The returned dictionary links VM id with the actual VM object.
"""
connection = self._connect()
reservations = connection.get_all_reservations()
cached_instances = {}
for rs in reservations:
for vm in rs.instances:
cached_instances[vm.id] = vm
return cached_instances
def _check_keypair(self, name, public_key_path, private_key_path):
"""First checks if the keypair is valid, then checks if the keypair
is registered with on the cloud. If not the keypair is added to the
users ssh keys.
:param str name: name of the ssh key
:param str public_key_path: path to the ssh public key file
:param str private_key_path: path to the ssh private key file
:raises: `KeypairError` if key is not a valid RSA or DSA key,
the key could not be uploaded or the fingerprint does not
match to the one uploaded to the cloud.
"""
connection = self._connect()
keypairs = connection.get_all_key_pairs()
keypairs = dict((k.name, k) for k in keypairs)
# decide if dsa or rsa key is provided
pkey = None
is_dsa_key = False
try:
pkey = DSSKey.from_private_key_file(private_key_path)
is_dsa_key = True
except PasswordRequiredException:
warn("Unable to check key file `{0}` because it is encrypted with a "
"password. Please, ensure that you added it to the SSH agent "
"with `ssh-add {1}`"
.format(private_key_path, private_key_path))
except SSHException:
try:
pkey = RSAKey.from_private_key_file(private_key_path)
except PasswordRequiredException:
warn("Unable to check key file `{0}` because it is encrypted with a "
"password. Please, ensure that you added it to the SSH agent "
"with `ssh-add {1}`"
.format(private_key_path, private_key_path))
except SSHException:
raise KeypairError('File `%s` is neither a valid DSA key '
'or RSA key.' % private_key_path)
# create keys that don't exist yet
if name not in keypairs:
log.warning(
"Keypair `%s` not found on resource `%s`, Creating a new one",
name, self._url)
with open(os.path.expanduser(public_key_path)) as f:
key_material = f.read()
try:
# check for DSA on amazon
if "amazon" in self._ec2host and is_dsa_key:
log.error(
"Apparently, amazon does not support DSA keys. "
"Please specify a valid RSA key.")
raise KeypairError(
"Apparently, amazon does not support DSA keys."
"Please specify a valid RSA key.")
connection.import_key_pair(name, key_material)
except Exception as ex:
log.error(
"Could not import key `%s` with name `%s` to `%s`",
name, public_key_path, self._url)
raise KeypairError(
"could not create keypair `%s`: %s" % (name, ex))
else:
# check fingerprint
cloud_keypair = keypairs[name]
if pkey:
if "amazon" in self._ec2host:
# AWS takes the MD5 hash of the key's DER representation.
key = RSA.importKey(open(private_key_path).read())
der = key.publickey().exportKey('DER')
m = hashlib.md5()
m.update(der)
digest = m.hexdigest()
fingerprint = ':'.join(digest[i:(i + 2)]
for i in range(0, len(digest), 2))
else:
fingerprint = ':'.join(i.encode('hex')
for i in pkey.get_fingerprint())
if fingerprint != cloud_keypair.fingerprint:
if "amazon" in self._ec2host:
log.error(
"Apparently, Amazon does not compute the RSA key "
"fingerprint as we do! We cannot check if the "
"uploaded keypair is correct!")
else:
raise KeypairError(
"Keypair `%s` is present but has "
"different fingerprint. Aborting!" % name)
def _check_security_group(self, name):
"""Checks if the security group exists.
:param str name: name of the security group
:return: str - security group id of the security group
:raises: `SecurityGroupError` if group does not exist
"""
connection = self._connect()
filters = {}
if self._vpc:
filters = {'vpc-id': self._vpc_id}
security_groups = connection.get_all_security_groups(filters=filters)
matching_groups = [
group
for group
in security_groups
if name in [group.name, group.id]
]
if len(matching_groups) == 0:
raise SecurityGroupError(
"the specified security group %s does not exist" % name)
elif len(matching_groups) == 1:
return matching_groups[0].id
elif self._vpc and len(matching_groups) > 1:
raise SecurityGroupError(
"the specified security group name %s matches "
"more than one security group" % name)
def _check_subnet(self, name):
"""Checks if the subnet exists.
:param str name: name of the subnet
:return: str - subnet id of the subnet
:raises: `SubnetError` if group does not exist
"""
# Subnets only exist in VPCs, so we don't need to worry about
# the EC2 Classic case here.
subnets = self._vpc_connection.get_all_subnets(
filters={'vpcId': self._vpc_id})
matching_subnets = [
subnet
for subnet
in subnets
if name in [subnet.tags.get('Name'), subnet.id]
]
if len(matching_subnets) == 0:
raise SubnetError(
"the specified subnet %s does not exist" % name)
elif len(matching_subnets) == 1:
return matching_subnets[0].id
else:
raise SubnetError(
"the specified subnet name %s matches more than "
"one subnet" % name)
def _find_image_id(self, image_id):
"""Finds an image id to a given id or name.
:param str image_id: name or id of image
:return: str - identifier of image
"""
if not self._images:
connection = self._connect()
self._images = connection.get_all_images()
image_id_cloud = None
for i in self._images:
if i.id == image_id or i.name == image_id:
image_id_cloud = i.id
break
if image_id_cloud:
return image_id_cloud
else:
raise ImageError(
"Could not find given image id `%s`" % image_id)
def __getstate__(self):
d = self.__dict__.copy()
del d['_ec2_connection']
del d['_vpc_connection']
return d
def __setstate__(self, state):
self.__dict__ = state
self._ec2_connection = None
self._vpc_connection = None
| gpl-3.0 | 4,051,944,693,585,966,600 | 39.425432 | 115 | 0.568405 | false |
tweemeterjop/thug | thug/ActiveX/modules/AnswerWorks.py | 1 | 1041 | # Vantage Linguistics AnserWorks ActiveX Controls
# CVE-2007-6387
import logging
log = logging.getLogger("Thug")
def GetHistory(self, arg):
if len(arg) > 215:
log.ThugLogging.log_exploit_event(self._window.url,
"AnswerWorks ActiveX",
"Overflow in GetHistory",
cve = 'CVE-2007-6387')
def GetSeedQuery(self, arg):
if len(arg) > 215:
log.ThugLogging.log_exploit_event(self._window.url,
"AnswerWorks ActiveX",
"Overflow in GetSeedQuery",
cve = 'CVE-2007-6387')
def SetSeedQuery(self, arg):
if len(arg) > 215:
log.ThugLogging.log_exploit_event(self._window.url,
"AnswerWorks ActiveX",
"SetSeedQuery",
cve = 'CVE-2007-6387')
| gpl-2.0 | 8,882,725,550,945,353,000 | 33.7 | 69 | 0.444765 | false |
blekhmanlab/hominid | hominid/sort_results.py | 1 | 6152 | """
Read a rvcf file with stability selection scores for taxa.
Sort the dataframe by rsq_median.
Print results.
usage:
python sort_results.py \
../example/stability_selection_example_output.vcf \
../example/hominid_example_taxon_table_input.txt \
arcsinsqrt \
0.5 \
10
"""
import argparse
import sys
import pandas as pd
from hominid.hominid import read_taxon_file, align_snp_and_taxa
def sort_results(rvcf_input_file_path, taxon_table_file_path, transform,
r_sqr_median_cutoff, stability_cutoff, snp_count, no_tables,
extra_columns):
print('plotting {} SNPs from {}'.format(snp_count, rvcf_input_file_path))
# read the rvcf file and sort by rsq_median
df = pd.read_csv(rvcf_input_file_path, sep='\t', dtype={'CHROM': str})
#print('df.shape: {}'.format(df.shape))
sorted_rsq_best_medians_df = df.sort_values(by='rsq_median', ascending=False)
x_df = sorted_rsq_best_medians_df[sorted_rsq_best_medians_df.rsq_median > r_sqr_median_cutoff]
print('{} SNPs with r_sqr > {:5.3f}'.format(x_df.shape[0], r_sqr_median_cutoff))
taxon_table_df = read_taxon_file(taxon_table_file_path, transform=transform)
for row_i in range(sorted_rsq_best_medians_df.shape[0]):
if row_i >= snp_count:
break
else:
# get a 1-row dataframe
snp_df = sorted_rsq_best_medians_df.iloc[[row_i]]
aligned_snp_df, aligned_taxa_df = align_snp_and_taxa(
snp_df,
taxon_table_df
)
# get the taxon stability selection scores
# use the taxon table df index to get column names for snp_df
taxon_scores_df = snp_df.loc[:, taxon_table_df.index].transpose()
sorted_taxon_scores_df = taxon_scores_df.sort_values(by=taxon_scores_df.columns[0], ascending=False)
#sorted_taxon_scores_df = taxon_scores_df.sort(taxon_scores_df.columns[0], ascending=False)
p_df_list = []
print('{} {} {:5.3f}'.format(snp_df.iloc[0].GENE, snp_df.iloc[0].ID, snp_df.iloc[0].rsq_median))
summary_line = '{}\t{}\t'.format(snp_df.iloc[0].GENE, snp_df.iloc[0].ID)
for i, (selected_taxon, selected_taxon_row) in enumerate(sorted_taxon_scores_df.iterrows()):
# use selected_taxon_row.index[0] to index the first and only column
selected_taxon_score = selected_taxon_row.iloc[0]
if selected_taxon_score < stability_cutoff:
#print('done with selected taxa')
break
else:
# trim 'Root;' from the front of the taxon name
if selected_taxon.startswith('Root;'):
taxon_name = selected_taxon[5:]
else:
taxon_name = selected_taxon
print(' {:5.3f} {}'.format(selected_taxon_score, taxon_name))
summary_line += '{}, '.format(taxon_name)
gts = [
snp_df.iloc[0].REF + snp_df.iloc[0].REF, # 0
snp_df.iloc[0].REF + snp_df.iloc[0].ALT, # 1
snp_df.iloc[0].ALT + snp_df.iloc[0].ALT # 2
]
aligned_snp_value_list = aligned_snp_df.values.flatten().tolist()
data_dict = {
'chromosome': [snp_df.iloc[0].CHROM] * aligned_snp_df.shape[1],
'snp_id': [snp_df.iloc[0].ID] * aligned_snp_df.shape[1],
'gene': [snp_df.iloc[0].GENE] * aligned_snp_df.shape[1],
'taxon': [selected_taxon] * aligned_snp_df.shape[1],
'abundance': aligned_taxa_df[selected_taxon].values.tolist(),
'variant_allele_count': [str(int(v)) for v in aligned_snp_value_list],
'genotype': [gts[int(v)] for v in aligned_snp_value_list],
'sample_id' : aligned_snp_df.columns
}
columns_to_display = ['abundance', 'variant_allele_count', 'genotype', 'sample_id']
if extra_columns:
for extra_column in extra_columns.split(','):
data_dict[extra_column] = snp_df.iloc[0][extra_column]
columns_to_display.append(extra_column)
p_df = pd.DataFrame(data_dict)
p_df_list.append(p_df)
if no_tables:
pass
else:
p_df[columns_to_display].to_csv(
sys.stdout,
sep='\t'
)
# save a stacked bar plot
if len(p_df_list) > 0:
file_name = 'stacked_bar_plot_selected_taxa_{}_{}.pdf'.format(
snp_df.iloc[0].GENE,
snp_df.iloc[0].ID
)
p_df = pd.concat(p_df_list, axis=0)
# at this point the index for p_df looks like
# 0...76.0...76.0...76
# replace the index
p_df.index = range(p_df.shape[0])
#p_df.to_csv(file_path, sep='\t')
stacked_bar_title = '{}\n{}'.format(snp_df.iloc[0].GENE, snp_df.iloc[0].ID)
def main():
argparser = argparse.ArgumentParser()
argparser.add_argument('rvcf_input_file_path')
argparser.add_argument('taxon_table_file_path')
argparser.add_argument('transform')
argparser.add_argument(
'r_sqr_median_cutoff',
type=float
)
argparser.add_argument(
'stability_cutoff',
type=float
)
argparser.add_argument(
'snp_count',
type=int
)
argparser.add_argument(
'--no-tables',
action='store_true'
)
argparser.add_argument(
'--extra-columns',
type=str
)
args = argparser.parse_args()
print(args)
sort_results(**vars(args))
if __name__ == '__main__':
main()
| mit | -8,981,641,414,200,290,000 | 41.136986 | 112 | 0.519831 | false |
pyfa-org/eos | tests/integration/container/unordered/test_type_unique_set.py | 1 | 8045 | # ==============================================================================
# Copyright (C) 2011 Diego Duclos
# Copyright (C) 2011-2018 Anton Vorobyov
#
# This file is part of Eos.
#
# Eos is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Eos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Eos. If not, see <http://www.gnu.org/licenses/>.
# ==============================================================================
from eos import Fit
from eos import Implant
from eos import Skill
from tests.integration.container.testcase import ContainerTestCase
class TestContainerTypeUniqueSet(ContainerTestCase):
def test_add_none(self):
fit = Fit()
# Action
with self.assertRaises(TypeError):
fit.skills.add(None)
# Verification
self.assertEqual(len(fit.skills), 0)
# Cleanup
self.assert_solsys_buffers_empty(fit.solar_system)
self.assert_log_entries(0)
def test_add_item(self):
fit = Fit()
item_type = self.mktype()
item = Skill(item_type.id)
# Action
fit.skills.add(item)
# Verification
self.assertEqual(len(fit.skills), 1)
self.assertIs(fit.skills[item_type.id], item)
self.assertIn(item, fit.skills)
self.assertIn(item_type.id, fit.skills)
# Cleanup
self.assert_item_buffers_empty(item)
self.assert_solsys_buffers_empty(fit.solar_system)
self.assert_log_entries(0)
def test_add_item_type_failure(self):
fit = Fit()
item_type = self.mktype()
item = Implant(item_type.id)
# Action
with self.assertRaises(TypeError):
fit.skills.add(item)
# Verification
self.assertEqual(len(fit.skills), 0)
self.assertNotIn(item, fit.skills)
self.assertNotIn(item_type.id, fit.skills)
fit.implants.add(item)
# Cleanup
self.assert_item_buffers_empty(item)
self.assert_solsys_buffers_empty(fit.solar_system)
self.assert_log_entries(0)
def test_add_item_value_failure_has_fit(self):
fit = Fit()
fit_other = Fit()
item_type = self.mktype()
item = Skill(item_type.id)
fit_other.skills.add(item)
# Action
with self.assertRaises(ValueError):
fit.skills.add(item)
# Verification
self.assertEqual(len(fit.skills), 0)
self.assertEqual(len(fit_other.skills), 1)
self.assertIs(fit_other.skills[item_type.id], item)
self.assertIn(item, fit_other.skills)
# Cleanup
self.assert_item_buffers_empty(item)
self.assert_solsys_buffers_empty(fit.solar_system)
self.assert_solsys_buffers_empty(fit_other.solar_system)
self.assert_log_entries(0)
def test_add_item_value_failure_existing_type_id(self):
fit = Fit()
item_type = self.mktype()
item1 = Skill(item_type.id)
item2 = Skill(item_type.id)
fit.skills.add(item1)
# Action
with self.assertRaises(ValueError):
fit.skills.add(item2)
# Verification
self.assertEqual(len(fit.skills), 1)
self.assertIs(fit.skills[item_type.id], item1)
self.assertIn(item1, fit.skills)
self.assertIn(item_type.id, fit.skills)
fit.skills.remove(item1)
fit.skills.add(item2)
# Cleanup
self.assert_item_buffers_empty(item1)
self.assert_item_buffers_empty(item2)
self.assert_solsys_buffers_empty(fit.solar_system)
self.assert_log_entries(0)
def test_remove_item(self):
fit = Fit()
item_type = self.mktype()
item = Skill(item_type.id)
fit.skills.add(item)
# Action
fit.skills.remove(item)
# Verification
self.assertEqual(len(fit.skills), 0)
self.assertNotIn(item, fit.skills)
self.assertNotIn(item_type.id, fit.skills)
# Cleanup
self.assert_item_buffers_empty(item)
self.assert_solsys_buffers_empty(fit.solar_system)
self.assert_log_entries(0)
def test_remove_item_failure(self):
fit = Fit()
item_type = self.mktype()
item = Skill(item_type.id)
# Action
with self.assertRaises(KeyError):
fit.skills.remove(item)
# Verification
self.assertEqual(len(fit.skills), 0)
self.assertNotIn(item, fit.skills)
self.assertNotIn(item_type.id, fit.skills)
fit.skills.add(item)
# Cleanup
self.assert_item_buffers_empty(item)
self.assert_solsys_buffers_empty(fit.solar_system)
self.assert_log_entries(0)
def test_delitem_item(self):
fit = Fit()
item_type = self.mktype()
item = Skill(item_type.id)
fit.skills.add(item)
# Action
del fit.skills[item_type.id]
# Verification
self.assertEqual(len(fit.skills), 0)
self.assertNotIn(item, fit.skills)
self.assertNotIn(item_type.id, fit.skills)
# Cleanup
self.assert_item_buffers_empty(item)
self.assert_solsys_buffers_empty(fit.solar_system)
self.assert_log_entries(0)
def test_delitem_item_failure(self):
fit = Fit()
item_type = self.mktype()
empty_type_id = self.allocate_type_id()
item = Skill(item_type.id)
fit.skills.add(item)
# Action
with self.assertRaises(KeyError):
del fit.skills[empty_type_id]
# Verification
self.assertEqual(len(fit.skills), 1)
self.assertIn(item, fit.skills)
self.assertIn(item_type.id, fit.skills)
# Cleanup
self.assert_item_buffers_empty(item)
self.assert_solsys_buffers_empty(fit.solar_system)
self.assert_log_entries(0)
def test_key_integrity(self):
fit = Fit()
item_type = self.mktype()
item1 = Skill(item_type.id)
item2 = Skill(item_type.id)
fit.skills.add(item1)
with self.assertRaises(KeyError):
fit.skills.remove(item2)
# Verification
self.assertIs(fit.skills[item_type.id], item1)
# Cleanup
self.assert_item_buffers_empty(item1)
self.assert_item_buffers_empty(item2)
self.assert_solsys_buffers_empty(fit.solar_system)
self.assert_log_entries(0)
def test_clear(self):
fit = Fit()
item1_type = self.mktype()
item1 = Skill(item1_type.id)
item2_type = self.mktype()
item2 = Skill(item2_type.id)
fit.skills.add(item1)
fit.skills.add(item2)
# Action
fit.skills.clear()
# Verification
self.assertEqual(len(fit.skills), 0)
self.assertNotIn(item1, fit.skills)
self.assertNotIn(item1_type.id, fit.skills)
self.assertNotIn(item2, fit.skills)
self.assertNotIn(item2_type.id, fit.skills)
# Cleanup
self.assert_item_buffers_empty(item1)
self.assert_item_buffers_empty(item2)
self.assert_solsys_buffers_empty(fit.solar_system)
self.assert_log_entries(0)
def test_bool(self):
fit = Fit()
item = Skill(self.mktype().id)
self.assertIs(bool(fit.skills), False)
fit.skills.add(item)
self.assertIs(bool(fit.skills), True)
fit.skills.remove(item)
self.assertIs(bool(fit.skills), False)
# Cleanup
self.assert_item_buffers_empty(item)
self.assert_solsys_buffers_empty(fit.solar_system)
self.assert_log_entries(0)
| lgpl-3.0 | 6,906,514,192,450,944,000 | 33.676724 | 80 | 0.606339 | false |
tejasnikumbh/Algorithms | ArraysAndSorting/MarkAndToys.py | 1 | 1514 | '''
In place quickSort The quickSort Method
Time Complexity : Best,Avg - O(NlogN) , Worst - O(N^2)
Space Complexity : O(N)
Auxilary Space : O(logN) for the stack frames
'''
def quickSort(a,start,end):
if(start >= end): return a
else:
pivot = a[end]
swapIndex = start
for i in range(start,end + 1):
if(a[i] < pivot):
#swap(a,i,swapIndex)
temp = a[i]
a[i] = a[swapIndex]
a[swapIndex] = temp
swapIndex += 1
#swap(a,end,swapIndex)
temp = a[end]
a[end] = a[swapIndex]
a[swapIndex] = temp
quickSort(a,start,swapIndex - 1)
quickSort(a,swapIndex + 1,end)
return a
'''
Function that returns maximum toys that can be bought. Simple strategy is to
sort the prices array and add as many toys as possible by incrementally adding
up prices from the least to the most until budget is exhausted.
'''
def max_toys(prices, rupees):
#Compute and return final answer over here
answer = 0
prices = quickSort(prices,0,len(prices)-1)
totalBudget = rupees
for price in prices:
if((totalBudget - price) >= 0):
totalBudget -= price
answer += 1
else: break
return answer
'''
Main function for the program
'''
if __name__ == '__main__':
n, k = map(int, raw_input().split())
prices = map(int, raw_input().split())
print max_toys(prices, k)
| bsd-2-clause | 122,863,958,827,943,060 | 28.686275 | 82 | 0.562087 | false |
openmips/stbgui | lib/python/Components/ServiceScan.py | 1 | 9086 | from enigma import eComponentScan, iDVBFrontend, eTimer
from Components.NimManager import nimmanager as nimmgr
from Tools.Transponder import getChannelNumber
class ServiceScan:
Idle = 1
Running = 2
Done = 3
Error = 4
DonePartially = 5
Errors = {
0: _("error starting scanning"),
1: _("error while scanning"),
2: _("no resource manager"),
3: _("no channel list")
}
def scanStatusChanged(self):
if self.state == self.Running:
self.progressbar.setValue(self.scan.getProgress())
self.lcd_summary and self.lcd_summary.updateProgress(self.scan.getProgress())
if self.scan.isDone():
errcode = self.scan.getError()
if errcode == 0:
self.state = self.DonePartially
self.servicelist.listAll()
else:
self.state = self.Error
self.errorcode = errcode
self.network.setText("")
self.transponder.setText("")
else:
result = self.foundServices + self.scan.getNumServices()
percentage = self.scan.getProgress()
if percentage > 99:
percentage = 99
#TRANSLATORS: The stb is performing a channel scan, progress percentage is printed in '%d' (and '%%' will show a single '%' symbol)
message = ngettext("Scanning - %d%% completed", "Scanning - %d%% completed", percentage) % percentage
message += ", "
#TRANSLATORS: Intermediate scanning result, '%d' channel(s) have been found so far
message += ngettext("%d channel found", "%d channels found", result) % result
self.text.setText(message)
transponder = self.scan.getCurrentTransponder()
network = ""
tp_text = ""
if transponder:
tp_type = transponder.getSystem()
if tp_type == iDVBFrontend.feSatellite:
network = _("Satellite")
tp = transponder.getDVBS()
orb_pos = tp.orbital_position
try:
sat_name = str(nimmgr.getSatDescription(orb_pos))
except KeyError:
sat_name = ""
if orb_pos > 1800: # west
orb_pos = 3600 - orb_pos
h = _("W")
else:
h = _("E")
if ("%d.%d" % (orb_pos/10, orb_pos%10)) in sat_name:
network = sat_name
else:
network = ("%s %d.%d %s") % (sat_name, orb_pos / 10, orb_pos % 10, h)
tp_text = { tp.System_DVB_S : "DVB-S", tp.System_DVB_S2 : "DVB-S2" }.get(tp.system, "")
if tp_text == "DVB-S2":
tp_text = ("%s %s") % ( tp_text,
{ tp.Modulation_Auto : "Auto", tp.Modulation_QPSK : "QPSK",
tp.Modulation_8PSK : "8PSK", tp.Modulation_QAM16 : "QAM16",
tp.Modulation_16APSK : "16APSK", tp.Modulation_32APSK : "32APSK" }.get(tp.modulation, ""))
tp_text = ("%s %d%c / %d / %s") % ( tp_text, tp.frequency/1000,
{ tp.Polarisation_Horizontal : 'H', tp.Polarisation_Vertical : 'V', tp.Polarisation_CircularLeft : 'L',
tp.Polarisation_CircularRight : 'R' }.get(tp.polarisation, ' '),
tp.symbol_rate/1000,
{ tp.FEC_Auto : "AUTO", tp.FEC_1_2 : "1/2", tp.FEC_2_3 : "2/3",
tp.FEC_3_4 : "3/4", tp.FEC_5_6 : "5/6", tp.FEC_7_8 : "7/8",
tp.FEC_8_9 : "8/9", tp.FEC_3_5 : "3/5", tp.FEC_4_5 : "4/5",
tp.FEC_9_10 : "9/10", tp.FEC_None : "NONE" }.get(tp.fec, ""))
if tp.is_id > -1 and tp.system == tp.System_DVB_S2:
tp_text = ("%s IS %d") % (tp_text, tp.is_id)
elif tp_type == iDVBFrontend.feCable:
network = _("Cable")
tp = transponder.getDVBC()
tp_text = ("DVB-C/C2 %s %d MHz / SR:%d / FEC:%s") %( { tp.Modulation_Auto : "AUTO",
tp.Modulation_QAM16 : "QAM16", tp.Modulation_QAM32 : "QAM32",
tp.Modulation_QAM64 : "QAM64", tp.Modulation_QAM128 : "QAM128",
tp.Modulation_QAM256 : "QAM256" }.get(tp.modulation, ""),
tp.frequency/1000,
tp.symbol_rate/1000,
{ tp.FEC_Auto : "AUTO", tp.FEC_1_2 : "1/2", tp.FEC_2_3 : "2/3",
tp.FEC_3_4 : "3/4", tp.FEC_5_6 : "5/6", tp.FEC_7_8 : "7/8",
tp.FEC_8_9 : "8/9", tp.FEC_3_5 : "3/5", tp.FEC_4_5 : "4/5",
tp.FEC_9_10 : "9/10", tp.FEC_6_7 : "6/7", tp.FEC_None : "NONE" }.get(tp.fec_inner, ""))
elif tp_type == iDVBFrontend.feTerrestrial:
network = _("Terrestrial")
tp = transponder.getDVBT()
channel = getChannelNumber(tp.frequency, self.scanList[self.run]["feid"])
if channel:
channel = _("CH") + "%s " % channel
freqMHz = "%0.1f MHz" % (tp.frequency/1000000.)
tp_text = ("%s %s %s %s") %(
{
tp.System_DVB_T_T2 : "DVB-T/T2",
tp.System_DVB_T : "DVB-T",
tp.System_DVB_T2 : "DVB-T2"
}.get(tp.system, ""),
{
tp.Modulation_QPSK : "QPSK",
tp.Modulation_QAM16 : "QAM16", tp.Modulation_QAM64 : "QAM64",
tp.Modulation_Auto : "AUTO", tp.Modulation_QAM256 : "QAM256"
}.get(tp.modulation, ""),
"%s%s" % (channel, freqMHz.replace(".0","")),
{
tp.Bandwidth_8MHz : "Bw 8MHz", tp.Bandwidth_7MHz : "Bw 7MHz", tp.Bandwidth_6MHz : "Bw 6MHz",
tp.Bandwidth_Auto : "Bw Auto", tp.Bandwidth_5MHz : "Bw 5MHz",
tp.Bandwidth_1_712MHz : "Bw 1.712MHz", tp.Bandwidth_10MHz : "Bw 10MHz"
}.get(tp.bandwidth, ""))
elif tp_type == iDVBFrontend.feATSC:
network = _("ATSC")
tp = transponder.getATSC()
freqMHz = "%0.1f MHz" % (tp.frequency/1000000.)
tp_text = ("%s %s %s %s") % (
{
tp.System_ATSC : _("ATSC"),
tp.System_DVB_C_ANNEX_B : _("DVB-C ANNEX B")
}.get(tp.system, ""),
{
tp.Modulation_Auto : _("Auto"),
tp.Modulation_QAM16 : "QAM16",
tp.Modulation_QAM32 : "QAM32",
tp.Modulation_QAM64 : "QAM64",
tp.Modulation_QAM128 : "QAM128",
tp.Modulation_QAM256 : "QAM256",
tp.Modulation_VSB_8 : "8VSB",
tp.Modulation_VSB_16 : "16VSB"
}.get(tp.modulation, ""),
freqMHz.replace(".0",""),
{
tp.Inversion_Off : _("Off"),
tp.Inversion_On :_("On"),
tp.Inversion_Unknown : _("Auto")
}.get(tp.inversion, ""))
else:
print "unknown transponder type in scanStatusChanged"
self.network.setText(network)
self.transponder.setText(tp_text)
if self.state == self.DonePartially:
self.foundServices += self.scan.getNumServices()
self.text.setText(ngettext("Scanning completed, %d channel found", "Scanning completed, %d channels found", self.foundServices) % self.foundServices)
if self.state == self.Error:
self.text.setText(_("ERROR - failed to scan (%s)!") % (self.Errors[self.errorcode]) )
if self.state == self.DonePartially or self.state == self.Error:
self.delaytimer.start(100, True)
def __init__(self, progressbar, text, servicelist, passNumber, scanList, network, transponder, frontendInfo, lcd_summary):
self.foundServices = 0
self.progressbar = progressbar
self.text = text
self.servicelist = servicelist
self.passNumber = passNumber
self.scanList = scanList
self.frontendInfo = frontendInfo
self.transponder = transponder
self.network = network
self.run = 0
self.lcd_summary = lcd_summary
self.scan = None
self.delaytimer = eTimer()
self.delaytimer.callback.append(self.execEnd)
def doRun(self):
self.scan = eComponentScan()
self.frontendInfo.frontend_source = lambda : self.scan.getFrontend()
self.feid = self.scanList[self.run]["feid"]
self.flags = self.scanList[self.run]["flags"]
self.networkid = 0
if "networkid" in self.scanList[self.run]:
self.networkid = self.scanList[self.run]["networkid"]
self.state = self.Idle
self.scanStatusChanged()
for x in self.scanList[self.run]["transponders"]:
self.scan.addInitial(x)
def updatePass(self):
size = len(self.scanList)
if size > 1:
txt = "%s %s/%s (%s)" % (_("pass"), self.run + 1, size, nimmgr.getNim(self.scanList[self.run]["feid"]).slot_name)
self.passNumber.setText(txt)
def execBegin(self):
self.doRun()
self.updatePass()
self.scan.statusChanged.get().append(self.scanStatusChanged)
self.scan.newService.get().append(self.newService)
self.servicelist.clear()
self.state = self.Running
err = self.scan.start(self.feid, self.flags, self.networkid)
self.frontendInfo.updateFrontendData()
if err:
self.state = self.Error
self.errorcode = 0
self.scanStatusChanged()
def execEnd(self):
if self.scan is None:
if not self.isDone():
print "*** warning *** scan was not finished!"
return
self.scan.statusChanged.get().remove(self.scanStatusChanged)
self.scan.newService.get().remove(self.newService)
self.scan = None
if self.run != len(self.scanList) - 1:
self.run += 1
self.execBegin()
else:
self.state = self.Done
def isDone(self):
return self.state == self.Done or self.state == self.Error
def newService(self):
newServiceName = self.scan.getLastServiceName()
newServiceRef = self.scan.getLastServiceRef()
self.servicelist.addItem((newServiceName, newServiceRef))
self.lcd_summary and self.lcd_summary.updateService(newServiceName)
def destroy(self):
self.state = self.Idle
if self.scan is not None:
self.scan.statusChanged.get().remove(self.scanStatusChanged)
self.scan.newService.get().remove(self.newService)
self.scan = None
| gpl-2.0 | 5,148,115,482,864,712,000 | 37.016736 | 152 | 0.619194 | false |
Forage/Gramps | po/update_po.py | 1 | 21490 | #! /usr/bin/env python
#
# update_po - a gramps tool to update translations
#
# Copyright (C) 2006-2006 Kees Bakker
# Copyright (C) 2006 Brian Matherly
# Copyright (C) 2008 Stephen George
# Copyright (C) 2012
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
update_po.py for Gramps translations.
Examples:
python update_po.py -t
Tests if 'gettext' and 'python' are well configured.
python update_po.py -h
Calls help and command line interface.
python update_po.py -p
Generates a new template/catalog (gramps.pot).
python update_po.py -m de.po
Merges 'de.po' file with 'gramps.pot'.
python update_po.py -k de.po
Checks 'de.po' file, tests to compile and generates a textual resume.
"""
from __future__ import print_function
import os
import sys
from argparse import ArgumentParser
# Windows OS
if sys.platform == 'win32':
# GetText Win 32 obtained from http://gnuwin32.sourceforge.net/packages/gettext.htm
# ....\gettext\bin\msgmerge.exe needs to be on the path
msgmergeCmd = os.path.join('C:', 'Program Files(x86)', 'gettext', 'bin', 'msgmerge.exe')
msgfmtCmd = os.path.join('C:', 'Program Files(x86)', 'gettext', 'bin', 'msgfmt.exe')
msgattribCmd = os.path.join('C:', 'Program Files(x86)', 'gettext', 'bin', 'msgattrib.exe')
xgettextCmd = os.path.join('C:', 'Program Files(x86)', 'gettext', 'bin', 'xgettext.exe')
pythonCmd = os.path.join(sys.prefix, 'bin', 'python.exe')
# Others OS
elif sys.platform in ['linux2', 'darwin', 'cygwin']:
msgmergeCmd = 'msgmerge'
msgfmtCmd = 'msgfmt'
msgattribCmd = 'msgattrib'
xgettextCmd = 'xgettext'
pythonCmd = os.path.join(sys.prefix, 'bin', 'python')
else:
print("Found platform %s, OS %s" % (sys.platform, os.name))
print ("Update PO ERROR: unknown system, don't know msgmerge, ... commands")
sys.exit(0)
# List of available languages, useful for grouped actions
# need files with po extension
LANG = [file for file in os.listdir('.') if file.endswith('.po')]
# add a special 'all' argument (for 'check' and 'merge' arguments)
LANG.append("all")
# visual polish on the languages list
LANG.sort()
def tests():
"""
Testing installed programs.
We made tests (-t flag) by displaying versions of tools if properly
installed. Cannot run all commands without 'gettext' and 'python'.
"""
try:
print ("\n====='msgmerge'=(merge our translation)================\n")
os.system('''%(program)s -V''' % {'program': msgmergeCmd})
except:
print ('Please, install %(program)s for updating your translation'
% {'program': msgmergeCmd})
try:
print ("\n==='msgfmt'=(format our translation for installation)==\n")
os.system('''%(program)s -V''' % {'program': msgfmtCmd})
except:
print ('Please, install %(program)s for checking your translation'
% {'program': msgfmtCmd})
try:
print ("\n===='msgattrib'==(list groups of messages)=============\n")
os.system('''%(program)s -V''' % {'program': msgattribCmd})
except:
print ('Please, install %(program)s for listing groups of messages'
% {'program': msgattribCmd})
try:
print("\n===='xgettext' =(generate a new template)==============\n")
os.system('''%(program)s -V''' % {'program': xgettextCmd})
except:
print ('Please, install %(program)s for generating a new template'
% {'program': xgettextCmd})
try:
print("\n=================='python'=============================\n")
os.system('''%(program)s -V''' % {'program': pythonCmd})
except:
print ('Please, install python')
def TipsParse(filename, mark):
"""
Experimental alternative to 'intltool-extract' for 'tips.xml'.
"""
from xml.etree import ElementTree
tree = ElementTree.parse(filename)
root = tree.getroot()
'''
<?xml version="1.0" encoding="UTF-8"?>
<tips>
<_tip number="1">
<b>Working with Dates</b>
<br/>
A range of dates can be given by using the format "between
January 4, 2000 and March 20, 2003". You can also indicate
the level of confidence in a date and even choose between seven
different calendars. Try the button next to the date field in the
Events Editor.
</_tip>
char *s = N_("<b>Working with Dates</b><br/>A range of dates can be
given by using the format "between January 4, 2000 and March 20,
2003". You can also indicate the level of confidence in a date
and even choose between seven different calendars. Try the button
next to the date field in the Events Editor.");
gramps.pot:
msgid ""
"<b>Working with Dates</b><br/>A range of dates can be given by using the "
"format "between January 4, 2000 and March 20, 2003". You can also "
"indicate the level of confidence in a date and even choose between seven "
"different calendars. Try the button next to the date field in the Events "
"Editor."
'''
tips = open('../data/tips.xml.in.h', 'w')
marklist = root.iter(mark)
for key in marklist:
tip = ElementTree.tostring(key, encoding="UTF-8")
tip = tip.replace("<?xml version='1.0' encoding='UTF-8'?>", "")
tip = tip.replace('\n<_tip number="%(number)s">' % key.attrib, "")
tip = tip.replace("<br />", "<br/>")
#tip = tip.replace("\n</_tip>\n", "</_tip>\n") # special case tip 7
#tip = tip.replace("\n<b>", "<b>") # special case tip 18
tip = tip.replace("</_tip>\n\n", "")
tip = tip.replace('"', '"')
tips.write('char *s = N_("%s");\n' % tip)
tips.close()
print ('Wrote ../data/tips.xml.in.h')
root.clear()
def HolidaysParse(filename, mark):
"""
Experimental alternative to 'intltool-extract' for 'holidays.xml'.
"""
from xml.etree import ElementTree
tree = ElementTree.parse(filename)
root = tree.getroot()
ellist = root.iter()
'''
<?xml version="1.0" encoding="utf-8"?>
calendar>
<country _name="Bulgaria">
..
<country _name="Jewish Holidays">
<date _name="Yom Kippur" value="> passover(y)" offset="172"/>
char *s = N_("Bulgaria");
char *s = N_("Jewish Holidays");
char *s = N_("Yom Kippur");
gramps.pot:
msgid "Bulgaria"
msgid "Jewish Holidays"
msgid "Yom Kippur"
'''
holidays = open('../gramps/plugins/lib/holidays.xml.in.h', 'w')
for key in ellist:
if key.attrib.get(mark):
line = key.attrib
string = line.items
# mapping via the line dict (_name is the key)
name = 'char *s = N_("%(_name)s");\n' % line
holidays.write(name)
holidays.close()
print ('Wrote ../gramps/plugins/lib/holidays.xml.in.h')
root.clear()
def XmlParse(filename, mark):
"""
Experimental alternative to 'intltool-extract' for 'gramps.xml'.
"""
from xml.etree import ElementTree
tree = ElementTree.parse(filename)
root = tree.getroot()
'''
<?xml version="1.0" encoding="UTF-8"?>
<mime-info xmlns="http://www.freedesktop.org/standards/shared-mime-info">
<mime-type type="application/x-gramps">
<_comment>Gramps database</_comment>
<glob pattern="*.grdb"/>
</mime-type>
<mime-type type="application/x-gedcom">
<_comment>GEDCOM</_comment>
<glob pattern="*.ged"/>
<glob pattern="*.gedcom"/>
<glob pattern="*.GED"/>
<glob pattern="*.GEDCOM"/>
msgid "Gramps database"
msgid "GEDCOM"
'''
mime = open('../data/gramps.xml.in.h', 'w')
for key in root.iter():
if key.tag == '{http://www.freedesktop.org/standards/shared-mime-info}%s' % mark:
comment = 'char *s = N_("%s");\n' % key.text
mime.write(comment)
mime.close()
print ('Wrote ../data/gramps.xml.in.h')
root.clear()
def DesktopParse(filename):
"""
Experimental alternative to 'intltool-extract' for 'gramps.desktop'.
"""
'''
[Desktop Entry]
_Name=Gramps
_GenericName=Genealogy System
_X-GNOME-FullName=Gramps Genealogy System
_Comment=Manage genealogical information,
perform genealogical research and analysis
msgid "Gramps"
msgid "Genealogy System"
msgid "Gramps Genealogy System"
msgid ""
"Manage genealogical information,
perform genealogical research and analysis"
'''
desktop = open('../data/gramps.desktop.in.h', 'w')
f = open(filename)
lines = [file.strip() for file in f]
f.close()
for line in lines:
if line[0] == '_':
for i in range(len(line)):
if line[i] == '=':
val = 'char *s = N_("%s");\n' % line[i+1:len(line)]
desktop.write(val)
desktop.close()
print ('Wrote ../data/gramps.desktop.in.h')
def KeyParse(filename, mark):
"""
Experimental alternative to 'intltool-extract' for 'gramps.keys'.
"""
'''
application/x-gramps-xml:
_description=Gramps XML database
default_action_type=application
short_list_application_ids=gramps
short_list_application_ids_for_novice_user_level=gramps
short_list_application_ids_for_intermediate_user_level=gramps
short_list_application_ids_for_advanced_user_level=gramps
category=Documents/Genealogy
icon-filename=/usr/share/gramps/gramps.png
open=gramps %f
application/x-gedcom:
_description=GEDCOM
default_action_type=application
msgid "Gramps XML database"
msgid "GEDCOM"
'''
key = open('../data/gramps.keys.in.h', 'w')
f = open(filename)
lines = [file for file in f]
f.close()
temp = []
for line in lines:
for i in range(len(line)):
if line[i:i+12] == mark:
temp.append(line.strip())
for t in temp:
for i in range(len(t)):
if t[i] == '=':
val = 'char *s = N_("%s");\n' % t[i+1:len(t)]
key.write(val)
key.close()
print ('Wrote ../data/gramps.keys.in.h')
def main():
"""
The utility for handling translation stuff.
What is need by Gramps, nothing more.
"""
parser = ArgumentParser(
description='This program generates a new template and '
'also provides some common features.',
)
parser.add_argument("-t", "--test",
action="store_true", dest="test", default=True,
help="test if 'python' and 'gettext' are properly installed")
parser.add_argument("-x", "--xml",
action="store_true", dest="xml", default=False,
help="extract messages from xml based file formats")
parser.add_argument("-g", "--glade",
action="store_true", dest="glade", default=False,
help="extract messages from glade file format only")
parser.add_argument("-c", "--clean",
action="store_true", dest="clean", default=False,
help="remove created files")
parser.add_argument("-p", "--pot",
action="store_true", dest="catalog", default=False,
help="create a new catalog")
update = parser.add_argument_group('Update', 'Maintenance around translations')
# need at least one argument (sv.po, de.po, etc ...)
# lang.po files maintenance
update.add_argument("-m", dest="merge",
choices=LANG,
help="merge lang.po files with last catalog")
update.add_argument("-k", dest="check",
choices=LANG,
help="check lang.po files")
# testing stage
trans = parser.add_argument_group('Translation', 'Display content of translations file')
# need one argument (eg, de.po)
trans.add_argument("-u", dest="untranslated",
choices=[file for file in os.listdir('.') if file.endswith('.po')],
help="list untranslated messages")
trans.add_argument("-f", dest="fuzzy",
choices=[file for file in os.listdir('.') if file.endswith('.po')],
help="list fuzzy messages")
args = parser.parse_args()
namespace, extra = parser.parse_known_args()
if args.test:
tests()
if args.xml:
extract_xml()
if args.glade:
create_filesfile()
extract_glade()
if os.path.isfile('tmpfiles'):
os.unlink('tmpfiles')
if args.catalog:
retrieve()
if args.clean:
clean()
if args.merge:
#retrieve() windows os?
if sys.argv[2:] == ['all']:
sys.argv[2:] = LANG
merge(sys.argv[2:])
if args.check:
#retrieve() windows os?
if sys.argv[2:] == ['all']:
sys.argv[2:] = LANG
check(sys.argv[2:])
if args.untranslated:
untranslated(sys.argv[2:])
if args.fuzzy:
fuzzy(sys.argv[2:])
def create_filesfile():
"""
Create a file with all files that we should translate.
These are all python files not in POTFILES.skip added with those in
POTFILES.in
"""
dir = os.getcwd()
topdir = os.path.normpath(os.path.join(dir, '..', 'gramps'))
lentopdir = len(topdir)
f = open('POTFILES.in')
infiles = dict(['../' + file.strip(), None] for file in f if file.strip()
and not file[0]=='#')
f.close()
f = open('POTFILES.skip')
notinfiles = dict(['../' + file.strip(), None] for file in f if file
and not file[0]=='#')
f.close()
for (dirpath, dirnames, filenames) in os.walk(topdir):
root, subdir = os.path.split(dirpath)
if subdir.startswith("."):
#don't continue in this dir
dirnames[:] = []
continue
for dirname in dirnames:
# Skip hidden and system directories:
if dirname.startswith(".") or dirname in ["po", "locale"]:
dirnames.remove(dirname)
#add the files which are python or glade files
# if the directory does not exist or is a link, do nothing
if not os.path.isdir(dirpath) or os.path.islink(dirpath):
continue
for filename in os.listdir(dirpath):
name = os.path.split(filename)[1]
if name.endswith('.py') or name.endswith('.glade'):
full_filename = os.path.join(dirpath, filename)
#Skip the file if in POTFILES.skip
if full_filename[lentopdir:] in notinfiles:
infiles['../gramps' + full_filename[lentopdir:]] = None
#now we write out all the files in form ../gramps/filename
f = open('tmpfiles', 'w')
for file in sorted(infiles.keys()):
f.write(file)
f.write('\n')
f.close()
def listing(name, extensionlist):
"""
List files according to extensions.
Parsing from a textual file (gramps) is faster and easy for maintenance.
Like POTFILES.in and POTFILES.skip
"""
f = open('tmpfiles')
files = [file.strip() for file in f if file and not file[0]=='#']
f.close()
temp = open(name, 'w')
for entry in files:
for ext in extensionlist:
if entry.endswith(ext):
temp.write(entry)
temp.write('\n')
break
temp.close()
def headers():
"""
Look at existing C file format headers.
Generated by 'intltool-extract' but want to get rid of this
dependency (perl, just a set of tools).
"""
headers = []
# in.h; extract_xml
if os.path.isfile('''../data/tips.xml.in.h'''):
headers.append('''../data/tips.xml.in.h''')
if os.path.isfile('''../gramps/plugins/lib/holidays.xml.in.h'''):
headers.append('''../gramps/plugins/lib/holidays.xml.in.h''')
if os.path.isfile('''../data/gramps.xml.in.h'''):
headers.append('''../data/gramps.xml.in.h''')
if os.path.isfile('''../data/gramps.desktop.in.h'''):
headers.append('''../data/gramps.desktop.in.h''')
if os.path.isfile('''../data/gramps.keys.in.h'''):
headers.append('''../data/gramps.keys.in.h''')
return headers
def extract_xml():
"""
Extract translation strings from XML based, keys, mime and desktop
files. Own XML files parsing and custom translation marks.
"""
HolidaysParse('../gramps/plugins/lib/holidays.xml.in', '_name')
TipsParse('../data/tips.xml.in', '_tip')
XmlParse('../data/gramps.xml.in', '_comment')
DesktopParse('../data/gramps.desktop.in')
KeyParse('../data/gramps.keys.in', '_description')
def create_template():
"""
Create a new file for template, if it does not exist.
"""
template = open('gramps.pot', 'w')
template.close()
def extract_glade():
"""
Extract messages from a temp file with all .glade
"""
if not os.path.isfile('gramps.pot'):
create_template()
listing('glade.txt', ['.glade'])
os.system('''%(xgettext)s --add-comments -j -L Glade '''
'''--from-code=UTF-8 -o gramps.pot --files-from=glade.txt'''
% {'xgettext': xgettextCmd}
)
def retrieve():
"""
Extract messages from all files used by Gramps (python, glade, xml)
"""
extract_xml()
if not os.path.isfile('gramps.pot'):
create_template()
create_filesfile()
listing('python.txt', ['.py', '.py.in'])
os.system('''%(xgettext)s -j --directory=./ -d gramps '''
'''-L Python -o gramps.pot --files-from=python.txt '''
'''--keyword=_ --keyword=ngettext '''
'''--keyword=sgettext --from-code=UTF-8''' % {'xgettext': xgettextCmd}
)
extract_glade()
# C format header (.h extension)
for h in headers():
print ('xgettext for %s' % h)
os.system('''%(xgettext)s --add-comments -j -o gramps.pot '''
'''--keyword=N_ --from-code=UTF-8 %(head)s'''
% {'xgettext': xgettextCmd, 'head': h}
)
clean()
def clean():
"""
Remove created files (C format headers, temp listings)
"""
for h in headers():
if os.path.isfile(h):
os.unlink(h)
print ('Remove %(head)s' % {'head': h})
if os.path.isfile('python.txt'):
os.unlink('python.txt')
print ("Remove 'python.txt'")
if os.path.isfile('glade.txt'):
os.unlink('glade.txt')
print ("Remove 'glade.txt'")
if os.path.isfile('tmpfiles'):
os.unlink('tmpfiles')
print ("Remove 'tmpfiles'")
def merge(args):
"""
Merge messages with 'gramps.pot'
"""
for arg in args:
if arg == 'all':
continue
print ('Merge %(lang)s with current template' % {'lang': arg})
os.system('''%(msgmerge)s --no-wrap %(lang)s gramps.pot -o updated_%(lang)s''' \
% {'msgmerge': msgmergeCmd, 'lang': arg})
print ("Updated file: 'updated_%(lang)s'." % {'lang': arg})
def check(args):
"""
Check the translation file
"""
for arg in args:
if arg == 'all':
continue
print ("Checked file: '%(lang.po)s'. See '%(txt)s.txt'." \
% {'lang.po': arg, 'txt': arg[:-3]})
os.system('''%(python)s ./check_po -s %(lang.po)s > %(lang)s.txt''' \
% {'python': pythonCmd, 'lang.po': arg, 'lang': arg[:-3]})
os.system('''%(msgfmt)s -c -v %(lang.po)s'''
% {'msgfmt': msgfmtCmd, 'lang.po': arg})
def untranslated(arg):
"""
List untranslated messages
"""
os.system('''%(msgattrib)s --untranslated %(lang.po)s''' % {'msgattrib': msgattribCmd, 'lang.po': arg[0]})
def fuzzy(arg):
"""
List fuzzy messages
"""
os.system('''%(msgattrib)s --only-fuzzy --no-obsolete %(lang.po)s''' % {'msgattrib': msgattribCmd, 'lang.po': arg[0]})
if __name__ == "__main__":
main()
| gpl-2.0 | 4,585,889,664,805,972,000 | 31.511346 | 122 | 0.55826 | false |
donbixler/xhtml2pdf | xhtml2pdf/parser.py | 1 | 24988 | # -*- coding: utf-8 -*-
# Copyright 2010 Dirk Holtwick, holtwick.it
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from html5lib import treebuilders, inputstream
from xhtml2pdf.default import TAGS, STRING, INT, BOOL, SIZE, COLOR, FILE
from xhtml2pdf.default import BOX, POS, MUST, FONT
from xhtml2pdf.util import getSize, getBool, toList, getColor, getAlign
from xhtml2pdf.util import getBox, getPos, pisaTempFile
from reportlab.platypus.doctemplate import NextPageTemplate, FrameBreak
from reportlab.platypus.flowables import PageBreak, KeepInFrame
from xhtml2pdf.xhtml2pdf_reportlab import PmlRightPageBreak, PmlLeftPageBreak
from xhtml2pdf.tags import * # TODO: Kill wild import!
from xhtml2pdf.tables import * # TODO: Kill wild import!
from xhtml2pdf.util import * # TODO: Kill wild import!
from xml.dom import Node
import copy
import html5lib
import logging
import re
import types
import xhtml2pdf.w3c.cssDOMElementInterface as cssDOMElementInterface
import xml.dom.minidom
CSSAttrCache = {}
log = logging.getLogger("xhtml2pdf")
rxhttpstrip = re.compile("https?://[^/]+(.*)", re.M | re.I)
class AttrContainer(dict):
def __getattr__(self, name):
try:
return dict.__getattr__(self, name)
except:
return self[name]
def pisaGetAttributes(c, tag, attributes):
global TAGS
attrs = {}
if attributes:
for k, v in attributes.items():
try:
attrs[str(k)] = str(v) # XXX no Unicode! Reportlab fails with template names
except:
attrs[k] = v
nattrs = {}
if tag in TAGS:
block, adef = TAGS[tag]
adef["id"] = STRING
# print block, adef
for k, v in adef.iteritems():
nattrs[k] = None
# print k, v
# defaults, wenn vorhanden
if type(v) == types.TupleType:
if v[1] == MUST:
if k not in attrs:
log.warn(c.warning("Attribute '%s' must be set!", k))
nattrs[k] = None
continue
nv = attrs.get(k, v[1])
dfl = v[1]
v = v[0]
else:
nv = attrs.get(k, None)
dfl = None
if nv is not None:
if type(v) == types.ListType:
nv = nv.strip().lower()
if nv not in v:
#~ raise PML_EXCEPTION, "attribute '%s' of wrong value, allowed is one of: %s" % (k, repr(v))
log.warn(c.warning("Attribute '%s' of wrong value, allowed is one of: %s", k, repr(v)))
nv = dfl
elif v == BOOL:
nv = nv.strip().lower()
nv = nv in ("1", "y", "yes", "true", str(k))
elif v == SIZE:
try:
nv = getSize(nv)
except:
log.warn(c.warning("Attribute '%s' expects a size value", k))
elif v == BOX:
nv = getBox(nv, c.pageSize)
elif v == POS:
nv = getPos(nv, c.pageSize)
elif v == INT:
nv = int(nv)
elif v == COLOR:
nv = getColor(nv)
elif v == FILE:
nv = c.getFile(nv)
elif v == FONT:
nv = c.getFontName(nv)
nattrs[k] = nv
return AttrContainer(nattrs)
attrNames = '''
color
font-family
font-size
font-weight
font-style
text-decoration
line-height
letter-spacing
background-color
display
margin-left
margin-right
margin-top
margin-bottom
padding-left
padding-right
padding-top
padding-bottom
border-top-color
border-top-style
border-top-width
border-bottom-color
border-bottom-style
border-bottom-width
border-left-color
border-left-style
border-left-width
border-right-color
border-right-style
border-right-width
text-align
vertical-align
width
height
zoom
page-break-after
page-break-before
list-style-type
list-style-image
white-space
text-indent
-pdf-page-break
-pdf-frame-break
-pdf-next-page
-pdf-keep-with-next
-pdf-outline
-pdf-outline-level
-pdf-outline-open
-pdf-line-spacing
-pdf-keep-in-frame-mode
-pdf-word-wrap
'''.strip().split()
def getCSSAttr(self, cssCascade, attrName, default=NotImplemented):
if attrName in self.cssAttrs:
return self.cssAttrs[attrName]
try:
result = cssCascade.findStyleFor(self.cssElement, attrName, default)
except LookupError:
result = None
# XXX Workaround for inline styles
try:
style = self.cssStyle
except:
style = self.cssStyle = cssCascade.parser.parseInline(self.cssElement.getStyleAttr() or '')[0]
if attrName in style:
result = style[attrName]
if result == 'inherit':
if hasattr(self.parentNode, 'getCSSAttr'):
result = self.parentNode.getCSSAttr(cssCascade, attrName, default)
elif default is not NotImplemented:
return default
raise LookupError("Could not find inherited CSS attribute value for '%s'" % (attrName,))
if result is not None:
self.cssAttrs[attrName] = result
return result
#TODO: Monkeypatching standard lib should go away.
xml.dom.minidom.Element.getCSSAttr = getCSSAttr
# Create an aliasing system. Many sources use non-standard tags, because browsers allow
# them to. This allows us to map a nonstandard name to the standard one.
nonStandardAttrNames = {
'bgcolor': 'background-color',
}
def mapNonStandardAttrs(c, n, attrList):
for attr in nonStandardAttrNames:
if attr in attrList and nonStandardAttrNames[attr] not in c:
c[nonStandardAttrNames[attr]] = attrList[attr]
return c
def getCSSAttrCacheKey(node):
_cl = _id = _st = ''
for k, v in node.attributes.items():
if k == 'class':
_cl = v
elif k == 'id':
_id = v
elif k == 'style':
_st = v
return "%s#%s#%s#%s#%s" % (id(node.parentNode), node.tagName.lower(), _cl, _id, _st)
def CSSCollect(node, c):
#node.cssAttrs = {}
#return node.cssAttrs
if c.css:
_key = getCSSAttrCacheKey(node)
if hasattr(node.parentNode, "tagName"):
if node.parentNode.tagName.lower() != "html":
CachedCSSAttr = CSSAttrCache.get(_key, None)
if CachedCSSAttr is not None:
node.cssAttrs = CachedCSSAttr
return CachedCSSAttr
node.cssElement = cssDOMElementInterface.CSSDOMElementInterface(node)
node.cssAttrs = {}
# node.cssElement.onCSSParserVisit(c.cssCascade.parser)
cssAttrMap = {}
for cssAttrName in attrNames:
try:
cssAttrMap[cssAttrName] = node.getCSSAttr(c.cssCascade, cssAttrName)
#except LookupError:
# pass
except Exception: # TODO: Kill this catch-all!
log.debug("CSS error '%s'", cssAttrName, exc_info=1)
CSSAttrCache[_key] = node.cssAttrs
return node.cssAttrs
def CSS2Frag(c, kw, isBlock):
# COLORS
if "color" in c.cssAttr:
c.frag.textColor = getColor(c.cssAttr["color"])
if "background-color" in c.cssAttr:
c.frag.backColor = getColor(c.cssAttr["background-color"])
# FONT SIZE, STYLE, WEIGHT
if "font-family" in c.cssAttr:
c.frag.fontName = c.getFontName(c.cssAttr["font-family"])
if "font-size" in c.cssAttr:
# XXX inherit
c.frag.fontSize = max(getSize("".join(c.cssAttr["font-size"]), c.frag.fontSize, c.baseFontSize), 1.0)
if "line-height" in c.cssAttr:
leading = "".join(c.cssAttr["line-height"])
c.frag.leading = getSize(leading, c.frag.fontSize)
c.frag.leadingSource = leading
else:
c.frag.leading = getSize(c.frag.leadingSource, c.frag.fontSize)
if "letter-spacing" in c.cssAttr:
c.frag.letterSpacing = c.cssAttr["letter-spacing"]
if "-pdf-line-spacing" in c.cssAttr:
c.frag.leadingSpace = getSize("".join(c.cssAttr["-pdf-line-spacing"]))
# print "line-spacing", c.cssAttr["-pdf-line-spacing"], c.frag.leading
if "font-weight" in c.cssAttr:
value = c.cssAttr["font-weight"].lower()
if value in ("bold", "bolder", "500", "600", "700", "800", "900"):
c.frag.bold = 1
else:
c.frag.bold = 0
for value in toList(c.cssAttr.get("text-decoration", "")):
if "underline" in value:
c.frag.underline = 1
if "line-through" in value:
c.frag.strike = 1
if "none" in value:
c.frag.underline = 0
c.frag.strike = 0
if "font-style" in c.cssAttr:
value = c.cssAttr["font-style"].lower()
if value in ("italic", "oblique"):
c.frag.italic = 1
else:
c.frag.italic = 0
if "white-space" in c.cssAttr:
# normal | pre | nowrap
c.frag.whiteSpace = str(c.cssAttr["white-space"]).lower()
# ALIGN & VALIGN
if "text-align" in c.cssAttr:
c.frag.alignment = getAlign(c.cssAttr["text-align"])
if "vertical-align" in c.cssAttr:
c.frag.vAlign = c.cssAttr["vertical-align"]
# HEIGHT & WIDTH
if "height" in c.cssAttr:
c.frag.height = "".join(toList(c.cssAttr["height"])) # XXX Relative is not correct!
if c.frag.height in ("auto",):
c.frag.height = None
if "width" in c.cssAttr:
c.frag.width = "".join(toList(c.cssAttr["width"])) # XXX Relative is not correct!
if c.frag.width in ("auto",):
c.frag.width = None
# ZOOM
if "zoom" in c.cssAttr:
zoom = "".join(toList(c.cssAttr["zoom"])) # XXX Relative is not correct!
if zoom.endswith("%"):
zoom = float(zoom[: - 1]) / 100.0
c.frag.zoom = float(zoom)
# MARGINS & LIST INDENT, STYLE
if isBlock:
if "margin-top" in c.cssAttr:
c.frag.spaceBefore = getSize(c.cssAttr["margin-top"], c.frag.fontSize)
if "margin-bottom" in c.cssAttr:
c.frag.spaceAfter = getSize(c.cssAttr["margin-bottom"], c.frag.fontSize)
if "margin-left" in c.cssAttr:
c.frag.bulletIndent = kw["margin-left"] # For lists
kw["margin-left"] += getSize(c.cssAttr["margin-left"], c.frag.fontSize)
c.frag.leftIndent = kw["margin-left"]
if "margin-right" in c.cssAttr:
kw["margin-right"] += getSize(c.cssAttr["margin-right"], c.frag.fontSize)
c.frag.rightIndent = kw["margin-right"]
if "text-indent" in c.cssAttr:
c.frag.firstLineIndent = getSize(c.cssAttr["text-indent"], c.frag.fontSize)
if "list-style-type" in c.cssAttr:
c.frag.listStyleType = str(c.cssAttr["list-style-type"]).lower()
if "list-style-image" in c.cssAttr:
c.frag.listStyleImage = c.getFile(c.cssAttr["list-style-image"])
# PADDINGS
if isBlock:
if "padding-top" in c.cssAttr:
c.frag.paddingTop = getSize(c.cssAttr["padding-top"], c.frag.fontSize)
if "padding-bottom" in c.cssAttr:
c.frag.paddingBottom = getSize(c.cssAttr["padding-bottom"], c.frag.fontSize)
if "padding-left" in c.cssAttr:
c.frag.paddingLeft = getSize(c.cssAttr["padding-left"], c.frag.fontSize)
if "padding-right" in c.cssAttr:
c.frag.paddingRight = getSize(c.cssAttr["padding-right"], c.frag.fontSize)
# BORDERS
if isBlock:
if "border-top-width" in c.cssAttr:
c.frag.borderTopWidth = getSize(c.cssAttr["border-top-width"], c.frag.fontSize)
if "border-bottom-width" in c.cssAttr:
c.frag.borderBottomWidth = getSize(c.cssAttr["border-bottom-width"], c.frag.fontSize)
if "border-left-width" in c.cssAttr:
c.frag.borderLeftWidth = getSize(c.cssAttr["border-left-width"], c.frag.fontSize)
if "border-right-width" in c.cssAttr:
c.frag.borderRightWidth = getSize(c.cssAttr["border-right-width"], c.frag.fontSize)
if "border-top-style" in c.cssAttr:
c.frag.borderTopStyle = c.cssAttr["border-top-style"]
if "border-bottom-style" in c.cssAttr:
c.frag.borderBottomStyle = c.cssAttr["border-bottom-style"]
if "border-left-style" in c.cssAttr:
c.frag.borderLeftStyle = c.cssAttr["border-left-style"]
if "border-right-style" in c.cssAttr:
c.frag.borderRightStyle = c.cssAttr["border-right-style"]
if "border-top-color" in c.cssAttr:
c.frag.borderTopColor = getColor(c.cssAttr["border-top-color"])
if "border-bottom-color" in c.cssAttr:
c.frag.borderBottomColor = getColor(c.cssAttr["border-bottom-color"])
if "border-left-color" in c.cssAttr:
c.frag.borderLeftColor = getColor(c.cssAttr["border-left-color"])
if "border-right-color" in c.cssAttr:
c.frag.borderRightColor = getColor(c.cssAttr["border-right-color"])
def pisaPreLoop(node, context, collect=False):
"""
Collect all CSS definitions
"""
data = u""
if node.nodeType == Node.TEXT_NODE and collect:
data = node.data
elif node.nodeType == Node.ELEMENT_NODE:
name = node.tagName.lower()
if name in ("style", "link"):
attr = pisaGetAttributes(context, name, node.attributes)
media = [x.strip() for x in attr.media.lower().split(",") if x.strip()]
if attr.get("type", "").lower() in ("", "text/css") and \
(not media or "all" in media or "print" in media or "pdf" in media):
if name == "style":
for node in node.childNodes:
data += pisaPreLoop(node, context, collect=True)
context.addCSS(data)
return u""
if name == "link" and attr.href and attr.rel.lower() == "stylesheet":
# print "CSS LINK", attr
context.addCSS('\n@import "%s" %s;' % (attr.href, ",".join(media)))
for node in node.childNodes:
result = pisaPreLoop(node, context, collect=collect)
if collect:
data += result
return data
def pisaLoop(node, context, path=None, **kw):
if path is None:
path = []
# Initialize KW
if not kw:
kw = {
"margin-top": 0,
"margin-bottom": 0,
"margin-left": 0,
"margin-right": 0,
}
else:
kw = copy.copy(kw)
#indent = len(path) * " " # only used for debug print statements
# TEXT
if node.nodeType == Node.TEXT_NODE:
# print indent, "#", repr(node.data) #, context.frag
context.addFrag(node.data)
# context.text.append(node.value)
# ELEMENT
elif node.nodeType == Node.ELEMENT_NODE:
node.tagName = node.tagName.replace(":", "").lower()
if node.tagName in ("style", "script"):
return
path = copy.copy(path) + [node.tagName]
# Prepare attributes
attr = pisaGetAttributes(context, node.tagName, node.attributes)
#log.debug(indent + "<%s %s>" % (node.tagName, attr) + repr(node.attributes.items())) #, path
# Calculate styles
context.cssAttr = CSSCollect(node, context)
context.cssAttr = mapNonStandardAttrs(context.cssAttr, node, attr)
context.node = node
# Block?
PAGE_BREAK = 1
PAGE_BREAK_RIGHT = 2
PAGE_BREAK_LEFT = 3
pageBreakAfter = False
frameBreakAfter = False
display = context.cssAttr.get("display", "inline").lower()
# print indent, node.tagName, display, context.cssAttr.get("background-color", None), attr
isBlock = (display == "block")
if isBlock:
context.addPara()
# Page break by CSS
if "-pdf-next-page" in context.cssAttr:
context.addStory(NextPageTemplate(str(context.cssAttr["-pdf-next-page"])))
if "-pdf-page-break" in context.cssAttr:
if str(context.cssAttr["-pdf-page-break"]).lower() == "before":
context.addStory(PageBreak())
if "-pdf-frame-break" in context.cssAttr:
if str(context.cssAttr["-pdf-frame-break"]).lower() == "before":
context.addStory(FrameBreak())
if str(context.cssAttr["-pdf-frame-break"]).lower() == "after":
frameBreakAfter = True
if "page-break-before" in context.cssAttr:
if str(context.cssAttr["page-break-before"]).lower() == "always":
context.addStory(PageBreak())
if str(context.cssAttr["page-break-before"]).lower() == "right":
context.addStory(PageBreak())
context.addStory(PmlRightPageBreak())
if str(context.cssAttr["page-break-before"]).lower() == "left":
context.addStory(PageBreak())
context.addStory(PmlLeftPageBreak())
if "page-break-after" in context.cssAttr:
if str(context.cssAttr["page-break-after"]).lower() == "always":
pageBreakAfter = PAGE_BREAK
if str(context.cssAttr["page-break-after"]).lower() == "right":
pageBreakAfter = PAGE_BREAK_RIGHT
if str(context.cssAttr["page-break-after"]).lower() == "left":
pageBreakAfter = PAGE_BREAK_LEFT
if display == "none":
# print "none!"
return
# Translate CSS to frags
# Save previous frag styles
context.pushFrag()
# Map styles to Reportlab fragment properties
CSS2Frag(context, kw, isBlock)
# EXTRAS
if "-pdf-keep-with-next" in context.cssAttr:
context.frag.keepWithNext = getBool(context.cssAttr["-pdf-keep-with-next"])
if "-pdf-outline" in context.cssAttr:
context.frag.outline = getBool(context.cssAttr["-pdf-outline"])
if "-pdf-outline-level" in context.cssAttr:
context.frag.outlineLevel = int(context.cssAttr["-pdf-outline-level"])
if "-pdf-outline-open" in context.cssAttr:
context.frag.outlineOpen = getBool(context.cssAttr["-pdf-outline-open"])
if "-pdf-word-wrap" in context.cssAttr:
context.frag.wordWrap = context.cssAttr["-pdf-word-wrap"]
# handle keep-in-frame
keepInFrameMode = None
keepInFrameMaxWidth = 0
keepInFrameMaxHeight = 0
if "-pdf-keep-in-frame-mode" in context.cssAttr:
value = str(context.cssAttr["-pdf-keep-in-frame-mode"]).strip().lower()
if value in ("shrink", "error", "overflow", "truncate"):
keepInFrameMode = value
if "-pdf-keep-in-frame-max-width" in context.cssAttr:
keepInFrameMaxWidth = getSize("".join(context.cssAttr["-pdf-keep-in-frame-max-width"]))
if "-pdf-keep-in-frame-max-height" in context.cssAttr:
keepInFrameMaxHeight = getSize("".join(context.cssAttr["-pdf-keep-in-frame-max-height"]))
# ignore nested keep-in-frames, tables have their own KIF handling
keepInFrame = keepInFrameMode is not None and context.keepInFrameIndex is None
if keepInFrame:
# keep track of current story index, so we can wrap everythink
# added after this point in a KeepInFrame
context.keepInFrameIndex = len(context.story)
# BEGIN tag
klass = globals().get("pisaTag%s" % node.tagName.replace(":", "").upper(), None)
obj = None
# Static block
elementId = attr.get("id", None)
staticFrame = context.frameStatic.get(elementId, None)
if staticFrame:
context.frag.insideStaticFrame += 1
oldStory = context.swapStory()
# Tag specific operations
if klass is not None:
obj = klass(node, attr)
obj.start(context)
# Visit child nodes
context.fragBlock = fragBlock = copy.copy(context.frag)
for nnode in node.childNodes:
pisaLoop(nnode, context, path, **kw)
context.fragBlock = fragBlock
# END tag
if obj:
obj.end(context)
# Block?
if isBlock:
context.addPara()
# XXX Buggy!
# Page break by CSS
if pageBreakAfter:
context.addStory(PageBreak())
if pageBreakAfter == PAGE_BREAK_RIGHT:
context.addStory(PmlRightPageBreak())
if pageBreakAfter == PAGE_BREAK_LEFT:
context.addStory(PmlLeftPageBreak())
if frameBreakAfter:
context.addStory(FrameBreak())
if keepInFrame:
# get all content added after start of -pdf-keep-in-frame and wrap
# it in a KeepInFrame
substory = context.story[context.keepInFrameIndex:]
context.story = context.story[:context.keepInFrameIndex]
context.story.append(
KeepInFrame(
content=substory,
maxWidth=keepInFrameMaxWidth,
maxHeight=keepInFrameMaxHeight))
context.keepInFrameIndex = None
# Static block, END
if staticFrame:
context.addPara()
for frame in staticFrame:
frame.pisaStaticStory = context.story
context.swapStory(oldStory)
context.frag.insideStaticFrame -= 1
# context.debug(1, indent, "</%s>" % (node.tagName))
# Reset frag style
context.pullFrag()
# Unknown or not handled
else:
# context.debug(1, indent, "???", node, node.nodeType, repr(node))
# Loop over children
for node in node.childNodes:
pisaLoop(node, context, path, **kw)
def pisaParser(src, context, default_css="", xhtml=False, encoding=None, xml_output=None):
"""
- Parse HTML and get miniDOM
- Extract CSS informations, add default CSS, parse CSS
- Handle the document DOM itself and build reportlab story
- Return Context object
"""
global CSSAttrCache
CSSAttrCache = {}
if xhtml:
#TODO: XHTMLParser doesn't see to exist...
parser = html5lib.XHTMLParser(tree=treebuilders.getTreeBuilder("dom"))
else:
parser = html5lib.HTMLParser(tree=treebuilders.getTreeBuilder("dom"))
if type(src) in types.StringTypes:
if type(src) is types.UnicodeType:
# If an encoding was provided, do not change it.
if not encoding:
encoding = "utf-8"
src = src.encode(encoding)
src = pisaTempFile(src, capacity=context.capacity)
# Test for the restrictions of html5lib
if encoding:
# Workaround for html5lib<0.11.1
if hasattr(inputstream, "isValidEncoding"):
if encoding.strip().lower() == "utf8":
encoding = "utf-8"
if not inputstream.isValidEncoding(encoding):
log.error("%r is not a valid encoding e.g. 'utf8' is not valid but 'utf-8' is!", encoding)
else:
if inputstream.codecName(encoding) is None:
log.error("%r is not a valid encoding", encoding)
document = parser.parse(
src,
encoding=encoding)
if xml_output:
if encoding:
xml_output.write(document.toprettyxml(encoding=encoding))
else:
xml_output.write(document.toprettyxml(encoding="utf8"))
if default_css:
context.addCSS(default_css)
pisaPreLoop(document, context)
#try:
context.parseCSS()
#except:
# context.cssText = DEFAULT_CSS
# context.parseCSS()
# context.debug(9, pprint.pformat(context.css))
pisaLoop(document, context)
return context
# Shortcuts
HTML2PDF = pisaParser
def XHTML2PDF(*a, **kw):
kw["xhtml"] = True
return HTML2PDF(*a, **kw)
XML2PDF = XHTML2PDF
| apache-2.0 | -1,639,543,538,045,728,800 | 34.494318 | 117 | 0.581279 | false |
Dioptas/Dioptas | dioptas/model/util/BackgroundExtraction.py | 1 | 2873 | # -*- coding: utf-8 -*-
# Dioptas - GUI program for fast processing of 2D X-ray diffraction data
# Principal author: Clemens Prescher ([email protected])
# Copyright (C) 2014-2019 GSECARS, University of Chicago, USA
# Copyright (C) 2015-2018 Institute for Geology and Mineralogy, University of Cologne, Germany
# Copyright (C) 2019-2020 DESY, Hamburg, Germany
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
logger = logging.getLogger(__name__)
import numpy as np
try:
from .smooth_bruckner import smooth_bruckner
except ImportError:
try:
from .smooth_bruckner_cython import smooth_bruckner
except ImportError:
try:
import pyximport
pyximport.install(language_level=3)
from .smooth_bruckner_cython import smooth_bruckner
except ImportError as e:
print(e)
logger.warning(
"Could not import the Fortran or Cython version of smooth_bruckner. Using python implementation instead. Please"
" run 'f2py -c -m smooth_bruckner smooth_bruckner.f95' in the model/util folder for faster"
" implementation")
from .smooth_bruckner_python import smooth_bruckner
def extract_background(x, y, smooth_width=0.1, iterations=50, cheb_order=50):
"""
Performs a background subtraction using bruckner smoothing and a chebyshev polynomial.
Standard parameters are found to be optimal for synchrotron XRD.
:param x: x-data of pattern
:param y: y-data of pattern
:param smooth_width: width of the window in x-units used for bruckner smoothing
:param iterations: number of iterations for the bruckner smoothing
:param cheb_order: order of the fitted chebyshev polynomial
:return: vector of extracted y background
"""
smooth_points = int((float(smooth_width) / (x[1] - x[0])))
y_smooth = smooth_bruckner(y, smooth_points, iterations)
# get cheb input parameters
x_cheb = 2. * (x - x[0]) / (x[-1] - x[0]) - 1.
cheb_parameters = np.polynomial.chebyshev.chebfit(x_cheb,
y_smooth,
cheb_order)
return np.polynomial.chebyshev.chebval(x_cheb, cheb_parameters)
| gpl-3.0 | -2,083,473,547,483,108,400 | 42.530303 | 128 | 0.679777 | false |
quru/wagtail | wagtail/wagtailimages/models.py | 1 | 17958 | from __future__ import absolute_import, unicode_literals
import hashlib
import os.path
from collections import OrderedDict
from contextlib import contextmanager
import django
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.files import File
from django.core.urlresolvers import reverse
from django.db import models
from django.db.models.signals import pre_delete, pre_save
from django.dispatch.dispatcher import receiver
from django.forms.widgets import flatatt
from django.utils.encoding import python_2_unicode_compatible
from django.utils.functional import cached_property
from django.utils.safestring import mark_safe
from django.utils.six import BytesIO, string_types, text_type
from django.utils.translation import ugettext_lazy as _
from taggit.managers import TaggableManager
from unidecode import unidecode
from willow.image import Image as WillowImage
from wagtail.wagtailadmin.taggable import TagSearchable
from wagtail.wagtailadmin.utils import get_object_usage
from wagtail.wagtailcore import hooks
from wagtail.wagtailcore.models import CollectionMember
from wagtail.wagtailimages.exceptions import InvalidFilterSpecError
from wagtail.wagtailimages.rect import Rect
from wagtail.wagtailsearch import index
from wagtail.wagtailsearch.queryset import SearchableQuerySetMixin
class SourceImageIOError(IOError):
"""
Custom exception to distinguish IOErrors that were thrown while opening the source image
"""
pass
class ImageQuerySet(SearchableQuerySetMixin, models.QuerySet):
pass
def get_upload_to(instance, filename):
"""
Obtain a valid upload path for an image file.
This needs to be a module-level function so that it can be referenced within migrations,
but simply delegates to the `get_upload_to` method of the instance, so that AbstractImage
subclasses can override it.
"""
return instance.get_upload_to(filename)
def get_rendition_upload_to(instance, filename):
"""
Obtain a valid upload path for an image rendition file.
This needs to be a module-level function so that it can be referenced within migrations,
but simply delegates to the `get_upload_to` method of the instance, so that AbstractRendition
subclasses can override it.
"""
return instance.get_upload_to(filename)
@python_2_unicode_compatible
class AbstractImage(CollectionMember, TagSearchable):
title = models.CharField(max_length=255, verbose_name=_('title'))
file = models.ImageField(
verbose_name=_('file'), upload_to=get_upload_to, width_field='width', height_field='height'
)
width = models.IntegerField(verbose_name=_('width'), editable=False)
height = models.IntegerField(verbose_name=_('height'), editable=False)
created_at = models.DateTimeField(verbose_name=_('created at'), auto_now_add=True, db_index=True)
uploaded_by_user = models.ForeignKey(
settings.AUTH_USER_MODEL, verbose_name=_('uploaded by user'),
null=True, blank=True, editable=False, on_delete=models.SET_NULL
)
tags = TaggableManager(help_text=None, blank=True, verbose_name=_('tags'))
focal_point_x = models.PositiveIntegerField(null=True, blank=True)
focal_point_y = models.PositiveIntegerField(null=True, blank=True)
focal_point_width = models.PositiveIntegerField(null=True, blank=True)
focal_point_height = models.PositiveIntegerField(null=True, blank=True)
file_size = models.PositiveIntegerField(null=True, editable=False)
objects = ImageQuerySet.as_manager()
def is_stored_locally(self):
"""
Returns True if the image is hosted on the local filesystem
"""
try:
self.file.path
return True
except NotImplementedError:
return False
def get_file_size(self):
if self.file_size is None:
try:
self.file_size = self.file.size
except OSError:
# File doesn't exist
return
self.save(update_fields=['file_size'])
return self.file_size
def get_upload_to(self, filename):
folder_name = 'original_images'
filename = self.file.field.storage.get_valid_name(filename)
# do a unidecode in the filename and then
# replace non-ascii characters in filename with _ , to sidestep issues with filesystem encoding
filename = "".join((i if ord(i) < 128 else '_') for i in unidecode(filename))
# Truncate filename so it fits in the 100 character limit
# https://code.djangoproject.com/ticket/9893
while len(os.path.join(folder_name, filename)) >= 95:
prefix, dot, extension = filename.rpartition('.')
filename = prefix[:-1] + dot + extension
return os.path.join(folder_name, filename)
def get_usage(self):
return get_object_usage(self)
@property
def usage_url(self):
return reverse('wagtailimages:image_usage',
args=(self.id,))
search_fields = TagSearchable.search_fields + CollectionMember.search_fields + [
index.FilterField('uploaded_by_user'),
]
def __str__(self):
return self.title
@contextmanager
def get_willow_image(self):
# Open file if it is closed
close_file = False
try:
image_file = self.file
if self.file.closed:
# Reopen the file
if self.is_stored_locally():
self.file.open('rb')
else:
# Some external storage backends don't allow reopening
# the file. Get a fresh file instance. #1397
storage = self._meta.get_field('file').storage
image_file = storage.open(self.file.name, 'rb')
close_file = True
except IOError as e:
# re-throw this as a SourceImageIOError so that calling code can distinguish
# these from IOErrors elsewhere in the process
raise SourceImageIOError(text_type(e))
# Seek to beginning
image_file.seek(0)
try:
yield WillowImage.open(image_file)
finally:
if close_file:
image_file.close()
def get_rect(self):
return Rect(0, 0, self.width, self.height)
def get_focal_point(self):
if self.focal_point_x is not None and \
self.focal_point_y is not None and \
self.focal_point_width is not None and \
self.focal_point_height is not None:
return Rect.from_point(
self.focal_point_x,
self.focal_point_y,
self.focal_point_width,
self.focal_point_height,
)
def has_focal_point(self):
return self.get_focal_point() is not None
def set_focal_point(self, rect):
if rect is not None:
self.focal_point_x = rect.centroid_x
self.focal_point_y = rect.centroid_y
self.focal_point_width = rect.width
self.focal_point_height = rect.height
else:
self.focal_point_x = None
self.focal_point_y = None
self.focal_point_width = None
self.focal_point_height = None
def get_suggested_focal_point(self):
with self.get_willow_image() as willow:
faces = willow.detect_faces()
if faces:
# Create a bounding box around all faces
left = min(face[0] for face in faces)
top = min(face[1] for face in faces)
right = max(face[2] for face in faces)
bottom = max(face[3] for face in faces)
focal_point = Rect(left, top, right, bottom)
else:
features = willow.detect_features()
if features:
# Create a bounding box around all features
left = min(feature[0] for feature in features)
top = min(feature[1] for feature in features)
right = max(feature[0] for feature in features)
bottom = max(feature[1] for feature in features)
focal_point = Rect(left, top, right, bottom)
else:
return None
# Add 20% to width and height and give it a minimum size
x, y = focal_point.centroid
width, height = focal_point.size
width *= 1.20
height *= 1.20
width = max(width, 100)
height = max(height, 100)
return Rect.from_point(x, y, width, height)
@classmethod
def get_rendition_model(cls):
""" Get the Rendition model for this Image model """
if django.VERSION >= (1, 9):
return cls.renditions.rel.related_model
else:
return cls.renditions.related.related_model
def get_rendition(self, filter):
if isinstance(filter, string_types):
filter, created = Filter.objects.get_or_create(spec=filter)
cache_key = filter.get_cache_key(self)
Rendition = self.get_rendition_model()
try:
rendition = self.renditions.get(
filter=filter,
focal_point_key=cache_key,
)
except Rendition.DoesNotExist:
# Generate the rendition image
generated_image = filter.run(self, BytesIO())
# Generate filename
input_filename = os.path.basename(self.file.name)
input_filename_without_extension, input_extension = os.path.splitext(input_filename)
# A mapping of image formats to extensions
FORMAT_EXTENSIONS = {
'jpeg': '.jpg',
'png': '.png',
'gif': '.gif',
}
output_extension = filter.spec.replace('|', '.') + FORMAT_EXTENSIONS[generated_image.format_name]
if cache_key:
output_extension = cache_key + '.' + output_extension
# Truncate filename to prevent it going over 60 chars
output_filename_without_extension = input_filename_without_extension[:(59 - len(output_extension))]
output_filename = output_filename_without_extension + '.' + output_extension
rendition, created = self.renditions.get_or_create(
filter=filter,
focal_point_key=cache_key,
defaults={'file': File(generated_image.f, name=output_filename)}
)
return rendition
def is_portrait(self):
return (self.width < self.height)
def is_landscape(self):
return (self.height < self.width)
@property
def filename(self):
return os.path.basename(self.file.name)
@property
def default_alt_text(self):
# by default the alt text field (used in rich text insertion) is populated
# from the title. Subclasses might provide a separate alt field, and
# override this
return self.title
def is_editable_by_user(self, user):
from wagtail.wagtailimages.permissions import permission_policy
return permission_policy.user_has_permission_for_instance(user, 'change', self)
class Meta:
abstract = True
class Image(AbstractImage):
admin_form_fields = (
'title',
'file',
'collection',
'tags',
'focal_point_x',
'focal_point_y',
'focal_point_width',
'focal_point_height',
)
# Do smartcropping calculations when user saves an image without a focal point
@receiver(pre_save, sender=Image)
def image_feature_detection(sender, instance, **kwargs):
if getattr(settings, 'WAGTAILIMAGES_FEATURE_DETECTION_ENABLED', False):
# Make sure the image doesn't already have a focal point
if not instance.has_focal_point():
# Set the focal point
instance.set_focal_point(instance.get_suggested_focal_point())
# Receive the pre_delete signal and delete the file associated with the model instance.
@receiver(pre_delete, sender=Image)
def image_delete(sender, instance, **kwargs):
# Pass false so FileField doesn't save the model.
instance.file.delete(False)
def get_image_model():
from django.conf import settings
from django.apps import apps
try:
app_label, model_name = settings.WAGTAILIMAGES_IMAGE_MODEL.split('.')
except AttributeError:
return Image
except ValueError:
raise ImproperlyConfigured("WAGTAILIMAGES_IMAGE_MODEL must be of the form 'app_label.model_name'")
image_model = apps.get_model(app_label, model_name)
if image_model is None:
raise ImproperlyConfigured(
"WAGTAILIMAGES_IMAGE_MODEL refers to model '%s' that has not been installed" %
settings.WAGTAILIMAGES_IMAGE_MODEL
)
return image_model
class Filter(models.Model):
"""
Represents one or more operations that can be applied to an Image to produce a rendition
appropriate for final display on the website. Usually this would be a resize operation,
but could potentially involve colour processing, etc.
"""
# The spec pattern is operation1-var1-var2|operation2-var1
spec = models.CharField(max_length=255, unique=True)
@cached_property
def operations(self):
# Search for operations
self._search_for_operations()
# Build list of operation objects
operations = []
for op_spec in self.spec.split('|'):
op_spec_parts = op_spec.split('-')
if op_spec_parts[0] not in self._registered_operations:
raise InvalidFilterSpecError("Unrecognised operation: %s" % op_spec_parts[0])
op_class = self._registered_operations[op_spec_parts[0]]
operations.append(op_class(*op_spec_parts))
return operations
def run(self, image, output):
with image.get_willow_image() as willow:
original_format = willow.format_name
# Fix orientation of image
willow = willow.auto_orient()
for operation in self.operations:
willow = operation.run(willow, image) or willow
if original_format == 'jpeg':
# Allow changing of JPEG compression quality
if hasattr(settings, 'WAGTAILIMAGES_JPEG_QUALITY'):
quality = settings.WAGTAILIMAGES_JPEG_QUALITY
else:
quality = 85
return willow.save_as_jpeg(output, quality=quality)
elif original_format == 'gif':
# Convert image to PNG if it's not animated
if not willow.has_animation():
return willow.save_as_png(output)
else:
return willow.save_as_gif(output)
elif original_format == 'bmp':
# Convert to PNG
return willow.save_as_png(output)
else:
return willow.save(original_format, output)
def get_cache_key(self, image):
vary_parts = []
for operation in self.operations:
for field in getattr(operation, 'vary_fields', []):
value = getattr(image, field, '')
vary_parts.append(str(value))
vary_string = '-'.join(vary_parts)
# Return blank string if there are no vary fields
if not vary_string:
return ''
return hashlib.sha1(vary_string.encode('utf-8')).hexdigest()[:8]
_registered_operations = None
@classmethod
def _search_for_operations(cls):
if cls._registered_operations is not None:
return
operations = []
for fn in hooks.get_hooks('register_image_operations'):
operations.extend(fn())
cls._registered_operations = dict(operations)
class AbstractRendition(models.Model):
filter = models.ForeignKey(Filter, related_name='+')
file = models.ImageField(upload_to=get_rendition_upload_to, width_field='width', height_field='height')
width = models.IntegerField(editable=False)
height = models.IntegerField(editable=False)
focal_point_key = models.CharField(max_length=255, blank=True, default='', editable=False)
@property
def url(self):
return self.file.url
@property
def alt(self):
return self.image.title
@property
def attrs(self):
"""
The src, width, height, and alt attributes for an <img> tag, as a HTML
string
"""
return flatatt(self.attrs_dict)
@property
def attrs_dict(self):
"""
A dict of the src, width, height, and alt attributes for an <img> tag.
"""
return OrderedDict([
('src', self.url),
('width', self.width),
('height', self.height),
('alt', self.alt),
])
def img_tag(self, extra_attributes={}):
attrs = self.attrs_dict.copy()
attrs.update(extra_attributes)
return mark_safe('<img{}>'.format(flatatt(attrs)))
def __html__(self):
return self.img_tag()
def get_upload_to(self, filename):
folder_name = 'images'
filename = self.file.field.storage.get_valid_name(filename)
return os.path.join(folder_name, filename)
class Meta:
abstract = True
class Rendition(AbstractRendition):
image = models.ForeignKey(Image, related_name='renditions')
class Meta:
unique_together = (
('image', 'filter', 'focal_point_key'),
)
# Receive the pre_delete signal and delete the file associated with the model instance.
@receiver(pre_delete, sender=Rendition)
def rendition_delete(sender, instance, **kwargs):
# Pass false so FileField doesn't save the model.
instance.file.delete(False)
| bsd-3-clause | -9,143,806,606,278,823,000 | 33.402299 | 111 | 0.622452 | false |
hehongliang/tensorflow | tensorflow/python/keras/optimizer_v2/ftrl_test.py | 1 | 17276 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for Ftrl operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.keras.optimizer_v2 import ftrl
from tensorflow.python.ops import embedding_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import adagrad
from tensorflow.python.training import gradient_descent
class FtrlOptimizerTest(test.TestCase):
def doTestFtrlwithoutRegularization(self, use_resource=False):
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session() as sess:
if use_resource:
var0 = resource_variable_ops.ResourceVariable([0.0, 0.0], dtype=dtype)
var1 = resource_variable_ops.ResourceVariable([0.0, 0.0], dtype=dtype)
else:
var0 = variables.Variable([0.0, 0.0], dtype=dtype)
var1 = variables.Variable([0.0, 0.0], dtype=dtype)
grads0 = constant_op.constant([0.1, 0.2], dtype=dtype)
grads1 = constant_op.constant([0.01, 0.02], dtype=dtype)
opt = ftrl.Ftrl(
3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllClose([0.0, 0.0], v0_val)
self.assertAllClose([0.0, 0.0], v1_val)
# Run 3 steps FTRL
for _ in range(3):
update.run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType(
np.array([-2.60260963, -4.29698515]), v0_val)
self.assertAllCloseAccordingToType(
np.array([-0.28432083, -0.56694895]), v1_val)
def testFtrlWithoutRegularization(self):
self.doTestFtrlwithoutRegularization(use_resource=False)
def testResourceFtrlWithoutRegularization(self):
self.doTestFtrlwithoutRegularization(use_resource=True)
def testFtrlwithoutRegularization2(self):
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session() as sess:
var0 = variables.Variable([1.0, 2.0], dtype=dtype)
var1 = variables.Variable([4.0, 3.0], dtype=dtype)
grads0 = constant_op.constant([0.1, 0.2], dtype=dtype)
grads1 = constant_op.constant([0.01, 0.02], dtype=dtype)
opt = ftrl.Ftrl(
3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType([1.0, 2.0], v0_val)
self.assertAllCloseAccordingToType([4.0, 3.0], v1_val)
# Run 3 steps FTRL
for _ in range(3):
update.run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType(
np.array([-2.55607247, -3.98729396]), v0_val)
self.assertAllCloseAccordingToType(
np.array([-0.28232238, -0.56096673]), v1_val)
def testMinimizeSparseResourceVariable(self):
for dtype in [dtypes.half, dtypes.float32, dtypes.float64]:
with self.cached_session():
var0 = resource_variable_ops.ResourceVariable([[1.0, 2.0]], dtype=dtype)
x = constant_op.constant([[4.0], [5.0]], dtype=dtype)
pred = math_ops.matmul(embedding_ops.embedding_lookup([var0], [0]), x)
loss = pred * pred
sgd_op = ftrl.Ftrl(1.0).minimize(loss, var_list=[var0])
variables.global_variables_initializer().run()
# Fetch params to validate initial values
self.assertAllCloseAccordingToType([[1.0, 2.0]], self.evaluate(var0))
# Run 1 step of sgd
sgd_op.run()
# Validate updated params
self.assertAllCloseAccordingToType([[0, 1]],
self.evaluate(var0),
atol=0.01)
def testFtrlWithL1(self):
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session() as sess:
var0 = variables.Variable([1.0, 2.0], dtype=dtype)
var1 = variables.Variable([4.0, 3.0], dtype=dtype)
grads0 = constant_op.constant([0.1, 0.2], dtype=dtype)
grads1 = constant_op.constant([0.01, 0.02], dtype=dtype)
opt = ftrl.Ftrl(
3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.001,
l2_regularization_strength=0.0)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType([1.0, 2.0], v0_val)
self.assertAllCloseAccordingToType([4.0, 3.0], v1_val)
# Run 10 steps FTRL
for _ in range(10):
update.run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType(
np.array([-7.66718769, -10.91273689]), v0_val)
self.assertAllCloseAccordingToType(
np.array([-0.93460727, -1.86147261]), v1_val)
def testFtrlWithL1_L2(self):
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session() as sess:
var0 = variables.Variable([1.0, 2.0], dtype=dtype)
var1 = variables.Variable([4.0, 3.0], dtype=dtype)
grads0 = constant_op.constant([0.1, 0.2], dtype=dtype)
grads1 = constant_op.constant([0.01, 0.02], dtype=dtype)
opt = ftrl.Ftrl(
3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.001,
l2_regularization_strength=2.0)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType([1.0, 2.0], v0_val)
self.assertAllCloseAccordingToType([4.0, 3.0], v1_val)
# Run 10 steps FTRL
for _ in range(10):
update.run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType(
np.array([-0.24059935, -0.46829352]), v0_val)
self.assertAllCloseAccordingToType(
np.array([-0.02406147, -0.04830509]), v1_val)
def testFtrlWithL1_L2_L2Shrinkage(self):
"""Test the new FTRL op with support for l2 shrinkage.
The addition of this parameter which places a constant pressure on weights
towards the origin causes the gradient descent trajectory to differ. The
weights will tend to have smaller magnitudes with this parameter set.
"""
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session() as sess:
var0 = variables.Variable([1.0, 2.0], dtype=dtype)
var1 = variables.Variable([4.0, 3.0], dtype=dtype)
grads0 = constant_op.constant([0.1, 0.2], dtype=dtype)
grads1 = constant_op.constant([0.01, 0.02], dtype=dtype)
opt = ftrl.Ftrl(
3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.001,
l2_regularization_strength=2.0,
l2_shrinkage_regularization_strength=0.1)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType([1.0, 2.0], v0_val)
self.assertAllCloseAccordingToType([4.0, 3.0], v1_val)
# Run 10 steps FTRL
for _ in range(10):
update.run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType(
np.array([-0.22578995, -0.44345796]), v0_val)
self.assertAllCloseAccordingToType(
np.array([-0.14378493, -0.13229476]), v1_val)
def testFtrlWithL1_L2_L2ShrinkageSparse(self):
"""Tests the new FTRL op with support for l2 shrinkage on sparse grads."""
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session() as sess:
var0 = variables.Variable([[1.0], [2.0]], dtype=dtype)
var1 = variables.Variable([[4.0], [3.0]], dtype=dtype)
grads0 = ops.IndexedSlices(
constant_op.constant([0.1], shape=[1, 1], dtype=dtype),
constant_op.constant([0]), constant_op.constant([2, 1]))
grads1 = ops.IndexedSlices(
constant_op.constant([0.02], shape=[1, 1], dtype=dtype),
constant_op.constant([1]), constant_op.constant([2, 1]))
opt = ftrl.Ftrl(
3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.001,
l2_regularization_strength=2.0,
l2_shrinkage_regularization_strength=0.1)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType([[1.0], [2.0]], v0_val)
self.assertAllCloseAccordingToType([[4.0], [3.0]], v1_val)
# Run 10 steps FTRL
for _ in range(10):
update.run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType([[-0.22578995], [2.]], v0_val)
self.assertAllCloseAccordingToType([[4.], [-0.13229476]], v1_val)
def testFtrlWithL2ShrinkageDoesNotChangeLrSchedule(self):
"""Verifies that l2 shrinkage in FTRL does not change lr schedule."""
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session() as sess:
var0 = variables.Variable([1.0, 2.0], dtype=dtype)
var1 = variables.Variable([1.0, 2.0], dtype=dtype)
grads0 = constant_op.constant([0.1, 0.2], dtype=dtype)
grads1 = constant_op.constant([0.1, 0.2], dtype=dtype)
opt0 = ftrl.Ftrl(
3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.001,
l2_regularization_strength=2.0,
l2_shrinkage_regularization_strength=0.1)
opt1 = ftrl.Ftrl(
3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.001,
l2_regularization_strength=2.0)
update0 = opt0.apply_gradients([(grads0, var0)])
update1 = opt1.apply_gradients([(grads1, var1)])
variables.global_variables_initializer().run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType([1.0, 2.0], v0_val)
self.assertAllCloseAccordingToType([1.0, 2.0], v1_val)
# Run 10 steps FTRL
for _ in range(10):
update0.run()
update1.run()
v0_val, v1_val = sess.run([var0, var1])
# var0 is experiencing L2 shrinkage so it should be smaller than var1
# in magnitude.
self.assertTrue((v0_val**2 < v1_val**2).all())
accum0 = sess.run(opt0.get_slot(var0, "accumulator"))
accum1 = sess.run(opt1.get_slot(var1, "accumulator"))
# L2 shrinkage should not change how we update grad accumulator.
self.assertAllCloseAccordingToType(accum0, accum1)
def applyOptimizer(self, opt, dtype, steps=5, is_sparse=False):
if is_sparse:
var0 = variables.Variable([[0.0], [0.0]], dtype=dtype)
var1 = variables.Variable([[0.0], [0.0]], dtype=dtype)
grads0 = ops.IndexedSlices(
constant_op.constant([0.1], shape=[1, 1], dtype=dtype),
constant_op.constant([0]), constant_op.constant([2, 1]))
grads1 = ops.IndexedSlices(
constant_op.constant([0.02], shape=[1, 1], dtype=dtype),
constant_op.constant([1]), constant_op.constant([2, 1]))
else:
var0 = variables.Variable([0.0, 0.0], dtype=dtype)
var1 = variables.Variable([0.0, 0.0], dtype=dtype)
grads0 = constant_op.constant([0.1, 0.2], dtype=dtype)
grads1 = constant_op.constant([0.01, 0.02], dtype=dtype)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
sess = ops.get_default_session()
v0_val, v1_val = sess.run([var0, var1])
if is_sparse:
self.assertAllCloseAccordingToType([[0.0], [0.0]], v0_val)
self.assertAllCloseAccordingToType([[0.0], [0.0]], v1_val)
else:
self.assertAllCloseAccordingToType([0.0, 0.0], v0_val)
self.assertAllCloseAccordingToType([0.0, 0.0], v1_val)
# Run Ftrl for a few steps
for _ in range(steps):
update.run()
v0_val, v1_val = sess.run([var0, var1])
return v0_val, v1_val
# When variables are initialized with Zero, FTRL-Proximal has two properties:
# 1. Without L1&L2 but with fixed learning rate, FTRL-Proximal is identical
# with GradientDescent.
# 2. Without L1&L2 but with adaptive learning rate, FTRL-Proximal is identical
# with Adagrad.
# So, basing on these two properties, we test if our implementation of
# FTRL-Proximal performs same updates as Adagrad or GradientDescent.
def testEquivAdagradwithoutRegularization(self):
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session():
val0, val1 = self.applyOptimizer(
ftrl.Ftrl(
3.0,
# Adagrad learning rate
learning_rate_power=-0.5,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0),
dtype)
with self.cached_session():
val2, val3 = self.applyOptimizer(
adagrad.AdagradOptimizer(3.0, initial_accumulator_value=0.1), dtype)
self.assertAllCloseAccordingToType(val0, val2)
self.assertAllCloseAccordingToType(val1, val3)
def testEquivSparseAdagradwithoutRegularization(self):
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session():
val0, val1 = self.applyOptimizer(
ftrl.Ftrl(
3.0,
# Adagrad learning rate
learning_rate_power=-0.5,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0),
dtype,
is_sparse=True)
with self.cached_session():
val2, val3 = self.applyOptimizer(
adagrad.AdagradOptimizer(3.0, initial_accumulator_value=0.1),
dtype,
is_sparse=True)
self.assertAllCloseAccordingToType(val0, val2)
self.assertAllCloseAccordingToType(val1, val3)
def testEquivSparseGradientDescentwithoutRegularization(self):
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session():
val0, val1 = self.applyOptimizer(
ftrl.Ftrl(
3.0,
# Fixed learning rate
learning_rate_power=-0.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0),
dtype,
is_sparse=True)
with self.cached_session():
val2, val3 = self.applyOptimizer(
gradient_descent.GradientDescentOptimizer(3.0),
dtype,
is_sparse=True)
self.assertAllCloseAccordingToType(val0, val2)
self.assertAllCloseAccordingToType(val1, val3)
def testEquivGradientDescentwithoutRegularization(self):
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session():
val0, val1 = self.applyOptimizer(
ftrl.Ftrl(
3.0,
# Fixed learning rate
learning_rate_power=-0.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0),
dtype)
with self.cached_session():
val2, val3 = self.applyOptimizer(
gradient_descent.GradientDescentOptimizer(3.0), dtype)
self.assertAllCloseAccordingToType(val0, val2)
self.assertAllCloseAccordingToType(val1, val3)
if __name__ == "__main__":
test.main()
| apache-2.0 | 2,758,639,719,121,693,000 | 39.553991 | 80 | 0.620167 | false |
maxtangli/sonico | language/python/teabreak/final_hint.py | 1 | 1056 | def intelligent_data_source_factory(*data):
import itertools
cy = itertools.cycle(data)
_int = int
return lambda i: _int(i) if isinstance(i, str) else next(cy)
int = intelligent_data_source_factory(1985, 33067, 84)
# int = intelligent_data_source_factory(2012, 9, 30) # invalid
# int = intelligent_data_source_factory(2012, 9, 16) # invalid
# int = intelligent_data_source_factory(84, 100, 114) # invalid
def range_check(func):
return lambda m, e, n, c: ((0 <= m < n) and func(m, e, n, c)) or ''
@range_check
def f(m, e, n, c):
return str(m) if pow(m, e) % n == c else ''
if __name__ == '__main__':
# for i in range(1000000):
# # if f(i, 17, 3569, 915) == str(i):
# if f(i, 1985, 33067, 84) == str(i):
# print(i) # 25202
#
# print(25202 % 1985, 25202 % 33067, 25202 % 84) # invalid
# print(25202 % 17, 25202 % 3569, 25202 % 915) # invalid
for i in range(1000000):
if f(i, int(17), int(3569), int(915)) == str(i):
print(i) # 25202 -> 20252(invalid)
| mit | 7,963,685,116,978,258,000 | 28.333333 | 71 | 0.571023 | false |
mory0tiki/pack-llama | views.py | 1 | 1220 | from django.core.files.base import ContentFile
from django.shortcuts import render
from django.http.response import HttpResponse
from django.views.generic import base
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
from django.conf import settings
import ast
import json
import uuid
import models
import utils
class SavePackView(base.View):
@method_decorator(csrf_exempt)
def dispatch(self, request, *args, **kwargs):
return super(SavePackView, self).dispatch(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
try:
result = {"result" : False}
if request.body:
pack = models.Pack()
pack.receive_from = request.META["REMOTE_ADDR"]
pack.queue_id = models.Queue.objects.get(name='Test').id
if settings.DEBUG:
print request.body
print "++++++++++++++++++++++++"
pack.message.save(str(uuid.uuid4()),ContentFile(request.body))
result["result"] = True
except Exception as ex:
print str(ex)
return HttpResponse(json.dumps(result))
| apache-2.0 | 5,642,803,948,910,530,000 | 32.888889 | 78 | 0.62623 | false |
JackCloudman/Youtube-music | download.py | 1 | 1342 | #Program to download Yotube music
#Author: Jack Cloudman
import pafy,os,shutil
from pydub import AudioSegment as convert
#Create song list
if os.path.exists('songs.txt'):
pass
else:
print("Creating songs.txt....")
document= open('songs.txt','w')
print("Paste yours songs in songs.txt")
document.close()
#create directory
if os.path.exists('music'):
if os.path.exists('music/temp'):
pass
else:
os.mkdir('music/temp')
else:
os.mkdir('music')
os.mkdir('music/temp')
document = open('songs.txt','r')
music_list = document.readlines()
document.close()
error_list=[]
print("Download music....")
for music in music_list:
try:
url = music
video = pafy.new(url)
bestaudio = video.getbestaudio()
bestaudio.download(filepath="music/temp/")
except:
error_list.append("Error download: "+music)
print("Converting to mp3.....")
for filename in os.listdir('music/temp/'):
try:
audio = convert.from_file('music/temp/'+filename)
name = os.path.splitext(filename)
audio.export('music/'+name[0]+'.mp3',format="mp3",bitrate="160k")
except:
error_list.append("Error convert: "+name[0])
shutil.rmtree("music/temp")
for error in error_list:
print(error)
print("Finished!")
| gpl-3.0 | -262,824,265,426,975,650 | 26.553191 | 73 | 0.622206 | false |
jfalkner/Efficient-Django-QuerySet-Use | demo-optimized/example/utils.py | 1 | 3812 | from django.utils.timezone import utc
from django_db_utils import pg_bulk_update
from example.models import Sample, SampleStatus
def now():
from datetime import datetime
return datetime.utcnow().replace(tzinfo=utc)
def make_fake_data(samples_to_make=100000, batch_threshold=100000, delete_existing=True, make_statuses=True, years=5):
"""Makes mock data for testing performance. Optionally, resets db.
"""
if delete_existing:
Sample.objects.all().delete()
print "Deleted existing"
# Make up a set of
offset = samples_to_make - samples_to_make/52/years
# Create all the samples.
samples = []
barcodes = range(samples_to_make)
for barcode in barcodes:
sample = Sample()
sample.barcode = str(barcode)
sample.created = now()
sample.status_created = sample.created
if barcode < offset:
sample.status_code = SampleStatus.COMPLETE
else:
sample.status_code = SampleStatus.LAB
sample.production = True
samples.append(sample)
if len(samples) >= batch_threshold:
Sample.objects.bulk_create(samples)
del samples[:]
print "Made %s samples." % Sample.objects.count()
if samples:
Sample.objects.bulk_create(samples)
print "Finished making %s samples." % Sample.objects.count()
if not make_statuses:
return
# Pull all ids for samples.
sample_ids = Sample.objects.values_list('id', flat=True)
# Create all the statuses.
offset = len(sample_ids)-len(sample_ids)/52/years
statuses = []
for sample in sample_ids[:offset]:
statuses.append(SampleStatus(sample_id=sample, status_code=SampleStatus.RECEIVED, created=now()))
statuses.append(SampleStatus(sample_id=sample, status_code=SampleStatus.LAB, created=now()))
statuses.append(SampleStatus(sample_id=sample, status_code=SampleStatus.COMPLETE, created=now()))
if len(statuses) >= batch_threshold:
SampleStatus.objects.bulk_create(statuses)
del statuses[:]
for sample in sample_ids[offset:]:
statuses.append(SampleStatus(sample_id=sample, status_code=SampleStatus.RECEIVED, created=now()))
statuses.append(SampleStatus(sample_id=sample, status_code=SampleStatus.LAB, created=now()))
if len(statuses) >= batch_threshold:
SampleStatus.objects.bulk_create(statuses)
del statuses[:]
print "Made %s statuses."%SampleStatus.objects.count()
if statuses:
SampleStatus.objects.bulk_create(statuses)
print "Finished making %s statuses."%SampleStatus.objects.count()
# Make all the denormalized status_code vars match.
sync_status(limit=batch_threshold)
print "Statuses synchronized"
def sync_status(limit=100000):
# Stream through all samples.
sample_count = Sample.objects.count()
for index in range(0, sample_count, limit):
vals = Sample.objects.order_by('id', '-statuses__status_code').distinct('id').values_list('id', 'status_code', 'statuses__id', 'statuses__status_code')[index:index+limit]
# Pull all mismatching values.
ids = []
status_codes = []
# status_ids = []
for sample_id, status_code, status_id, latest_status_code in vals:
if status_code != latest_status_code:
ids.append(sample_id)
status_codes.append(latest_status_code)
# status_ids.append(status_id)
# Sync using a bulk update.
if ids:
pg_bulk_update(Sample, 'id', 'status_code', list(ids), list(status_codes))
# pg_bulk_update(Sample, 'id', 'status_id', list(ids), list(status_ids))
print 'Synced %s out of %s samples at %s'%(len(ids), limit, index)
| mit | 361,137,351,912,115,300 | 39.126316 | 178 | 0.647692 | false |
cloudbase/coriolis | coriolis/wsman.py | 1 | 6173 | # Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
import base64
from oslo_log import log as logging
import requests
from winrm import protocol
from winrm import exceptions as winrm_exceptions
from coriolis import exception
from coriolis import utils
AUTH_BASIC = "basic"
AUTH_KERBEROS = "kerberos"
AUTH_CERTIFICATE = "certificate"
CODEPAGE_UTF8 = 65001
DEFAULT_TIMEOUT = 3600
LOG = logging.getLogger(__name__)
class WSManConnection(object):
def __init__(self, timeout=None):
self._protocol = None
self._conn_timeout = int(timeout or DEFAULT_TIMEOUT)
EOL = "\r\n"
@utils.retry_on_error()
def connect(self, url, username, auth=None, password=None,
cert_pem=None, cert_key_pem=None):
if not auth:
if cert_pem:
auth = AUTH_CERTIFICATE
else:
auth = AUTH_BASIC
auth_transport_map = {AUTH_BASIC: 'plaintext',
AUTH_KERBEROS: 'kerberos',
AUTH_CERTIFICATE: 'ssl'}
self._protocol = protocol.Protocol(
endpoint=url,
transport=auth_transport_map[auth],
username=username,
password=password,
cert_pem=cert_pem,
cert_key_pem=cert_key_pem)
@classmethod
def from_connection_info(cls, connection_info, timeout=DEFAULT_TIMEOUT):
""" Returns a wsman.WSManConnection object for the provided conn info. """
if not isinstance(connection_info, dict):
raise ValueError(
"WSMan connection must be a dict. Got type '%s', value: %s" % (
type(connection_info), connection_info))
required_keys = ["ip", "username", "password"]
missing = [key for key in required_keys if key not in connection_info]
if missing:
raise ValueError(
"The following keys were missing from WSMan connection info %s. "
"Got: %s" % (missing, connection_info))
host = connection_info["ip"]
port = connection_info.get("port", 5986)
username = connection_info["username"]
password = connection_info.get("password")
cert_pem = connection_info.get("cert_pem")
cert_key_pem = connection_info.get("cert_key_pem")
url = "https://%s:%s/wsman" % (host, port)
LOG.info("Connection info: %s", str(connection_info))
LOG.info("Waiting for connectivity on host: %(host)s:%(port)s",
{"host": host, "port": port})
utils.wait_for_port_connectivity(host, port)
conn = cls(timeout)
conn.connect(url=url, username=username, password=password,
cert_pem=cert_pem, cert_key_pem=cert_key_pem)
return conn
def disconnect(self):
self._protocol = None
def set_timeout(self, timeout):
if timeout:
self._protocol.timeout = timeout
self._protocol.transport.timeout = timeout
@utils.retry_on_error(
terminal_exceptions=[winrm_exceptions.InvalidCredentialsError,
exception.OSMorphingWinRMOperationTimeout])
def _exec_command(self, cmd, args=[], timeout=None):
timeout = int(timeout or self._conn_timeout)
self.set_timeout(timeout)
shell_id = self._protocol.open_shell(codepage=CODEPAGE_UTF8)
try:
command_id = self._protocol.run_command(shell_id, cmd, args)
try:
(std_out,
std_err,
exit_code) = self._protocol.get_command_output(
shell_id, command_id)
except requests.exceptions.ReadTimeout:
raise exception.OSMorphingWinRMOperationTimeout(
cmd=("%s %s" % (cmd, " ".join(args))), timeout=timeout)
finally:
self._protocol.cleanup_command(shell_id, command_id)
return (std_out, std_err, exit_code)
finally:
self._protocol.close_shell(shell_id)
def exec_command(self, cmd, args=[], timeout=None):
LOG.debug("Executing WSMAN command: %s", str([cmd] + args))
std_out, std_err, exit_code = self._exec_command(
cmd, args, timeout=timeout)
if exit_code:
raise exception.CoriolisException(
"Command \"%s\" failed with exit code: %s\n"
"stdout: %s\nstd_err: %s" %
(str([cmd] + args), exit_code, std_out, std_err))
return std_out
def exec_ps_command(self, cmd, ignore_stdout=False, timeout=None):
LOG.debug("Executing PS command: %s", cmd)
base64_cmd = base64.b64encode(cmd.encode('utf-16le')).decode()
return self.exec_command(
"powershell.exe", ["-EncodedCommand", base64_cmd],
timeout=timeout)[:-2]
def test_path(self, remote_path):
ret_val = self.exec_ps_command("Test-Path -Path \"%s\"" % remote_path)
return ret_val == "True"
def download_file(self, url, remote_path):
LOG.debug("Downloading: \"%(url)s\" to \"%(path)s\"",
{"url": url, "path": remote_path})
# Nano Server does not have Invoke-WebRequest and additionally
# this is also faster
self.exec_ps_command(
"[Net.ServicePointManager]::SecurityProtocol = "
"[Net.SecurityProtocolType]::Tls12;"
"if(!([System.Management.Automation.PSTypeName]'"
"System.Net.Http.HttpClient').Type) {$assembly = "
"[System.Reflection.Assembly]::LoadWithPartialName("
"'System.Net.Http')}; (new-object System.Net.Http.HttpClient)."
"GetStreamAsync('%(url)s').Result.CopyTo("
"(New-Object IO.FileStream '%(outfile)s', Create, Write, None), "
"1MB)" % {"url": url, "outfile": remote_path},
ignore_stdout=True)
def write_file(self, remote_path, content):
self.exec_ps_command(
"[IO.File]::WriteAllBytes('%s', [Convert]::FromBase64String('%s'))"
% (remote_path, base64.b64encode(content).decode()),
ignore_stdout=True)
| agpl-3.0 | -159,369,638,874,737,570 | 36.640244 | 82 | 0.581403 | false |
kirienko/gourmet | src/gourmet/importers/plaintext_importer.py | 1 | 4803 | import re
from gourmet import check_encodings
from gourmet.gdebug import debug
from gourmet.i18n import _
from gourmet.importers import importer
class TextImporter (importer.Importer):
ATTR_DICT = {'Recipe By':'source',
'Serving Size':'servings',
'Preparation Time':'preptime',
'Categories':'category',
}
end_of_paragraph_length = 60
def __init__ (self, filename, conv=None):
self.fn = filename
self.rec = {}
self.ing = {}
self.compile_regexps()
importer.Importer.__init__(self,conv=conv)
def pre_run (self):
self.lines = check_encodings.get_file(self.fn)
self.total_lines = len(self.lines)
print('we have ',self.total_lines,'lines in file',self.fn)
def do_run (self):
if not hasattr(self,'lines'):
raise Exception("pre_run has not been run!")
for n in range(self.total_lines):
l=self.lines[n]
if n % 15 == 0:
prog = float(n)/float(self.total_lines)
msg = _("Imported %s recipes.")%(len(self.added_recs))
self.emit('progress',prog,msg)
self.handle_line(l)
# commit the last rec if need be
if self.rec:
self.commit_rec()
importer.Importer.do_run(self)
def handle_line (self, l):
raise NotImplementedError
def compile_regexps (self):
self.blank_matcher = re.compile(r"^\s*$")
# out unwrap regexp looks for a line with no meaningful characters, or a line that starts in
# ALLCAPS or a line that is only space. (we use this with .split() to break text up into
# paragraph breaks.
self.unwrap_matcher = re.compile(r'\n\W*\n')
self.find_header_breaks_matcher = re.compile(r'\s+(?=[A-Z][A-Z][A-Z]+:.*)')
def unwrap_lines (self, blob):
if blob.find("") >= 0:
debug('Using built-in paragraph markers',1)
# then we have paragraph markers in the text already
outblob = " ".join(blob.split("\n")) # get rid of line breaks
lines = outblob.split("") # split text up into paragraphs
outblob = "\n".join(lines) # insert linebreaks where paragraphs were
return outblob
outblob = ""
newline = True
for l in blob.split('\n'):
debug('examining %s'%l,3)
if re.match(r'^\W*$',l):
# ignore repeated nonword characters (hyphens, stars, etc.)
outblob += "\n"
continue
# if we have a non-word character at the start of the line,
# we assume we need to keep the newline.
if len(l)>=3 and re.match(r'(\W|[0-9])',l[2]):
debug('Match non-word character; add newline before: %s'%l,4)
outblob += "\n"
outblob += l
newline = False
continue
# if we are continuing an old line, we add a space
# (because we're generally stripping all spaces when
# we write)
if not newline: outblob += " "
hmatch = self.find_header_breaks_matcher.search(l)
if hmatch:
# if there's a header in the middle, we go ahead
# and start a new line
debug('Splitting at header in line: %s'%l,4)
outblob += l[:hmatch.start()]
outblob += "\n"
outblob += l[hmatch.start():]
continue
#else...
outblob += l.strip()
if len(l) < self.end_of_paragraph_length: #60 is our hard-coded end-o-paragraph length
debug('line < %s characters, adding newline.'%self.end_of_paragraph_length,4)
outblob += "\n"
newline = True
else:
newline = False
return outblob
class Tester (importer.Tester):
def __init__ (self):
importer.Tester.__init__(self,regexp=MASTERCOOK_START_REGEXP)
self.not_me = "<[?]?(xml|mx2|RcpE|RTxt)[^>]*>"
def test (self, filename):
"""Test file named filename.
filename can also be a file object.
"""
if not hasattr(self,'matcher'):
self.matcher=re.compile(self.regexp)
self.not_matcher = re.compile(self.not_me)
if isinstance(self.ofi, str):
self.ofi = open(filename,'r')
l = self.ofi.readline()
while l:
if self.not_matcher.match(l):
self.ofi.close()
return False
if self.matcher.match(l):
self.ofi.close()
return True
l = self.ofi.readline()
self.ofi.close()
| gpl-2.0 | -4,688,046,965,670,273,000 | 36.818898 | 100 | 0.531335 | false |
rajul/tvb-framework | tvb/tests/framework/adapters/visualizers/ica_test.py | 1 | 3631 | # -*- coding: utf-8 -*-
#
#
# TheVirtualBrain-Framework Package. This package holds all Data Management, and
# Web-UI helpful to run brain-simulations. To use it, you also need do download
# TheVirtualBrain-Scientific Package (for simulators). See content of the
# documentation-folder for more details. See also http://www.thevirtualbrain.org
#
# (c) 2012-2013, Baycrest Centre for Geriatric Care ("Baycrest")
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by the Free
# Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details. You should have received a copy of the GNU General
# Public License along with this program; if not, you can download it here
# http://www.gnu.org/licenses/old-licenses/gpl-2.0
#
#
# CITATION:
# When using The Virtual Brain for scientific publications, please cite it as follows:
#
# Paula Sanz Leon, Stuart A. Knock, M. Marmaduke Woodman, Lia Domide,
# Jochen Mersmann, Anthony R. McIntosh, Viktor Jirsa (2013)
# The Virtual Brain: a simulator of primate brain network dynamics.
# Frontiers in Neuroinformatics (7:10. doi: 10.3389/fninf.2013.00010)
#
#
"""
.. moduleauthor:: Bogdan Neacsa <[email protected]>
"""
import unittest
from tvb.core.entities.file.files_helper import FilesHelper
from tvb.adapters.visualizers.ica import ICA
from tvb.datatypes.connectivity import Connectivity
from tvb.tests.framework.core.test_factory import TestFactory
from tvb.tests.framework.datatypes.datatypes_factory import DatatypesFactory
from tvb.tests.framework.core.base_testcase import TransactionalTestCase
class ICATest(TransactionalTestCase):
"""
Unit-tests for ICA Viewer.
"""
def setUp(self):
"""
Sets up the environment for running the tests;
creates a test user, a test project, a connectivity and a surface;
imports a CFF data-set
"""
self.datatypeFactory = DatatypesFactory()
self.test_project = self.datatypeFactory.get_project()
self.test_user = self.datatypeFactory.get_user()
TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
self.assertTrue(self.connectivity is not None)
def tearDown(self):
"""
Clean-up tests data
"""
FilesHelper().remove_project_structure(self.test_project.name)
def test_launch(self):
"""
Check that all required keys are present in output from BrainViewer launch.
"""
time_series = self.datatypeFactory.create_timeseries(self.connectivity)
conn_measure = self.datatypeFactory.create_ICA(time_series)
viewer = ICA()
result = viewer.launch(conn_measure)
expected_keys = ['matrix_strides', 'matrix_shape', 'matrix_data', 'mainContent', 'isAdapter']
for key in expected_keys:
self.assertTrue(key in result)
def suite():
"""
Gather all the tests in a test suite.
"""
test_suite = unittest.TestSuite()
test_suite.addTest(unittest.makeSuite(ICATest))
return test_suite
if __name__ == "__main__":
#So you can run tests from this package individually.
TEST_RUNNER = unittest.TextTestRunner()
TEST_SUITE = suite()
TEST_RUNNER.run(TEST_SUITE) | gpl-2.0 | -877,347,918,199,350,000 | 36.833333 | 102 | 0.705866 | false |
seeschloss/grammalecte | ContextMenu.py | 1 | 6974 | # -*- coding: utf8 -*-
# Grammalecte - Lexicographe
# by Olivier R. License: MPL 2
import uno
import unohelper
import traceback
from com.sun.star.task import XJob
from com.sun.star.ui import XContextMenuInterceptor
from com.sun.star.ui.ContextMenuInterceptorAction import IGNORED
from com.sun.star.ui.ContextMenuInterceptorAction import EXECUTE_MODIFIED
import grammalecte.fr.lexicographe as lxg
xDesktop = None
oDict = None
oLexicographe = None
def printServices (o):
for s in o.getAvailableServiceNames():
print(' >'+s)
def getConfigSetting (sNodeConfig, bUpdate):
# get a configuration node
# example: aSettings = getConfigSetting("/org.openoffice.Office.Common/Path/Current", false)
xSvMgr = uno.getComponentContext().ServiceManager
xConfigProvider = xSvMgr.createInstanceWithContext("com.sun.star.configuration.ConfigurationProvider", uno.getComponentContext())
xPropertyValue = uno.createUnoStruct("com.sun.star.beans.PropertyValue")
xPropertyValue.Name = "nodepath"
xPropertyValue.Value = sNodeConfig
if bUpdate:
sService = "com.sun.star.configuration.ConfigurationUpdateAccess"
else:
sService = "com.sun.star.configuration.ConfigurationAccess"
return xConfigProvider.createInstanceWithArguments(sService, (xPropertyValue,))
class MyContextMenuInterceptor (XContextMenuInterceptor, unohelper.Base):
def __init__ (self, ctx):
self.ctx = ctx
def notifyContextMenuExecute (self, xEvent):
sWord = self._getWord()
try:
aItem, aVerb = oLexicographe.analyzeWord(sWord)
if not aItem:
#return uno.Enum("com.sun.star.ui.ContextMenuInterceptorAction", "IGNORED") # don’t work on AOO, have to import the value
return IGNORED
xContextMenu = xEvent.ActionTriggerContainer
if xContextMenu:
# entries index
i = xContextMenu.Count
nUnoConstantLine = uno.getConstantByName("com.sun.star.ui.ActionTriggerSeparatorType.LINE")
i = self._addItemToContextMenu(xContextMenu, i, "ActionTriggerSeparator", SeparatorType=nUnoConstantLine)
for item in aItem:
if isinstance(item, str):
i = self._addItemToContextMenu(xContextMenu, i, "ActionTrigger", Text=item)
elif isinstance(item, tuple):
sRoot, lMorph = item
# submenu
xSubMenuContainer = xContextMenu.createInstance("com.sun.star.ui.ActionTriggerContainer")
for j, s in enumerate(lMorph):
self._addItemToContextMenu(xSubMenuContainer, j, "ActionTrigger", Text=s)
# create root menu entry
i = self._addItemToContextMenu(xContextMenu, i, "ActionTrigger", Text=sRoot, SubContainer=xSubMenuContainer)
else:
i = self._addItemToContextMenu(xContextMenu, i, "ActionTrigger", Text="# erreur : {}".format(item))
# Links to Conjugueur
if aVerb:
i = self._addItemToContextMenu(xContextMenu, i, "ActionTriggerSeparator", SeparatorType=nUnoConstantLine)
for sVerb in aVerb:
i = self._addItemToContextMenu(xContextMenu, i, "ActionTrigger", Text="Conjuguer “{}”…".format(sVerb),
CommandURL="service:net.grammalecte.AppLauncher?CJ/"+sVerb)
# The controller should execute the modified context menu and stop notifying other interceptors.
#return uno.Enum("com.sun.star.ui.ContextMenuInterceptorAction", "EXECUTE_MODIFIED") # don’t work on AOO, have to import the value
return EXECUTE_MODIFIED
except:
traceback.print_exc()
#return uno.Enum("com.sun.star.ui.ContextMenuInterceptorAction", "IGNORED") # don’t work on AOO, have to import the value
return IGNORED
def _addItemToContextMenu (self, xContextMenu, i, sType, **args):
xMenuItem = xContextMenu.createInstance("com.sun.star.ui."+sType)
for k, v in args.items():
xMenuItem.setPropertyValue(k, v)
xContextMenu.insertByIndex(i, xMenuItem)
return i + 1
def _getWord (self):
try:
xDoc = xDesktop.getCurrentComponent()
xViewCursor = xDoc.CurrentController.ViewCursor
if xViewCursor.CharLocale.Language != "fr":
return ""
xText = xViewCursor.Text
xCursor = xText.createTextCursorByRange(xViewCursor)
xCursor.gotoStartOfWord(False)
xCursor.gotoEndOfWord(True)
except:
traceback.print_exc()
return xCursor.String.strip('.')
class JobExecutor (XJob, unohelper.Base):
def __init__ (self, ctx):
self.ctx = ctx
global xDesktop
global oDict
global oLexicographe
if not xDesktop:
xDesktop = self.ctx.getServiceManager().createInstanceWithContext('com.sun.star.frame.Desktop', self.ctx)
if not oDict:
xCurCtx = uno.getComponentContext()
oGC = xCurCtx.ServiceManager.createInstanceWithContext("org.openoffice.comp.pyuno.Lightproof.grammalecte", xCurCtx)
oDict = oGC.getDictionary()
if not oLexicographe:
oLexicographe = lxg.Lexicographe(oDict)
def execute (self, args):
if not args:
return
# what version of the software?
xSettings = getConfigSetting("org.openoffice.Setup/Product", False)
sProdName = xSettings.getByName("ooName")
sVersion = xSettings.getByName("ooSetupVersion")
if (sProdName == "LibreOffice" and sVersion < "4") or sProdName == "OpenOffice.org":
return
# what event?
bCorrectEvent = False
for arg in args:
if arg.Name == "Environment":
for v in arg.Value:
if v.Name == "EnvType" and v.Value == "DOCUMENTEVENT":
bCorrectEvent = True
elif v.Name == "EventName":
pass
# check is correct event
#print "Event: %s" % v.Value
elif v.Name == "Model":
model = v.Value
if bCorrectEvent:
if model.supportsService("com.sun.star.text.TextDocument"):
xController = model.getCurrentController()
if xController:
xController.registerContextMenuInterceptor(MyContextMenuInterceptor(self.ctx))
g_ImplementationHelper = unohelper.ImplementationHelper()
g_ImplementationHelper.addImplementation(JobExecutor, "grammalecte.ContextMenuHandler", ("grammalecte.ContextMenuHandler",),)
| gpl-3.0 | -1,739,991,668,112,274,700 | 42.786164 | 146 | 0.618931 | false |
mark-me/Pi-Jukebox | venv/Lib/site-packages/pygame/ftfont.py | 1 | 6239 | """pygame module for loading and rendering fonts (freetype alternative)"""
__all__ = ['Font', 'init', 'quit', 'get_default_font', 'get_init', 'SysFont']
from pygame._freetype import init, Font as _Font, get_default_resolution
from pygame._freetype import quit, get_default_font, get_init as _get_init
from pygame._freetype import __PYGAMEinit__
from pygame.sysfont import match_font, get_fonts, SysFont as _SysFont
from pygame import encode_file_path
from pygame.compat import bytes_, unicode_, as_unicode, as_bytes
from pygame import Surface as _Surface, Color as _Color, SRCALPHA as _SRCALPHA
class Font(_Font):
"""Font(filename, size) -> Font
Font(object, size) -> Font
create a new Font object from a file (freetype alternative)
This Font type differs from font.Font in that it can render glyphs
for Unicode code points in the supplementary planes (> 0xFFFF).
"""
__encode_file_path = staticmethod(encode_file_path)
__get_default_resolution = staticmethod(get_default_resolution)
__default_font = encode_file_path(get_default_font())
__unull = as_unicode(r"\x00")
__bnull = as_bytes("\x00")
def __init__(self, file, size=-1):
if size <= 1:
size = 1
if isinstance(file, unicode_):
try:
bfile = self.__encode_file_path(file, ValueError)
except ValueError:
bfile = ''
else:
bfile = file
if isinstance(bfile, bytes_) and bfile == self.__default_font:
file = None
if file is None:
resolution = int(self.__get_default_resolution() * 0.6875)
if resolution == 0:
kwds['resolution'] = 1
else:
resolution = 0
super(Font, self).__init__(file, size=size, resolution=resolution)
self.strength = 1.0 / 12.0
self.kerning = False
self.origin = True
self.pad = True
self.ucs4 = True
self.underline_adjustment = 1.0
def render(self, text, antialias, color, background=None):
"""render(text, antialias, color, background=None) -> Surface
draw text on a new Surface"""
if text is None:
text = ""
if (isinstance(text, unicode_) and # conditional and
self.__unull in text):
raise ValueError("A null character was found in the text")
if (isinstance(text, bytes_) and # conditional and
self.__bnull in text):
raise ValueError("A null character was found in the text")
save_antialiased = self.antialiased
self.antialiased = bool(antialias)
try:
s, r = super(Font, self).render(text, color, background)
return s
finally:
self.antialiased = save_antialiased
def set_bold(self, value):
"""set_bold(bool) -> None
enable fake rendering of bold text"""
self.wide = bool(value)
def get_bold(self):
"""get_bold() -> bool
check if text will be rendered bold"""
return self.wide
def set_italic(self, value):
"""set_italic(bool) -> None
enable fake rendering of italic text"""
self.oblique = bool(value)
def get_italic(self):
"""get_italic() -> bool
check if the text will be rendered italic"""
return self.oblique
def set_underline(self, value):
"""set_underline(bool) -> None
control if text is rendered with an underline"""
self.underline = bool(value)
def get_underline(self):
"""set_bold(bool) -> None
enable fake rendering of bold text"""
return self.underline
def metrics(self, text):
"""metrics(text) -> list
Gets the metrics for each character in the pased string."""
return self.get_metrics(text)
def get_ascent(self):
"""get_ascent() -> int
get the ascent of the font"""
return self.get_sized_ascender()
def get_descent(self):
"""get_descent() -> int
get the descent of the font"""
return self.get_sized_descender()
def get_height(self):
"""get_height() -> int
get the height of the font"""
return self.get_sized_ascender() - self.get_sized_descender() + 1
def get_linesize(self):
"""get_linesize() -> int
get the line space of the font text"""
return self.get_sized_height();
def size(self, text):
"""size(text) -> (width, height)
determine the amount of space needed to render text"""
return self.get_rect(text).size
FontType = Font
def get_init():
"""get_init() -> bool
true if the font module is initialized"""
return _get_init()
def SysFont(name, size, bold=0, italic=0, constructor=None):
"""pygame.ftfont.SysFont(name, size, bold=False, italic=False, constructor=None) -> Font
create a pygame Font from system font resources (freetype alternative)
This will search the system fonts for the given font
name. You can also enable bold or italic styles, and
the appropriate system font will be selected if available.
This will always return a valid Font object, and will
fallback on the builtin pygame font if the given font
is not found.
Name can also be a comma separated list of names, in
which case set of names will be searched in order. Pygame
uses a small set of common font aliases, if the specific
font you ask for is not available, a reasonable alternative
may be used.
if optional contructor is provided, it must be a function with
signature constructor(fontpath, size, bold, italic) which returns
a Font instance. If None, a pygame.ftfont.Font object is created.
"""
if constructor is None:
def constructor(fontpath, size, bold, italic):
font = Font(fontpath, size)
font.set_bold(bold)
font.set_italic(italic)
return font
return _SysFont(name, size, bold, italic, constructor)
del _Font, get_default_resolution, encode_file_path, as_unicode, as_bytes
| agpl-3.0 | 7,635,080,894,809,608,000 | 32.363636 | 92 | 0.608431 | false |
google/ffn | ffn/utils/vector_pb2.py | 1 | 15524 | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: utils/vector.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='utils/vector.proto',
package='ffn.proto',
syntax='proto2',
serialized_pb=_b('\n\x12utils/vector.proto\x12\tffn.proto\" \n\x08Vector2d\x12\t\n\x01x\x18\x01 \x01(\x01\x12\t\n\x01y\x18\x02 \x01(\x01\" \n\x08Vector2i\x12\t\n\x01x\x18\x01 \x01(\x05\x12\t\n\x01y\x18\x02 \x01(\x05\"+\n\x08Vector3d\x12\t\n\x01x\x18\x01 \x01(\x01\x12\t\n\x01y\x18\x02 \x01(\x01\x12\t\n\x01z\x18\x03 \x01(\x01\"+\n\x08Vector3f\x12\t\n\x01x\x18\x01 \x01(\x02\x12\t\n\x01y\x18\x02 \x01(\x02\x12\t\n\x01z\x18\x03 \x01(\x02\"+\n\x08Vector3j\x12\t\n\x01x\x18\x01 \x01(\x03\x12\t\n\x01y\x18\x02 \x01(\x03\x12\t\n\x01z\x18\x03 \x01(\x03\"4\n\x0cVector2dList\x12$\n\x07vectors\x18\x01 \x03(\x0b\x32\x13.ffn.proto.Vector2d\"4\n\x0cVector2iList\x12$\n\x07vectors\x18\x01 \x03(\x0b\x32\x13.ffn.proto.Vector2i\"4\n\x0cVector3dList\x12$\n\x07vectors\x18\x01 \x03(\x0b\x32\x13.ffn.proto.Vector3d\"4\n\x0cVector3fList\x12$\n\x07vectors\x18\x01 \x03(\x0b\x32\x13.ffn.proto.Vector3f\"4\n\x0cVector3jList\x12$\n\x07vectors\x18\x01 \x03(\x0b\x32\x13.ffn.proto.Vector3j')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_VECTOR2D = _descriptor.Descriptor(
name='Vector2d',
full_name='ffn.proto.Vector2d',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='x', full_name='ffn.proto.Vector2d.x', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='y', full_name='ffn.proto.Vector2d.y', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=33,
serialized_end=65,
)
_VECTOR2I = _descriptor.Descriptor(
name='Vector2i',
full_name='ffn.proto.Vector2i',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='x', full_name='ffn.proto.Vector2i.x', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='y', full_name='ffn.proto.Vector2i.y', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=67,
serialized_end=99,
)
_VECTOR3D = _descriptor.Descriptor(
name='Vector3d',
full_name='ffn.proto.Vector3d',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='x', full_name='ffn.proto.Vector3d.x', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='y', full_name='ffn.proto.Vector3d.y', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='z', full_name='ffn.proto.Vector3d.z', index=2,
number=3, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=101,
serialized_end=144,
)
_VECTOR3F = _descriptor.Descriptor(
name='Vector3f',
full_name='ffn.proto.Vector3f',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='x', full_name='ffn.proto.Vector3f.x', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='y', full_name='ffn.proto.Vector3f.y', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='z', full_name='ffn.proto.Vector3f.z', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=146,
serialized_end=189,
)
_VECTOR3J = _descriptor.Descriptor(
name='Vector3j',
full_name='ffn.proto.Vector3j',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='x', full_name='ffn.proto.Vector3j.x', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='y', full_name='ffn.proto.Vector3j.y', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='z', full_name='ffn.proto.Vector3j.z', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=191,
serialized_end=234,
)
_VECTOR2DLIST = _descriptor.Descriptor(
name='Vector2dList',
full_name='ffn.proto.Vector2dList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='vectors', full_name='ffn.proto.Vector2dList.vectors', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=236,
serialized_end=288,
)
_VECTOR2ILIST = _descriptor.Descriptor(
name='Vector2iList',
full_name='ffn.proto.Vector2iList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='vectors', full_name='ffn.proto.Vector2iList.vectors', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=290,
serialized_end=342,
)
_VECTOR3DLIST = _descriptor.Descriptor(
name='Vector3dList',
full_name='ffn.proto.Vector3dList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='vectors', full_name='ffn.proto.Vector3dList.vectors', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=344,
serialized_end=396,
)
_VECTOR3FLIST = _descriptor.Descriptor(
name='Vector3fList',
full_name='ffn.proto.Vector3fList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='vectors', full_name='ffn.proto.Vector3fList.vectors', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=398,
serialized_end=450,
)
_VECTOR3JLIST = _descriptor.Descriptor(
name='Vector3jList',
full_name='ffn.proto.Vector3jList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='vectors', full_name='ffn.proto.Vector3jList.vectors', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=452,
serialized_end=504,
)
_VECTOR2DLIST.fields_by_name['vectors'].message_type = _VECTOR2D
_VECTOR2ILIST.fields_by_name['vectors'].message_type = _VECTOR2I
_VECTOR3DLIST.fields_by_name['vectors'].message_type = _VECTOR3D
_VECTOR3FLIST.fields_by_name['vectors'].message_type = _VECTOR3F
_VECTOR3JLIST.fields_by_name['vectors'].message_type = _VECTOR3J
DESCRIPTOR.message_types_by_name['Vector2d'] = _VECTOR2D
DESCRIPTOR.message_types_by_name['Vector2i'] = _VECTOR2I
DESCRIPTOR.message_types_by_name['Vector3d'] = _VECTOR3D
DESCRIPTOR.message_types_by_name['Vector3f'] = _VECTOR3F
DESCRIPTOR.message_types_by_name['Vector3j'] = _VECTOR3J
DESCRIPTOR.message_types_by_name['Vector2dList'] = _VECTOR2DLIST
DESCRIPTOR.message_types_by_name['Vector2iList'] = _VECTOR2ILIST
DESCRIPTOR.message_types_by_name['Vector3dList'] = _VECTOR3DLIST
DESCRIPTOR.message_types_by_name['Vector3fList'] = _VECTOR3FLIST
DESCRIPTOR.message_types_by_name['Vector3jList'] = _VECTOR3JLIST
Vector2d = _reflection.GeneratedProtocolMessageType('Vector2d', (_message.Message,), dict(
DESCRIPTOR = _VECTOR2D,
__module__ = 'utils.vector_pb2'
# @@protoc_insertion_point(class_scope:ffn.proto.Vector2d)
))
_sym_db.RegisterMessage(Vector2d)
Vector2i = _reflection.GeneratedProtocolMessageType('Vector2i', (_message.Message,), dict(
DESCRIPTOR = _VECTOR2I,
__module__ = 'utils.vector_pb2'
# @@protoc_insertion_point(class_scope:ffn.proto.Vector2i)
))
_sym_db.RegisterMessage(Vector2i)
Vector3d = _reflection.GeneratedProtocolMessageType('Vector3d', (_message.Message,), dict(
DESCRIPTOR = _VECTOR3D,
__module__ = 'utils.vector_pb2'
# @@protoc_insertion_point(class_scope:ffn.proto.Vector3d)
))
_sym_db.RegisterMessage(Vector3d)
Vector3f = _reflection.GeneratedProtocolMessageType('Vector3f', (_message.Message,), dict(
DESCRIPTOR = _VECTOR3F,
__module__ = 'utils.vector_pb2'
# @@protoc_insertion_point(class_scope:ffn.proto.Vector3f)
))
_sym_db.RegisterMessage(Vector3f)
Vector3j = _reflection.GeneratedProtocolMessageType('Vector3j', (_message.Message,), dict(
DESCRIPTOR = _VECTOR3J,
__module__ = 'utils.vector_pb2'
# @@protoc_insertion_point(class_scope:ffn.proto.Vector3j)
))
_sym_db.RegisterMessage(Vector3j)
Vector2dList = _reflection.GeneratedProtocolMessageType('Vector2dList', (_message.Message,), dict(
DESCRIPTOR = _VECTOR2DLIST,
__module__ = 'utils.vector_pb2'
# @@protoc_insertion_point(class_scope:ffn.proto.Vector2dList)
))
_sym_db.RegisterMessage(Vector2dList)
Vector2iList = _reflection.GeneratedProtocolMessageType('Vector2iList', (_message.Message,), dict(
DESCRIPTOR = _VECTOR2ILIST,
__module__ = 'utils.vector_pb2'
# @@protoc_insertion_point(class_scope:ffn.proto.Vector2iList)
))
_sym_db.RegisterMessage(Vector2iList)
Vector3dList = _reflection.GeneratedProtocolMessageType('Vector3dList', (_message.Message,), dict(
DESCRIPTOR = _VECTOR3DLIST,
__module__ = 'utils.vector_pb2'
# @@protoc_insertion_point(class_scope:ffn.proto.Vector3dList)
))
_sym_db.RegisterMessage(Vector3dList)
Vector3fList = _reflection.GeneratedProtocolMessageType('Vector3fList', (_message.Message,), dict(
DESCRIPTOR = _VECTOR3FLIST,
__module__ = 'utils.vector_pb2'
# @@protoc_insertion_point(class_scope:ffn.proto.Vector3fList)
))
_sym_db.RegisterMessage(Vector3fList)
Vector3jList = _reflection.GeneratedProtocolMessageType('Vector3jList', (_message.Message,), dict(
DESCRIPTOR = _VECTOR3JLIST,
__module__ = 'utils.vector_pb2'
# @@protoc_insertion_point(class_scope:ffn.proto.Vector3jList)
))
_sym_db.RegisterMessage(Vector3jList)
# @@protoc_insertion_point(module_scope)
| apache-2.0 | 395,596,101,208,389,570 | 30.361616 | 969 | 0.690157 | false |
poppogbr/genropy | packages/hosting/webpages/client.py | 1 | 8379 | #!/usr/bin/env python
# encoding: utf-8
"""
Created by Softwell on 2008-07-10.
Copyright (c) 2008 Softwell. All rights reserved.
"""
# --------------------------- GnrWebPage Standard header ---------------------------
from gnr.core.gnrbag import Bag
class GnrCustomWebPage(object):
maintable = 'hosting.client'
py_requires = """public:Public,standard_tables:TableHandler,
gnrcomponents/selectionhandler,
hosted:HostedClient,hosted:HostedInstance"""
######################## STANDARD TABLE OVERRIDDEN METHODS ###############
def windowTitle(self):
return '!!Client'
def pageAuthTags(self, method=None, **kwargs):
return 'owner'
def tableWriteTags(self):
return 'owner'
def tableDeleteTags(self):
return 'owner'
def barTitle(self):
return '!!Client'
def lstBase(self, struct):
r = struct.view().rows()
r.fieldcell('code', width='10em')
r.fieldcell('@user_id.username', name='User', width='10em')
self.hosted_card_columns(r)
return struct
def conditionBase(self):
pass
def queryBase(self):
return dict(column='code', op='contains', val='%')
def orderBase(self):
return 'code'
############################## FORM METHODS ##################################
def formBase(self, parentBC, disabled=False, **kwargs):
bc = parentBC.borderContainer(**kwargs)
top = bc.borderContainer(region='top', height='120px')
right = top.contentPane(region='right', width='350px')
self.hosted_card_linker(right, disabled=disabled)
center = top.contentPane(region='center')
fb = center.formbuilder(cols=1, border_spacing='3px', fld_width='100%',
width='350px', disabled=disabled)
fb.field('code')
fb.field('user_id')
tc = bc.tabContainer(region='center')
self.main_clienttab(tc.borderContainer(title='Info'), disabled)
for pkgname, handler in [(c.split('_')[1], getattr(self, c)) for c in dir(self) if
c.startswith('hostedclient_')]:
handler(tc.contentPane(datapath='.hosted_data.%s' % pkgname,
title=self.db.packages[pkgname].name_long,
nodeId='hosted_client_data_%s' % pkgname,
sqlContextName='sql_record_hosted_client_%s' % pkgname,
sqlContextRoot='form.record.hosted_client_data'))
def main_clienttab(self, bc, disabled):
self.selectionHandler(bc.borderContainer(region='center'), label='!!Instances',
datapath="instances", nodeId='instances', table='hosting.instance',
struct=self.struct_instances, reloader='^form.record.id',
hiddencolumns='$site_path', reload_onSaved=False,
selectionPars=dict(where='$client_id=:c_id', c_id='=form.record.id',
applymethod='apply_instances_selection', order_by='$code'),
dialogPars=dict(height='400px', width='600px', formCb=self.instance_form,
onSaved='genro.fireAfter("#instances.reload",true,5000)',
toolbarPars=dict(lock_action=True, add_action=True, del_action=True,
save_action=True),
default_client_id='=form.record.id',
saveKwargs=dict(_lockScreen=True, saveAlways=True)))
def instance_form(self, parentBC, disabled=None, table=None, **kwargs):
tc = parentBC.tabContainer(**kwargs)
self.main_instancetab(tc.contentPane(title='Info', _class='pbl_roundedGroup', margin='5px'), table=table,
disabled=disabled)
for pkgname, handler in [(c.split('_')[1], getattr(self, c)) for c in dir(self) if
c.startswith('hostedinstance_')]:
handler(tc.contentPane(datapath='.hosted_data.%s' % pkgname, title=self.db.packages[pkgname].name_long,
nodeId='hosted_instance_data_%s' % pkgname,
sqlContextName='sql_record_hosted_instance_%s' % pkgname,
sqlContextRoot='instances.dlg.record.hosted_data.%s' % pkgname))
def main_instancetab(self, parent, disabled=None, table=None):
bc = parent.borderContainer()
pane = bc.contentPane(region='top')
pane.div('!!Manage instances', _class='pbl_roundedGroupLabel')
fb = pane.formbuilder(cols=1, border_spacing='6px', dbtable=table, disabled=disabled)
fb.field('code', width='15em', lbl='!!Instance Name')
pane.dataRpc('.$creation_result', 'createInst', instance_code='=.code', instance_exists='=.$instance_exists',
site_exists='=.$site_exists',
_fired='^.$create', _onResult='FIRE .$created', _userChanges=True)
pane.dataController("""
if (site_path){
SET .site_path=site_path;
SET .$site_exists=true;
}
if (instance_path){
SET .path=instance_path;
SET .$instance_exists=true;
}
""", site_path='=.$creation_result.site_path',
instance_path='=.$creation_result.instance_path',
_fired='^.$created', _userChanges=True)
def struct(struct):
r = struct.view().rows()
r.cell('type', name='Slot type', width='15em')
r.cell('qty', name='Qty', width='4em', dtype='I')
return struct
iv = self.includedViewBox(bc.borderContainer(region='center'), label='!!Slot configuration',
storepath='.slot_configuration', struct=struct,
datamode='bag', autoWidth=True,
add_action=True, del_action=True)
gridEditor = iv.gridEditor()
gridEditor.dbSelect(gridcell='type', dbtable='hosting.slot_type',
columns='$code,$description', rowcaption='$code',
exclude=True, hasDownArrow=True)
gridEditor.numberTextBox(gridcell='qty')
def onLoading_hosting_instance(self, record, newrecord, loadingParameters, recInfo):
tblinstance = self.db.table('hosting.instance')
instance_exists = self.db.packages['hosting'].instance_exists(record['code'])
site_exists = self.db.packages['hosting'].site_exists(record['code'])
record.setItem('$instance_exists', instance_exists)
record.setItem('$site_exists', site_exists)
def rpc_apply_instances_selection(self, selection, **kwargs):
tblinstance = self.db.table('hosting.instance')
def apply_row(row):
instance_exists = self.db.packages['hosting'].instance_exists(row['code'])
site_exists = self.db.packages['hosting'].site_exists(row['code'])
if site_exists and instance_exists:
return dict(create='<div class="greenLight"></div>')
else:
return dict(create='<div class="yellowLight"></div>')
selection.apply(apply_row)
def rpc_createInst(self, instance_code=None, instance_exists=None, site_exists=None):
result = Bag()
instancetbl = self.db.table('hosting.instance')
if not instance_exists:
result['instance_path'] = instancetbl.create_instance(instance_code, self.site.instance_path,
self.site.gnrapp.config)
if not site_exists:
result['site_path'] = instancetbl.create_site(instance_code, self.site.site_path, self.site.config)
return result
def struct_instances(self, struct):
r = struct.view().rows()
r.fieldcell('code', width='10em')
r.fieldcell('path', width='20em')
r.cell('create', calculated=True, name='!!Status', width='10em')
return struct
| lgpl-2.1 | 4,752,140,607,337,308,000 | 47.155172 | 117 | 0.546963 | false |
hugohmk/Epidemic-Emulator | main.py | 1 | 7208 | from epidemic_emulator import node
from datetime import datetime
import platform
import argparse
import time
import os
import matplotlib.pyplot as plt
import random
def parse_network(f, node_id, topology = "clique"):
neighbors = []
nd = None
t = datetime.now()
t = t-t
net = []
index = -1
cnt = 0
for i in f:
i = i.rstrip("\n").split("|")
if len(i)<4:
continue
u = (i[0],(i[1],int(i[2])),[(i[3],t)])
if i[0]==node_id:
nd = u
index = cnt
net.append(u)
cnt+=1
f.close()
# clique
if topology == "clique":
neighbors = [i for i in net if i[0] != node_id]
# star
elif topology == "star":
if index > 0:
neighbors = [net[0]]
else:
neighbors = net[1:]
return neighbors,nd
def simulation_controller(args,nd,network):
# Example nd value:
#('9', ('127.0.0.1', 9179), [('S', datetime.timedelta(0))])
#
# network is a tuple containing every node identifier constructed from
# args.network (default=network.txt) file
r = args.recovery_rate
e = args.endogenous_rate
x = args.exogenous_rate
if nd is not None:
with node.Node(r,e,x) as a:
a.start(nd, network)
if args.interaction == 1:
try:
help_text = """>> Commands:
0 (help) -> print this
1 (print current) -> print current network state
2 (print history) -> print network history
3 (end) -> send shutdown message to all nodes
4 (display state) -> display current network state
5 (display history) -> display network history
"""
print help_text
while True:
opt = raw_input(">> Insert command: ")
if opt == "0":
print help_text
elif opt == "1":
#print a.network_state(),"\n"
a.print_state()
elif opt == "2":
#print a.network_history(),"\n"
a.print_history()
elif opt == "3":
a.display_history()
a.network_shutdown()
a.stop()
break
elif opt == "4":
a.display_state()
elif opt == "5":
a.display_history()
else:
print "Invalid input\n"
except:
a.network_shutdown()
a.stop()
finally:
a.network_shutdown()
a.stop()
elif args.interaction > 1:
print("Running simulation for %d seconds." % args.interaction)
time.sleep(args.interaction)
#a.display_history()
simdata = a.save_simulation_data()
a.network_shutdown()
a.stop()
return simdata
else:
try:
while not a.stopped():
time.sleep(2)
except:
a.stop()
finally:
a.stop()
def process_data(simdata,repetitions,simulation_time):
simresults = [[-1 for t in range(simulation_time+1)] for x in range(repetitions)]
print_stuff = 1
for k in range(repetitions):
if print_stuff:
print("")
print("Run #%d" % (k+1))
print("time\tinfected count")
t = 0
for event in simdata[k]:
if print_stuff: print("%.2f\t%d" % (event[0],event[1]))
time = int(event[0])
infected_count = event[1]
if time < t:
continue
elif t < simulation_time+1:
if print_stuff: print("* %.2f" % event[0])
while t <= time:
simresults[k][t] = infected_count
t = t+1
while t < simulation_time+1:
simresults[k][t] = infected_count
t = t+1
if print_stuff:
print("")
print("Processed output:")
print("time\tinfected count")
for t in range(simulation_time+1):
print("%d\t%d" % (t,simresults[k][t]))
average_results = [0.0 for t in range(simulation_time+1)]
for t in range(simulation_time+1):
for k in range(repetitions):
average_results[t] = average_results[t] + simresults[k][t]
average_results[t] = float(average_results[t]) / repetitions
print(average_results)
plt.plot(list(range(0,simulation_time+1)),average_results,'-o')
axes = plt.gca()
axes.set_xlim([0,simulation_time])
#axes.set_ylim([0,10])
plt.xlabel("Seconds")
plt.ylabel("Infected nodes")
plt.savefig("average_simulation.pdf")
if __name__ == "__main__":
dir_path = os.path.dirname(os.path.realpath(__file__))
dir_path_unix = dir_path.replace("\\","/")
if (platform.system()!="Windows"): dir_path = dir_path_unix
parser = argparse.ArgumentParser()
parser.add_argument("-id","--identifier",required=True,
help="Node identifier")
parser.add_argument("-n","--network",type=argparse.FileType('r'), default = dir_path_unix+"/network.txt",
help="File that contains the network's description; each line presents node_id|node_ip|port_number|initial_state")
# parser.add_argument("-i","--interactive",type=int,default=0,
# help="Interactive mode")
parser.add_argument("-i","--interaction",type=int,default=0,
help="Interaction mode: default (0), interactive (1), simulation (2)")
parser.add_argument("-r","--recovery_rate",type=float,#default=1.0,
help="Simulation parameter: recovery_rate")
parser.add_argument("-e","--endogenous_rate",type=float,#default=1.0,
help="Simulation parameter: endogenous_infection_rate")
parser.add_argument("-x","--exogenous_rate",type=float,#default=1e-6,
help="Simulation parameter: exogenous_infection_rate")
parser.add_argument("-t","--topology",choices=["clique","star"],default="clique",
help="Network topology: clique or star")
args = parser.parse_args()
network = {}
if args.network is not None:
network,nd = parse_network(args.network, args.identifier, args.topology)
simulation_time = args.interaction
repetitions = 1
simdata = []
for i in range(repetitions):
simdata.append(simulation_controller(args,nd,network))
if args.identifier == '0':
process_data(simdata,repetitions,simulation_time)
| mit | 6,437,179,172,426,943,000 | 33.161137 | 138 | 0.489734 | false |
all-of-us/raw-data-repository | rdr_service/lib_fhir/fhirclient_3_0_0/models/appointment_tests.py | 1 | 9471 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 3.0.0.11832 on 2017-03-22.
# 2017, SMART Health IT.
import io
import json
import os
import unittest
from . import appointment
from .fhirdate import FHIRDate
class AppointmentTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get('FHIR_UNITTEST_DATADIR') or ''
with io.open(os.path.join(datadir, filename), 'r', encoding='utf-8') as handle:
js = json.load(handle)
self.assertEqual("Appointment", js["resourceType"])
return appointment.Appointment(js)
def testAppointment1(self):
inst = self.instantiate_from("appointment-example-request.json")
self.assertIsNotNone(inst, "Must have instantiated a Appointment instance")
self.implAppointment1(inst)
js = inst.as_json()
self.assertEqual("Appointment", js["resourceType"])
inst2 = appointment.Appointment(js)
self.implAppointment1(inst2)
def implAppointment1(self, inst):
self.assertEqual(inst.appointmentType.coding[0].code, "wi")
self.assertEqual(inst.appointmentType.coding[0].display, "Walk in")
self.assertEqual(inst.appointmentType.coding[0].system, "http://example.org/appointment-type")
self.assertEqual(inst.comment, "Further expand on the results of the MRI and determine the next actions that may be appropriate.")
self.assertEqual(inst.created.date, FHIRDate("2015-12-02").date)
self.assertEqual(inst.created.as_json(), "2015-12-02")
self.assertEqual(inst.description, "Discussion on the results of your recent MRI")
self.assertEqual(inst.id, "examplereq")
self.assertEqual(inst.identifier[0].system, "http://example.org/sampleappointment-identifier")
self.assertEqual(inst.identifier[0].value, "123")
self.assertEqual(inst.minutesDuration, 15)
self.assertEqual(inst.participant[0].required, "required")
self.assertEqual(inst.participant[0].status, "needs-action")
self.assertEqual(inst.participant[1].required, "required")
self.assertEqual(inst.participant[1].status, "needs-action")
self.assertEqual(inst.participant[1].type[0].coding[0].code, "ATND")
self.assertEqual(inst.participant[1].type[0].coding[0].system, "http://hl7.org/fhir/v3/ParticipationType")
self.assertEqual(inst.participant[2].required, "required")
self.assertEqual(inst.participant[2].status, "accepted")
self.assertEqual(inst.priority, 5)
self.assertEqual(inst.reason[0].coding[0].code, "413095006")
self.assertEqual(inst.reason[0].coding[0].system, "http://snomed.info/sct")
self.assertEqual(inst.reason[0].text, "Clinical Review")
self.assertEqual(inst.requestedPeriod[0].end.date, FHIRDate("2016-06-09").date)
self.assertEqual(inst.requestedPeriod[0].end.as_json(), "2016-06-09")
self.assertEqual(inst.requestedPeriod[0].start.date, FHIRDate("2016-06-02").date)
self.assertEqual(inst.requestedPeriod[0].start.as_json(), "2016-06-02")
self.assertEqual(inst.serviceCategory.coding[0].code, "gp")
self.assertEqual(inst.serviceCategory.coding[0].display, "General Practice")
self.assertEqual(inst.serviceCategory.coding[0].system, "http://example.org/service-category")
self.assertEqual(inst.specialty[0].coding[0].code, "gp")
self.assertEqual(inst.specialty[0].coding[0].display, "General Practice")
self.assertEqual(inst.specialty[0].coding[0].system, "http://example.org/specialty")
self.assertEqual(inst.status, "proposed")
self.assertEqual(inst.text.div, "<div xmlns=\"http://www.w3.org/1999/xhtml\">Brian MRI results discussion</div>")
self.assertEqual(inst.text.status, "generated")
def testAppointment2(self):
inst = self.instantiate_from("appointment-example.json")
self.assertIsNotNone(inst, "Must have instantiated a Appointment instance")
self.implAppointment2(inst)
js = inst.as_json()
self.assertEqual("Appointment", js["resourceType"])
inst2 = appointment.Appointment(js)
self.implAppointment2(inst2)
def implAppointment2(self, inst):
self.assertEqual(inst.appointmentType.coding[0].code, "follow")
self.assertEqual(inst.appointmentType.coding[0].display, "Followup")
self.assertEqual(inst.appointmentType.coding[0].system, "http://example.org/appointment-type")
self.assertEqual(inst.comment, "Further expand on the results of the MRI and determine the next actions that may be appropriate.")
self.assertEqual(inst.created.date, FHIRDate("2013-10-10").date)
self.assertEqual(inst.created.as_json(), "2013-10-10")
self.assertEqual(inst.description, "Discussion on the results of your recent MRI")
self.assertEqual(inst.end.date, FHIRDate("2013-12-10T11:00:00Z").date)
self.assertEqual(inst.end.as_json(), "2013-12-10T11:00:00Z")
self.assertEqual(inst.id, "example")
self.assertEqual(inst.participant[0].required, "required")
self.assertEqual(inst.participant[0].status, "accepted")
self.assertEqual(inst.participant[1].required, "required")
self.assertEqual(inst.participant[1].status, "accepted")
self.assertEqual(inst.participant[1].type[0].coding[0].code, "ATND")
self.assertEqual(inst.participant[1].type[0].coding[0].system, "http://hl7.org/fhir/v3/ParticipationType")
self.assertEqual(inst.participant[2].required, "required")
self.assertEqual(inst.participant[2].status, "accepted")
self.assertEqual(inst.priority, 5)
self.assertEqual(inst.serviceCategory.coding[0].code, "gp")
self.assertEqual(inst.serviceCategory.coding[0].display, "General Practice")
self.assertEqual(inst.serviceCategory.coding[0].system, "http://example.org/service-category")
self.assertEqual(inst.serviceType[0].coding[0].code, "52")
self.assertEqual(inst.serviceType[0].coding[0].display, "General Discussion")
self.assertEqual(inst.specialty[0].coding[0].code, "gp")
self.assertEqual(inst.specialty[0].coding[0].display, "General Practice")
self.assertEqual(inst.specialty[0].coding[0].system, "http://example.org/specialty")
self.assertEqual(inst.start.date, FHIRDate("2013-12-10T09:00:00Z").date)
self.assertEqual(inst.start.as_json(), "2013-12-10T09:00:00Z")
self.assertEqual(inst.status, "booked")
self.assertEqual(inst.text.div, "<div xmlns=\"http://www.w3.org/1999/xhtml\">Brian MRI results discussion</div>")
self.assertEqual(inst.text.status, "generated")
def testAppointment3(self):
inst = self.instantiate_from("appointment-example2doctors.json")
self.assertIsNotNone(inst, "Must have instantiated a Appointment instance")
self.implAppointment3(inst)
js = inst.as_json()
self.assertEqual("Appointment", js["resourceType"])
inst2 = appointment.Appointment(js)
self.implAppointment3(inst2)
def implAppointment3(self, inst):
self.assertEqual(inst.appointmentType.coding[0].code, "wi")
self.assertEqual(inst.appointmentType.coding[0].display, "Walk in")
self.assertEqual(inst.appointmentType.coding[0].system, "http://example.org/appointment-type")
self.assertEqual(inst.comment, "Clarify the results of the MRI to ensure context of test was correct")
self.assertEqual(inst.description, "Discussion about Peter Chalmers MRI results")
self.assertEqual(inst.end.date, FHIRDate("2013-12-09T11:00:00Z").date)
self.assertEqual(inst.end.as_json(), "2013-12-09T11:00:00Z")
self.assertEqual(inst.id, "2docs")
self.assertEqual(inst.participant[0].required, "information-only")
self.assertEqual(inst.participant[0].status, "accepted")
self.assertEqual(inst.participant[1].required, "required")
self.assertEqual(inst.participant[1].status, "accepted")
self.assertEqual(inst.participant[2].required, "required")
self.assertEqual(inst.participant[2].status, "accepted")
self.assertEqual(inst.participant[3].required, "information-only")
self.assertEqual(inst.participant[3].status, "accepted")
self.assertEqual(inst.priority, 5)
self.assertEqual(inst.serviceCategory.coding[0].code, "gp")
self.assertEqual(inst.serviceCategory.coding[0].display, "General Practice")
self.assertEqual(inst.serviceCategory.coding[0].system, "http://example.org/service-category")
self.assertEqual(inst.serviceType[0].coding[0].code, "52")
self.assertEqual(inst.serviceType[0].coding[0].display, "General Discussion")
self.assertEqual(inst.specialty[0].coding[0].code, "gp")
self.assertEqual(inst.specialty[0].coding[0].display, "General Practice")
self.assertEqual(inst.specialty[0].coding[0].system, "http://example.org/specialty")
self.assertEqual(inst.start.date, FHIRDate("2013-12-09T09:00:00Z").date)
self.assertEqual(inst.start.as_json(), "2013-12-09T09:00:00Z")
self.assertEqual(inst.status, "booked")
self.assertEqual(inst.text.div, "<div xmlns=\"http://www.w3.org/1999/xhtml\">Brian MRI results discussion</div>")
self.assertEqual(inst.text.status, "generated")
| bsd-3-clause | -8,303,702,823,180,291,000 | 58.943038 | 138 | 0.689473 | false |
JazzeYoung/VeryDeepAutoEncoder | theano/gpuarray/opt.py | 1 | 39678 | from __future__ import absolute_import, print_function, division
import copy
import numpy
import logging
import pdb
from six.moves import xrange
import theano
from theano import tensor, scalar, gof, config
from theano.compile import optdb
from theano.compile.ops import shape_i
from theano.gof import (local_optimizer, EquilibriumDB, TopoOptimizer,
SequenceDB, Optimizer, toolbox)
from theano.gof.optdb import LocalGroupDB
from theano.ifelse import IfElse
from theano.scalar.basic import Scalar, Pow, Cast
from theano.scan_module import scan_utils, scan_op, scan_opt
from theano.tensor.nnet.conv import ConvOp
from theano.tensor.nnet.blocksparse import SparseBlockGemv, SparseBlockOuter
from theano.tensor.nnet.abstract_conv import (AbstractConv2d,
AbstractConv2d_gradWeights,
AbstractConv2d_gradInputs)
from theano.tests.breakpoint import PdbBreakpoint
from .type import (GpuArrayType, GpuArrayConstant, get_context,
ContextNotDefined)
from .basic_ops import (as_gpuarray_variable, infer_context_name,
host_from_gpu, GpuToGpu,
HostFromGpu, GpuFromHost,
GpuSplit, GpuContiguous, gpu_contiguous,
GpuAlloc, GpuAllocEmpty, GpuReshape,
GpuEye, gpu_join, GpuJoin)
from .blas import (gpu_dot22, GpuGemm, GpuGer, GpuGemmBatch,
gpugemm_no_inplace, gpugemm_inplace, gpugemmbatch_no_inplace,
gpugemv_no_inplace, gpugemv_inplace)
from .blocksparse import (GpuSparseBlockGemv, GpuSparseBlockOuter,
gpu_sparse_block_outer, gpu_sparse_block_outer_inplace,
gpu_sparse_block_gemv, gpu_sparse_block_gemv_inplace)
from .nnet import (gpu_crossentropy_softmax_1hot_with_bias_dx,
gpu_crossentropy_softmax_argmax_1hot_with_bias,
gpu_softmax_with_bias, gpu_softmax)
from .elemwise import (GpuElemwise, GpuDimShuffle, GpuCAReduceCuda,
GpuCAReduceCPY)
from .subtensor import (GpuIncSubtensor, GpuSubtensor,
GpuAdvancedSubtensor1,
GpuAdvancedIncSubtensor1,
GpuAdvancedIncSubtensor1_dev20)
from .opt_util import alpha_merge, output_merge
_logger = logging.getLogger("theano.gpuarray.opt")
gpu_optimizer = EquilibriumDB()
gpu_cut_copies = EquilibriumDB()
gpu_seqopt = SequenceDB()
# Don't register this right now
conv_groupopt = LocalGroupDB()
conv_groupopt.__name__ = "gpua_conv_opts"
gpu_seqopt.register('gpuarray_local_optimiziations', gpu_optimizer, 1,
'fast_compile', 'fast_run', 'gpuarray')
gpu_seqopt.register('gpuarray_cut_transfers', gpu_cut_copies, 2,
'fast_compile', 'fast_run', 'gpuarray')
# do not add 'fast_run' to these two as this would always enable gpuarray mode
optdb.register('gpuarray_opt', gpu_seqopt,
optdb.__position__.get('add_destroy_handler', 49.5) - 1,
'gpuarray')
def register_opt(*tags, **kwargs):
def f(local_opt):
name = (kwargs and kwargs.pop('name')) or local_opt.__name__
gpu_optimizer.register(name, local_opt, 'fast_run', 'gpuarray', *tags)
return local_opt
return f
def register_inplace(*tags, **kwargs):
def f(local_opt):
name = (kwargs and kwargs.pop('name')) or local_opt.__name__
optdb.register(
name, TopoOptimizer(
local_opt, failure_callback=TopoOptimizer.warn_inplace),
60, 'fast_run', 'inplace', 'gpuarray', *tags)
return local_opt
return f
register_opt('fast_compile')(theano.tensor.opt.local_track_shape_i)
register_opt(final_opt=True, name='gpua_constant_folding')(
tensor.opt.constant_folding)
gpu_optimizer.register('local_remove_all_assert',
theano.tensor.opt.local_remove_all_assert,
'unsafe')
def safe_to_gpu(x, ctx_name):
if isinstance(x.type, tensor.TensorType):
return GpuFromHost(ctx_name)(x)
else:
return x
def safe_to_cpu(x):
if isinstance(x.type, GpuArrayType):
return host_from_gpu(x)
else:
return x
def op_lifter(OP, cuda_only=False):
"""
OP(..., host_from_gpu(), ...) -> host_from_gpu(GpuOP(...))
gpu_from_host(OP(inp0, ...)) -> GpuOP(inp0, ...)
"""
def f(maker):
def local_opt(node):
if type(node.op) in OP:
# Either one of our inputs is on the gpu or
# all of our clients are on the gpu
replace = False
# TODO: Maybe set context_name with infer_context_name()?
context_name = None
# We replace if any input is a host_from_gpu
for i in node.inputs:
if i.owner and i.owner.op == host_from_gpu:
context_name = i.owner.inputs[0].type.context_name
replace = True
break
if not replace:
# We replace if *all* clients are on the GPU
clients = [c for o in node.outputs for c in o.clients]
replace = len(clients) != 0
for c, idx in clients:
if (c == 'output' or
not isinstance(c.op, GpuFromHost)):
replace = False
# TODO: check that the clients want the same context?
if replace:
# All clients are GpuFromHost and we have at least one
context_name = clients[0][0].op.context_name
# Check if we should replace
if (not replace or
(cuda_only and
get_context(context_name).kind != b'cuda')):
return False
# tag the inputs with the context in case
# the context was derived from the outputs
for i in node.inputs:
i.tag.context_name = context_name
new_op = maker(node, context_name)
# This is needed as sometimes new_op inherits from OP.
if new_op and new_op != node.op:
if isinstance(new_op, theano.Op):
return [safe_to_cpu(o) for o in
new_op(*node.inputs, return_list=True)]
elif isinstance(new_op, (tuple, list)):
return [safe_to_cpu(o) for o in new_op]
else: # suppose it is a variable on the GPU
return [host_from_gpu(new_op)]
return False
local_opt.__name__ = maker.__name__
return local_optimizer(OP)(local_opt)
return f
class InputToGpuOptimizer(Optimizer):
"""
Transfer the input to the gpu to start the rolling wave.
"""
def add_requirements(self, fgraph):
fgraph.attach_feature(toolbox.ReplaceValidate())
def apply(self, fgraph):
for input in fgraph.inputs:
if isinstance(input.type, GpuArrayType):
continue
# If all clients are outputs or transfers don't do anything.
if (all(cl[0] == 'output' or isinstance(cl[0].op, GpuFromHost)
for cl in input.clients)):
continue
target = getattr(input.tag, 'target', None)
if target == 'cpu':
continue
try:
new_input = host_from_gpu(GpuFromHost(target)(input))
fgraph.replace_validate(input, new_input,
"InputToGpuOptimizer")
except TypeError:
# This could fail if the inputs are not TensorTypes
pass
except ContextNotDefined:
if hasattr(input.tag, 'target'):
raise
# If there is no context tag and no default context
# then it stays on the CPU
pass
gpu_seqopt.register('InputToGpuArrayOptimizer', InputToGpuOptimizer(),
0, 'fast_run', 'fast_compile', 'merge')
@local_optimizer([GpuFromHost, GpuToGpu, HostFromGpu])
def local_cut_gpu_transfers(node):
# gpu[ab] -> host -> gpub
if (isinstance(node.op, GpuFromHost) and
node.inputs[0].owner and
isinstance(node.inputs[0].owner.op, HostFromGpu)):
other = node.inputs[0].owner.inputs[0]
if node.op.context_name == other.type.context_name:
return [other]
else:
return [GpuToGpu(node.op.context_name)(other)]
# ? -> gpua -> host
elif (isinstance(node.op, HostFromGpu) and
node.inputs[0].owner):
n2 = node.inputs[0].owner
# host ->
if isinstance(n2.op, GpuFromHost):
return [n2.inputs[0]]
# gpub ->
if isinstance(n2.op, GpuToGpu):
return [host_from_gpu(n2.inputs[0])]
# ? -> gpua -> gpub
elif isinstance(node.op, GpuToGpu):
# Transfer within same context
if node.inputs[0].type.context_name == node.op.context_name:
return [node.inputs[0]]
if node.inputs[0].owner:
n2 = node.inputs[0].owner
# host ->
if isinstance(n2.op, GpuFromHost):
return [as_gpuarray_variable(n2.inputs[0],
node.op.context_name)]
# gpuc ->
if isinstance(n2.op, GpuToGpu):
if node.op.context_name == n2.inputs[0].type.context_name:
return [n2.inputs[0]]
else:
return [node.op(n2.inputs[0])]
gpu_cut_copies.register('cut_gpua_host_transfers', local_cut_gpu_transfers,
'fast_compile', 'fast_run', 'gpuarray')
gpu_cut_copies.register('cut_gpua_constant_transfers',
tensor.opt.constant_folding,
'fast_compile', 'fast_run', 'gpuarray')
optdb['canonicalize'].register('local_cut_gpua_host_gpua',
local_cut_gpu_transfers,
'fast_compile', 'fast_run', 'gpuarray')
@register_opt('fast_compile')
@local_optimizer([tensor.Alloc])
def local_gpuaalloc2(node):
"""
Join(axis, {Alloc or HostFromGPU}, ...) -> Join(axis, GpuAlloc, Alloc, ...)
Moves an alloc that is an input to join to the gpu.
"""
try:
get_context(None)
except ContextNotDefined:
# If there is no default context then we do not perform the move here.
return
if (isinstance(node.op, tensor.Alloc) and
all(c != 'output' and
c.op == tensor.join and
all(i.owner and
i.owner.op in [host_from_gpu, tensor.alloc]
for i in c.inputs[1:])
for c, idx in node.outputs[0].clients)):
return [host_from_gpu(GpuAlloc(None)(*node.inputs))]
@register_opt('fast_compile')
@op_lifter([tensor.Alloc])
def local_gpuaalloc(node, context_name):
return GpuAlloc(context_name)(*node.inputs)
@register_opt('fast_compile')
@op_lifter([tensor.AllocEmpty])
def local_gpuaallocempty(node, context_name):
# We use _props_dict() to make sure that the GPU op know all the
# CPU op props.
return GpuAllocEmpty(context_name=context_name,
**node.op._props_dict())(*node.inputs)
@register_opt()
@local_optimizer([GpuAlloc])
def local_gpualloc_memset_0(node):
if isinstance(node.op, GpuAlloc) and not node.op.memset_0:
inp = node.inputs[0]
if (isinstance(inp, GpuArrayConstant) and
inp.data.size == 1 and
(numpy.asarray(inp.data) == 0).all()):
new_op = GpuAlloc(node.op.context_name, memset_0=True)
return [new_op(*node.inputs)]
# Don't register by default.
@gof.local_optimizer([GpuAllocEmpty])
def local_gpua_alloc_empty_to_zeros(node):
if isinstance(node.op, GpuAllocEmpty):
context_name = infer_context_name(*node.inputs)
z = numpy.asarray(0, dtype=node.outputs[0].dtype)
return [GpuAlloc()(as_gpuarray_variable(z, context_name),
*node.inputs)]
optdb.register('local_gpua_alloc_empty_to_zeros',
theano.tensor.opt.in2out(local_gpua_alloc_empty_to_zeros),
# After move to gpu and merge2, before inplace.
49.3,
'alloc_empty_to_zeros',)
@register_opt()
@local_optimizer([GpuContiguous])
def local_gpu_contiguous_gpu_contiguous(node):
"""
gpu_contiguous(gpu_contiguous(x)) -> gpu_contiguous(x)
"""
if isinstance(node.op, GpuContiguous):
inp = node.inputs[0]
if inp.owner and isinstance(inp.owner.op, GpuContiguous):
return [inp]
@register_opt('fast_compile')
@op_lifter([tensor.extra_ops.CpuContiguous])
def local_gpu_contiguous(node, context_name):
return gpu_contiguous
@register_opt('fast_compile')
@op_lifter([tensor.Reshape])
def local_gpureshape(node, context_name):
op = node.op
name = op.name
if name:
name = 'Gpu' + name
res = GpuReshape(op.ndim, op.name)
return res
@register_opt('fast_compile')
@op_lifter([tensor.Rebroadcast])
def local_gpu_rebroadcast(node, context_name):
return node.op(as_gpuarray_variable(node.inputs[0], context_name))
@register_opt('fast_compile')
@op_lifter([tensor.Flatten])
def local_gpuflatten(node, context_name):
op = node.op
shp = []
if op.outdim != 1:
shp = [node.inputs[0].shape[i] for i in range(op.outdim - 1)]
shp += [-1]
res = GpuReshape(op.outdim, None)
o = res(node.inputs[0], theano.tensor.as_tensor_variable(shp))
return o
@register_opt('fast_compile')
@op_lifter([tensor.Elemwise])
def local_gpu_elemwise(node, context_name):
op = node.op
scal_op = op.scalar_op
name = op.name
if name:
name = 'Gpu' + name
if len(node.outputs) > 1:
return
res = GpuElemwise(scal_op, name=name,
inplace_pattern=copy.copy(op.inplace_pattern),
nfunc_spec=op.nfunc_spec)
# If the elemwise operation is a pow, casts might be required on the
# inputs and or outputs because only the (float, float)->float and
# (double, double)->double cases are implemented at the moment.
if isinstance(op.scalar_op, Pow):
# Only transfer the computation on the gpu if the output dtype is
# floating point. Else, give up on the transfer to the gpu.
out_dtype = node.outputs[0].dtype
if out_dtype not in ['float16', 'float32', 'float64']:
return
# Transfer the inputs on the GPU and cast them to the right dtype.
new_inputs = []
for inp in node.inputs:
if inp.dtype != out_dtype:
gpu_cast_op = GpuElemwise(Cast(Scalar(out_dtype)))
new_inputs.append(gpu_cast_op(as_gpuarray_variable(inp, context_name)))
else:
new_inputs.append(as_gpuarray_variable(inp, context_name))
# Perform the exponent on the gpu and transfer the output back to the
# cpu.
gpu_output = res(*new_inputs)
cpu_output = host_from_gpu(gpu_output)
return [cpu_output]
else:
return res
def max_inputs_to_GpuElemwise(node):
ptr_size = 8
int_size = 4
# we take the limit from CUDA for now
argument_limit = 232
ndim = node.inputs[0].type.ndim
# number of elements and shape
size_param_mandatory = (int_size * (ndim + 1)) + \
(ptr_size + int_size * ndim) * len(node.outputs)
nb_bytes_avail = argument_limit - size_param_mandatory
nb_bytes_per_input = ptr_size + ndim * int_size
max_nb_inputs = nb_bytes_avail // nb_bytes_per_input
return max_nb_inputs
gpu_local_elemwise_fusion = tensor.opt.local_elemwise_fusion_op(
GpuElemwise,
max_inputs_to_GpuElemwise)
optdb.register('gpua_elemwise_fusion',
tensor.opt.FusionOptimizer(gpu_local_elemwise_fusion), 71.00,
'fast_run', 'fusion', 'local_elemwise_fusion', 'gpuarray')
inplace_gpu_elemwise_opt = tensor.opt.inplace_elemwise_optimizer_op(
GpuElemwise)
optdb.register('gpua_inplace_opt', inplace_gpu_elemwise_opt, 75,
'inplace_elemwise_optimizer', 'fast_run', 'inplace', 'gpuarray')
@register_opt('fast_compile')
@op_lifter([tensor.DimShuffle])
def local_gpua_dimshuffle(node, context_name):
return GpuDimShuffle(node.op.input_broadcastable,
node.op.new_order)
@register_opt('fast_compile')
@op_lifter([tensor.SpecifyShape])
def local_gpua_specifyShape(node, context_name):
if isinstance(node.inputs[0].type, GpuArrayType):
return
inp = [as_gpuarray_variable(node.inputs[0], context_name)]
inp += node.inputs[1:]
return tensor.specify_shape(*inp)
@register_opt('fast_compile')
@op_lifter([theano.compile.ops.Shape])
def local_gpua_shape(node, context_name):
# op_lifter will call this opt too frequently as the output is
# always on the CPU.
if isinstance(node.inputs[0].type, GpuArrayType):
return
return [as_gpuarray_variable(node.inputs[0], context_name).shape]
def gpu_print_wrapper(op, cnda):
op.old_op.global_fn(op.old_op, numpy.asarray(cnda))
@register_opt('fast_compile')
@op_lifter([tensor.printing.Print])
def local_gpu_print_op(node, context_name):
x, = node.inputs
gpu_x = as_gpuarray_variable(x, context_name=context_name)
new_op = node.op.__class__(global_fn=gpu_print_wrapper)
new_op.old_op = node.op
return new_op(gpu_x)
@register_opt('fast_compile')
@local_optimizer([PdbBreakpoint])
def local_gpu_pdbbreakpoint_op(node):
if isinstance(node.op, PdbBreakpoint):
old_inputs = node.inputs
old_outputs = node.outputs
new_inputs = node.inputs[:1]
input_transfered = []
# Go through the monitored variables, only transfering on GPU those
# for which the input comes from the GPU or the output will be
# transfered on the GPU.
nb_monitored_vars = len(node.outputs)
for i in range(nb_monitored_vars):
inp = old_inputs[i + 1]
out = old_outputs[i]
input_is_from_gpu = (inp.owner and
isinstance(inp.owner.op, HostFromGpu))
output_goes_to_gpu = False
for c in out.clients:
if c == 'output':
continue
if isinstance(c[0].op, GpuFromHost):
output_goes_to_gpu = True
context_name = c[0].op.context_name
break
if input_is_from_gpu:
# The op should be applied on the GPU version of the input
new_inputs.append(inp.owner.inputs[0])
input_transfered.append(True)
elif output_goes_to_gpu:
# The input should be transfered to the gpu
new_inputs.append(as_gpuarray_variable(inp, context_name))
input_transfered.append(True)
else:
# No transfer is required.
new_inputs.append(inp)
input_transfered.append(False)
# Only continue the optimization if at least one input has been
# transfered to the gpu
if not any(input_transfered):
return False
# Apply the op on the new inputs
new_op_outputs = node.op(*new_inputs, return_list=True)
# Propagate the transfer to the gpu through the outputs that require
# it
new_outputs = []
for i in range(len(new_op_outputs)):
if input_transfered[i]:
new_outputs.append(host_from_gpu(new_op_outputs[i]))
else:
new_outputs.append(new_op_outputs[i])
return new_outputs
return False
@register_opt('fast_compile')
@op_lifter([IfElse])
def local_gpua_lazy_ifelse(node, context_name):
if node.op.gpu:
return
c = node.inputs[0]
inps = []
for v in node.inputs[1:]:
if isinstance(v.type, (tensor.TensorType, GpuArrayType)):
inps.append(as_gpuarray_variable(v, context_name))
else:
inps.append(v)
return IfElse(node.op.n_outs, gpu=True)(c, *inps, return_list=True)
@register_opt('fast_compile')
@op_lifter([tensor.Join])
def local_gpua_join(node, context_name):
return gpu_join
@register_opt('fast_compile')
@local_optimizer([GpuJoin])
def local_gpuajoin_1(node):
# join of a single element
if (isinstance(node.op, GpuJoin) and
len(node.inputs) == 2):
return [node.inputs[1]]
@register_opt('fast_compile')
@op_lifter([tensor.Split])
def local_gpua_split(node, context_name):
return GpuSplit(node.op.len_splits)
@register_opt('fast_compile')
@op_lifter([tensor.Subtensor])
def local_gpua_subtensor(node, context_name):
x = node.inputs[0]
if (x.owner and isinstance(x.owner.op, HostFromGpu)):
gpu_x = x.owner.inputs[0]
if (gpu_x.owner and
isinstance(gpu_x.owner.op, GpuFromHost) and
# And it is a shared var or an input of the graph.
not gpu_x.owner.inputs[0].owner):
if len(x.clients) == 1:
if any([n == 'output' or any([isinstance(v.type, GpuArrayType)
for v in n.inputs + n.outputs])
for n, _ in node.outputs[0].clients]):
return
else:
return [host_from_gpu(gpu_x.owner.op(node.outputs[0]))]
return GpuSubtensor(node.op.idx_list)
@register_opt('fast_compile')
@op_lifter([tensor.IncSubtensor])
def local_gpua_incsubtensor(node, context_name):
op = GpuIncSubtensor(node.op.idx_list, node.op.inplace,
node.op.set_instead_of_inc,
node.op.destroyhandler_tolerate_aliased)
ret = op(*node.inputs)
val = getattr(node.outputs[0].tag, 'nan_guard_mode_check', True)
ret.tag.nan_guard_mode_check = val
return ret
@register_opt('fast_compile')
@op_lifter([tensor.AdvancedSubtensor1])
def local_gpua_advanced_subtensor(node, context_name):
return GpuAdvancedSubtensor1()
@register_opt('fast_compile')
@op_lifter([tensor.AdvancedIncSubtensor1])
def local_gpua_advanced_incsubtensor(node, context_name):
context = get_context(context_name)
# This is disabled on non-cuda contexts
if context.kind != b'cuda':
return None
x, y, ilist = node.inputs
# Gpu Ops needs both inputs to have the same dtype
if (x.type.dtype != y.type.dtype):
dtype = scalar.upcast(x.type.dtype, y.type.dtype)
if x.type.dtype != dtype:
x = tensor.cast(x, dtype)
if y.type.dtype != dtype:
y = tensor.cast(y, dtype)
set_instead_of_inc = node.op.set_instead_of_inc
compute_capability = int(context.bin_id[-2])
if (compute_capability < 2 or x.ndim != 2 or y.ndim != 2):
return GpuAdvancedIncSubtensor1(
set_instead_of_inc=set_instead_of_inc)
else:
return GpuAdvancedIncSubtensor1_dev20(
set_instead_of_inc=set_instead_of_inc)
@register_inplace()
@local_optimizer([GpuAdvancedIncSubtensor1, GpuAdvancedIncSubtensor1_dev20])
def local_advincsub1_gpua_inplace(node):
if isinstance(node.op, (GpuAdvancedIncSubtensor1,
GpuAdvancedIncSubtensor1_dev20)):
if not node.op.inplace:
return [node.op.clone_inplace()(*node.inputs)]
@register_opt('fast_compile')
@op_lifter([tensor.CAReduce, tensor.Sum, tensor.elemwise.Prod])
def local_gpua_careduce(node, context_name):
if isinstance(node.op.scalar_op, (scalar.Add, scalar.Mul,
scalar.Maximum, scalar.Minimum)):
ctx = get_context(context_name)
if ctx.kind == b'opencl':
op = GpuCAReduceCPY
if node.op.scalar_op not in [scalar.add, scalar.mul]:
# We don't support yet all reduction with cpy code.
return
elif ctx.kind == b'cuda':
op = GpuCAReduceCuda
else:
return False
x, = node.inputs
greduce = op(
node.op.scalar_op, axis=node.op.axis,
dtype=getattr(node.op, 'dtype', None),
acc_dtype=getattr(node.op, 'acc_dtype', None))
gvar = greduce(x)
# We need to have the make node called, otherwise the mask can
# be None
if (op is GpuCAReduceCPY or
gvar.owner.op.supports_c_code([
as_gpuarray_variable(x, context_name)])):
return greduce
else:
# Try to make a simpler pattern based on reshaping
# The principle is that if two adjacent dimensions have
# the same value in the reduce_mask, then we can reshape
# to make them a single dimension, do the reduction, and
# then reshape to get them back.
if node.op.axis is None:
reduce_mask = [1] * x.type.ndim
else:
reduce_mask = [0] * x.type.ndim
for a in node.op.axis:
assert reduce_mask[a] == 0
reduce_mask[a] = 1
new_in_shp = [shape_i(x, 0)]
new_mask = [reduce_mask[0]]
for i in xrange(1, x.type.ndim):
if reduce_mask[i] == reduce_mask[i - 1]:
new_in_shp[-1] *= shape_i(x, i)
else:
new_mask.append(reduce_mask[i])
new_in_shp.append(shape_i(x, i))
new_axis = []
for idx, m in enumerate(new_mask):
if m == 1:
new_axis.append(idx)
greduce = op(
node.op.scalar_op,
axis=new_axis, reduce_mask=new_mask,
dtype=getattr(node.op, 'dtype', None),
acc_dtype=getattr(node.op, 'acc_dtype', None))
reshaped_x = x.reshape(tensor.stack(new_in_shp))
gpu_reshaped_x = as_gpuarray_variable(reshaped_x, context_name)
gvar = greduce(gpu_reshaped_x)
# We need to have the make node called, otherwise the mask can
# be None
reshaped_gpu_inputs = [gpu_reshaped_x]
if greduce.supports_c_code(reshaped_gpu_inputs):
reduce_reshaped_x = host_from_gpu(
greduce(gpu_reshaped_x))
if reduce_reshaped_x.ndim != node.outputs[0].ndim:
out_shp = []
for i in range(x.ndim):
if i not in node.op.axis:
out_shp.append(shape_i(x, i))
unreshaped_reduce = reduce_reshaped_x.reshape(
tensor.stack(out_shp))
else:
unreshaped_reduce = reduce_reshaped_x
return [unreshaped_reduce]
@register_opt('fast_compile')
@op_lifter([tensor.blas.Gemv, tensor.blas_c.CGemv])
def local_gpua_gemv(node, context_name):
if node.op.inplace:
return gpugemv_inplace
else:
return gpugemv_no_inplace
@register_opt('fast_compile')
@op_lifter([tensor.blas.Gemm])
def local_gpua_gemm(node, context_name):
if node.op.inplace:
return gpugemm_inplace
else:
return gpugemm_no_inplace
@register_opt('fast_compile')
@op_lifter([tensor.blas.BatchedDot])
def local_gpua_gemmbatch(node, context_name):
a, b = node.inputs
c = tensor.AllocEmpty(a.dtype)(a.shape[0], a.shape[1], b.shape[2])
return gpugemmbatch_no_inplace(c, 1.0, a, b, 0.0)
@register_opt('fast_compile')
@op_lifter([tensor.basic.Dot])
def local_gpua_hgemm(node, context_name):
from theano.sandbox.cuda import nvcc_compiler
if nvcc_compiler.nvcc_version < '7.5':
_logger.warning("Not performing dot of float16 on the GPU since "
"cuda 7.5 is not available. Updating could speed up "
"your code.")
return
A = node.inputs[0]
B = node.inputs[1]
if (A.ndim == 2 and B.ndim == 2 and
A.dtype == 'float16' and B.dtype == 'float16'):
fgraph = node.inputs[0].fgraph
C = GpuAllocEmpty(dtype='float16', context_name=context_name)(
shape_i(A, 0, fgraph),
shape_i(B, 1, fgraph))
return gpugemm_no_inplace(C, 1.0, A, B, 0.0)
@register_opt()
@alpha_merge(GpuGemm, alpha_in=1, beta_in=4)
def local_gpuagemm_alpha_merge(node, *inputs):
return [gpugemm_no_inplace(*inputs)]
@register_opt()
@output_merge(GpuGemm, alpha_in=1, beta_in=4, out_in=0)
def local_gpuagemm_output_merge(node, *inputs):
return [gpugemm_no_inplace(*inputs)]
@register_opt()
@alpha_merge(GpuGemmBatch, alpha_in=1, beta_in=4)
def local_gpuagemmbatch_alpha_merge(node, *inputs):
return [gpugemmbatch_no_inplace(*inputs)]
@register_opt()
@output_merge(GpuGemmBatch, alpha_in=1, beta_in=4, out_in=0)
def local_gpuagemmbatch_output_merge(node, *inputs):
return [gpugemmbatch_no_inplace(*inputs)]
@register_opt('fast_compile')
@op_lifter([tensor.blas.Ger, tensor.blas_c.CGer, tensor.blas_scipy.ScipyGer])
def local_gpua_ger(node, context_name):
return GpuGer(inplace=node.op.destructive)
@register_opt('fast_compile')
@op_lifter([tensor.blas.Dot22])
def local_gpua_dot22(node, context_name):
return gpu_dot22
@register_opt('fast_compile')
@op_lifter([tensor.blas.Dot22Scalar])
def local_gpua_dot22scalar(node, context_name):
x, y, a = node.inputs
x = as_gpuarray_variable(x, context_name)
y = as_gpuarray_variable(y, context_name)
z = GpuAllocEmpty(x.dtype, context_name)(x.shape[0], y.shape[1])
return [gpugemm_no_inplace(z, a, x, y, 0)]
@register_opt('fast_compile')
@op_lifter([tensor.basic.Eye])
def local_gpua_eye(node, context_name):
return GpuEye(dtype=node.op.dtype, context_name=context_name)
@register_opt('fast_compile')
@op_lifter([tensor.nnet.CrossentropySoftmaxArgmax1HotWithBias], cuda_only=True)
def local_gpua_crossentropysoftmaxargmax1hotwithbias(node, context_name):
return gpu_crossentropy_softmax_argmax_1hot_with_bias
@register_opt('fast_compile')
@op_lifter([tensor.nnet.CrossentropySoftmax1HotWithBiasDx], cuda_only=True)
def local_gpua_crossentropysoftmax1hotwithbiasdx(node, context_name):
return gpu_crossentropy_softmax_1hot_with_bias_dx
@register_opt('fast_compile')
@op_lifter([tensor.nnet.Softmax], cuda_only=True)
def local_gpua_softmax(node, context_name):
return gpu_softmax
@register_opt('fast_compile')
@op_lifter([tensor.nnet.SoftmaxWithBias], cuda_only=True)
def local_gpua_softmaxwithbias(node, context_name):
return gpu_softmax_with_bias
@register_opt('fast_compile')
@op_lifter([theano.tensor.opt.Assert])
def local_assert(node, context_name):
# Check if input nodes are already on the GPU
if isinstance(node.inputs[0].type, GpuArrayType):
return
return [host_from_gpu(node.op(as_gpuarray_variable(node.inputs[0],
context_name),
*node.inputs[1:]))]
@register_opt('fast_compile')
@op_lifter([ConvOp])
def local_error_convop(node, context_name):
assert False, """
ConvOp does not work with the gpuarray backend.
Use the new convolution interface to have GPU convolution working:
theano.tensor.nnet.conv2d()
"""
@register_opt('fast_compile')
@op_lifter([SparseBlockGemv])
def local_lift_sparseblockgemv(node, context_name):
if node.op.inplace:
return gpu_sparse_block_gemv_inplace
else:
return gpu_sparse_block_gemv
@register_opt('fast_compile')
@op_lifter([SparseBlockOuter])
def local_lift_sparseblockouter(node, context_name):
if node.op.inplace:
return gpu_sparse_block_outer_inplace
else:
return gpu_sparse_block_outer
@register_inplace()
@local_optimizer([GpuSparseBlockGemv], inplace=True)
def local_inplace_sparseblockgemv(node):
if isinstance(node.op, GpuSparseBlockGemv) and not node.op.inplace:
return [gpu_sparse_block_gemv_inplace(*node.inputs)]
@register_inplace()
@local_optimizer([GpuSparseBlockOuter], inplace=True)
def local_inplace_sparseblockouter(node):
if isinstance(node.op, GpuSparseBlockOuter) and not node.op.inplace:
return [GpuSparseBlockOuter(inplace=True)(*node.inputs)]
# This deals with any abstract convs that have a transfer somewhere
@register_opt('fast_compile')
@op_lifter([AbstractConv2d,
AbstractConv2d_gradWeights,
AbstractConv2d_gradInputs])
def local_lift_abstractconv2d(node, context_name):
if isinstance(node.outputs[0].type, GpuArrayType):
# Don't handle this node here, it's already on the GPU.
return
inps = list(node.inputs)
inps[0] = as_gpuarray_variable(node.inputs[0],
context_name=context_name)
inps[1] = as_gpuarray_variable(node.inputs[1],
context_name=context_name)
return [node.op(*inps)]
# Register this here so that it goes after the abstract lifting
register_opt('fast_compile')(conv_groupopt)
@register_opt("low_memory")
@local_optimizer([GpuCAReduceCuda])
def local_gpu_elemwise_careduce(node):
"""
Merge some GpuCAReduceCuda and GPUElemwise.
"""
if (isinstance(node.op, GpuCAReduceCuda) and
node.op.pre_scalar_op is None and
node.inputs[0].owner and
isinstance(node.inputs[0].owner.op, GpuElemwise) and
# The Op support all scalar with 1 inputs. We don't
# automatically add more case, as some like trigonometic
# operation with some reduction pattern will probably results
# in slow down.
isinstance(node.inputs[0].owner.op.scalar_op, scalar.basic.Sqr)):
op = node.op
inp = node.inputs[0].owner.inputs[0]
return [GpuCAReduceCuda(scalar_op=op.scalar_op,
axis=op.axis,
reduce_mask=op.reduce_mask,
pre_scalar_op=scalar.basic.sqr)(inp)]
@local_optimizer(None)
def local_assert_no_cpu_op(node):
if (all([var.owner and isinstance(var.owner.op, HostFromGpu)
for var in node.inputs]) and
any([[c for c in var.clients if isinstance(c[0].op, GpuFromHost)]
for var in node.outputs])):
if config.assert_no_cpu_op == "warn":
_logger.warning(("CPU Op %s is detected in the computation "
"graph") % node)
elif config.assert_no_cpu_op == "raise":
raise AssertionError("The Op %s is on CPU." % node)
elif config.assert_no_cpu_op == "pdb":
pdb.set_trace()
# Register the local_assert_no_cpu_op:
assert_no_cpu_op = theano.tensor.opt.in2out(local_assert_no_cpu_op,
name='assert_no_cpu_op')
# 49.2 is after device specialization & fusion optimizations for last transfers
optdb.register('gpua_assert_no_cpu_op', assert_no_cpu_op, 49.2,
'assert_no_cpu_op')
def tensor_to_gpu(x, context_name):
if isinstance(x.type, tensor.TensorType):
y = GpuArrayType(broadcastable=x.type.broadcastable,
context_name=context_name,
dtype=x.type.dtype)()
if x.name:
y.name = x.name + '[Gpua]'
return y
else:
return x
def gpu_safe_new(x, tag=''):
"""
Internal function that constructs a new variable from x with the same
type, but with a different name (old name + tag). This function is used
by gradient, or the R-op to construct new variables for the inputs of
the inner graph such that there is no interference between the original
graph and the newly constructed graph.
"""
if hasattr(x, 'name') and x.name is not None:
nw_name = x.name + tag
else:
nw_name = None
if isinstance(x, theano.Constant):
return x.clone()
nw_x = x.type()
nw_x.name = nw_name
return nw_x
def gpu_reconstruct_graph(inputs, outputs, tag=None):
"""
Different interface to clone, that allows you to pass inputs.
Compared to clone, this method always replaces the inputs with
new variables of the same type, and returns those (in the same
order as the original inputs).
"""
if tag is None:
tag = ''
nw_inputs = [gpu_safe_new(x, tag) for x in inputs]
givens = {}
for nw_x, x in zip(nw_inputs, inputs):
givens[x] = nw_x
nw_outputs = scan_utils.clone(outputs, replace=givens)
return (nw_inputs, nw_outputs)
@register_opt('scan', 'fast_compile')
@op_lifter([scan_op.Scan])
def local_scan_to_gpua(node, context_name):
info = copy.deepcopy(node.op.info)
if info.get('gpua', False):
return
info['gpua'] = True
nw_ins = [node.inputs[0]]
e = (1 +
node.op.n_seqs +
node.op.n_mit_mot +
node.op.n_mit_sot +
node.op.n_sit_sot +
node.op.n_shared_outs)
nw_ins += [safe_to_gpu(x, context_name) for x in node.inputs[1:e]]
b = e
e = e + node.op.n_nit_sot
nw_ins += node.inputs[b:e]
nw_ins += [safe_to_gpu(x, context_name) for x in node.inputs[e:]]
scan_ins = [tensor_to_gpu(x, context_name) for x in node.op.inputs]
# The inner output corresponding to the looping condition should not be
# moved to the gpu
if node.op.info['as_while']:
scan_outs = [safe_to_gpu(x, context_name) for x in node.op.outputs[:-1]]
scan_outs += [node.op.outputs[-1]]
else:
scan_outs = [safe_to_gpu(x, context_name) for x in node.op.outputs]
scan_outs = scan_utils.clone(
scan_outs,
replace=list(zip(node.op.inputs,
(safe_to_cpu(x) for x in scan_ins))))
# We need to construct the hash here, because scan
# __init__ does not know about the gpu and can not
# handle graphs with inputs being on the gpu
tmp_in, tmp_out = gpu_reconstruct_graph(scan_ins, scan_outs)
local_fgraph = gof.FunctionGraph(tmp_in, tmp_out, clone=True)
_cmodule_key = gof.CLinker().cmodule_key_(local_fgraph, [])
info['gpu_hash'] = hash(_cmodule_key)
def typebuild(dtype, broadcastable, context_name=context_name):
return GpuArrayType(dtype=dtype, broadcastable=broadcastable,
context_name=context_name)
nw_op = scan_op.Scan(scan_ins, scan_outs, info,
typeConstructor=typebuild).make_node(*nw_ins)
return nw_op.outputs
def _scan_type_infer(node):
context_name = infer_context_name(*node.inputs)
def typebuild(dtype, broadcastable, context_name=context_name):
return GpuArrayType(dtype=dtype, broadcastable=broadcastable,
context_name=context_name)
return typebuild
# Do not register in fast_run or fast_compile.
# It will be added to fast_run if the GPU is enabled.
optdb.register('gpua_scanOp_make_inplace',
scan_opt.ScanInplaceOptimizer(typeInfer=_scan_type_infer,
gpua_flag=True),
75,
'gpuarray',
'inplace',
'scan')
| bsd-3-clause | -5,803,320,694,378,629,000 | 34.113274 | 87 | 0.599526 | false |
subins2000/TorrentBro | torrentbro/lib/tpb/constants.py | 1 | 3066 | import sys
if sys.version_info >= (3, 0):
class_type = type
else:
from new import classobj
class_type = classobj
class ConstantType(type):
"""
Tree representation metaclass for class attributes. Metaclass is extended
to all child classes too.
"""
def __new__(cls, clsname, bases, dct):
"""
Extend metaclass to all class attributes too.
"""
attrs = {}
for name, attr in dct.items():
if isinstance(attr, class_type):
# substitute attr with a new class with Constants as
# metaclass making it possible to spread this same method
# to all child classes
attr = ConstantType(
attr.__name__, attr.__bases__, attr.__dict__)
attrs[name] = attr
return super(ConstantType, cls).__new__(cls, clsname, bases, attrs)
def __repr__(cls):
"""
Tree representation of class attributes. Child classes are also
represented.
"""
# dump current class name
tree = cls.__name__ + ':\n'
for name in dir(cls):
if not name.startswith('_'):
attr = getattr(cls, name)
output = repr(attr)
if not isinstance(attr, ConstantType):
output = '{}: {}'.format(name, output)
# indent all child attrs
tree += '\n'.join([' ' * 4 +
line for line in output.splitlines()]) + '\n'
return tree
def __str__(cls):
return repr(cls)
Constants = ConstantType('Constants', (object,), {})
class ORDERS(Constants):
class NAME:
DES = 1
ASC = 2
class UPLOADED:
DES = 3
ASC = 4
class SIZE:
DES = 5
ASC = 6
class SEEDERS:
DES = 7
ASC = 8
class LEECHERS:
DES = 9
ASC = 10
class UPLOADER:
DES = 11
ASC = 12
class TYPE:
DES = 13
ASC = 14
class CATEGORIES(Constants):
ALL = 0
class AUDIO:
ALL = 100
MUSIC = 101
AUDIO_BOOKS = 102
SOUND_CLIPS = 103
FLAC = 104
OTHER = 199
class VIDEO:
ALL = 200
MOVIES = 201
MOVIES_DVDR = 202
MUSIC_VIDEOS = 203
MOVIE_CLIPS = 204
TV_SHOWS = 205
HANDHELD = 206
HD_MOVIES = 207
HD_TV_SHOWS = 208
THREE_DIMENSIONS = 209
OTHER = 299
class APPLICATIONS:
ALL = 300
WINDOWS = 301
MAC = 302
UNIX = 303
HANDHELD = 304
IOS = 305
ANDROID = 306
OTHER = 399
class GAMES:
ALL = 400
PC = 401
MAC = 402
PSX = 403
XBOX360 = 404
WII = 405
HANDHELD = 406
IOS = 407
ANDROID = 408
OTHER = 499
class OTHER:
EBOOKS = 601
COMICS = 602
PICTURES = 603
COVERS = 604
PHYSIBLES = 605
OTHER = 699
| gpl-3.0 | -6,296,941,637,029,682,000 | 21.217391 | 80 | 0.483366 | false |
jackylee0424/dfr | tornado/test/simple_httpclient_test.py | 1 | 16087 | from __future__ import absolute_import, division, print_function, with_statement
import collections
from contextlib import closing
import errno
import gzip
import logging
import os
import re
import socket
import sys
from tornado.httpclient import AsyncHTTPClient
from tornado.httputil import HTTPHeaders
from tornado.ioloop import IOLoop
from tornado.log import gen_log
from tornado.simple_httpclient import SimpleAsyncHTTPClient, _DEFAULT_CA_CERTS
from tornado.test.httpclient_test import ChunkHandler, CountdownHandler, HelloWorldHandler
from tornado.test import httpclient_test
from tornado.testing import AsyncHTTPTestCase, AsyncHTTPSTestCase, AsyncTestCase, bind_unused_port, ExpectLog
from tornado.test.util import unittest, skipOnTravis
from tornado.web import RequestHandler, Application, asynchronous, url
class SimpleHTTPClientCommonTestCase(httpclient_test.HTTPClientCommonTestCase):
def get_http_client(self):
client = SimpleAsyncHTTPClient(io_loop=self.io_loop,
force_instance=True)
self.assertTrue(isinstance(client, SimpleAsyncHTTPClient))
return client
class TriggerHandler(RequestHandler):
def initialize(self, queue, wake_callback):
self.queue = queue
self.wake_callback = wake_callback
@asynchronous
def get(self):
logging.debug("queuing trigger")
self.queue.append(self.finish)
if self.get_argument("wake", "true") == "true":
self.wake_callback()
class HangHandler(RequestHandler):
@asynchronous
def get(self):
pass
class ContentLengthHandler(RequestHandler):
def get(self):
self.set_header("Content-Length", self.get_argument("value"))
self.write("ok")
class HeadHandler(RequestHandler):
def head(self):
self.set_header("Content-Length", "7")
class OptionsHandler(RequestHandler):
def options(self):
self.set_header("Access-Control-Allow-Origin", "*")
self.write("ok")
class NoContentHandler(RequestHandler):
def get(self):
if self.get_argument("error", None):
self.set_header("Content-Length", "7")
self.set_status(204)
class SeeOtherPostHandler(RequestHandler):
def post(self):
redirect_code = int(self.request.body)
assert redirect_code in (302, 303), "unexpected body %r" % self.request.body
self.set_header("Location", "/see_other_get")
self.set_status(redirect_code)
class SeeOtherGetHandler(RequestHandler):
def get(self):
if self.request.body:
raise Exception("unexpected body %r" % self.request.body)
self.write("ok")
class HostEchoHandler(RequestHandler):
def get(self):
self.write(self.request.headers["Host"])
class SimpleHTTPClientTestMixin(object):
def get_app(self):
# callable objects to finish pending /trigger requests
self.triggers = collections.deque()
return Application([
url("/trigger", TriggerHandler, dict(queue=self.triggers,
wake_callback=self.stop)),
url("/chunk", ChunkHandler),
url("/countdown/([0-9]+)", CountdownHandler, name="countdown"),
url("/hang", HangHandler),
url("/hello", HelloWorldHandler),
url("/content_length", ContentLengthHandler),
url("/head", HeadHandler),
url("/options", OptionsHandler),
url("/no_content", NoContentHandler),
url("/see_other_post", SeeOtherPostHandler),
url("/see_other_get", SeeOtherGetHandler),
url("/host_echo", HostEchoHandler),
], gzip=True)
def test_singleton(self):
# Class "constructor" reuses objects on the same IOLoop
self.assertTrue(SimpleAsyncHTTPClient(self.io_loop) is
SimpleAsyncHTTPClient(self.io_loop))
# unless force_instance is used
self.assertTrue(SimpleAsyncHTTPClient(self.io_loop) is not
SimpleAsyncHTTPClient(self.io_loop,
force_instance=True))
# different IOLoops use different objects
with closing(IOLoop()) as io_loop2:
self.assertTrue(SimpleAsyncHTTPClient(self.io_loop) is not
SimpleAsyncHTTPClient(io_loop2))
def test_connection_limit(self):
with closing(self.create_client(max_clients=2)) as client:
self.assertEqual(client.max_clients, 2)
seen = []
# Send 4 requests. Two can be sent immediately, while the others
# will be queued
for i in range(4):
client.fetch(self.get_url("/trigger"),
lambda response, i=i: (seen.append(i), self.stop()))
self.wait(condition=lambda: len(self.triggers) == 2)
self.assertEqual(len(client.queue), 2)
# Finish the first two requests and let the next two through
self.triggers.popleft()()
self.triggers.popleft()()
self.wait(condition=lambda: (len(self.triggers) == 2 and
len(seen) == 2))
self.assertEqual(set(seen), set([0, 1]))
self.assertEqual(len(client.queue), 0)
# Finish all the pending requests
self.triggers.popleft()()
self.triggers.popleft()()
self.wait(condition=lambda: len(seen) == 4)
self.assertEqual(set(seen), set([0, 1, 2, 3]))
self.assertEqual(len(self.triggers), 0)
def test_redirect_connection_limit(self):
# following redirects should not consume additional connections
with closing(self.create_client(max_clients=1)) as client:
client.fetch(self.get_url('/countdown/3'), self.stop,
max_redirects=3)
response = self.wait()
response.rethrow()
def test_default_certificates_exist(self):
open(_DEFAULT_CA_CERTS).close()
def test_gzip(self):
# All the tests in this file should be using gzip, but this test
# ensures that it is in fact getting compressed.
# Setting Accept-Encoding manually bypasses the client's
# decompression so we can see the raw data.
response = self.fetch("/chunk", use_gzip=False,
headers={"Accept-Encoding": "gzip"})
self.assertEqual(response.headers["Content-Encoding"], "gzip")
self.assertNotEqual(response.body, b"asdfqwer")
# Our test data gets bigger when gzipped. Oops. :)
self.assertEqual(len(response.body), 34)
f = gzip.GzipFile(mode="r", fileobj=response.buffer)
self.assertEqual(f.read(), b"asdfqwer")
def test_max_redirects(self):
response = self.fetch("/countdown/5", max_redirects=3)
self.assertEqual(302, response.code)
# We requested 5, followed three redirects for 4, 3, 2, then the last
# unfollowed redirect is to 1.
self.assertTrue(response.request.url.endswith("/countdown/5"))
self.assertTrue(response.effective_url.endswith("/countdown/2"))
self.assertTrue(response.headers["Location"].endswith("/countdown/1"))
def test_header_reuse(self):
# Apps may reuse a headers object if they are only passing in constant
# headers like user-agent. The header object should not be modified.
headers = HTTPHeaders({'User-Agent': 'Foo'})
self.fetch("/hello", headers=headers)
self.assertEqual(list(headers.get_all()), [('User-Agent', 'Foo')])
def test_see_other_redirect(self):
for code in (302, 303):
response = self.fetch("/see_other_post", method="POST", body="%d" % code)
self.assertEqual(200, response.code)
self.assertTrue(response.request.url.endswith("/see_other_post"))
self.assertTrue(response.effective_url.endswith("/see_other_get"))
# request is the original request, is a POST still
self.assertEqual("POST", response.request.method)
@skipOnTravis
def test_request_timeout(self):
response = self.fetch('/trigger?wake=false', request_timeout=0.1)
self.assertEqual(response.code, 599)
self.assertTrue(0.099 < response.request_time < 0.15, response.request_time)
self.assertEqual(str(response.error), "HTTP 599: Timeout")
# trigger the hanging request to let it clean up after itself
self.triggers.popleft()()
@unittest.skipIf(not socket.has_ipv6, 'ipv6 support not present')
def test_ipv6(self):
try:
self.http_server.listen(self.get_http_port(), address='::1')
except socket.gaierror as e:
if e.args[0] == socket.EAI_ADDRFAMILY:
# python supports ipv6, but it's not configured on the network
# interface, so skip this test.
return
raise
url = self.get_url("/hello").replace("localhost", "[::1]")
# ipv6 is currently disabled by default and must be explicitly requested
self.http_client.fetch(url, self.stop)
response = self.wait()
self.assertEqual(response.code, 599)
self.http_client.fetch(url, self.stop, allow_ipv6=True)
response = self.wait()
self.assertEqual(response.body, b"Hello world!")
def test_multiple_content_length_accepted(self):
response = self.fetch("/content_length?value=2,2")
self.assertEqual(response.body, b"ok")
response = self.fetch("/content_length?value=2,%202,2")
self.assertEqual(response.body, b"ok")
response = self.fetch("/content_length?value=2,4")
self.assertEqual(response.code, 599)
response = self.fetch("/content_length?value=2,%202,3")
self.assertEqual(response.code, 599)
def test_head_request(self):
response = self.fetch("/head", method="HEAD")
self.assertEqual(response.code, 200)
self.assertEqual(response.headers["content-length"], "7")
self.assertFalse(response.body)
def test_options_request(self):
response = self.fetch("/options", method="OPTIONS")
self.assertEqual(response.code, 200)
self.assertEqual(response.headers["content-length"], "2")
self.assertEqual(response.headers["access-control-allow-origin"], "*")
self.assertEqual(response.body, b"ok")
def test_no_content(self):
response = self.fetch("/no_content")
self.assertEqual(response.code, 204)
# 204 status doesn't need a content-length, but tornado will
# add a zero content-length anyway.
self.assertEqual(response.headers["Content-length"], "0")
# 204 status with non-zero content length is malformed
response = self.fetch("/no_content?error=1")
self.assertEqual(response.code, 599)
def test_host_header(self):
host_re = re.compile(b"^localhost:[0-9]+$")
response = self.fetch("/host_echo")
self.assertTrue(host_re.match(response.body))
url = self.get_url("/host_echo").replace("http://", "http://me:secret@")
self.http_client.fetch(url, self.stop)
response = self.wait()
self.assertTrue(host_re.match(response.body), response.body)
def test_connection_refused(self):
server_socket, port = bind_unused_port()
server_socket.close()
with ExpectLog(gen_log, ".*", required=False):
self.http_client.fetch("http://localhost:%d/" % port, self.stop)
response = self.wait()
self.assertEqual(599, response.code)
if sys.platform != 'cygwin':
# cygwin returns EPERM instead of ECONNREFUSED here
self.assertTrue(str(errno.ECONNREFUSED) in str(response.error),
response.error)
# This is usually "Connection refused".
# On windows, strerror is broken and returns "Unknown error".
expected_message = os.strerror(errno.ECONNREFUSED)
self.assertTrue(expected_message in str(response.error),
response.error)
class SimpleHTTPClientTestCase(SimpleHTTPClientTestMixin, AsyncHTTPTestCase):
def setUp(self):
super(SimpleHTTPClientTestCase, self).setUp()
self.http_client = self.create_client()
def create_client(self, **kwargs):
return SimpleAsyncHTTPClient(self.io_loop, force_instance=True,
**kwargs)
class SimpleHTTPSClientTestCase(SimpleHTTPClientTestMixin, AsyncHTTPSTestCase):
def setUp(self):
super(SimpleHTTPSClientTestCase, self).setUp()
self.http_client = self.create_client()
def create_client(self, **kwargs):
return SimpleAsyncHTTPClient(self.io_loop, force_instance=True,
defaults=dict(validate_cert=False),
**kwargs)
class CreateAsyncHTTPClientTestCase(AsyncTestCase):
def setUp(self):
super(CreateAsyncHTTPClientTestCase, self).setUp()
self.saved = AsyncHTTPClient._save_configuration()
def tearDown(self):
AsyncHTTPClient._restore_configuration(self.saved)
super(CreateAsyncHTTPClientTestCase, self).tearDown()
def test_max_clients(self):
AsyncHTTPClient.configure(SimpleAsyncHTTPClient)
with closing(AsyncHTTPClient(
self.io_loop, force_instance=True)) as client:
self.assertEqual(client.max_clients, 10)
with closing(AsyncHTTPClient(
self.io_loop, max_clients=11, force_instance=True)) as client:
self.assertEqual(client.max_clients, 11)
# Now configure max_clients statically and try overriding it
# with each way max_clients can be passed
AsyncHTTPClient.configure(SimpleAsyncHTTPClient, max_clients=12)
with closing(AsyncHTTPClient(
self.io_loop, force_instance=True)) as client:
self.assertEqual(client.max_clients, 12)
with closing(AsyncHTTPClient(
self.io_loop, max_clients=13, force_instance=True)) as client:
self.assertEqual(client.max_clients, 13)
with closing(AsyncHTTPClient(
self.io_loop, max_clients=14, force_instance=True)) as client:
self.assertEqual(client.max_clients, 14)
class HTTP100ContinueTestCase(AsyncHTTPTestCase):
def respond_100(self, request):
self.request = request
self.request.connection.stream.write(
b"HTTP/1.1 100 CONTINUE\r\n\r\n",
self.respond_200)
def respond_200(self):
self.request.connection.stream.write(
b"HTTP/1.1 200 OK\r\nContent-Length: 1\r\n\r\nA",
self.request.connection.stream.close)
def get_app(self):
# Not a full Application, but works as an HTTPServer callback
return self.respond_100
def test_100_continue(self):
res = self.fetch('/')
self.assertEqual(res.body, b'A')
class HostnameMappingTestCase(AsyncHTTPTestCase):
def setUp(self):
super(HostnameMappingTestCase, self).setUp()
self.http_client = SimpleAsyncHTTPClient(
self.io_loop,
hostname_mapping={
'www.example.com': '127.0.0.1',
('foo.example.com', 8000): ('127.0.0.1', self.get_http_port()),
})
def get_app(self):
return Application([url("/hello", HelloWorldHandler), ])
def test_hostname_mapping(self):
self.http_client.fetch(
'http://www.example.com:%d/hello' % self.get_http_port(), self.stop)
response = self.wait()
response.rethrow()
self.assertEqual(response.body, b'Hello world!')
def test_port_mapping(self):
self.http_client.fetch('http://foo.example.com:8000/hello', self.stop)
response = self.wait()
response.rethrow()
self.assertEqual(response.body, b'Hello world!')
| mit | 2,541,758,861,666,431,500 | 39.419598 | 109 | 0.629887 | false |
wikimedia/user_metrics | user_metrics/api/run.py | 1 | 4196 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
This module defines the entry point for flask_ web server implementation
of the Wikimedia User Metrics API. This module is consumable
by the Apache web server via WSGI interface via mod_wsgi. An Apache
server can be pointed to api.wsgi such that Apache may be used as a
wrapper in this way.
.. _flask: http://flask.pocoo.org
Cohort Data
^^^^^^^^^^^
Cohort data is maintained in the host s1-analytics-slave.eqiad.wmnet under
the `staging` database in the `usertags` and `usertags_meta` tables: ::
+---------+-----------------+------+-----+---------+-------+
| Field | Type | Null | Key | Default | Extra |
+---------+-----------------+------+-----+---------+-------+
| ut_user | int(5) unsigned | NO | PRI | NULL | |
| ut_tag | int(4) unsigned | NO | PRI | NULL | |
+---------+-----------------+------+-----+---------+-------+
+-------------+-----------------+------+-----+---------+
| Field | Type | Null | Key | Default |
+-------------+-----------------+------+-----+---------+
| utm_id | int(5) unsigned | NO | PRI | NULL |
| utm_name | varchar(255) | NO | | |
| utm_notes | varchar(255) | YES | | NULL |
| utm_touched | datetime | YES | | NULL |
+-------------+-----------------+------+-----+---------+
"""
__author__ = {
"dario taraborelli": "[email protected]",
"ryan faulkner": "[email protected]"
}
__date__ = "2012-12-21"
__license__ = "GPL (version 2 or later)"
import multiprocessing as mp
from user_metrics.config import logging, settings
from user_metrics.api.engine.request_manager import job_control, \
requests_notification_callback
from user_metrics.api.engine.response_handler import process_responses
from user_metrics.api.views import app
from user_metrics.api.engine.request_manager import api_request_queue, \
req_notification_queue_out, req_notification_queue_in, api_response_queue
from user_metrics.utils import terminate_process_with_checks
job_controller_proc = None
response_controller_proc = None
rm_callback_proc = None
######
#
# Define Custom Classes
#
#######
def teardown():
""" When the instance is deleted store the pickled data and shutdown
the job controller """
# Try to shutdown the job control proc gracefully
try:
terminate_process_with_checks(job_controller_proc)
terminate_process_with_checks(response_controller_proc)
terminate_process_with_checks(rm_callback_proc)
except Exception:
logging.error(__name__ + ' :: Could not shut down callbacks.')
def setup_controller(req_queue, res_queue, msg_queue_in, msg_queue_out):
"""
Sets up the process that handles API jobs
"""
job_controller_proc = mp.Process(target=job_control,
args=(req_queue, res_queue))
response_controller_proc = mp.Process(target=process_responses,
args=(res_queue,
msg_queue_in))
rm_callback_proc = mp.Process(target=requests_notification_callback,
args=(msg_queue_in,
msg_queue_out))
job_controller_proc.start()
response_controller_proc.start()
rm_callback_proc.start()
######
#
# Execution
#
#######
# initialize API data - get the instance
setup_controller(api_request_queue, api_response_queue,
req_notification_queue_in, req_notification_queue_out)
app.config['SECRET_KEY'] = settings.__secret_key__
# With the presence of flask.ext.login module
if settings.__flask_login_exists__:
from user_metrics.api.session import login_manager
login_manager.setup_app(app)
if __name__ == '__main__':
try:
app.run(debug=True,
use_reloader=False,
host=settings.__instance_host__,
port=settings.__instance_port__,)
finally:
teardown()
| bsd-3-clause | 5,606,985,563,411,942,000 | 32.568 | 78 | 0.547188 | false |
ralphm/wokkel | wokkel/test/test_iwokkel.py | 1 | 1676 | # Copyright (c) Ralph Meijer.
# See LICENSE for details.
"""
Tests for L{wokkel.iwokkel}
"""
from __future__ import division, absolute_import
from twisted.trial import unittest
class DeprecationTest(unittest.TestCase):
"""
Deprecation test for L{wokkel.subprotocols}.
"""
def lookForDeprecationWarning(self, testmethod, attributeName, newName):
"""
Importing C{testmethod} emits a deprecation warning.
"""
warningsShown = self.flushWarnings([testmethod])
self.assertEqual(len(warningsShown), 1)
self.assertIdentical(warningsShown[0]['category'], DeprecationWarning)
self.assertEqual(
warningsShown[0]['message'],
"wokkel.iwokkel." + attributeName + " "
"was deprecated in wokkel 0.7.0: Use " + newName + " instead.")
def test_iXMPPHandler(self):
"""
L{wokkel.iwokkel.IXMPPHandler} is deprecated.
"""
from wokkel.iwokkel import IXMPPHandler
IXMPPHandler
self.lookForDeprecationWarning(
self.test_iXMPPHandler,
"IXMPPHandler",
"twisted.words.protocols.jabber.ijabber."
"IXMPPHandler")
def test_iXMPPHandlerCollection(self):
"""
L{wokkel.iwokkel.IXMPPHandlerCollection} is deprecated.
"""
from wokkel.iwokkel import IXMPPHandlerCollection
IXMPPHandlerCollection
self.lookForDeprecationWarning(
self.test_iXMPPHandlerCollection,
"IXMPPHandlerCollection",
"twisted.words.protocols.jabber.ijabber."
"IXMPPHandlerCollection")
| mit | -6,871,118,426,764,101,000 | 30.622642 | 78 | 0.622912 | false |
benosteen/mypaint | gui/brushcreationwidget.py | 1 | 9333 | # This file is part of MyPaint.
# Copyright (C) 2009 by Martin Renold <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import os
import gtk
gdk = gtk.gdk
from lib import document
import tileddrawwidget, brushmanager, dialogs
from gettext import gettext as _
def startfile(path):
import os
import platform
if platform.system == 'Windows':
os.startfile(path)
else:
os.system("xdg-open " + path)
def stock_button(stock_id):
b = gtk.Button()
img = gtk.Image()
img.set_from_stock(stock_id, gtk.ICON_SIZE_MENU)
b.add(img)
return b
class BrushManipulationWidget(gtk.HBox):
""" """
def __init__(self, app, brushicon_editor):
gtk.HBox.__init__(self)
self.app = app
self.bm = app.brushmanager
self.brushicon_editor = brushicon_editor
self.init_widgets()
self.bm.selected_brush_observers.append(self.brush_selected_cb)
def init_widgets(self):
l = self.brush_name_label = gtk.Label()
l.set_text(_('(unnamed brush)'))
self.pack_start(l, expand=True)
right_vbox_buttons = [
(gtk.STOCK_SAVE, self.update_settings_cb, _('Save Settings')),
(gtk.STOCK_ADD, self.create_brush_cb, _('Add As New')),
(gtk.STOCK_PROPERTIES, self.edit_brush_cb, _('Edit Brush Icon')),
(gtk.STOCK_EDIT, self.rename_brush_cb, _('Rename...')),
(gtk.STOCK_DELETE, self.delete_brush_cb, _('Remove...')),
]
for stock_id, clicked_cb, tooltip in reversed(right_vbox_buttons):
b = stock_button(stock_id)
b.connect('clicked', clicked_cb)
b.set_tooltip_text(tooltip)
self.pack_end(b, expand=False)
def brush_selected_cb(self, managed_brush, brushinfo):
name = managed_brush.name
if name is None:
name = _('(unnamed brush)')
else:
name = name.replace('_', ' ') # XXX safename/unsafename utils?
self.brush_name_label.set_text(name)
def edit_brush_cb(self, window):
self.edit_brush_properties_cb()
def create_brush_cb(self, window):
"""Create and save a new brush based on the current working brush."""
b = brushmanager.ManagedBrush(self.bm)
b.brushinfo = self.app.brush.clone()
b.brushinfo.set_string_property("parent_brush_name", None) #avoid mis-hilight
b.preview = self.brushicon_editor.get_preview_pixbuf()
b.save()
if self.bm.active_groups:
group = self.bm.active_groups[0]
else:
group = brushmanager.DEFAULT_BRUSH_GROUP
brushes = self.bm.get_group_brushes(group, make_active=True)
brushes.insert(0, b)
b.persistent = True # Brush was saved
b.in_brushlist = True
for f in self.bm.brushes_observers: f(brushes)
self.bm.select_brush(b)
# Pretend that the active app.brush is a child of the new one, for the
# sake of the strokemap and strokes drawn immediately after.
self.app.brush.set_string_property("parent_brush_name", b.name)
def rename_brush_cb(self, window):
src_brush = self.bm.selected_brush
if not src_brush.name:
dialogs.error(self, _('No brush selected!'))
return
dst_name = dialogs.ask_for_name(self, _("Rename Brush"), src_brush.name.replace('_', ' '))
if not dst_name:
return
dst_name = dst_name.replace(' ', '_')
# ensure we don't overwrite an existing brush by accident
dst_deleted = None
for group, brushes in self.bm.groups.iteritems():
for b2 in brushes:
if b2.name == dst_name:
if group == brushmanager.DELETED_BRUSH_GROUP:
dst_deleted = b2
else:
dialogs.error(self, _('A brush with this name already exists!'))
return
print 'renaming brush', repr(src_brush.name), '-->', repr(dst_name)
if dst_deleted:
deleted_brushes = self.bm.get_group_brushes(brushmanager.DELETED_BRUSH_GROUP)
deleted_brushes.remove(dst_deleted)
for f in self.bm.brushes_observers: f(deleted_brushes)
# save src as dst
src_name = src_brush.name
src_brush.name = dst_name
src_brush.save()
src_brush.name = src_name
# load dst
dst_brush = brushmanager.ManagedBrush(self.bm, dst_name, persistent=True)
dst_brush.load()
dst_brush.in_brushlist = True
# replace src with dst (but keep src in the deleted list if it is a stock brush)
self.delete_brush_internal(src_brush, replacement=dst_brush)
self.bm.select_brush(dst_brush)
def update_settings_cb(self, window):
b = self.bm.selected_brush
if not b.name:
dialogs.error(self, _('No brush selected, please use "Add As New" instead.'))
return
b.brushinfo = self.app.brush.clone()
b.save()
def delete_brush_cb(self, window):
b = self.bm.selected_brush
if not b.name:
dialogs.error(self, _('No brush selected!'))
return
if not dialogs.confirm(self, _("Really delete brush from disk?")):
return
self.bm.select_brush(None)
self.delete_brush_internal(b)
def delete_brush_internal(self, b, replacement=None):
for brushes in self.bm.groups.itervalues():
if b in brushes:
idx = brushes.index(b)
if replacement:
brushes[idx] = replacement
else:
del brushes[idx]
for f in self.bm.brushes_observers: f(brushes)
assert b not in brushes, 'Brush exists multiple times in the same group!'
if not b.delete_from_disk():
# stock brush can't be deleted
deleted_brushes = self.bm.get_group_brushes(brushmanager.DELETED_BRUSH_GROUP)
deleted_brushes.insert(0, b)
for f in self.bm.brushes_observers: f(deleted_brushes)
class BrushIconEditorWidget(gtk.VBox):
def __init__(self, app):
gtk.VBox.__init__(self)
self.app = app
self.bm = app.brushmanager
self.set_border_width(8)
self.init_widgets()
self.bm.selected_brush_observers.append(self.brush_selected_cb)
self.set_brush_preview_edit_mode(False)
def init_widgets(self):
button_box = gtk.HBox()
doc = document.Document(self.app.brush)
self.tdw = tileddrawwidget.TiledDrawWidget(self.app, doc)
self.tdw.set_size_request(brushmanager.preview_w*2, brushmanager.preview_h*2)
self.tdw.scale = 2.0
tdw_box = gtk.HBox()
tdw_box.pack_start(self.tdw, expand=False, fill=False)
tdw_box.pack_start(gtk.Label(), expand=True)
self.pack_start(tdw_box, expand=False, fill=False, padding=3)
self.pack_start(button_box, expand=False, fill=False, padding=3)
self.brush_preview_edit_mode_button = b = gtk.CheckButton(_('Edit'))
b.connect('toggled', self.brush_preview_edit_mode_cb)
button_box.pack_start(b, expand=False, padding=3)
self.brush_preview_clear_button = b = gtk.Button(_('Clear'))
def clear_cb(window):
self.tdw.doc.clear_layer()
b.connect('clicked', clear_cb)
button_box.pack_start(b, expand=False, padding=3)
self.brush_preview_save_button = b = gtk.Button(_('Save'))
b.connect('clicked', self.update_preview_cb)
button_box.pack_start(b, expand=False, padding=3)
def brush_preview_edit_mode_cb(self, button):
self.set_brush_preview_edit_mode(button.get_active())
def set_brush_preview_edit_mode(self, edit_mode):
self.brush_preview_edit_mode = edit_mode
self.brush_preview_edit_mode_button.set_active(edit_mode)
self.brush_preview_save_button.set_sensitive(edit_mode)
self.brush_preview_clear_button.set_sensitive(edit_mode)
self.tdw.set_sensitive(edit_mode)
def set_preview_pixbuf(self, pixbuf):
if pixbuf is None:
self.tdw.doc.clear()
else:
self.tdw.doc.load_from_pixbuf(pixbuf)
def get_preview_pixbuf(self):
pixbuf = self.tdw.doc.render_as_pixbuf(0, 0, brushmanager.preview_w, brushmanager.preview_h)
return pixbuf
def update_preview_cb(self, window):
pixbuf = self.get_preview_pixbuf()
b = self.bm.selected_brush
if not b.name:
dialogs.error(self, _('No brush selected, please use "Add As New" instead.'))
return
b.preview = pixbuf
b.save()
for brushes in self.bm.groups.itervalues():
if b in brushes:
for f in self.bm.brushes_observers: f(brushes)
def brush_selected_cb(self, managed_brush, brushinfo):
# Update brush icon preview if it is not in edit mode
if not self.brush_preview_edit_mode:
self.set_preview_pixbuf(managed_brush.preview)
| gpl-2.0 | 4,452,946,291,741,899,300 | 35.457031 | 100 | 0.605807 | false |
missionpinball/mpf | mpf/tests/test_CarouselMode.py | 1 | 10205 | from mpf.tests.MpfTestCase import MpfTestCase, MagicMock
class TestCarouselMode(MpfTestCase):
def get_config_file(self):
return 'config.yaml'
def get_machine_path(self):
return 'tests/machine_files/carousel/'
def _start_game(self):
self.machine.playfield.add_ball = MagicMock()
self.machine.ball_controller.num_balls_known = 3
self.hit_and_release_switch("s_start")
self.advance_time_and_run()
self.assertIsNotNone(self.machine.game)
def _stop_game(self):
# stop game
self.assertIsNotNone(self.machine.game)
self.machine.game.end_game()
self.advance_time_and_run()
self.assertIsNone(self.machine.game)
def testBlockingCarousel(self):
self.mock_event("blocking_carousel_item1_highlighted")
self.mock_event("blocking_carousel_item2_highlighted")
self.mock_event("blocking_carousel_item3_highlighted")
self.mock_event("flipper_cancel")
self._start_game()
self.post_event("start_mode4")
self.assertIn(self.machine.modes["blocking_carousel"], self.machine.mode_controller.active_modes)
self.assertEqual(1, self._events["blocking_carousel_item1_highlighted"])
self.assertEqual(0, self._events["blocking_carousel_item2_highlighted"])
self.post_event("s_flipper_right_active")
self.post_event("s_flipper_right_inactive")
self.assertEqual(1, self._events["blocking_carousel_item2_highlighted"])
self.assertEqual(0, self._events["blocking_carousel_item3_highlighted"])
self.assertEqual(0, self._events["flipper_cancel"])
self.post_event("s_flipper_right_active")
self.post_event("s_flipper_left_active")
self.post_event("flipper_cancel")
self.post_event("s_flipper_right_inactive")
self.post_event("s_flipper_left_inactive")
self.assertEqual(1, self._events["flipper_cancel"])
self.assertEqual(1, self._events["blocking_carousel_item1_highlighted"])
self.assertEqual(1, self._events["blocking_carousel_item2_highlighted"])
self.assertEqual(0, self._events["blocking_carousel_item3_highlighted"])
self.post_event("both_flippers_inactive")
self.post_event("s_flipper_right_inactive")
self.assertEqual(1, self._events["blocking_carousel_item3_highlighted"])
# Restart the mode to ensure that the block is cleared
self.post_event("flipper_cancel")
self.post_event("stop_mode4")
self.advance_time_and_run()
self.post_event("start_mode4")
self.post_event("s_flipper_right_inactive")
self.assertEqual(2, self._events["blocking_carousel_item2_highlighted"],
"item2_highlighted should be called when a blocked mode restarts")
def testConditionalCarousel(self):
self.mock_event("conditional_carousel_item1_highlighted")
self.mock_event("conditional_carousel_item2_highlighted")
self.mock_event("conditional_carousel_item3_highlighted")
self.mock_event("conditional_carousel_item4_highlighted")
self._start_game()
# Start the mode without any conditions true
self.post_event("start_mode3")
self.assertIn(self.machine.modes["conditional_carousel"], self.machine.mode_controller.active_modes)
self.assertEqual(1, self._events["conditional_carousel_item1_highlighted"])
self.assertEqual(0, self._events["conditional_carousel_item2_highlighted"])
self.assertEqual(0, self._events["conditional_carousel_item3_highlighted"])
self.assertEqual(0, self._events["conditional_carousel_item4_highlighted"])
self.post_event("next")
self.assertEqual(2, self._events["conditional_carousel_item1_highlighted"])
self.assertEqual(0, self._events["conditional_carousel_item2_highlighted"])
self.assertEqual(0, self._events["conditional_carousel_item3_highlighted"])
self.assertEqual(0, self._events["conditional_carousel_item4_highlighted"])
self.post_event("next")
self.assertEqual(3, self._events["conditional_carousel_item1_highlighted"])
self.assertEqual(0, self._events["conditional_carousel_item2_highlighted"])
self.assertEqual(0, self._events["conditional_carousel_item3_highlighted"])
self.assertEqual(0, self._events["conditional_carousel_item4_highlighted"])
self.post_event("stop_mode3")
# Reset the count for item 1
self.mock_event("conditional_carousel_item1_highlighted")
# Start the mode with a player variable condition
self.machine.game.player["show_item4"] = True
self.post_event("start_mode3")
self.assertEqual(1, self._events["conditional_carousel_item1_highlighted"])
self.assertEqual(0, self._events["conditional_carousel_item2_highlighted"])
self.assertEqual(0, self._events["conditional_carousel_item3_highlighted"])
self.assertEqual(0, self._events["conditional_carousel_item4_highlighted"])
self.post_event("next")
self.assertEqual(1, self._events["conditional_carousel_item1_highlighted"])
self.assertEqual(0, self._events["conditional_carousel_item2_highlighted"])
self.assertEqual(0, self._events["conditional_carousel_item3_highlighted"])
self.assertEqual(1, self._events["conditional_carousel_item4_highlighted"])
self.post_event("next")
self.assertEqual(2, self._events["conditional_carousel_item1_highlighted"])
self.assertEqual(0, self._events["conditional_carousel_item2_highlighted"])
self.assertEqual(0, self._events["conditional_carousel_item3_highlighted"])
self.assertEqual(1, self._events["conditional_carousel_item4_highlighted"])
self.post_event("stop_mode3")
# Reset the count for items 1 and 4
self.mock_event("conditional_carousel_item1_highlighted")
self.mock_event("conditional_carousel_item4_highlighted")
# Start the mode with a machine variable condition
self.machine.variables.set_machine_var("player2_score", 500000)
self.machine.game.player["show_item4"] = False
self.post_event("start_mode3")
self.assertEqual(1, self._events["conditional_carousel_item1_highlighted"])
self.assertEqual(0, self._events["conditional_carousel_item2_highlighted"])
self.assertEqual(0, self._events["conditional_carousel_item3_highlighted"])
self.assertEqual(0, self._events["conditional_carousel_item4_highlighted"])
self.post_event("next")
self.assertEqual(1, self._events["conditional_carousel_item1_highlighted"])
self.assertEqual(0, self._events["conditional_carousel_item2_highlighted"])
self.assertEqual(1, self._events["conditional_carousel_item3_highlighted"])
self.assertEqual(0, self._events["conditional_carousel_item4_highlighted"])
self.post_event("next")
self.assertEqual(2, self._events["conditional_carousel_item1_highlighted"])
self.assertEqual(0, self._events["conditional_carousel_item2_highlighted"])
self.assertEqual(1, self._events["conditional_carousel_item3_highlighted"])
self.assertEqual(0, self._events["conditional_carousel_item4_highlighted"])
self.post_event("stop_mode3")
# The mode shouldn't start if all conditions are false (i.e. no items)
self.mock_event("conditional_carousel_items_empty")
self.machine.game.player["hide_item1"] = "truthy"
self.machine.variables.set_machine_var("player2_score", 0)
self.post_event("start_mode3")
self.assertEqual(1, self._events["conditional_carousel_items_empty"])
self.assertNotIn(self.machine.modes["conditional_carousel"], self.machine.mode_controller.active_modes)
def testExtraBall(self):
self.mock_event("carousel_item1_highlighted")
self.mock_event("carousel_item1_selected")
self.mock_event("carousel_item2_highlighted")
self.mock_event("carousel_item2_selected")
self.mock_event("carousel_item3_highlighted")
self.mock_event("carousel_item3_selected")
# start game
self._start_game()
# start mode
self.post_event("start_mode1")
self.assertIn(self.machine.modes["carousel"], self.machine.mode_controller.active_modes)
self.assertEqual(1, self._events["carousel_item1_highlighted"])
self.assertEqual(0, self._events["carousel_item2_highlighted"])
self.assertEqual(0, self._events["carousel_item3_highlighted"])
self.post_event("next")
self.assertEqual(1, self._events["carousel_item1_highlighted"])
self.assertEqual(1, self._events["carousel_item2_highlighted"])
self.assertEqual(0, self._events["carousel_item3_highlighted"])
self.post_event("next")
self.assertEqual(1, self._events["carousel_item1_highlighted"])
self.assertEqual(1, self._events["carousel_item2_highlighted"])
self.assertEqual(1, self._events["carousel_item3_highlighted"])
self.post_event("next")
self.assertEqual(2, self._events["carousel_item1_highlighted"])
self.assertEqual(1, self._events["carousel_item2_highlighted"])
self.assertEqual(1, self._events["carousel_item3_highlighted"])
self.post_event("previous2")
self.assertEqual(2, self._events["carousel_item1_highlighted"])
self.assertEqual(1, self._events["carousel_item2_highlighted"])
self.assertEqual(2, self._events["carousel_item3_highlighted"])
self.post_event("previous")
self.assertEqual(2, self._events["carousel_item1_highlighted"])
self.assertEqual(2, self._events["carousel_item2_highlighted"])
self.assertEqual(2, self._events["carousel_item3_highlighted"])
self.post_event("select")
self.assertEqual(0, self._events["carousel_item1_selected"])
self.assertEqual(1, self._events["carousel_item2_selected"])
self.assertEqual(0, self._events["carousel_item3_selected"])
self.assertNotIn(self.machine.modes["carousel"], self.machine.mode_controller.active_modes)
| mit | -5,229,053,415,711,745,000 | 52.151042 | 111 | 0.685742 | false |
dietrichc/streamline-ppc-reports | examples/dfp/v201405/label_service/get_labels_by_statement.py | 1 | 1743 | #!/usr/bin/python
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example gets all labels ordered by name.
To create a label, run create_label.py. This feature is only available to DFP
premium solution networks.
"""
__author__ = ('Nicholas Chen',
'Joseph DiLallo')
# Import appropriate modules from the client library.
from googleads import dfp
def main(client):
# Initialize appropriate service.
label_service = client.GetService('LabelService', version='v201405')
# Create statement to get all labels
statement = dfp.FilterStatement('ORDER BY name')
# Get labels by statement.
while True:
response = label_service.getLabelsByStatement(statement.ToStatement())
if 'results' in response:
# Display results.
for label in response['results']:
print ('Label with id \'%s\' and name \'%s\' was found.'
% (label['id'], label['name']))
statement.offset += dfp.SUGGESTED_PAGE_LIMIT
else:
break
print '\nNumber of results found: %s' % response['totalResultSetSize']
if __name__ == '__main__':
# Initialize client object.
dfp_client = dfp.DfpClient.LoadFromStorage()
main(dfp_client)
| apache-2.0 | -264,873,779,856,759,300 | 30.690909 | 77 | 0.703385 | false |
ryfeus/lambda-packs | Pandas_numpy/source/numpy/core/_internal.py | 3 | 21639 | """
A place for code to be called from core C-code.
Some things are more easily handled Python.
"""
from __future__ import division, absolute_import, print_function
import re
import sys
from numpy.compat import basestring
from .multiarray import dtype, array, ndarray
try:
import ctypes
except ImportError:
ctypes = None
from .numerictypes import object_
if (sys.byteorder == 'little'):
_nbo = b'<'
else:
_nbo = b'>'
def _makenames_list(adict, align):
allfields = []
fnames = list(adict.keys())
for fname in fnames:
obj = adict[fname]
n = len(obj)
if not isinstance(obj, tuple) or n not in [2, 3]:
raise ValueError("entry not a 2- or 3- tuple")
if (n > 2) and (obj[2] == fname):
continue
num = int(obj[1])
if (num < 0):
raise ValueError("invalid offset.")
format = dtype(obj[0], align=align)
if (n > 2):
title = obj[2]
else:
title = None
allfields.append((fname, format, num, title))
# sort by offsets
allfields.sort(key=lambda x: x[2])
names = [x[0] for x in allfields]
formats = [x[1] for x in allfields]
offsets = [x[2] for x in allfields]
titles = [x[3] for x in allfields]
return names, formats, offsets, titles
# Called in PyArray_DescrConverter function when
# a dictionary without "names" and "formats"
# fields is used as a data-type descriptor.
def _usefields(adict, align):
try:
names = adict[-1]
except KeyError:
names = None
if names is None:
names, formats, offsets, titles = _makenames_list(adict, align)
else:
formats = []
offsets = []
titles = []
for name in names:
res = adict[name]
formats.append(res[0])
offsets.append(res[1])
if (len(res) > 2):
titles.append(res[2])
else:
titles.append(None)
return dtype({"names": names,
"formats": formats,
"offsets": offsets,
"titles": titles}, align)
# construct an array_protocol descriptor list
# from the fields attribute of a descriptor
# This calls itself recursively but should eventually hit
# a descriptor that has no fields and then return
# a simple typestring
def _array_descr(descriptor):
fields = descriptor.fields
if fields is None:
subdtype = descriptor.subdtype
if subdtype is None:
if descriptor.metadata is None:
return descriptor.str
else:
new = descriptor.metadata.copy()
if new:
return (descriptor.str, new)
else:
return descriptor.str
else:
return (_array_descr(subdtype[0]), subdtype[1])
names = descriptor.names
ordered_fields = [fields[x] + (x,) for x in names]
result = []
offset = 0
for field in ordered_fields:
if field[1] > offset:
num = field[1] - offset
result.append(('', '|V%d' % num))
offset += num
if len(field) > 3:
name = (field[2], field[3])
else:
name = field[2]
if field[0].subdtype:
tup = (name, _array_descr(field[0].subdtype[0]),
field[0].subdtype[1])
else:
tup = (name, _array_descr(field[0]))
offset += field[0].itemsize
result.append(tup)
if descriptor.itemsize > offset:
num = descriptor.itemsize - offset
result.append(('', '|V%d' % num))
return result
# Build a new array from the information in a pickle.
# Note that the name numpy.core._internal._reconstruct is embedded in
# pickles of ndarrays made with NumPy before release 1.0
# so don't remove the name here, or you'll
# break backward compatibility.
def _reconstruct(subtype, shape, dtype):
return ndarray.__new__(subtype, shape, dtype)
# format_re was originally from numarray by J. Todd Miller
format_re = re.compile(br'(?P<order1>[<>|=]?)'
br'(?P<repeats> *[(]?[ ,0-9L]*[)]? *)'
br'(?P<order2>[<>|=]?)'
br'(?P<dtype>[A-Za-z0-9.?]*(?:\[[a-zA-Z0-9,.]+\])?)')
sep_re = re.compile(br'\s*,\s*')
space_re = re.compile(br'\s+$')
# astr is a string (perhaps comma separated)
_convorder = {b'=': _nbo}
def _commastring(astr):
startindex = 0
result = []
while startindex < len(astr):
mo = format_re.match(astr, pos=startindex)
try:
(order1, repeats, order2, dtype) = mo.groups()
except (TypeError, AttributeError):
raise ValueError('format number %d of "%s" is not recognized' %
(len(result)+1, astr))
startindex = mo.end()
# Separator or ending padding
if startindex < len(astr):
if space_re.match(astr, pos=startindex):
startindex = len(astr)
else:
mo = sep_re.match(astr, pos=startindex)
if not mo:
raise ValueError(
'format number %d of "%s" is not recognized' %
(len(result)+1, astr))
startindex = mo.end()
if order2 == b'':
order = order1
elif order1 == b'':
order = order2
else:
order1 = _convorder.get(order1, order1)
order2 = _convorder.get(order2, order2)
if (order1 != order2):
raise ValueError(
'inconsistent byte-order specification %s and %s' %
(order1, order2))
order = order1
if order in [b'|', b'=', _nbo]:
order = b''
dtype = order + dtype
if (repeats == b''):
newitem = dtype
else:
newitem = (dtype, eval(repeats))
result.append(newitem)
return result
class dummy_ctype(object):
def __init__(self, cls):
self._cls = cls
def __mul__(self, other):
return self
def __call__(self, *other):
return self._cls(other)
def __eq__(self, other):
return self._cls == other._cls
def __ne__(self, other):
return self._cls != other._cls
def _getintp_ctype():
val = _getintp_ctype.cache
if val is not None:
return val
if ctypes is None:
import numpy as np
val = dummy_ctype(np.intp)
else:
char = dtype('p').char
if (char == 'i'):
val = ctypes.c_int
elif char == 'l':
val = ctypes.c_long
elif char == 'q':
val = ctypes.c_longlong
else:
val = ctypes.c_long
_getintp_ctype.cache = val
return val
_getintp_ctype.cache = None
# Used for .ctypes attribute of ndarray
class _missing_ctypes(object):
def cast(self, num, obj):
return num
def c_void_p(self, num):
return num
class _ctypes(object):
def __init__(self, array, ptr=None):
if ctypes:
self._ctypes = ctypes
else:
self._ctypes = _missing_ctypes()
self._arr = array
self._data = ptr
if self._arr.ndim == 0:
self._zerod = True
else:
self._zerod = False
def data_as(self, obj):
return self._ctypes.cast(self._data, obj)
def shape_as(self, obj):
if self._zerod:
return None
return (obj*self._arr.ndim)(*self._arr.shape)
def strides_as(self, obj):
if self._zerod:
return None
return (obj*self._arr.ndim)(*self._arr.strides)
def get_data(self):
return self._data
def get_shape(self):
return self.shape_as(_getintp_ctype())
def get_strides(self):
return self.strides_as(_getintp_ctype())
def get_as_parameter(self):
return self._ctypes.c_void_p(self._data)
data = property(get_data, None, doc="c-types data")
shape = property(get_shape, None, doc="c-types shape")
strides = property(get_strides, None, doc="c-types strides")
_as_parameter_ = property(get_as_parameter, None, doc="_as parameter_")
def _newnames(datatype, order):
"""
Given a datatype and an order object, return a new names tuple, with the
order indicated
"""
oldnames = datatype.names
nameslist = list(oldnames)
if isinstance(order, str):
order = [order]
seen = set()
if isinstance(order, (list, tuple)):
for name in order:
try:
nameslist.remove(name)
except ValueError:
if name in seen:
raise ValueError("duplicate field name: %s" % (name,))
else:
raise ValueError("unknown field name: %s" % (name,))
seen.add(name)
return tuple(list(order) + nameslist)
raise ValueError("unsupported order value: %s" % (order,))
def _copy_fields(ary):
"""Return copy of structured array with padding between fields removed.
Parameters
----------
ary : ndarray
Structured array from which to remove padding bytes
Returns
-------
ary_copy : ndarray
Copy of ary with padding bytes removed
"""
dt = ary.dtype
copy_dtype = {'names': dt.names,
'formats': [dt.fields[name][0] for name in dt.names]}
return array(ary, dtype=copy_dtype, copy=True)
def _getfield_is_safe(oldtype, newtype, offset):
""" Checks safety of getfield for object arrays.
As in _view_is_safe, we need to check that memory containing objects is not
reinterpreted as a non-object datatype and vice versa.
Parameters
----------
oldtype : data-type
Data type of the original ndarray.
newtype : data-type
Data type of the field being accessed by ndarray.getfield
offset : int
Offset of the field being accessed by ndarray.getfield
Raises
------
TypeError
If the field access is invalid
"""
if newtype.hasobject or oldtype.hasobject:
if offset == 0 and newtype == oldtype:
return
if oldtype.names:
for name in oldtype.names:
if (oldtype.fields[name][1] == offset and
oldtype.fields[name][0] == newtype):
return
raise TypeError("Cannot get/set field of an object array")
return
def _view_is_safe(oldtype, newtype):
""" Checks safety of a view involving object arrays, for example when
doing::
np.zeros(10, dtype=oldtype).view(newtype)
Parameters
----------
oldtype : data-type
Data type of original ndarray
newtype : data-type
Data type of the view
Raises
------
TypeError
If the new type is incompatible with the old type.
"""
# if the types are equivalent, there is no problem.
# for example: dtype((np.record, 'i4,i4')) == dtype((np.void, 'i4,i4'))
if oldtype == newtype:
return
if newtype.hasobject or oldtype.hasobject:
raise TypeError("Cannot change data-type for object array.")
return
# Given a string containing a PEP 3118 format specifier,
# construct a NumPy dtype
_pep3118_native_map = {
'?': '?',
'c': 'S1',
'b': 'b',
'B': 'B',
'h': 'h',
'H': 'H',
'i': 'i',
'I': 'I',
'l': 'l',
'L': 'L',
'q': 'q',
'Q': 'Q',
'e': 'e',
'f': 'f',
'd': 'd',
'g': 'g',
'Zf': 'F',
'Zd': 'D',
'Zg': 'G',
's': 'S',
'w': 'U',
'O': 'O',
'x': 'V', # padding
}
_pep3118_native_typechars = ''.join(_pep3118_native_map.keys())
_pep3118_standard_map = {
'?': '?',
'c': 'S1',
'b': 'b',
'B': 'B',
'h': 'i2',
'H': 'u2',
'i': 'i4',
'I': 'u4',
'l': 'i4',
'L': 'u4',
'q': 'i8',
'Q': 'u8',
'e': 'f2',
'f': 'f',
'd': 'd',
'Zf': 'F',
'Zd': 'D',
's': 'S',
'w': 'U',
'O': 'O',
'x': 'V', # padding
}
_pep3118_standard_typechars = ''.join(_pep3118_standard_map.keys())
def _dtype_from_pep3118(spec):
class Stream(object):
def __init__(self, s):
self.s = s
self.byteorder = '@'
def advance(self, n):
res = self.s[:n]
self.s = self.s[n:]
return res
def consume(self, c):
if self.s[:len(c)] == c:
self.advance(len(c))
return True
return False
def consume_until(self, c):
if callable(c):
i = 0
while i < len(self.s) and not c(self.s[i]):
i = i + 1
return self.advance(i)
else:
i = self.s.index(c)
res = self.advance(i)
self.advance(len(c))
return res
@property
def next(self):
return self.s[0]
def __bool__(self):
return bool(self.s)
__nonzero__ = __bool__
stream = Stream(spec)
dtype, align = __dtype_from_pep3118(stream, is_subdtype=False)
return dtype
def __dtype_from_pep3118(stream, is_subdtype):
field_spec = dict(
names=[],
formats=[],
offsets=[],
itemsize=0
)
offset = 0
common_alignment = 1
is_padding = False
# Parse spec
while stream:
value = None
# End of structure, bail out to upper level
if stream.consume('}'):
break
# Sub-arrays (1)
shape = None
if stream.consume('('):
shape = stream.consume_until(')')
shape = tuple(map(int, shape.split(',')))
# Byte order
if stream.next in ('@', '=', '<', '>', '^', '!'):
byteorder = stream.advance(1)
if byteorder == '!':
byteorder = '>'
stream.byteorder = byteorder
# Byte order characters also control native vs. standard type sizes
if stream.byteorder in ('@', '^'):
type_map = _pep3118_native_map
type_map_chars = _pep3118_native_typechars
else:
type_map = _pep3118_standard_map
type_map_chars = _pep3118_standard_typechars
# Item sizes
itemsize_str = stream.consume_until(lambda c: not c.isdigit())
if itemsize_str:
itemsize = int(itemsize_str)
else:
itemsize = 1
# Data types
is_padding = False
if stream.consume('T{'):
value, align = __dtype_from_pep3118(
stream, is_subdtype=True)
elif stream.next in type_map_chars:
if stream.next == 'Z':
typechar = stream.advance(2)
else:
typechar = stream.advance(1)
is_padding = (typechar == 'x')
dtypechar = type_map[typechar]
if dtypechar in 'USV':
dtypechar += '%d' % itemsize
itemsize = 1
numpy_byteorder = {'@': '=', '^': '='}.get(
stream.byteorder, stream.byteorder)
value = dtype(numpy_byteorder + dtypechar)
align = value.alignment
else:
raise ValueError("Unknown PEP 3118 data type specifier %r" % stream.s)
#
# Native alignment may require padding
#
# Here we assume that the presence of a '@' character implicitly implies
# that the start of the array is *already* aligned.
#
extra_offset = 0
if stream.byteorder == '@':
start_padding = (-offset) % align
intra_padding = (-value.itemsize) % align
offset += start_padding
if intra_padding != 0:
if itemsize > 1 or (shape is not None and _prod(shape) > 1):
# Inject internal padding to the end of the sub-item
value = _add_trailing_padding(value, intra_padding)
else:
# We can postpone the injection of internal padding,
# as the item appears at most once
extra_offset += intra_padding
# Update common alignment
common_alignment = _lcm(align, common_alignment)
# Convert itemsize to sub-array
if itemsize != 1:
value = dtype((value, (itemsize,)))
# Sub-arrays (2)
if shape is not None:
value = dtype((value, shape))
# Field name
if stream.consume(':'):
name = stream.consume_until(':')
else:
name = None
if not (is_padding and name is None):
if name is not None and name in field_spec['names']:
raise RuntimeError("Duplicate field name '%s' in PEP3118 format"
% name)
field_spec['names'].append(name)
field_spec['formats'].append(value)
field_spec['offsets'].append(offset)
offset += value.itemsize
offset += extra_offset
field_spec['itemsize'] = offset
# extra final padding for aligned types
if stream.byteorder == '@':
field_spec['itemsize'] += (-offset) % common_alignment
# Check if this was a simple 1-item type, and unwrap it
if (field_spec['names'] == [None]
and field_spec['offsets'][0] == 0
and field_spec['itemsize'] == field_spec['formats'][0].itemsize
and not is_subdtype):
ret = field_spec['formats'][0]
else:
_fix_names(field_spec)
ret = dtype(field_spec)
# Finished
return ret, common_alignment
def _fix_names(field_spec):
""" Replace names which are None with the next unused f%d name """
names = field_spec['names']
for i, name in enumerate(names):
if name is not None:
continue
j = 0
while True:
name = 'f{}'.format(j)
if name not in names:
break
j = j + 1
names[i] = name
def _add_trailing_padding(value, padding):
"""Inject the specified number of padding bytes at the end of a dtype"""
if value.fields is None:
field_spec = dict(
names=['f0'],
formats=[value],
offsets=[0],
itemsize=value.itemsize
)
else:
fields = value.fields
names = value.names
field_spec = dict(
names=names,
formats=[fields[name][0] for name in names],
offsets=[fields[name][1] for name in names],
itemsize=value.itemsize
)
field_spec['itemsize'] += padding
return dtype(field_spec)
def _prod(a):
p = 1
for x in a:
p *= x
return p
def _gcd(a, b):
"""Calculate the greatest common divisor of a and b"""
while b:
a, b = b, a % b
return a
def _lcm(a, b):
return a // _gcd(a, b) * b
# Exception used in shares_memory()
class TooHardError(RuntimeError):
pass
class AxisError(ValueError, IndexError):
""" Axis supplied was invalid. """
def __init__(self, axis, ndim=None, msg_prefix=None):
# single-argument form just delegates to base class
if ndim is None and msg_prefix is None:
msg = axis
# do the string formatting here, to save work in the C code
else:
msg = ("axis {} is out of bounds for array of dimension {}"
.format(axis, ndim))
if msg_prefix is not None:
msg = "{}: {}".format(msg_prefix, msg)
super(AxisError, self).__init__(msg)
def array_ufunc_errmsg_formatter(dummy, ufunc, method, *inputs, **kwargs):
""" Format the error message for when __array_ufunc__ gives up. """
args_string = ', '.join(['{!r}'.format(arg) for arg in inputs] +
['{}={!r}'.format(k, v)
for k, v in kwargs.items()])
args = inputs + kwargs.get('out', ())
types_string = ', '.join(repr(type(arg).__name__) for arg in args)
return ('operand type(s) all returned NotImplemented from '
'__array_ufunc__({!r}, {!r}, {}): {}'
.format(ufunc, method, args_string, types_string))
def _ufunc_doc_signature_formatter(ufunc):
"""
Builds a signature string which resembles PEP 457
This is used to construct the first line of the docstring
"""
# input arguments are simple
if ufunc.nin == 1:
in_args = 'x'
else:
in_args = ', '.join('x{}'.format(i+1) for i in range(ufunc.nin))
# output arguments are both keyword or positional
if ufunc.nout == 0:
out_args = ', /, out=()'
elif ufunc.nout == 1:
out_args = ', /, out=None'
else:
out_args = '[, {positional}], / [, out={default}]'.format(
positional=', '.join(
'out{}'.format(i+1) for i in range(ufunc.nout)),
default=repr((None,)*ufunc.nout)
)
# keyword only args depend on whether this is a gufunc
kwargs = (
", casting='same_kind'"
", order='K'"
", dtype=None"
", subok=True"
"[, signature"
", extobj]"
)
if ufunc.signature is None:
kwargs = ", where=True" + kwargs
# join all the parts together
return '{name}({in_args}{out_args}, *{kwargs})'.format(
name=ufunc.__name__,
in_args=in_args,
out_args=out_args,
kwargs=kwargs
)
| mit | -6,998,548,371,938,173,000 | 27.698939 | 82 | 0.526411 | false |
Taifxx/xxtrep | context.addtolib/resources/lib/ext/base/tags.py | 1 | 15746 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2011-2014 Martijn Kaijser
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
########## DEFINE TAGS:
#### System param's ...
### Library folder name ...
TAG_PAR_LIB_FOLDER = 'LIB'
### TMP folders names ...
TAG_PAR_TMP = 'TMP'
TAG_PAR_TMPA = 'TMPA'
### Addon ...
TAG_PAR_SCRIPT_ID = 'context.addtolib'
TAG_PAR_SERVICE_PY = 'service.py'
TAG_PAR_ADDON_PY = 'context.py'
TAG_PAR_COLORS_FILE = 'colors'
### Addon folders ...
TAG_PAR_RESFOLDER = 'resources'
TAG_PAR_BSFOLDER = 'bs'
TAG_PAR_SKINSFOLDER = [TAG_PAR_RESFOLDER,'skins']
TAG_PAR_SPLASH_FILE = [TAG_PAR_RESFOLDER, TAG_PAR_BSFOLDER, 'splash.mp4']
### RunScript's ...
TAG_PAR_SERVICE = 'special://home/addons/%s/%s' % (TAG_PAR_SCRIPT_ID, TAG_PAR_SERVICE_PY)
TAG_PAR_ADDON = 'special://home/addons/%s/%s' % (TAG_PAR_SCRIPT_ID, TAG_PAR_ADDON_PY)
### Strinsg XML (as default) ...
TAG_PAR_STRINGSXML_PATH = [TAG_PAR_RESFOLDER,'language','english']
TAG_PAR_STRINGSXML_FILE = 'strings.xml'
### Dropbox API ...
TAG_PAR_DROPBOX_LF = 'synclock'
TAG_PAR_DROPBOX_LCODE = 'XX000000'
TAG_PAR_DROPBOX_SYNC_FILE = 'vdbsync'
TAG_PAR_DROPBOX_SYNC_T_FILE = 'vdbsync.tmp'
TAG_PAR_DROPBOX_LI_FILE = 'libimg'
TAG_PAR_DROPBOX_LI_T_FILE = 'libimg.tmp'
TAG_PAR_DROPBOX_LI_S_FILE = 'libimg.sync'
TAG_PAR_DROPBOX_CORR_FILE = 'corruption'
TAG_PAR_DROPBOX_UID_FILE = 'uid'
TAG_PAR_DROPBOX_SYNC_T_DIR = 'SYNC_TMP'
TAG_PAR_DROPBOX_PATH = [TAG_PAR_RESFOLDER,'lib','dropbox']
TAG_PAR_DBXACCESSTOKEN_FILE = 'dropbox_access_token'
TAG_PAR_DROPBOX_LISEPREC = '\n'
TAG_PAR_DROPBOX_LISEPTM = '<**DBXTM**>'
TAG_PAR_DROPBOX_MSGSEP = '#'
TAG_PAR_DBXAPPKEY = 'cxa8c253kvoqbqd'
TAG_PAR_DBXAPPSECRET = 'n7tx9emzji3aqnh'
### Addon work files ...
TAG_PAR_TVSPACK_FILE = 'tvs.pack'
TAG_PAR_TVSRAWFILE = 'tvs.eraw'
TAG_PAR_STL_FILE = 'linktable'
TAG_PAR_FSET_FILE = 'fset'
TAG_PAR_PTYPETABLE_FILE = 'pttable'
### Addon work files (tmp) ...
TAG_PAR_TVSUPD_FILE = 'tvsupd'
TAG_PAR_TVSUPDNOW_FILE = 'updnow'
#TAG_PAR_LOCKF = 'lock'
TAG_PAR_STRARTF = 'lock_started'
#TAG_PAR_STRARTAF = 'act'
TAG_PAR_LAACTT = 'laactt'
TAG_PAR_WS_FILE = 'watchsync'
TAG_PAR_WS_TMP_FILE = 'watchsync.tmp'
### Video extensions ...
TAG_PAR_VIDEOSEXT = ['.avi', '.mpeg', '.wmv', 'asf', '.flv', '.mkv', '.mka', '.mp4', '.m4a', '.aac', '.ogg', '.ogm', '.ram', '.rm', '.rv', '.ra', '.rmvb', '.3gp']
### Backup files template ...
TAG_PAR_SYSFLSTMPL = ['.strm', TAG_PAR_TVSPACK_FILE, TAG_PAR_TVSRAWFILE, TAG_PAR_STL_FILE, TAG_PAR_FSET_FILE, TAG_PAR_PTYPETABLE_FILE, TAG_PAR_TVSUPD_FILE, TAG_PAR_TVSUPDNOW_FILE, TAG_PAR_STRARTF, TAG_PAR_DROPBOX_SYNC_FILE, TAG_PAR_DBXACCESSTOKEN_FILE]
TAG_PAR_DROPBOX_TMPL = ['.strm', TAG_PAR_TVSPACK_FILE, TAG_PAR_TVSRAWFILE, TAG_PAR_STL_FILE]
### Default tmpl ...
TAG_PAR_TVSDEFSEASON = '01'
TAG_PAR_SETDEF = 'Default'
TAG_PAR_MNUCOLORFORMAT = '[COLOR %s]%s[/COLOR]'
TAG_PAR_COLORTAG = '##COLOR##'
TAG_PAR_ADDONLABEL_TMPL = '<string id="29999">%s</string>'
TAG_PAR_ADDONLABEL_PATT = TAG_PAR_ADDONLABEL_TMPL % ('(.*)')
TAG_PAR_ADDONLABEL = TAG_PAR_ADDONLABEL_TMPL % ('ADD to [COLOR %s]Lib[/COLOR]')
TAG_PAR_LNPAGE = ' - (%s/%s)'
TAG_PAR_LNSEP = ' > '
TAG_PAR_TTLQ = '%s ( %s ):'
### Zip ...
TAG_PAR_ZIPCN = 'CN'
TAG_PAR_ZIPST = 'atl.backup.'
TAG_PAR_ZIPTMPL = TAG_PAR_ZIPST + '%s.%s.'+ TAG_PAR_ZIPCN + '.zip'
### XML
TAG_PAR_XMLW_SELDLG = 'XDialogSelect.xml'
TAG_PAR_XMLW_SELDLGSUB = 'XDialogSelectSub.xml'
TAG_PAR_XMLW_OKDLG = 'XDialogOk.xml'
TAG_PAR_XMLW_YESNODLG = 'XDialogYesNo.xml'
TAG_PAR_XMLW_RESUMEDLG = 'XDialogResume.xml'
TAG_PAR_XMLW_NOWPLAYDLG = 'XDialogNowPlay.xml'
TAG_PAR_XMLW_DROPBOX = 'Dropbox.xml'
### Help ...
TAG_PAG_HELPXML = 'DialogHelp.xml'
TAG_PAR_HELPFILE = 'help.'
TAG_PAR_HELPPATH = [TAG_PAR_RESFOLDER, 'help']
### Time ...
TAG_PAR_TIMENUMFORMAT = '{:0>2}'
TAG_PAR_TIMESEP = ':'
### URL ...
TAG_PAR_CALLURLTMPL = 'plugin://%s//?#strmtype=#%s&#strmfile=#%s&#strmurl=#'
TAG_PAR_REPFN = '%s'
TAG_PAR_ACTION = 'action='
TAG_PAR_IGNOREST = 'ignorestarted'
### tvs.pack separators ...
TAG_PAR_TVSPACK_LSEP = '<**LSTSEP**>'
TAG_PAR_TVSPACK_SSEP = '<**SRCSEP**>'
TAG_PAR_TVSPACK_FSEP = '<**FRCSEP**>'
TAG_PAR_TVSPACK_ESEP = '<**EPSSEP**>'
TAG_PAR_TVSPACK_PSEP = '<**PRTSEP**>'
TAG_PAR_TVSPACK_VERSEP = '<**VERSIONSEP**>'
TAG_PAR_TVSPACK_VERSION = '10015'
### Containers starts with ...
TAG_CON_STARTSW_EXT = 'plugin:'
TAG_CON_STARTSW_VID = 'videodb:'
TAG_CON_STARTSW_PVD = 'playlistvideo:'
#### Const Tags ...
### Default ...
DEFAULT = 10000
### Types ...
TAG_TYP_ALL = 10001
TAG_TYP_MOV = 10002
TAG_TYP_TVS = 10003
TAG_TYP_SRC = 10004
TAG_TYP_FOLDER = 10005
TAG_TYP_PREFILE = 10006
TAG_TYP_FILE = 10007
### Containers ...
TAG_CON_LOCAL = 10071
TAG_CON_EXT = 10072
TAG_CON_VID = 10073
TAG_CON_PVD = 10074
### Condidions ...
TAG_CND_FOUND = 10075
TAG_CND_NOTFOUND = 10076
TAG_CND_LISTEMPTY = 10077
TAG_CND_NEWSRC = 10078
TAG_CND_OLDSRC = 10079
TAG_CND_NOUPD = 10080
TAG_CND_NEWFRC = 10081
TAG_CND_OLDFRC = 10082
TAG_CND_UPDPRC = 10083
TAG_CND_NOUPDPRC = 10084
TAG_CND_NOGL = 10085
TAG_CND_NOACTION = 10086
TAG_CND_PLAY = 10087
TAG_CND_DBXNOAUTH = 10088
TAG_CND_NOTISMOV = 10089
TAG_CND_ISMOV = 10090
### Free actions ...
TAG_ACT_LPRESET = 10200
TAG_ACT_SHADOWUPD = 10201
TAG_ACT_DONOTHING = 10202
TAG_ACT_CHCOLOR = 10203
TAG_ACT_RENAMER = 10204
TAG_ACT_BACKUP = 10205
TAG_ACT_REMBACK = 10206
TAG_ACT_RESTBACK = 10207
TAG_ACT_RESETTBU = 10208
TAG_ACT_AUTOBACKUP = 10209
TAG_ACT_RESKIN = 10210
TAG_ACT_DBXCONNECT = 10211
TAG_ACT_DBXDISCONNECT = 10212
TAG_ACT_SYNC = 10213
TAG_ACT_WATCHSYNC = 10214
TAG_ACT_STOPSRV = 10215
TAG_ACT_STARTSRV = 10216
#### Strings Tags ...
### Language ...
TAG_LNG_ID = 30000
### Menue ...
TAG_MNU_MOV = 30001
TAG_MNU_TVS = 30002
TAG_MNU_TVSU = 30003
TAG_MNU_OPEN = 30004
TAG_MNU_RESCAN = 30005
TAG_MNU_REMSRC = 30006
TAG_MNU_RESTORE = 30007
TAG_MNU_DELETE = 30008
TAG_MNU_VIDLIBU = 30009
TAG_MNU_CHKNEW = 30010
TAG_MNU_JOIN = 30011
TAG_MNU_TVSREN = 30012
TAG_MNU_SRCREN = 30013
TAG_MNU_UPDMAN = 30014
TAG_MNU_ADDEXIST = 30015
TAG_MNU_ADDNEW = 30016
TAG_MNU_SM = 30017
TAG_MNU_SHOWALL = 30018
TAG_MNU_SRCMAN = 30019
TAG_MNU_TVSMAN = 30020
TAG_MNU_QR = 30021
TAG_MNU_QL = 30022
TAG_MNU_NEW = 30023
TAG_MNU_ADDFOL = 30024
TAG_MNU_SRE = 30025
TAG_MNU_UPDFOL = 30026
TAG_MNU_VIDLIBCLN = 30027
TAG_MNU_SHDIR = 30028
TAG_MNU_REBSTL = 30029
TAG_MNU_DEFNMMOV = 30030
TAG_MNU_NEWNMMOV = 30031
TAG_MNU_ATVSNM = 30032
TAG_MNU_ATVSNUMT = 30033
TAG_MNU_ATVSNUM = 30034
TAG_MNU_DEFNM = 30035
TAG_MNU_SEQNUM = 30036
TAG_MNU_SEANUM = 30037
TAG_MNU_STARTADD = 30038
TAG_MNU_ATVS = 30039
TAG_MNU_ATVSSERT = 30040
TAG_MNU_SERDEF = 30041
TAG_MNU_SERTPL = 30042
TAG_MNU_SEASON = 30043
TAG_MNU_RFROM = 30044
TAG_MNU_SFRBEGIN = 30045
TAG_MNU_ADVADD = 30046
TAG_MNU_CHKNEWGL = 30047
TAG_MNU_RESTOREALL = 30048
TAG_MNU_SMM = 30049
TAG_MNU_RAWADD = 30050
TAG_MNU_BRWSREN = 30051
TAG_MNU_CONTUPD = 30052
TAG_MNU_RESCANALLS = 30053
TAG_MNU_RESCANFULL = 30054
TAG_MNU_YES = 30055
TAG_MNU_NO = 30056
TAG_MNU_CLOSEDLG = 30057
TAG_MNU_ADVLSORT = 30058
TAG_MNU_ADVLSORTDOWN = 30059
TAG_MNU_ADVLSORTUP = 30060
TAG_MNU_EPSLISTCORR = 30061
TAG_MNU_NUMBCORR = 30062
TAG_MNU_PBTYPES = 30063
TAG_MNU_DBSYNC = 30064
TAG_MNU_DELMOV = 30065
TAG_MNU_DELTVS = 30066
TAG_MNU_REMARKALL = 30067
TAG_MNU_TVSSTALN = 30068
TAG_MNU_FOLDMODE = 30069
### Static mnu ...
TAG_MNU_MORE = 30090
TAG_MNU_BACKMAIN = 30091
TAG_MNU_OK = 30092
TAG_MNU_HELP = 30096
TAG_MNU_SET = 30097
TAG_MNU_BACK = 30098
TAG_MNU_CANCEL = 30099
### Confirms ...
TAG_CFR_RESCAN = 30071
TAG_CFR_REMSRC = 30072
TAG_CFR_RESTORE = 30073
TAG_CFR_DELETE = 30074
TAG_CFR_TVSREN = 30075
TAG_CFR_JOIN = 30076
TAG_CFR_CLEANVL = 30077
TAG_CFR_DEFNM = 30078
TAG_CFR_RESTOREALL = 30079
TAG_CFR_RESCANALLS = 30080
TAG_CFR_RESCANFULL = 30081
TAG_CFR_RENAMER = 30082
TAG_CFR_UNLOCK = 30083
TAG_CFR_REMBACK = 30084
TAG_CFR_RESTBACK = 30085
TAG_CFR_EXCLPLUG = 30086
### Dialogs messages ...
TAG_DLG_OK = 30100
TAG_DLG_NX = 30101
TAG_DLG_PR = 30102
TAG_DLG_INNM = 30103
TAG_DLG_INSE = 30104
TAG_DLG_NUMSKIP = 30105
TAG_DLG_SUPPRES = 30106
TAG_DLG_PBT1 = 30107
TAG_DLG_PBT2 = 30108
TAG_DLG_PBTAD1 = 30109
TAG_DLG_PBTAD2 = 30110
TAG_DLG_PBTADTIMEO = 30111
TAG_DLG_PBTADTCLAS = 30112
TAG_DLG_PBTADTISP = 30113
TAG_DLG_PBTADTFOLD = 30114
TAG_DLG_PBTT1 = 30115
TAG_DLG_PBTT2 = 30116
TAG_DLG_PBTT3 = 30117
TAG_DLG_PBTT4 = 30118
TAG_DLG_PBTT5 = 30119
TAG_DLG_PBTALT = 30120
TAG_DLG_PBTREM = 30121
TAG_DLG_NPINFO = 30122
TAG_DLG_NPINFRAT = 30123
TAG_DLG_NPINFSRC = 30124
TAG_DLG_NPINFPBT = 30125
TAG_DLG_NPDIRL = 30126
TAG_DLG_PBTTRAN = 30127
TAG_DLG_PBTTRANI = 30128
TAG_DLG_DBXP1 = 30129
TAG_DLG_DBXP2 = 30130
TAG_DLG_DBXP3 = 30131
TAG_DLG_DBXP4 = 30132
TAG_DLG_DBXP5 = 30133
TAG_DLG_DBXPEC = 30134
TAG_DLG_DBXPRGSMSGS = 30135
TAG_DLG_CORR1 = 30136
TAG_DLG_CORR2 = 30137
TAG_DLG_CORR3 = 30138
TAG_DLG_CORR_FORCE = 30139
TAG_DLG_CORR_UNL = 30140
TAG_DLG_MOVIEDEL = 30141
TAG_DLG_TVSDEL = 30142
TAG_DLG_SCLNDB = 30143
TAG_DLG_SREMEF = 30144
TAG_DLG_LOCKSYQ = 30145
TAG_DLG_RENM = 30146
TAG_DLG_CURRTVS = 30147
TAG_DLG_EXCLADDON = 30148
### Titles ...
TAG_TTL_NM = 30150
TAG_TTL_ENTNAME = 30151
TAG_TTL_CHSNAME = 30152
TAG_TTL_ADDTVS = 30153
TAG_TTL_NEWEPS = 30154
TAG_TTL_EXITVS = 30155
TAG_TTL_CHKUPD = 30156
TAG_TTL_ADDMOV = 30157
TAG_TTL_ENTNAMEM = 30158
TAG_TTL_ADVADD = 30159
TAG_TTL_RESTOREALL = 30160
TAG_TTL_CHKUPDGL = 30161
TAG_TTL_POSHLP = 30162
TAG_TTL_CAST = 30163
TAG_TTL_BRWSREN = 30164
TAG_TTL_BRWSRENEP = 30165
TAG_TTL_COLORIZE = 30166
TAG_TTL_SEASON = 30167
TAG_TTL_BACKUP = 30168
TAG_TTL_RESTBACK = 30169
TAG_TTL_RESTLIB = 30170
TAG_TTL_RESTRL = 30171
TAG_TTL_RESTUL = 30172
TAG_TTL_RESTCHK = 30173
TAG_TTL_BCKNM = 30174
TAG_TTL_RESTAT = 30175
TAG_TTL_RESTATC = 30176
TAG_TTL_RESTRTMP = 30177
TAG_TTL_PACK = 30178
TAG_TTL_REMOLDBCK = 30179
TAG_TTL_CLRERRDT = 30180
TAG_TTL_CLRERRD = 30181
TAG_TTL_HELP = 30182
TAG_TTL_MAINMNU = 30183
TAG_TTL_RESKIN = 30184
TAG_TTL_RAWADDEPS = 30185
TAG_TTL_SYNCAUTO = 30186
TAG_TTL_SYNCUP = 30187
TAG_TTL_SYNCDOWN = 30188
TAG_TTL_SYNCUNLOCK = 30189
TAG_TTL_SYNCSENDCH = 30190
TAG_TTL_DBXTTL = 30191
TAG_TTL_DBXOK = 30192
TAG_TTL_DBXCANCEL = 30193
TAG_TTL_DBXCOPY = 30194
TAG_TTL_DBXKEYB = 30195
TAG_TTL_DBXPASTE = 30196
TAG_TTL_DBXOPEN = 30197
TAG_TTL_SVIDDB = 30198
TAG_TTL_SWS = 30199
TAG_TTL_LOCKSY = 30200
### Set ...
TAG_SET_RENAMER = 30436
### Ok messages ...
TAG_ERR_OK = 30301
TAG_ERR_OK_MOVADD = 30302
TAG_ERR_OK_TVSADD = 30303
TAG_ERR_OK_TVSUPD = 30304
TAG_ERR_OK_RESCAN = 30305
TAG_ERR_OK_RESTOR = 30306
TAG_ERR_OK_REMSRC = 30307
TAG_ERR_OK_DELETE = 30308
TAG_ERR_OK_CHKNEW = 30309
TAG_ERR_OK_TVSREN = 30310
TAG_ERR_OK_SRCREN = 30311
TAG_ERR_OK_JOIN = 30312
TAG_ERR_OK_ADDFOL = 30313
TAG_ERR_OK_UPDFOL = 30314
TAG_ERR_OK_SETUPD = 30315
TAG_ERR_OK_VIDLIBU = 30316
TAG_ERR_OK_REBSTL = 30317
TAG_ERR_OK_RESTOREALL = 30318
TAG_ERR_OK_BRWSREN = 30319
TAG_ERR_OK_NEWFRC = 30320
TAG_ERR_OK_RESCANALLS = 30321
TAG_ERR_OK_RESCANFULL = 30322
TAG_ERR_OK_RENAMER = 30323
TAG_ERR_OK_BACKUP = 30324
TAG_ERR_OK_REMBACK = 30325
TAG_ERR_OK_RESTBACK = 30326
TAG_ERR_OK_NOBACK = 30327
TAG_ERR_OK_DBXSMAC = 30328
TAG_ERR_OK_DBXSMDL = 30329
TAG_ERR_OK_DBXSMUP = 30330
TAG_ERR_OK_DBXWSMAC = 30331
TAG_ERR_OK_DBXWSMDL = 30332
TAG_ERR_OK_DBXWSMUP = 30333
TAG_ERR_OK_SYNCUNLOCK = 30334
TAG_ERR_OK_MTVSDEL = 30335
TAG_ERR_OK_SYNCLOCK = 30336
TAG_ERR_OK_EPSREM = 30337
TAG_ERR_OK_EXCLUPLUG = 30338
### Errors ...
TAG_ERR_NOTFILE = 30201
TAG_ERR_INCINPUT = 30202
TAG_ERR_LISTEMPTY = 30203
TAG_ERR_ABORT = 30204
TAG_ERR_NOTOJOIN = 30205
TAG_ERR_DEDLINK = 30206
TAG_ERR_NONAME = 30207
TAG_ERR_NONAME2 = 30208
TAG_ERR_DEFEPS = 30209
TAG_ERR_BROKENLINK = 30210
TAG_ERR_BROKENLINK2 = 30211
TAG_ERR_LIB = 30212
TAG_ERR_LIBACT = 30213
TAG_ERR_LOCK = 30214
TAG_ERR_OL = 30215
TAG_ERR_BADZIP = 30216
TAG_ERR_NOBCKPATH = 30217
TAG_ERR_NOBCKPATHM = 30218
TAG_ERR_INCPBTYPE = 30219
TAG_ERR_NODBXCONNECT = 30220
TAG_ERR_DBXISLOCK = 30221
TAG_ERR_DBXRAISE = 30222
### Other ...
TAG_SET_RUN = 30479
TAG_SET_STOP = 30480
| gpl-3.0 | -7,829,866,278,117,454,000 | 31.262295 | 261 | 0.560658 | false |
Tset-Noitamotua/_learnpython | google-python-class/lesson_123_list_methods.py | 1 | 1755 | # -*- coding: utf-8 -*-
# filename: lesson_122_list_methods.py
# Life is short, use Python!
# LIST METHODS
# usage: LIST.METHOD(ARGUMENTs)
# L.append(ELEMENT) ---> append ELEMENT as is at the end of list L
# L.extend('LIST') ---> add elements of LIST at the end of L
# L.insert(INDEX, 'ELEMENT') ---> insert ELEMENT at INDEX e.g. 0 of L
# L.remove('ELEMENT') ---> search and remove ELEMENT from L
# L.pop() ---> remove and return LAST element from L
# L.pop(INDEX) ---> remove and return given INDEX from L
# L.index('ELEMENT') ---> return the INDEX of ELEMENT in L
# NOTE: Common error - the above methods do not RETURN the modified list,
# they just modify it!!!
L = ['larry', 'curly', 'moe']
print(L)
# appends the given list as is to L
L.append(['Tset', 'Noitamotua'])
print(L)
# appends each string and number from given list to L
L.extend(['TESTER', 'QA', 1, 3, 'PASS'])
print(L)
# inserts string PROWSER at index 0 of L
L.insert(0, 'PROWSER')
print(L)
L.remove('larry')
print(L)
# removes last element of L (index (-1) -> PASS) and returns it
L.pop()
print(L)
# removes first element (PROWSER) and returns it
L.pop(0)
print(L)
# get an element's index
print(L.index('curly'))
print(L.index(['Tset', 'Noitamotua']))
print(L.index('TESTER'))
# Example of common mistake mentioned above
try:
a = L.append(99)
print('a ---> ' + str(a))
if a == None:
print('What did I told you above, BITCH?!')
print('List methods do NOT return the modified list!')
print("They just modify it! But they don't RETURN it!")
print("That's why a is None!!!")
print('But the list was modified, though:')
print(L)
except:
raise
| gpl-2.0 | 7,685,919,252,727,828,000 | 32.113208 | 76 | 0.619943 | false |
natea/Miro-Community | localtv/inline_edit/urls.py | 1 | 2444 | # Copyright 2009 - Participatory Culture Foundation
#
# This file is part of Miro Community.
#
# Miro Community is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# Miro Community is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Miro Community. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls.defaults import patterns
from localtv import models
from localtv.playlists.models import Playlist
urlpatterns = patterns(
'localtv.inline_edit',
(r'^video/(?P<id>[0-9]+)/name/$', 'simple.edit_field',
{'model': models.Video, 'field': 'name'},
'localtv_admin_video_edit_name'),
(r'^video/(?P<id>[0-9]+)/when_published/$', 'simple.edit_field',
{'model': models.Video, 'field': 'when_published'},
'localtv_admin_video_edit_when_published'),
(r'^video/(?P<id>[0-9]+)/authors/$', 'simple.edit_field',
{'model': models.Video, 'field': 'authors'},
'localtv_admin_video_edit_authors'),
(r'^video/(?P<id>[0-9]+)/categories/$', 'simple.edit_field',
{'model': models.Video, 'field': 'categories'},
'localtv_admin_video_edit_categories'),
(r'^video/(?P<id>[0-9]+)/tags/$', 'simple.edit_field',
{'model': models.Video, 'field': 'tags'},
'localtv_admin_video_edit_tags'),
(r'^video/(?P<id>[0-9]+)/description/$', 'simple.edit_field',
{'model': models.Video, 'field': 'description'},
'localtv_admin_video_edit_description'),
(r'^video/(?P<id>[0-9]+)/website_url/$', 'simple.edit_field',
{'model': models.Video, 'field': 'website_url'},
'localtv_admin_video_edit_website_url'),
(r'^video/(?P<id>[0-9]+)/editors_comment/$', 'video_views.editors_comment',
{}, 'localtv_admin_video_edit_editors_comment'),
(r'^video/(?P<id>[0-9]+)/thumbnail/$', 'simple.edit_field',
{'model': models.Video, 'field': 'thumbnail'},
'localtv_admin_video_edit_thumbnail'),
(r'^playlist/([0-9]+)/info/$', 'playlist.info',
{}, 'localtv_admin_playlist_edit_info'),
)
| agpl-3.0 | -3,982,084,841,670,686,700 | 45.113208 | 79 | 0.657938 | false |
yingcuhk/LeetCode | Algorithms/#303 Range Sum Query - Immutable/PythonCode.py | 1 | 1082 | """
Given an integer array nums, find the sum of the elements between indices i and j (i ¡Ü j), inclusive.
Example:
Given nums = [-2, 0, 3, -5, 2, -1]
sumRange(0, 2) -> 1
sumRange(2, 5) -> -1
sumRange(0, 5) -> -3
Note:
You may assume that the array does not change.
There are many calls to sumRange function.
"""
class NumArray(object):
def __init__(self, nums):
"""
initialize your data structure here.
:type nums: List[int]
"""
#self.nums = nums
L = len(nums)
CumSum = [0 for i in xrange(L+1)]
for i in range(1,L+1):
CumSum[i] = CumSum[i-1]+nums[i-1]
#print CumSum
self.CumSum = CumSum
def sumRange(self, i, j):
"""
sum of elements nums[i..j], inclusive.
:type i: int
:type j: int
:rtype: int
"""
return self.CumSum[j+1] - self.CumSum[i]
# Your NumArray object will be instantiated and called as such:
# numArray = NumArray(nums)
# numArray.sumRange(0, 1)
# numArray.sumRange(1, 2) | mit | 6,948,932,555,707,017,000 | 22.042553 | 102 | 0.550832 | false |
vascotenner/holoviews | holoviews/plotting/mpl/annotation.py | 1 | 3913 | import matplotlib
from matplotlib import patches as patches
from ...core.util import match_spec
from ...core.options import abbreviated_exception
from .element import ElementPlot
class AnnotationPlot(ElementPlot):
"""
AnnotationPlot handles the display of all annotation elements.
"""
def __init__(self, annotation, **params):
self._annotation = annotation
super(AnnotationPlot, self).__init__(annotation, **params)
self.handles['annotations'] = []
def initialize_plot(self, ranges=None):
annotation = self.hmap.last
key = self.keys[-1]
ranges = self.compute_ranges(self.hmap, key, ranges)
ranges = match_spec(annotation, ranges)
axis = self.handles['axis']
opts = self.style[self.cyclic_index]
with abbreviated_exception():
handles = self.draw_annotation(axis, annotation.data, opts)
self.handles['annotations'] = handles
return self._finalize_axis(key, ranges=ranges)
def update_handles(self, key, axis, annotation, ranges, style):
# Clear all existing annotations
for element in self.handles['annotations']:
element.remove()
with abbreviated_exception():
self.handles['annotations'] = self.draw_annotation(axis, annotation.data, style)
class VLinePlot(AnnotationPlot):
"Draw a vertical line on the axis"
style_opts = ['alpha', 'color', 'linewidth', 'linestyle', 'visible']
def draw_annotation(self, axis, position, opts):
return [axis.axvline(position, **opts)]
class HLinePlot(AnnotationPlot):
"Draw a horizontal line on the axis"
style_opts = ['alpha', 'color', 'linewidth', 'linestyle', 'visible']
def draw_annotation(self, axis, position, opts):
"Draw a horizontal line on the axis"
return [axis.axhline(position, **opts)]
class TextPlot(AnnotationPlot):
"Draw the Text annotation object"
style_opts = ['alpha', 'color', 'family', 'weight', 'rotation', 'fontsize', 'visible']
def draw_annotation(self, axis, data, opts):
(x,y, text, fontsize,
horizontalalignment, verticalalignment, rotation) = data
opts['fontsize'] = fontsize
return [axis.text(x,y, text,
horizontalalignment = horizontalalignment,
verticalalignment = verticalalignment,
rotation=rotation, **opts)]
class ArrowPlot(AnnotationPlot):
"Draw an arrow using the information supplied to the Arrow annotation"
_arrow_style_opts = ['alpha', 'color', 'lw', 'linewidth', 'visible']
_text_style_opts = TextPlot.style_opts
style_opts = sorted(set(_arrow_style_opts + _text_style_opts))
def draw_annotation(self, axis, data, opts):
direction, text, xy, points, arrowstyle = data
arrowprops = dict({'arrowstyle':arrowstyle},
**{k: opts[k] for k in self._arrow_style_opts if k in opts})
textopts = {k: opts[k] for k in self._text_style_opts if k in opts}
if direction in ['v', '^']:
xytext = (0, points if direction=='v' else -points)
elif direction in ['>', '<']:
xytext = (points if direction=='<' else -points, 0)
return [axis.annotate(text, xy=xy, textcoords='offset points',
xytext=xytext, ha="center", va="center",
arrowprops=arrowprops, **textopts)]
class SplinePlot(AnnotationPlot):
"Draw the supplied Spline annotation (see Spline docstring)"
style_opts = ['alpha', 'edgecolor', 'linewidth', 'linestyle', 'visible']
def draw_annotation(self, axis, data, opts):
verts, codes = data
patch = patches.PathPatch(matplotlib.path.Path(verts, codes),
facecolor='none', **opts)
axis.add_patch(patch)
return [patch]
| bsd-3-clause | -3,434,174,948,459,445,000 | 34.899083 | 92 | 0.620751 | false |
Dacelonid/gerrymander | gerrymander/reports.py | 1 | 49794 | #
# Copyright (C) 2014 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import prettytable
import logging
import time
import re
import json
import sys
import xml.dom.minidom
from gerrymander.operations import OperationQuery
from gerrymander.model import ModelApproval
from gerrymander.format import format_date
from gerrymander.format import format_delta
from gerrymander.format import format_title
from gerrymander.format import format_color
LOG = logging.getLogger(__name__)
class ReportOutputColumn(object):
ALIGN_LEFT = "l"
ALIGN_RIGHT = "r"
ALIGN_CENTER = "c"
def __init__(self, key, label, mapfunc, sortfunc=None, format=None, truncate=0, align=ALIGN_LEFT, visible=True):
self.key = key
self.label = label
self.mapfunc = mapfunc
self.sortfunc = sortfunc
self.format = format
self.truncate = truncate
self.align = align
self.visible = visible
def get_value(self, report, row):
val = self.mapfunc(report, self.key, row)
if self.format is not None:
val = self.format % val
elif val is None:
val = ""
if type(val) != str:
val = val.encode('utf-8')
if self.truncate and len(val) > self.truncate:
val = val[0:self.truncate] + "..."
return val
def get_sort_value(self, report, row):
if self.sortfunc:
return self.sortfunc(report, self.key, row)
else:
return self.mapfunc(report, self.key, row)
class ReportOutput(object):
DISPLAY_MODE_TEXT = "text"
DISPLAY_MODE_CSV = "csv"
DISPLAY_MODE_XML = "xml"
DISPLAY_MODE_JSON = "json"
def __init__(self, usecolor=False):
super(ReportOutput, self).__init__()
self.usecolor = usecolor
def display(self, mode, stream=sys.stdout):
if mode == ReportOutput.DISPLAY_MODE_TEXT:
stream.write(self.to_text())
elif mode == ReportOutput.DISPLAY_MODE_CSV:
stream.write(self.to_csv())
elif mode == ReportOutput.DISPLAY_MODE_XML:
impl = xml.dom.minidom.getDOMImplementation()
doc = impl.createDocument(None, "report", None)
self.to_xml(doc, doc.documentElement)
stream.write(doc.toprettyxml())
elif mode == ReportOutput.DISPLAY_MODE_JSON:
doc = []
self.to_json(doc)
stream.write(json.dumps(doc, indent=2) + "\n")
else:
raise Exception("Unknown display mode '%s'" % mode)
def to_text(self):
raise NotImplementedError("Subclass should implement the 'to_text' method")
def to_csv(self):
raise NotImplementedError("Subclass should implement the 'to_csv' method")
def to_xml(self, root):
raise NotImplementedError("Subclass should implement the 'to_xml' method")
def to_json(self, root):
raise NotImplementedError("Subclass should implement the 'to_json' method")
class ReportOutputCompound(ReportOutput):
def __init__(self):
self.report = []
def add_report(self, report):
self.report.append(report)
def to_text(self):
blocks = []
for report in self.report:
blocks.append(report.to_text())
return "\n".join(blocks)
def to_json(self, root):
for report in self.report:
report.to_json(root)
def to_xml(self, doc, root):
for report in self.report:
report.to_xml(doc, root)
class ReportOutputList(ReportOutput):
def __init__(self, columns, title=None, usecolor=False):
super(ReportOutputList, self).__init__(usecolor)
self.columns = columns
self.row = {}
self.title = title
def set_row(self, row):
self.row = row
def to_xml(self, doc, root):
lst = doc.createElement("list")
root.appendChild(lst)
if self.title is not None:
title = doc.createElement("title")
title.appendChild(doc.createTextNode(self.title))
lst.appendChild(title)
headers = doc.createElement("headers")
content = doc.createElement("content")
lst.appendChild(headers)
lst.appendChild(content)
for col in self.columns:
if col.visible:
xmlcol = doc.createElement(col.key)
xmlcol.appendChild(doc.createTextNode(col.label))
headers.appendChild(xmlcol)
for col in self.columns:
if col.visible:
xmlfield = doc.createElement(col.key)
xmlfield.appendChild(doc.createTextNode(col.get_value(self, self.row)))
content.appendChild(xmlfield)
def to_json(self, root):
headers = {}
for col in self.columns:
if col.visible:
headers[col.key] = col.label
content = {}
for col in self.columns:
if col.visible:
content[col.key] = col.get_value(self, self.row)
node = {
"list": {
"headers": headers,
"content": content
}
}
if self.title is not None:
node["list"]["title"] = self.title
root.append(node)
def to_text(self):
labels = []
width = 1
for col in self.columns:
if col.visible:
if len(col.label) > width:
width = len(col.label)
labels.append(col.label)
fmt = " %" + str(width) + "s: %s"
lines = []
for col in self.columns:
if col.visible:
line = fmt % (col.label, col.get_value(self, self.row))
lines.append(line)
prolog = ""
if self.title is not None:
prolog = format_title(self.title) + "\n"
return prolog + "\n".join(lines) + "\n"
class ReportOutputTable(ReportOutput):
def __init__(self, columns, sortcol, reverse, limit, title=None, usecolor=False):
super(ReportOutputTable, self).__init__(usecolor)
self.columns = list(columns)
self.rows = []
self.sortcol = sortcol
self.reverse = reverse
self.limit = limit
self.title = title
def add_column(self, col):
self.columns.append(col)
def add_row(self, row):
self.rows.append(row)
def sort_rows(self):
sortcol = None
for col in self.columns:
if col.key == self.sortcol:
sortcol = col
if sortcol is not None:
self.rows.sort(key = lambda item: sortcol.get_sort_value(self, item),
reverse=self.reverse)
def to_xml(self, doc, root):
self.sort_rows()
table = doc.createElement("table")
root.appendChild(table)
if self.title is not None:
title = doc.createElement("title")
title.appendChild(doc.createTextNode(self.title))
table.appendChild(title)
headers = doc.createElement("headers")
content = doc.createElement("content")
table.appendChild(headers)
table.appendChild(content)
for col in self.columns:
if col.visible:
xmlcol = doc.createElement(col.key)
xmlcol.appendChild(doc.createTextNode(col.label))
headers.appendChild(xmlcol)
rows = self.rows
if self.limit is not None:
rows = rows[0:self.limit]
for row in rows:
xmlrow = doc.createElement("row")
for col in self.columns:
if col.visible:
xmlfield = doc.createElement(col.key)
xmlfield.appendChild(doc.createTextNode(col.get_value(self, row)))
xmlrow.appendChild(xmlfield)
content.appendChild(xmlrow)
return doc
def to_json(self, root):
self.sort_rows()
headers = {}
for col in self.columns:
if col.visible:
headers[col.key] = col.label
content = []
rows = self.rows
if self.limit is not None:
rows = rows[0:self.limit]
for row in rows:
data = {}
for col in self.columns:
if col.visible:
data[col.key] = col.get_value(self, row)
content.append(data)
node = {
"table": {
"headers": headers,
"content": content
}
}
if self.title is not None:
node["table"]["title"] = self.title
root.append(node)
def to_text(self):
self.sort_rows()
labels = []
for col in self.columns:
if col.visible:
labels.append(col.label)
table = prettytable.PrettyTable(labels)
for col in self.columns:
table.align[col.label] = col.align
table.padding_width = 1
rows = self.rows
if self.limit is not None:
rows = rows[0:self.limit]
for row in rows:
data = []
for col in self.columns:
if col.visible:
data.append(col.get_value(self, row))
table.add_row(data)
prolog = ""
if self.title is not None:
prolog = format_title(self.title) + "\n"
return prolog + str(table) + "\n"
def to_csv(self):
self.sort_rows()
labels = []
for col in self.columns:
if col.visible:
labels.append(col.label)
lines = []
if self.title is not None:
lines.append(self.title)
lines.append(",".join(labels))
rows = self.rows
if self.limit is not None:
rows = rows[0:self.limit]
for row in rows:
data = []
for col in self.columns:
if col.visible:
data.append(col.get_value(self, row))
lines.append(",".join(data))
return "\n".join(lines)
class Report(object):
def __init__(self, client):
self.client = client
def generate(self):
raise NotImplementedError("Subclass must override generate method")
def display(self, mode):
output = self.generate()
output.display(mode)
class ReportTable(Report):
def __init__(self, client, columns, sort=None, reverse=False):
super(ReportTable, self).__init__(client)
self.columns = columns
self.limit = None
self.set_sort_column(sort, reverse)
def get_columns(self):
return self.columns
def get_column(self, key):
for col in self.columns:
if col.key == key:
return col
return None
def has_column(self, key):
col = self.get_column(key)
if col is None:
return False
return True
def set_sort_column(self, key, reverse=False):
got = False
for col in self.columns:
if col.key == key:
got = True
if not got:
raise Exception("Unknown sort column %s" % key)
self.sort = key
self.reverse = reverse
def set_data_limit(self, limit):
self.limit = limit
def new_table(self, title=None):
return ReportOutputTable(self.columns, self.sort,
self.reverse, self.limit,
title, self.usecolor)
class ReportPatchReviewStats(ReportTable):
def user_mapfunc(rep, col, row):
return row[0]
def team_mapfunc(rep, col, row):
return row[2]
def review_mapfunc(rep, col, row):
return row[1]['total']
def ratio_mapfunc(rep, col, row):
plus = float(row[1]['votes']['flag-p2'] + row[1]['votes']['flag-p1'])
minus = float(row[1]['votes']['flag-m2'] + row[1]['votes']['flag-m1'])
ratio = (plus / (plus + minus)) * 100
return ratio
def vote_mapfunc(rep, col, row):
return row[1]['votes'][col]
COLUMNS = [
ReportOutputColumn("user", "User", user_mapfunc, align=ReportOutputColumn.ALIGN_LEFT),
ReportOutputColumn("team", "Team", team_mapfunc, align=ReportOutputColumn.ALIGN_LEFT),
ReportOutputColumn("reviews", "Reviews", review_mapfunc, align=ReportOutputColumn.ALIGN_RIGHT),
ReportOutputColumn("flag-m2", "-2", vote_mapfunc, align=ReportOutputColumn.ALIGN_RIGHT),
ReportOutputColumn("flag-m1", "-1", vote_mapfunc, align=ReportOutputColumn.ALIGN_RIGHT),
ReportOutputColumn("flag-p1", "+1", vote_mapfunc, align=ReportOutputColumn.ALIGN_RIGHT),
ReportOutputColumn("flag-p2", "+2", vote_mapfunc, align=ReportOutputColumn.ALIGN_RIGHT),
ReportOutputColumn("ratio", "+/-", ratio_mapfunc, format="%0.0lf%%", align=ReportOutputColumn.ALIGN_RIGHT),
]
def __init__(self, client, projects, maxagedays=30, teams={}, usecolor=False):
super(ReportPatchReviewStats, self).__init__(client,
ReportPatchReviewStats.COLUMNS,
sort="reviews", reverse=True)
self.projects = projects
self.teams = teams
self.maxagedays = maxagedays
self.usecolor = usecolor
def generate(self):
# We could query all projects at once, but if we do them
# individually it means we get better hit rate against the
# cache if the report is re-run for many different project
# combinations
reviews = []
cutoff = time.time() - (self.maxagedays * 24 * 60 * 60)
for project in self.projects:
query = OperationQuery(self.client,
{
"project": [project],
},
patches=OperationQuery.PATCHES_ALL,
approvals=True)
def querycb(change):
for patch in change.patches:
for approval in patch.approvals:
if approval.is_newer_than(cutoff):
reviews.append(approval)
query.run(querycb)
reviewers = {}
for review in reviews:
if review.action != ModelApproval.ACTION_REVIEWED or review.user is None:
continue
reviewer = review.user.username
if reviewer is None:
reviewer = review.user.name
if reviewer is None:
continue
if reviewer.lower() in ["jenkins", "smokestack"]:
continue
reviewers.setdefault(reviewer,
{
'votes': {'flag-m2': 0, 'flag-m1': 0, 'flag-p1': 0, 'flag-p2': 0},
'total': 0,
})
reviewers[reviewer]['total'] = reviewers[reviewer]['total'] + 1
votes = { "-2" : "flag-m2",
"-1" : "flag-m1",
"1" : "flag-p1",
"2" : "flag-p2" }
cur = reviewers[reviewer]['votes'][votes[str(review.value)]]
reviewers[reviewer]['votes'][votes[str(review.value)]] = cur + 1
compound = ReportOutputCompound()
table = self.new_table("Review statistics")
compound.add_report(table)
for user, votes in reviewers.items():
userteam = ""
for team in self.teams.keys():
if user in self.teams[team]:
userteam = team
table.add_row([user, votes, userteam])
summary = ReportOutputList([
ReportOutputColumn("nreviews", "Total reviews", format="%d",
mapfunc=lambda rep, col, row: row[0]),
ReportOutputColumn("nreviewers", "Total rviewers", format="%d",
mapfunc=lambda rep, col, row: row[1])
], title="Review summary")
summary.set_row([len(reviews), len(reviewers.keys())])
compound.add_report(summary)
return compound
class ReportPatchReviewRate(ReportTable):
def user_mapfunc(rep, col, row):
return row[0]
def team_mapfunc(rep, col, row):
return row[1]
def week_mapfunc(rep, col, row):
if col not in row[2]:
return 0.0
return (row[2][col] / 7.0)
def total_mapfunc(rep, col, row):
if col not in row[2]:
return 0.0
return (row[2][col] / (52.0 * 7.0))
COLUMNS = [
ReportOutputColumn("user", "User", user_mapfunc, align=ReportOutputColumn.ALIGN_LEFT),
ReportOutputColumn("team", "Team", team_mapfunc, align=ReportOutputColumn.ALIGN_LEFT),
ReportOutputColumn("total", "Total", total_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week1", "1 week", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week2", "2 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week3", "3 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week4", "4 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week5", "5 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week6", "6 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week7", "7 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week8", "8 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week9", "9 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week10", "10 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week11", "11 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week12", "12 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week13", "13 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week14", "14 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week15", "15 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week16", "16 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week17", "17 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week18", "18 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week19", "19 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week20", "20 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week21", "21 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week22", "22 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week23", "23 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week24", "24 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week25", "25 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week26", "26 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week27", "27 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week28", "28 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week29", "29 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week30", "30 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week31", "31 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week32", "32 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week33", "33 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week34", "34 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week35", "35 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week36", "36 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week37", "37 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week38", "38 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week39", "39 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week40", "40 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week41", "41 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week42", "42 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week43", "43 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week44", "44 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week45", "45 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week46", "46 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week47", "47 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week48", "48 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week49", "49 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week50", "50 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week51", "51 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
ReportOutputColumn("week52", "52 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"),
]
def __init__(self, client, projects, teams={}, usecolor=False):
super(ReportPatchReviewRate, self).__init__(client,
ReportPatchReviewRate.COLUMNS,
sort="total", reverse=True)
self.projects = projects
self.teams = teams
self.usecolor = usecolor
def generate(self):
# We could query all projects at once, but if we do them
# individually it means we get better hit rate against the
# cache if the report is re-run for many different project
# combinations
reviewers = {}
now = time.time()
for project in self.projects:
query = OperationQuery(self.client,
{
"project": [project],
},
patches=OperationQuery.PATCHES_ALL,
approvals=True)
def querycb(change):
for patch in change.patches:
for approval in patch.approvals:
if approval.action == ModelApproval.ACTION_VERIFIED:
continue
user = approval.user
if user is None or user.username is None:
continue
username = user.username
if username not in reviewers:
reviewers[username] = { "total": 0}
agesecs = approval.get_age(now)
ageweeks = int(agesecs / (60 * 60 * 24 * 7)) + 1
key = "week%d" % ageweeks
if key not in reviewers[username]:
reviewers[username][key] = 0
reviewers[username][key] = reviewers[username][key] + 1
if ageweeks <= 52:
reviewers[username]["total"] = reviewers[username]["total"] + 1
query.run(querycb)
table = self.new_table("Daily review rates per week")
for reviewer in reviewers.keys():
userteam = ""
for team in self.teams.keys():
if reviewer in self.teams[team]:
userteam = team
table.add_row([reviewer, userteam, reviewers[reviewer]])
return table
class ReportBaseChange(ReportTable):
@staticmethod
def get_approval_votes(patch):
# Yes, the numbers are slightly odd order
# A +2 or -2 more important than any -1 or +1
# so we prefer them as the summary value
levels = ["-2", "2", "-1", "1"]
votes = {
"c": { "total": collections.defaultdict(int),
"list": [],
"summary": "",
"details": "",
},
"v": { "total": collections.defaultdict(int),
"list": [],
"summary": "",
"details": "",
},
"w": { "total": collections.defaultdict(int),
"list": [],
"summary": "",
"details": "",
},
}
for approval in patch.approvals:
got_type = approval.action[0:1].lower()
if got_type not in votes:
continue
vote = str(approval.value)
votes[got_type]["total"][vote] = votes[got_type]["total"][vote] + 1
votes[got_type]["list"].append(vote)
for key in votes.keys():
votes[key]["details"] = ",".join(votes[key]["list"])
vals = []
for level in levels:
if level in votes[key]["total"]:
votes[key]["summary"] = level
break
return votes
def approvals_mapfunc(rep, col, row):
patch = row.get_current_patch()
if patch is None:
LOG.error("No patch")
return ""
votes = ReportBaseChange.get_approval_votes(patch)
keys = list(votes.keys())
keys.sort(reverse=True)
data = " ".join(map(lambda val: "%s=%s" % (val, votes[val]["details"]), keys))
if rep.usecolor:
if votes["w"]["total"]["1"] > 0: # Stuff pending merge
return format_color(data, fg="blue", styles=["bold"])
elif votes["w"]["total"]["-1"] > 0: # Work-in-progress
return format_color(data, fg="magenta", styles=[])
elif votes["c"]["total"]["-2"] > 0: # Hard-nack from core
return format_color(data, fg="red", styles=["bold"])
elif votes["c"]["total"]["-1"] > 0 or votes["v"]["total"]["-1"] > 0: # Nack from any or bots
return format_color(data, fg="red", styles=[])
elif votes["c"]["total"]["2"] > 0: # Approval from core
return format_color(data, fg="green", styles=["bold"])
elif votes["c"]["total"]["1"] > 0: # Approval from any
return format_color(data, fg="green", styles=[])
else:
return data
else:
return data
def votes_mapfunc(rep, col, row):
patch = row.get_current_patch()
if patch is None:
LOG.error("No patch")
return ""
if col == "tests":
coltype = "v"
elif col == "reviews":
coltype = "c"
else:
coltype = "w"
votes = ReportBaseChange.get_approval_votes(patch)
data = "%2s" % votes[coltype]["summary"]
if rep.usecolor:
if votes[coltype]["total"]["-2"] > 0: # Hard-nack from core
return format_color(data, fg="red", styles=["bold"])
elif votes[coltype]["total"]["2"] > 0: # Approval from core
return format_color(data, fg="green", styles=["bold"])
elif votes[coltype]["total"]["-1"] > 0: # Soft-nack from any
return format_color(data, fg="red", styles=[])
elif votes[coltype]["total"]["1"] > 0: # Approval from any
return format_color(data, fg="green", styles=[])
else:
return data
else:
return data
def user_mapfunc(rep, col, row):
if not row.owner or not row.owner.username:
return "<unknown>"
return row.owner.username
def date_mapfunc(rep, col, row):
if col == "lastUpdated":
return format_date(row.lastUpdated)
else:
return format_date(row.createdOn)
def date_sortfunc(rep, col, row):
if col == "lastUpdated":
return row.lastUpdated
else:
return row.createdOn
COLUMNS = [
ReportOutputColumn("status", "Status", lambda rep, col, row: row.status),
ReportOutputColumn("topic", "Topic", lambda rep, col, row: row.topic, visible=False),
ReportOutputColumn("url", "URL", lambda rep, col, row: row.url),
ReportOutputColumn("owner", "Owner", user_mapfunc),
ReportOutputColumn("project", "Project", lambda rep, col, row: row.project, visible=False),
ReportOutputColumn("branch", "Branch", lambda rep, col, row: row.branch, visible=False),
ReportOutputColumn("subject", "Subject", lambda rep, col, row: row.subject, truncate=30),
ReportOutputColumn("createdOn", "Created", date_mapfunc, date_sortfunc),
ReportOutputColumn("lastUpdated", "Updated", date_mapfunc, date_sortfunc),
ReportOutputColumn("approvals", "Approvals", approvals_mapfunc, visible=False),
ReportOutputColumn("tests", "Tests", votes_mapfunc),
ReportOutputColumn("reviews", "Reviews", votes_mapfunc),
ReportOutputColumn("workflow", "Workflow", votes_mapfunc),
]
def __init__(self, client, usecolor=False):
super(ReportBaseChange, self).__init__(client, ReportBaseChange.COLUMNS,
sort="createdOn", reverse=False)
self.usecolor = usecolor
class ReportChanges(ReportBaseChange):
def __init__(self, client, projects=[], owners=[],
status=[], messages=[], branches=[], topics=[], reviewers=[],
approvals=[], files=[], rawquery=None, usecolor=False):
super(ReportChanges, self).__init__(client, usecolor)
self.projects = projects
self.owners = owners
self.status = status
self.messages = messages
self.branches = branches
self.topics = topics
self.reviewers = reviewers
self.approvals = approvals
self.files = files
self.rawquery = rawquery
def generate(self):
needFiles = False
if len(self.files) > 0:
needFiles = True
query = OperationQuery(self.client,
{
"project": self.projects,
"owner": self.owners,
"message": self.messages,
"branch": self.branches,
"topic": self.topics,
"status": self.status,
"reviewer": self.reviewers,
},
rawquery=self.rawquery,
patches=OperationQuery.PATCHES_CURRENT,
approvals=True,
files=needFiles)
def match_files(change):
if len(self.files) == 0:
return True
for filere in self.files:
for file in change.get_current_patch().files:
if re.search(filere, file.path):
return True
return False
table = self.new_table("Changes")
def querycb(change):
if match_files(change):
table.add_row(change)
query.run(querycb)
return table
class ReportToDoList(ReportBaseChange):
def __init__(self, client, projects=[], branches=[],
files=[], topics=[], reviewers=[], usecolor=False):
super(ReportToDoList, self).__init__(client, usecolor)
self.projects = projects
self.branches = branches
self.reviewers = reviewers
self.files = files
self.topics = topics
def filter(self, change):
return True
def generate(self):
needFiles = False
if len(self.files) > 0:
needFiles = True
query = OperationQuery(self.client,
{
"project": self.projects,
"status": [ OperationQuery.STATUS_OPEN ],
"branch": self.branches,
"topic": self.topics,
"reviewer": self.reviewers,
},
patches=OperationQuery.PATCHES_ALL,
approvals=True,
files=needFiles)
def match_files(change):
if len(self.files) == 0:
return True
for filere in self.files:
for patch in change.patches:
for file in patch.files:
if re.search(filere, file.path):
return True
return False
table = self.new_table("Changes To Do List")
def querycb(change):
if self.filter(change) and match_files(change):
table.add_row(change)
query.run(querycb)
return table
class ReportToDoListMine(ReportToDoList):
def __init__(self, client, username, projects=[],
branches=[], files=[], topics=[], usecolor=False):
'''
Report to provide a list of changes 'username' has
reviewed an older version of the patch, and needs
to provide feedback on latest version
'''
super(ReportToDoListMine, self).__init__(client,
projects,
reviewers=[ username ],
branches=branches,
files=files,
topics=topics,
usecolor=usecolor)
self.username = username
def filter(self, change):
if (not change.has_current_reviewers([self.username]) and
not change.has_owner([self.username])):
return True
return False
class ReportToDoListOthers(ReportToDoList):
def __init__(self, client, username, bots=[], projects=[],
branches=[], files=[], topics=[], usecolor=False):
'''
Report to provide a list of changes where 'username' has
never reviewed, but at least one other non-bot user has
provided review
'''
super(ReportToDoListOthers, self).__init__(client,
projects,
reviewers=[ "!", username ],
branches=branches,
files=files,
topics=topics,
usecolor=usecolor)
self.bots = bots
def filter(self, change):
# allchanges contains changes where 'username' has
# not reviewed any version of the patch. We want to
# filter out changes which only have bots, or have
# no reviewers at all.
if change.has_any_other_reviewers(self.bots):
return True
return False
class ReportToDoListAnyones(ReportToDoList):
def __init__(self, client, username, bots=[], projects=[],
branches=[], files=[], topics=[], usecolor=False):
'''
Report to provide a list of changes where at least
one other non-bot user has provided review
'''
super(ReportToDoListAnyones, self).__init__(client,
projects,
branches=branches,
files=files,
topics=topics,
usecolor=usecolor)
self.bots = bots
self.username = username
def filter(self, change):
if change.has_current_reviewers([self.username]):
return False
if change.has_any_other_reviewers(self.bots):
return True
return False
class ReportToDoListNoones(ReportToDoList):
def __init__(self, client, bots=[], projects=[],
branches=[], files=[], topics=[], usecolor=False):
'''
Report to provide a list of changes that no one
has ever reviewed
'''
super(ReportToDoListNoones, self).__init__(client,
projects,
branches=branches,
files=files,
topics=topics,
usecolor=usecolor)
self.bots = bots
def filter(self, change):
if not change.has_any_other_reviewers(self.bots):
return True
return False
class ReportToDoListApprovable(ReportToDoList):
def __init__(self, client, username, strict, projects=[],
branches=[], files=[], topics=[], usecolor=False):
'''
Report to provide a list of changes that no one
has ever reviewed
'''
super(ReportToDoListApprovable, self).__init__(client,
projects,
branches=branches,
files=files,
topics=topics,
usecolor=usecolor)
self.username = username
self.strict = strict
def filter(self, change):
if (change.has_current_approval(ModelApproval.ACTION_REVIEWED, 2) and
not change.has_owner([self.username]) and
not change.has_current_approval(ModelApproval.ACTION_WORKFLOW, -1) and
not change.has_current_approval(ModelApproval.ACTION_WORKFLOW, 1) and
not change.has_current_approval(ModelApproval.ACTION_REVIEWED, -2) and
not change.has_current_reviewers([self.username])):
if (self.strict and
change.has_current_approval(ModelApproval.ACTION_REVIEWED, -1)):
return False
return True
return False
class ReportToDoListExpirable(ReportToDoList):
def __init__(self, client, age=28, projects=[],
branches=[], files=[], topics=[], usecolor=False):
'''
Report to provide a list of changes that are
stale and can potentially be expired
'''
super(ReportToDoListExpirable, self).__init__(client,
projects,
branches=branches,
files=files,
topics=topics,
usecolor=usecolor)
self.age = age
def filter(self, change):
if change.get_current_reviewer_nack_age() > (self.age * 24 * 60 * 60):
return True
return False
class ReportOpenReviewStats(ReportBaseChange):
def __init__(self, client, projects, branch="master", topic="", days=7, usecolor=False):
super(ReportOpenReviewStats, self).__init__(client, usecolor)
self.projects = projects
self.branch = branch
self.topic = topic
self.days = days
@staticmethod
def average_age(changes, ages):
if len(changes) == 0:
return 0
total = 0
for change in changes:
total += ages[change]
return format_delta(total / len(changes))
@staticmethod
def median_age(changes, ages):
if len(changes) == 0:
return 0
total = 0
wantages = []
for change in changes:
wantages.append(ages[change])
wantages.sort()
return format_delta(wantages[int(len(wantages)/2)])
@staticmethod
def older_than(changes, ages, cutoffdays):
cutoff = cutoffdays * 24 * 60 * 60
older = 0
for change in changes:
if ages[change] > cutoff:
older = older + 1
return older
@staticmethod
def get_longest_changes(ids, changes, ages, count):
want = []
for id in sorted(ids, key=lambda x: ages[x]):
want.append(changes[id])
return want
def generate(self):
# We could query all projects at once, but if we do them
# individually it means we get better hit rate against the
# cache if the report is re-run for many different project
# combinations
agecurrent = {}
agefirst = {}
agenonnacked = {}
wait_reviewer = []
wait_submitter = []
changes = {}
for project in self.projects:
query = OperationQuery(self.client,
{
"project": [project],
"status": [OperationQuery.STATUS_OPEN],
"branch": [self.branch],
"topic": [self.topic],
},
patches=OperationQuery.PATCHES_ALL,
approvals=True)
def querycb(change):
if change.status != "NEW":
return
now = time.time()
current = change.get_current_patch()
first = change.get_first_patch()
nonnacked = change.get_reviewer_not_nacked_patch()
changes[change.id] = change
if current.is_nacked():
wait_submitter.append(change.id)
else:
wait_reviewer.append(change.id)
agecurrent[change.id] = current.get_age(now)
agefirst[change.id] = first.get_age(now)
if nonnacked:
agenonnacked[change.id] = nonnacked.get_age(now)
else:
agenonnacked[change.id] = 0
query.run(querycb)
compound = ReportOutputCompound()
summary = ReportOutputList([
ReportOutputColumn("nreviews", "Total open reviews", format="%d",
mapfunc=lambda rep, col, row: row[0] + row [1]),
ReportOutputColumn("waitsubmitter", "Waiting on submitter", format="%d",
mapfunc=lambda rep, col, row: row[0]),
ReportOutputColumn("waitreviewer", "Waiting on reviewer", format="%d",
mapfunc=lambda rep, col, row: row[1]),
], title="Review summary")
summary.set_row([len(wait_submitter), len(wait_reviewer)])
compound.add_report(summary)
lastrev = ReportOutputList([
ReportOutputColumn("average", "Average wait time",
mapfunc=lambda rep, col, row: row[0]),
ReportOutputColumn("median", "Median wait time",
mapfunc=lambda rep, col, row: row[1]),
ReportOutputColumn("stale", "Older than %d days" % self.days, format="%d",
mapfunc=lambda rep, col, row: row[2]),
], title="Summary since current revision")
lastrev.set_row([self.average_age(wait_reviewer, agecurrent),
self.median_age(wait_reviewer, agecurrent),
self.older_than(wait_reviewer, agecurrent, self.days)])
compound.add_report(lastrev)
firstrev = ReportOutputList([
ReportOutputColumn("average", "Average wait time",
mapfunc=lambda rep, col, row: row[0]),
ReportOutputColumn("median", "Median wait time",
mapfunc=lambda rep, col, row: row[1]),
], title="Summary since first revision")
firstrev.set_row([self.average_age(wait_reviewer, agefirst),
self.median_age(wait_reviewer, agefirst)])
compound.add_report(firstrev)
nonnackedrev = ReportOutputList([
ReportOutputColumn("average", "Average wait time",
mapfunc=lambda rep, col, row: row[0]),
ReportOutputColumn("median", "Median wait time",
mapfunc=lambda rep, col, row: row[1]),
], title="Summary since last revision without -1/-2 from reviewer")
nonnackedrev.set_row([self.average_age(wait_reviewer, agenonnacked),
self.median_age(wait_reviewer, agenonnacked)])
compound.add_report(nonnackedrev)
def waitlastmap(rep, col, row):
return format_delta(row.get_current_age())
def waitlastsort(rep, col, row):
return row.get_current_age()
waitlastrev = self.new_table("Longest waiting since current revision")
waitlastrev.add_column(ReportOutputColumn("age", "Age",
sortfunc=waitlastsort,
mapfunc=waitlastmap))
waitlastrev.sortcol = "age"
waitlastrev.reverse = True
for change in self.get_longest_changes(wait_reviewer, changes, agecurrent, 5):
waitlastrev.add_row(change)
compound.add_report(waitlastrev)
def waitfirstmap(rep, col, row):
return format_delta(row.get_first_age())
def waitfirstsort(rep, col, row):
return row.get_first_age()
waitfirstrev = self.new_table("Longest waiting since first revision")
waitfirstrev.add_column(ReportOutputColumn("age", "Age",
sortfunc=waitfirstsort,
mapfunc=waitfirstmap))
waitfirstrev.sortcol = "age"
waitfirstrev.reverse = True
for change in self.get_longest_changes(wait_reviewer, changes, agefirst, 5):
waitfirstrev.add_row(change)
compound.add_report(waitfirstrev)
def waitnonnackedmap(rep, col, row):
return format_delta(row.get_reviewer_not_nacked_age())
def waitnonnackedsort(rep, col, row):
return row.get_reviewer_not_nacked_age()
waitnonnackedrev = self.new_table("Longest waiting since last revision without -1/-2 from reviewer")
waitnonnackedrev.add_column(ReportOutputColumn("age", "Age",
sortfunc=waitnonnackedsort,
mapfunc=waitnonnackedmap))
waitnonnackedrev.sortcol = "age"
waitnonnackedrev.reverse = True
for change in self.get_longest_changes(wait_reviewer, changes, agenonnacked, 5):
waitnonnackedrev.add_row(change)
compound.add_report(waitnonnackedrev)
return compound
| apache-2.0 | 1,719,606,987,100,816,600 | 37.750195 | 116 | 0.540989 | false |
ebrensi/registry-frontend | ff.py | 1 | 1240 | #! usr/bin/env python
# This script is for testing without having to host the flask app.
import folium
import pandas as pd
import os
from sqlalchemy import create_engine
import geojson
DATABASE_URL = os.environ["DATABASE_URL"]
STATES_GEOJSON_PATH = "static/us-states.json"
engine = create_engine(DATABASE_URL)
with engine.connect() as db:
query = "Select state, count(*) From registry Group By state;"
df = pd.read_sql_query(query, db)
with open(STATES_GEOJSON_PATH, "r") as file:
gj = geojson.load(file)
# Folium choropleth requires a one-to-one correspondence between GeoJSON
# features (state definitions) and shade values, so we will make a new
# GeoJSON object that is a FeatureCollection of only the states that we
# have data for.
relevant_features = [feature for feature in gj["features"]
if ("id" in feature) and
(feature["id"] in df["state"].values)]
gj_relevant = geojson.FeatureCollection(relevant_features)
geo_str = geojson.dumps(gj_relevant)
base_map = folium.Map([43, -100], zoom_start=5)
base_map.choropleth(
geo_str=geo_str,
data=df,
columns=['state', 'count'],
key_on='feature.id',
fill_color='PuBuGn',
)
base_map.save("map.html")
| mit | -3,929,384,207,766,329,000 | 25.956522 | 72 | 0.691935 | false |
cattleio/stampede | docs/do-demo/deploy.py | 1 | 6809 | #!/usr/bin/env python
import cattle
import sys
ZK_NODES = 3
REDIS_NODES = 3
API_SERVER_NODES = 3
PROCESS_SERVER_NODES = 3
AGENT_SERVER_NODES = 3
MYSQL_COMPUTE = 1
# Set if you want to override the cattle.jar in the Docker image with a custom one
URL = ''
TAG = 'latest'
client = cattle.from_env()
def wait(c):
return client.wait_success(c, timeout=120)
deleted = []
for c in client.list_container(removed_null=True):
if c.name != 'Agent':
client.delete(c)
print 'Deleting', c.name
deleted.append(c)
print 'Waiting for deleting'
for c in deleted:
wait(c)
print 'Done'
def set_link(instance, name, target):
instance = wait(instance)
for link in instance.instanceLinks():
if link.linkName == name:
print 'Linking {} to {}'.format(instance.name, target.name)
wait(client.update(link, targetInstanceId=target.id))
def deploy_zk():
# Deploying ZK is complicated....
# Create dummy ZK to link against, then we will create the circle
# We want it to be stopped so that ZooKeeper doesn't actually connect
print 'Creating Dummy ZK node'
zk_dummy = wait(client.create_container(imageUuid='docker:ibuildthecloud/zookeeper',
name='zk_dummy'))
zk_dummy = wait(zk_dummy.stop())
zks = []
for i in range(1, ZK_NODES + 1):
links = {}
for j in range(1, ZK_NODES + 1):
if j != i:
links['zk{}'.format(j)] = zk_dummy.id
zk = client.create_container(imageUuid='docker:ibuildthecloud/zookeeper',
name='zk{}'.format(i),
environment={
'ID': i
},
instanceTriggeredStop='restart',
instanceLinks=links)
print 'Created', zk.name
zks.append(wait(zk))
for zk_target in zks:
for zk in zks:
set_link(zk, zk_target.name, zk_target)
client.delete(zk_dummy)
return zks
def deploy_redis():
print 'Create Redis'
redises = []
for i in range(1, REDIS_NODES + 1):
redis = client.create_container(imageUuid='docker:ibuildthecloud/redis',
instanceTriggeredStop='restart',
name='redis{}'.format(i))
print 'Created', redis.name
redises.append(redis)
return redises
def haproxy(targets, name, listen_port):
links = {}
for i, c in enumerate(targets):
links['TARGET{}'.format(i)] = wait(c).id
return client.create_container(imageUuid='docker:ibuildthecloud/haproxy',
instanceLinks=links,
instanceTriggeredStop='restart',
name=name,
ports=['{}:80'.format(listen_port)])
zookeepers = deploy_zk()
redises = deploy_redis()
mysql = client.create_container(imageUuid='docker:ibuildthecloud/mysql',
compute=MYSQL_COMPUTE,
instanceTriggeredStop='restart',
ports=['9082:80'],
name='mysql')
print 'Created', mysql.name
graphite = client.create_container(imageUuid='docker:ibuildthecloud/graphite',
instanceTriggeredStop='restart',
ports=['9083:80'],
name='graphite')
print 'Created', graphite.name
es = client.create_container(imageUuid='docker:ibuildthecloud/logstash',
instanceTriggeredStop='restart',
ports=['9200:9200'],
name='logstash/elasticache')
print 'Created', es.name
kibana = client.create_container(imageUuid='docker:ibuildthecloud/kibana',
name='Kibana',
instanceTriggeredStop='restart',
ports=['9081:80'],
environment={
'ES_PORT_9200_TCP_ADDR': wait(es).hosts()[0].ipAddresses()[0].address,
'ES_PORT_9200_TCP_PORT': '9200'
})
print 'Created', kibana.name
print 'Create Cattle'
links = {
'gelf': wait(es).id,
'graphite': wait(graphite).id
}
instances = []
instances.extend(zookeepers)
instances.extend(redises)
instances.append(mysql)
for c in instances:
links[c.name] = wait(c).id
api_servers = []
agent_servers = []
for i in range(1, API_SERVER_NODES + 1):
c = client.create_container(imageUuid='docker:cattle/api-server:{}'.format(TAG),
name='API Server {}'.format(i),
environment={
'URL': URL,
'CATTLE_CATTLE_SERVER_ID': 'apiserver{}'.format(i)
},
instanceTriggeredStop='restart',
instanceLinks=links)
print 'Created', c.name
api_servers.append(c)
for i in range(1, PROCESS_SERVER_NODES + 1):
c = client.create_container(imageUuid='docker:cattle/process-server:{}'.format(TAG),
name='Process Server {}'.format(i),
environment={
'URL': URL,
'CATTLE_JAVA_OPTS': '-Xmx1024m',
'CATTLE_CATTLE_SERVER_ID': 'processserver{}'.format(i)
},
instanceTriggeredStop='restart',
instanceLinks=links)
print 'Created', c.name
for i in range(1, AGENT_SERVER_NODES + 1):
c = client.create_container(imageUuid='docker:cattle/agent-server:{}'.format(TAG),
name='Agent Server {}'.format(i),
environment={
'URL': URL,
'CATTLE_JAVA_OPTS': '-Xmx1024m',
'CATTLE_CATTLE_SERVER_ID': 'agentserver{}'.format(i)
},
instanceTriggeredStop='restart',
instanceLinks=links)
print 'Created', c.name
agent_servers.append(c)
h1 = haproxy(api_servers, 'Api Servers Load Balancer', 8080)
print 'Created', h1.name
h2 = haproxy(agent_servers, 'Agent Servers Load Balancer', 8081)
print 'Created', h2.name
wait(h1)
wait(h2)
| apache-2.0 | -6,914,724,294,481,272,000 | 33.21608 | 107 | 0.500661 | false |
Aydarkhan/cca | automata.py | 1 | 5250 | """Copyright 2010 Aydarkhanov Ruslan, Kurochkin Ilya, Rusinov Ivan
This file is part of CCA.
CCA is free software: you can redistribute it and/or modify it under
the terms of the GNU General Public License as published
by the Free Software Foundation, either version 2 of the License,
or (at your option) any later version.
CCA is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty
of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with CCA. If not, see http://www.gnu.org/licenses/.
"""
from state import *
class Automata(object):
def __init__(self, width=150, height=70, states=None):
self.width = width
self.height = height
if states == None:
self.states = [State("Dead", '-', "white", '0', [5]),
State("Alive", '+', "black", '1',
[0, 1, 4, 5, 6, 7, 8])]
else:
self.states = states
self.symbols = {}
self.st_sym = {}
for num, st in enumerate(self.states):
self.symbols[st.symbol] = num
self.st_sym[st.symbol] = st
self.field = []
for row in range(height):
self.field.append([])
for col in range(width):
self.field[row].append(self.states[0].symbol)
def next_step(self):
changed = []
for row in range(1, self.height - 1):
for col in range(1, self.width - 1):
symbol = self.field[row][col]
num = 0
for vert in range(row - 1, row + 2):
for horiz in range(col - 1, col + 2):
if self.field[vert][horiz] == symbol:
num += 1
if self.st_sym[symbol].next_state(num - 1):
changed.append((row, col))
for row in range(1, self.height - 1):
symbol1 = self.field[row][0]
symbol2 = self.field[row][self.width - 1]
num1 = 0
num2 = 0
for vert in range(row - 1, row + 2):
for horiz in [0, 1, self.width - 1]:
if self.field[vert][horiz] == symbol1:
num1 += 1
for horiz in [self.width - 2, self.width - 1, 0]:
if self.field[vert][horiz] == symbol2:
num2 += 1
if self.st_sym[symbol1].next_state(num1 - 1):
changed.append((row, 0))
if self.st_sym[symbol2].next_state(num2 - 1):
changed.append((row, self.width - 1))
for col in range(1, self.width - 1):
symbol1 = self.field[0][col]
symbol2 = self.field[self.height - 1][col]
num1 = 0
num2 = 0
for horiz in range(col - 1, col + 2):
for vert in [0, 1, self.height - 1]:
if self.field[vert][horiz] == symbol1:
num1 += 1
for vert in [self.height - 2, self.height - 1, 0]:
if self.field[vert][horiz] == symbol2:
num2 += 1
if self.st_sym[symbol1].next_state(num1 - 1):
changed.append((0, col))
if self.st_sym[symbol2].next_state(num2 - 1):
changed.append((self.height - 1, col))
for row, col in [(0, 0), (self.height - 1, self.width - 1),
(0, self.width - 1), (self.height - 1, 0)]:
symbol = self.field[row][col]
num = 0
for vert_long in range(row + self.height - 1,
row + self.height + 2):
for horiz_long in range(col + self.width - 1,
col + self.width + 2):
vert = vert_long % self.height
horiz = horiz_long % self.width
if self.field[vert][horiz] == symbol:
num += 1
if self.st_sym[symbol].next_state(num - 1):
changed.append((row, col))
for row, col in changed:
index = (self.symbols[self.field[row][col]] +
1) % len(self.states)
self.field[row][col] = self.states[index].symbol
return changed
def change_size(self, value, side):
"0-up, 1-right, 2-down, 3-left"
new_field = []
if side == 0:
self.height += value
for row in range(value):
new_field.append([])
for col in range(self.width):
new_field[row].append(self.states[0].symbol)
init = value
if value < 0:
init = 0
for row in range(init, self.height):
new_field.append([])
for col in range(self.width):
new_field[row].append(self.field[row - value][col])
if side == 2:
self.height += value
term = value
if value < 0:
term = 0
for row in range(self.height - term):
new_field.append([])
for col in range(self.width):
new_field[row].append(self.field[row][col])
for row in range(self.height - term, self.height):
new_field.append([])
for col in range(self.width):
new_field[row].append(self.states[0].symbol)
if side == 1:
self.width += value
term = value
if value < 0:
term = 0
for row in range(self.height):
new_field.append([])
for col in range(self.width - term):
new_field[row].append(self.field[row][col])
for row in range(self.height):
for col in range(self.width - term, self.width):
new_field[row].append(self.states[0].symbol)
if side == 3:
self.width += value
for row in range(self.height):
new_field.append([])
for col in range(value):
new_field[row].append(self.states[0].symbol)
init = value
if value < 0:
init = 0
for row in range(self.height):
for col in range(init, self.width):
new_field[row].append(self.field[row][col - value])
self.field = new_field
| gpl-2.0 | 5,469,230,736,711,367,000 | 30.25 | 68 | 0.60781 | false |
LaRiffle/axa_challenge | fonction_py/train.py | 1 | 12400 | from fonction_py.tools import *
from fonction_py.preprocess import *
from sklearn import linear_model
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn import cross_validation
from sklearn.linear_model import LogisticRegression
from sklearn import tree
from sklearn import svm
from sklearn import decomposition
from sklearn.naive_bayes import GaussianNB
from sklearn.ensemble import RandomForestRegressor
from sklearn.ensemble import GradientBoostingRegressor
from sklearn.grid_search import GridSearchCV
from sklearn.grid_search import RandomizedSearchCV
from scipy.stats import uniform as sp_randint
from sklearn import datasets
from sklearn.linear_model import Ridge
from fonction_py.tim import *
import time
def faireTout():
fields = ['DATE', 'DAY_OFF', 'WEEK_END', 'DAY_WE_DS', 'ASS_ASSIGNMENT', 'CSPL_RECEIVED_CALLS' ] # selectionne les colonnes à lire
c = pd.DataFrame()
<<<<<<< HEAD
listmodel = faireListModel()#recupere le nom et les modeles de chaque truc
data=pd.read_csv("data/trainPure.csv", sep=";", usecols=fields) # LECTURE du fichier de train,
resultat = pd.read_csv("data/submission.txt", sep="\t") # LECTURE dufichier de test
res=[]
model = listmodel[0]
for model in listmodel:
print(model[0]) #affiche le ass assignment
(xTest, x, souvenir, y)=preprocessTOTAL(model[0]) # ajuste le nombre et le nom de feature pour que xTest et x aient les memes
mod= GradientBoostingRegressor(loss='huber', alpha=0.9,n_estimators=100, max_depth=3,learning_rate=.1, min_samples_leaf=9,min_samples_split=9)
mod.fit(x, y) #s'entraine
pred = mod.predict(xTest) # predit
pred[pred>max(y)*1.05]=max(y)*1.05 # pour pas predire trop grand
pred[pred<0]=0 # pas de negatif
pred =np.round(pred).astype(int) # to int
souvenir['prediction']=pred # on l'ajoute a souvenir qui garde le format standard et la date pour qu'on remette tout a la bonne place a la fin
resultat=pd.merge(resultat, souvenir, how='left',on=['DATE', 'ASS_ASSIGNMENT']) # on remet chaque prediction à la bonne ligne -> il cree prediction_x et prediction_y car l'ancienne prediction et la nouvelle colonne de prediction
resultat=resultat.fillna(0) # on remplit les endroits ou on a pas predit avec des 0
resultat['prediction'] = resultat['prediction_x']+resultat['prediction_y'] # merge les deux colonnes
del resultat['prediction_x']
del resultat['prediction_y']
=======
listmodel = faireListModel()
#'Evenements', 'Gestion Amex'
#setFields = set(pd.read_csv("data/fields.txt", sep=";")['0'].values)
# resultat = pd.read_csv("data/submission.txt", sep="\t")
i=0
# res = []
start_time = time.time()
model = listmodel[24]
data=pd.read_csv("data/trainPure.csv", sep=";", usecols=fields) # LECTURE
resultat = pd.read_csv("data/submission.txt", sep="\t") # LECTURE
res=[]
for model in listmodel:
i = i+1
print(model[0])
x,y = preprocess(data.copy(), model[0]) # rajoute les features
model[1].fit(x, y)
#model.score(xTrain, yTrain)
(xTest, souvenir)=preprocessFINAL(x,model[0])
pred = model[1].predict(xTest)
pred[pred>max(y)*1.05]=max(y)*1.05
pred[pred<0]=0
pred =np.round(pred)
souvenir['prediction']=int(pred)
resultat=pd.merge(resultat, souvenir, how='left',on=['DATE', 'ASS_ASSIGNMENT'])
resultat=resultat.fillna(0)
resultat['prediction'] = resultat['prediction_x']+resultat['prediction_y']
del resultat['prediction_x']
del resultat['prediction_y']
x,y = preprocess(data.copy(), 'Téléphonie') # rajoute les features
#model.score(xTrain, yTrain)
(xTest, souvenir)=preprocessFINAL(x,'Téléphonie')
pred=telephoniePred(x,y,xTest)
pred[pred>max(y)*1.05]=max(y)*1.05
pred[pred<0]=0
pred =np.round(pred)
souvenir['prediction']=int(pred)
resultat=pd.merge(resultat, souvenir, how='left',on=['DATE', 'ASS_ASSIGNMENT'])
resultat=resultat.fillna(0)
resultat['prediction'] = resultat['prediction_x']+resultat['prediction_y']
del resultat['prediction_x']
del resultat['prediction_y']
<<<<<<< HEAD
pd.DataFrame(res).to_csv("reslist.csv", sep=";", decimal=",")
resultat.to_csv("vraipred.txt", sep="\t", index =False)
=======
>>>>>>> origin/master
resultat['prediction']=resultat['prediction'].astype(int)
resultat.to_csv("pouranalyse.txt", sep="\t", index =False, encoding='utf-8')
>>>>>>> origin/master
return resultat
def faireListModel():
return [('CAT', linear_model.LinearRegression()),
('CMS', RandomForestRegressor(bootstrap=False, criterion='mse', max_depth=5,
max_features=30, max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=10, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Crises',linear_model.LinearRegression()),
('Domicile', RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=30,
max_features=30, max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=90, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Gestion',RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=30,
max_features='auto', max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=100, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Gestion - Accueil Telephonique',RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=20,
max_features=30, max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=70, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Gestion Assurances',RandomForestRegressor(bootstrap=False, criterion='mse', max_depth=20,
max_features=30, max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=20, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Gestion Clients', RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=10,
max_features=90, max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=50, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Gestion DZ', RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=5,
max_features=30, max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=30, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Gestion Relation Clienteles',RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=10,
max_features=90, max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=110, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Gestion Renault', RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=30,
max_features=50, max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=30, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Japon',RandomForestRegressor(bootstrap=False, criterion='mse', max_depth=10,
max_features=30, max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=30, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Manager',RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=10,
max_features=30, max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=30, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Mécanicien',RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=20,
max_features='auto', max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=100, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Médical',RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=30,
max_features='auto', max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=100, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Nuit', RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=20,
max_features='auto', max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=100, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Prestataires',RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=20,
max_features='auto', max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=100, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('RENAULT',RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=80,
max_features='auto', max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=100, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('RTC',RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=20,
max_features='auto', max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=100, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Regulation Medicale',linear_model.LinearRegression()),
('SAP',RandomForestRegressor(bootstrap=False, criterion='mse', max_depth=20,
max_features=30, max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=30, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Services',RandomForestRegressor(bootstrap=False, criterion='mse', max_depth=30,
max_features=30, max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=30, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Tech. Axa',RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=20,
max_features='auto', max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=100, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Tech. Inter',RandomForestRegressor(bootstrap=False, criterion='mse', max_depth=30,
max_features=30, max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=30, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Tech. Total',RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=70,
max_features='auto', max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=100, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Téléphonie',GradientBoostingRegressor(loss='huber', alpha=0.9,n_estimators=100, max_depth=3,learning_rate=.1, min_samples_leaf=9,min_samples_split=9) )] | mit | -4,974,344,228,029,968,000 | 55.322727 | 236 | 0.66021 | false |
nonamenix/yandex-vesna-generator | yandex_vesna_generator/vesna.py | 1 | 2537 | # -*- coding: utf-8 -*-
from lxml import etree
from slugify import slugify
class Entry(object):
def __init__(self, title="", paragraphs=[], themes=[], **kwargs):
self.title = title
self.paragraphs = paragraphs
self.themes = themes
self.header_wrapper = kwargs.get("header_wrapper", "h2")
self.paragraph_wrapper = kwargs.get("paragraph_wrapper", "p")
self.slug = slugify(title, to_lower=True)
self.description = self.paragraphs[0][0:kwargs.get("description_length", 220)]
def render_html(self):
html = self.header
html += self.body
return html
@property
def header(self):
return "<%(wrapper)s>%(title)s</%(wrapper)s> \n" % {
'title': self.title,
'wrapper': self.header_wrapper
}
@property
def body(self):
return "".join(["<%(wrapper)s>%(text)s</$(wrapper)s> \n" % {
"text": p,
"wrapper": self.paragraph_wrapper
} for p in self.paragraphs])
def __repr__(self):
return '<Entry theme="%s" id="%s">' % (", ".join(self.themes), hex(id(self)))
def __getitem__(self, field):
return self.__dict__[field]
class VesnaGenerator(object):
""" Class for generate crazy text on your site """
# Themes
AVAILABLE_THEMES = [
'astronomy', 'geology', 'gyroscope', 'literature', 'marketing', 'mathematics', 'music', 'polit',
'agrobiologia', 'law', 'psychology', 'geography', 'physics', 'philosophy', 'chemistry']
def __init__(self, themes=[], entry_options={}):
self.themes = [theme for theme in themes if theme in self.AVAILABLE_THEMES] or self.AVAILABLE_THEMES
self.entry_options = entry_options
# Generate yandex vesna url
self.base_url = "http://referats.yandex.ru/referats/"
self.url = self.base_url + "?t=" + "+".join(self.themes)
self.entries = []
def generate_entry(self):
self.parser = etree.HTMLParser(recover=True)
self.doc = etree.parse(self.url, self.parser)
title = self.doc.xpath('/html/body/div[2]/div[1]/div[1]/div/div[2]/div[1]/strong')[0].text
title = title.encode('utf-8').replace('Тема: «', '').replace('»', '').decode('utf-8')
paragraps = self.doc.xpath('/html/body/div[2]/div[1]/div[1]/div/div[2]/div[1]/p')
return Entry(
title=title,
paragraphs=[p.text for p in paragraps],
themes=self.themes,
**self.entry_options
) | apache-2.0 | -2,670,042,156,606,405,000 | 33.216216 | 108 | 0.575267 | false |
razorpay/razorpay-python | tests/test_client_utility.py | 1 | 1858 | import responses
from .helpers import mock_file, ClientTestCase
from razorpay.errors import SignatureVerificationError
class TestClientValidator(ClientTestCase):
def setUp(self):
super(TestClientValidator, self).setUp()
@responses.activate
def test_verify_payment_signature(self):
sig = 'b2335e3b0801106b84a7faff035df56ecffde06918c9ddd1f0fafbb37a51cc89'
parameters = {}
parameters['razorpay_order_id'] = 'fake_order_id'
parameters['razorpay_payment_id'] = 'fake_payment_id'
parameters['razorpay_signature'] = sig
self.assertEqual(
self.client.utility.verify_payment_signature(parameters),
True)
@responses.activate
def test_verify_payment_signature_with_exception(self):
parameters = {}
parameters['razorpay_order_id'] = 'fake_order_id'
parameters['razorpay_payment_id'] = 'fake_payment_id'
parameters['razorpay_signature'] = 'test_signature'
self.assertRaises(
SignatureVerificationError,
self.client.utility.verify_payment_signature,
parameters)
@responses.activate
def test_verify_webhook_signature(self):
secret = self.client.auth[1]
sig = 'd60e67fd884556c045e9be7dad57903e33efc7172c17c6e3ef77db42d2b366e9'
body = mock_file('fake_payment_authorized_webhook')
self.assertEqual(
self.client.utility.verify_webhook_signature(body, sig, secret),
True)
@responses.activate
def test_verify_webhook_signature_with_exception(self):
secret = self.client.auth[1]
sig = 'test_signature'
body = ''
self.assertRaises(
SignatureVerificationError,
self.client.utility.verify_webhook_signature,
body,
sig,
secret)
| mit | 4,018,015,657,456,469,500 | 31.596491 | 80 | 0.653929 | false |
nosuchtim/VizBench | src/PyLoopyCam/testit.py | 1 | 5268 | """
Copyright (c) 2015, Tim Thompson
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
- Neither the name of Tim Thompson, nosuch.com, nor the names of
any contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import sys
import time
import traceback
import thread
import threading
import copy
import asyncore
import asynchat
import socket
import sys
import re
import xml.dom.minidom as xmldom
import string
import pygame.pypm
import os.path
import os, pygame
import pickle
import random
from os.path import isdir, isfile, isabs, abspath
from urllib import quote, unquote
from threading import *
from ctypes import *
from time import sleep
from Queue import Queue, Empty
from xml.sax import saxutils
from xml.dom import Node
from traceback import format_exc
from dircache import listdir
from pygame.locals import *
from thread import *
from ffff import *
global debug
debug = False
global debugosc
debugosc = False
global debugosc2
debugosc2 = False
class NthEventServer(Thread):
"""
Provides an event stream that can serve multiple listeners
track of what fingers are currently down, smoothing drag motion, etc.
"""
oneServer = None
def __init__(self):
Thread.__init__(self)
self.setDaemon(True)
NthEventServer.oneServer = self
print "NthEventServer.oneServer = ", NthEventServer.oneServer
self.dispenser = PushedEventDispenser()
self.throttle = 0.005
self.throttle = 0.0
self.inputs = {}
self.outputs = {}
self.cv = threading.Condition()
self.events = {}
self.firstevent = 0
self.nextevent = 0
self.osc_recipients = {"music":[], "graphic":[]}
self.start()
self.too_old_seconds = 30.0
self.event_inputs = {}
self.forward_inputs = {}
self.forward_finger = None
self.tm0 = time.time()
self.osc_count = 0
def send_osc(self, o, apptype):
(msg_addr, msg_data) = o
if msg_addr == "":
print "No msg_addr value in send_osc?"
return
now = time.time()
self.osc_count += 1
if now - self.tm0 > 1.0:
print "OSC Per second = ", self.osc_count
self.osc_count = 0
self.tm0 = now
msg_addr = str(msg_addr)
b = createBinaryMsg(msg_addr, msg_data)
# print "createBinary msg_addr=",msg_addr," msg_data=",msg_data
print("SHOULD BE sending %s OSC=%s" % (apptype, o.__str__()))
# r.osc_socket.sendto(b, (r.osc_addr, r.osc_port))
def main():
debug = True
httpaddr = "127.0.0.1"
httpport = 7777
rootdir = None
print "SYS.ARGV len=", len(sys.argv)
argn = len(sys.argv)
if len(sys.argv) == 1:
print "NO arguments..."
else:
argn = 1
if sys.argv[argn] == "-d":
debug = True
print "Debug is True"
argn += 1
else:
debug = False
argn += 1
for i in range(argn, len (sys.argv)):
a = sys.argv[i]
print("a = ", a)
if a.startswith("rootdir:"):
rootdir = abspath(a[8:])
elif a.startswith("httpaddr:"):
httpaddr = a[9:]
elif a.startswith("httpport:"):
httpport = int(a[9:])
try:
import os
position = (-800, 0)
position = (600, 360)
os.environ['SDL_VIDEO_WINDOW_POS'] = str(position[0]) + "," + str(position[1])
pygame.init()
width = 250
height = 500
flags = pygame.SRCALPHA
# from panel import NthControlPanel
# ui = NthControlPanel(width, height, flags)
# time.sleep(1.0)
# pygame.event.set_grab(True)
try:
ffff = Ffff("localhost",80)
except:
print "EXCEPT caught in creating Ffff! Exception=", format_exc()
plugin = ffff.get_ffgl("Twisted")
param = plugin.get_param("Twirl")
# ffff.set_all_params(plugin,1.0)
for nm in plugin.param:
p = plugin.param[nm]
val = random.random() % 1.0
ffff.change_plugin_param_val(plugin,p,val)
except KeyboardInterrupt:
print("KeyboardInterrupt received...\n");
# server.shutdown_quick()
except:
s = format_exc()
if not re.search(".*shutdown_quick.*", s):
print("Exception while running myserver?\n");
print(s)
# server.shutdown_quick()
if __name__ == '__main__':
main()
# import cProfile
# cProfile.run('main()')
| mit | 7,051,323,283,669,740,000 | 24.205742 | 80 | 0.705201 | false |
juntalis/aio-pika | docs/source/rabbitmq-tutorial/examples/3-publish-subscribe/receive_logs.py | 1 | 1064 | import asyncio
from aio_pika import connect, IncomingMessage, ExchangeType
loop = asyncio.get_event_loop()
def on_message(message: IncomingMessage):
with message.process():
print("[x] %r" % message.body)
async def main():
# Perform connection
connection = await connect("amqp://guest:guest@localhost/", loop=loop)
# Creating a channel
channel = await connection.channel()
await channel.set_qos(prefetch_count=1)
logs_exchange = await channel.declare_exchange(
'logs',
ExchangeType.FANOUT
)
# Declaring queue
queue = await channel.declare_queue(exclusive=True)
# Binding the queue to the exchange
await queue.bind(logs_exchange)
# Start listening the queue with name 'task_queue'
queue.consume(on_message)
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.create_task(main())
# we enter a never-ending loop that waits for data and runs callbacks whenever necessary.
print(' [*] Waiting for logs. To exit press CTRL+C')
loop.run_forever()
| apache-2.0 | 1,176,001,167,043,564,000 | 24.95122 | 93 | 0.675752 | false |
griddynamics/bunch | lettuce_bunch/dependencies.py | 1 | 2875 | # -*- coding: utf-8 -*-
# <Bunch - BDD test tool for Lettuce scenarios>
# Copyright (c) 2012 Grid Dynamics Consulting Services, Inc, All Rights Reserved
# http://www.griddynamics.com
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from exceptions import CyclicDependencySpecification
from topsort import topsort_levels,CycleError
from itertools import chain, tee, izip, product
def pairwise(iterable):
a, b = tee(iterable)
next(b)
return izip(a, b)
def dependency_lists_to_pairs(dependency_lists):
return chain(*(pairwise(dep_list) for dep_list in dependency_lists))
def dependency_groups_to_pairs(groups):
return chain(*(product(a,b) for a,b in pairwise(groups)))
def split_solitaries(deps):
solitaries = []
linked = []
for dep in deps:
if len(dep) == 1 and len(dep[0]) > 0:
solitaries.append(dep[0])
else:
linked.append(dep)
return solitaries, linked
def filter_empties(deps):
return filter(None, deps)
def combine_fixture_deps(deps):
solitaries, linked = split_solitaries(filter_empties(deps))
try:
result = [sorted(group) for group in topsort_levels(chain(*map(dependency_groups_to_pairs, linked)))]
for solitary in solitaries:
if solitary not in result:
result.append(solitary)
except CycleError as cycle_details:
raise CyclicDependencySpecification(cycle_details)
return result
| gpl-3.0 | 8,455,682,489,359,907,000 | 39.666667 | 109 | 0.712348 | false |
tonyshardlow/reg_sde | run_pf.py | 1 | 1560 | from __future__ import (absolute_import, division,
print_function, unicode_literals)
exec(open("ground.py").read())
# mine
import hamiltonian
import diffeo
import sde
from utility import *
#
#
# all data defined in utility (exp2,...)
#
def run(dict):
import os.path
if 'fname' in dict:
filename=dict['fname']
else:
print("No filename given")
exit(1)
print("filename: ",filename+dict['ext'])
#
G=hamiltonian.GaussGreen(dict['ell'],0)
no_steps=dict['no_steps']
#
SDE = sde.SDE(G)
SDE.set_no_steps(no_steps)
SDE.set_landmarks(dict['landmarks_n'])
SDE.set_lam_beta(dict['lam'],dict['beta'],True)
# plot a push-forward sample (with current shape)
plot_setup()
plt.axis('equal')
plt.axis('off')
Q0=dict['landmarks'][0,:,:]
D=SDE.sample_push_forward(Q0)
D.plot_qpath_01(0)
D.plot_warped_grid(10)
plt.savefig(filename+dict['ext']+'.pdf',bbox_inches='tight')
print("...finished.")
#
####################################################################
if __name__ == "__main__":
# do this
plt.ion()
noise_var=0.2
dict=exp1(noise_var)
#dict=exp2(noise_var)
#dict=exp4(noise_var)
# dict=exp4(noise_var)
dict['lam']=0.5
scale=1.0e1;betas=np.array([1., 2., 4.0, 8.])*scale
exts=['a_pf', 'b_pf', 'c_pf', 'd_pf']
for i in range(4):
print("=======")
dict['beta']=betas[i]
dict['ext']=exts[i]
run(dict)
| mit | 5,528,959,925,701,617,000 | 25.857143 | 68 | 0.523077 | false |
ge0rgi/cinder | cinder/tests/unit/image/test_cache.py | 1 | 12125 | # Copyright (C) 2015 Pure Storage, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from datetime import timedelta
import ddt
import mock
from oslo_utils import timeutils
from cinder import context as ctxt
from cinder.db.sqlalchemy import models
from cinder.image import cache as image_cache
from cinder import objects
from cinder import test
from cinder.tests.unit import fake_constants as fake
@ddt.ddt
class ImageVolumeCacheTestCase(test.TestCase):
def setUp(self):
super(ImageVolumeCacheTestCase, self).setUp()
self.mock_db = mock.Mock()
self.mock_volume_api = mock.Mock()
self.context = ctxt.get_admin_context()
self.volume = models.Volume()
vol_params = {'id': fake.VOLUME_ID,
'host': 'foo@bar#whatever',
'cluster_name': 'cluster',
'size': 0}
self.volume.update(vol_params)
self.volume_ovo = objects.Volume(self.context, **vol_params)
def _build_cache(self, max_gb=0, max_count=0):
cache = image_cache.ImageVolumeCache(self.mock_db,
self.mock_volume_api,
max_gb,
max_count)
cache.notifier = self.notifier
return cache
def _build_entry(self, size=10):
entry = {
'id': 1,
'host': 'test@foo#bar',
'cluster_name': 'cluster@foo#bar',
'image_id': 'c7a8b8d4-e519-46c7-a0df-ddf1b9b9fff2',
'image_updated_at': timeutils.utcnow(with_timezone=True),
'volume_id': '70a599e0-31e7-49b7-b260-868f441e862b',
'size': size,
'last_used': timeutils.utcnow(with_timezone=True)
}
return entry
def test_get_by_image_volume(self):
cache = self._build_cache()
ret = {'id': 1}
volume_id = '70a599e0-31e7-49b7-b260-868f441e862b'
self.mock_db.image_volume_cache_get_by_volume_id.return_value = ret
entry = cache.get_by_image_volume(self.context, volume_id)
self.assertEqual(ret, entry)
self.mock_db.image_volume_cache_get_by_volume_id.return_value = None
entry = cache.get_by_image_volume(self.context, volume_id)
self.assertIsNone(entry)
def test_evict(self):
cache = self._build_cache()
entry = self._build_entry()
cache.evict(self.context, entry)
self.mock_db.image_volume_cache_delete.assert_called_once_with(
self.context,
entry['volume_id']
)
msg = self.notifier.notifications[0]
self.assertEqual('image_volume_cache.evict', msg['event_type'])
self.assertEqual('INFO', msg['priority'])
self.assertEqual(entry['host'], msg['payload']['host'])
self.assertEqual(entry['image_id'], msg['payload']['image_id'])
self.assertEqual(1, len(self.notifier.notifications))
@ddt.data(True, False)
def test_get_entry(self, clustered):
cache = self._build_cache()
entry = self._build_entry()
image_meta = {
'is_public': True,
'owner': '70a599e0-31e7-49b7-b260-868f441e862b',
'properties': {
'virtual_size': '1.7'
},
'updated_at': entry['image_updated_at']
}
(self.mock_db.
image_volume_cache_get_and_update_last_used.return_value) = entry
if not clustered:
self.volume_ovo.cluster_name = None
expect = {'host': self.volume.host}
else:
expect = {'cluster_name': self.volume.cluster_name}
found_entry = cache.get_entry(self.context,
self.volume_ovo,
entry['image_id'],
image_meta)
self.assertDictEqual(entry, found_entry)
(self.mock_db.
image_volume_cache_get_and_update_last_used.assert_called_once_with)(
self.context,
entry['image_id'],
**expect
)
msg = self.notifier.notifications[0]
self.assertEqual('image_volume_cache.hit', msg['event_type'])
self.assertEqual('INFO', msg['priority'])
self.assertEqual(entry['host'], msg['payload']['host'])
self.assertEqual(entry['image_id'], msg['payload']['image_id'])
self.assertEqual(1, len(self.notifier.notifications))
def test_get_entry_not_exists(self):
cache = self._build_cache()
image_meta = {
'is_public': True,
'owner': '70a599e0-31e7-49b7-b260-868f441e862b',
'properties': {
'virtual_size': '1.7'
},
'updated_at': timeutils.utcnow(with_timezone=True)
}
image_id = 'c7a8b8d4-e519-46c7-a0df-ddf1b9b9fff2'
(self.mock_db.
image_volume_cache_get_and_update_last_used.return_value) = None
found_entry = cache.get_entry(self.context,
self.volume_ovo,
image_id,
image_meta)
self.assertIsNone(found_entry)
msg = self.notifier.notifications[0]
self.assertEqual('image_volume_cache.miss', msg['event_type'])
self.assertEqual('INFO', msg['priority'])
self.assertEqual(self.volume.host, msg['payload']['host'])
self.assertEqual(image_id, msg['payload']['image_id'])
self.assertEqual(1, len(self.notifier.notifications))
@mock.patch('cinder.objects.Volume.get_by_id')
def test_get_entry_needs_update(self, mock_volume_by_id):
cache = self._build_cache()
entry = self._build_entry()
image_meta = {
'is_public': True,
'owner': '70a599e0-31e7-49b7-b260-868f441e862b',
'properties': {
'virtual_size': '1.7'
},
'updated_at': entry['image_updated_at'] + timedelta(hours=2)
}
(self.mock_db.
image_volume_cache_get_and_update_last_used.return_value) = entry
mock_volume = mock.MagicMock()
mock_volume_by_id.return_value = mock_volume
found_entry = cache.get_entry(self.context,
self.volume_ovo,
entry['image_id'],
image_meta)
# Expect that the cache entry is not returned and the image-volume
# for it is deleted.
self.assertIsNone(found_entry)
self.mock_volume_api.delete.assert_called_with(self.context,
mock_volume)
msg = self.notifier.notifications[0]
self.assertEqual('image_volume_cache.miss', msg['event_type'])
self.assertEqual('INFO', msg['priority'])
self.assertEqual(self.volume.host, msg['payload']['host'])
self.assertEqual(entry['image_id'], msg['payload']['image_id'])
self.assertEqual(1, len(self.notifier.notifications))
def test_create_cache_entry(self):
cache = self._build_cache()
entry = self._build_entry()
image_meta = {
'updated_at': entry['image_updated_at']
}
self.mock_db.image_volume_cache_create.return_value = entry
created_entry = cache.create_cache_entry(self.context,
self.volume_ovo,
entry['image_id'],
image_meta)
self.assertEqual(entry, created_entry)
self.mock_db.image_volume_cache_create.assert_called_once_with(
self.context,
self.volume_ovo.host,
self.volume_ovo.cluster_name,
entry['image_id'],
entry['image_updated_at'].replace(tzinfo=None),
self.volume_ovo.id,
self.volume_ovo.size
)
def test_ensure_space_unlimited(self):
cache = self._build_cache(max_gb=0, max_count=0)
has_space = cache.ensure_space(self.context, self.volume)
self.assertTrue(has_space)
self.volume.size = 500
has_space = cache.ensure_space(self.context, self.volume)
self.assertTrue(has_space)
def test_ensure_space_no_entries(self):
cache = self._build_cache(max_gb=100, max_count=10)
self.mock_db.image_volume_cache_get_all.return_value = []
self.volume_ovo.size = 5
has_space = cache.ensure_space(self.context, self.volume_ovo)
self.assertTrue(has_space)
self.volume_ovo.size = 101
has_space = cache.ensure_space(self.context, self.volume_ovo)
self.assertFalse(has_space)
def test_ensure_space_need_gb(self):
cache = self._build_cache(max_gb=30, max_count=10)
mock_delete = mock.patch.object(cache, '_delete_image_volume').start()
entries = []
entry1 = self._build_entry(size=12)
entries.append(entry1)
entry2 = self._build_entry(size=5)
entries.append(entry2)
entry3 = self._build_entry(size=10)
entries.append(entry3)
self.mock_db.image_volume_cache_get_all.return_value = entries
self.volume_ovo.size = 15
has_space = cache.ensure_space(self.context, self.volume_ovo)
self.assertTrue(has_space)
self.assertEqual(2, mock_delete.call_count)
mock_delete.assert_any_call(self.context, entry2)
mock_delete.assert_any_call(self.context, entry3)
def test_ensure_space_need_count(self):
cache = self._build_cache(max_gb=30, max_count=2)
mock_delete = mock.patch.object(cache, '_delete_image_volume').start()
entries = []
entry1 = self._build_entry(size=10)
entries.append(entry1)
entry2 = self._build_entry(size=5)
entries.append(entry2)
self.mock_db.image_volume_cache_get_all.return_value = entries
self.volume_ovo.size = 12
has_space = cache.ensure_space(self.context, self.volume_ovo)
self.assertTrue(has_space)
self.assertEqual(1, mock_delete.call_count)
mock_delete.assert_any_call(self.context, entry2)
def test_ensure_space_need_gb_and_count(self):
cache = self._build_cache(max_gb=30, max_count=3)
mock_delete = mock.patch.object(cache, '_delete_image_volume').start()
entries = []
entry1 = self._build_entry(size=10)
entries.append(entry1)
entry2 = self._build_entry(size=5)
entries.append(entry2)
entry3 = self._build_entry(size=12)
entries.append(entry3)
self.mock_db.image_volume_cache_get_all.return_value = entries
self.volume_ovo.size = 16
has_space = cache.ensure_space(self.context, self.volume_ovo)
self.assertTrue(has_space)
self.assertEqual(2, mock_delete.call_count)
mock_delete.assert_any_call(self.context, entry2)
mock_delete.assert_any_call(self.context, entry3)
def test_ensure_space_cant_free_enough_gb(self):
cache = self._build_cache(max_gb=30, max_count=10)
mock_delete = mock.patch.object(cache, '_delete_image_volume').start()
entries = list(self._build_entry(size=25))
self.mock_db.image_volume_cache_get_all.return_value = entries
self.volume_ovo.size = 50
has_space = cache.ensure_space(self.context, self.volume_ovo)
self.assertFalse(has_space)
mock_delete.assert_not_called()
| apache-2.0 | 2,970,020,564,799,283,700 | 38.754098 | 78 | 0.583175 | false |
xbed/Mixly_Arduino | mixly_arduino/mpBuild/ESP32_MixGo/lib/mixgo.py | 1 | 5214 | from machine import Pin
from machine import PWM
from machine import ADC
from machine import DAC
from machine import I2C
from machine import Timer
from machine import RTC
from machine import TouchPad
import time
from neopixel import NeoPixel
def get_brightness(pin = 39):
return ADCSensor(pin).read()
def get_soundlevel(pin = 35):
return ADCSensor(pin).read()
# Button
class Button:
def __init__(self, pin):
from machine import Pin
self.pin = Pin(pin, Pin.IN)
def get_presses(self, delay = 1):
last_time, last_state, presses = time.time(), 0, 0
while time.time() < last_time + delay:
time.sleep_ms(50)
if last_state == 0 and self.pin.value() == 1:
last_state = 1
if last_state == 1 and self.pin.value() == 0:
last_state, presses = 0, presses + 1
return presses
def is_pressed(self, flag = 0):
return self.pin.value() == flag
def was_pressed(self, flag = 0):
last_state = self.pin.value()
if flag:
if not last_state:
return False
else:
while self.pin.value():
time.sleep_ms(10)
return True
else:
if last_state:
return False
else:
while not self.pin.value():
time.sleep_ms(10)
return True
def irq(self, handler, trigger):
self.pin.irq(handler = handler, trigger = trigger)
# Pin
class MyPin(Pin):
def write_digital(self,val):
self.init(Pin.OUT)
self.value(val)
def read_digital(self):
self.init(Pin.IN)
return self.value()
def write_analog(self,val):
id = int(str(self)[4:-1]) #unsafe!
self = PWM(Pin(id),duty=val)
def dac_write(self,val):
id = int(str(self)[4:-1]) #unsafe!
self = DAC(Pin(id)).write(val)
def read_analog(self):
id = int(str(self)[4:-1]) #unsafe!
self = ADC(Pin(id))
return self.read()
def set_frequency(self,val):
id = int(str(self)[4:-1])
self = PWM(Pin(id),freq=val)
def is_touched(self):
id = int(str(self)[4:-1]) #unsafe!
if id in (0,2,4,12,13,14,15,27,32,33):
# print(TouchPad(Pin(id)).read())
return (TouchPad(Pin(id)).read() - 150 < 0)
else:
self.init(Pin.IN)
return self.value() == 1
class Infrared(MyPin):
def near(self):
id = int(str(self)[4:-1]) #unsafe!
pin15=Pin(15,Pin.OUT)
pin15.value(1)
adc=ADC(Pin(id))
adc.atten(ADC.ATTN_11DB)
approximate =adc.read()
pin15.value(0)
return approximate
# Servo
class Servo:
def __init__(self,pin):
self.pin=pin
def write_angle(self,angle):
id = int(str(self.pin)[4:-1])
PWM(Pin(id),freq=50,duty=int(40 + 75 * angle / 180))
# Sonar
class Sonar:
def __init__(self, trig, echo):
self.trig=Pin(trig, Pin.OUT)
self.echo=Pin(echo, Pin.IN)
def checkdist(self):
self.trig.value(0)
self.echo.value(0)
self.trig.value(1)
time.sleep_us(10)
self.trig.value(0)
while(self.echo.value()==0):
pass
t1 = time.ticks_us()
while(self.echo.value()==1):
pass
t2 = time.ticks_us()
return round(time.ticks_diff(t2, t1) / 10000 * 340 / 2, 2)
class led:
def __init__(self, pin, flag=1):
self.val = flag
self.pin = pin
self.flag = flag
def setbrightness(self,val):
self.val = val
if self.flag:
PWM(Pin(self.pin)).duty(self.val)
else:
PWM(Pin(self.pin)).duty(1023 - self.val)
def setonoff(self,val):
if(val == -1):
Pin(self.pin,Pin.OUT).value(1 - Pin(self.pin).value())
elif(val == 1):
Pin(self.pin,Pin.OUT).value(self.flag)
elif(val == 0):
Pin(self.pin,Pin.OUT).value(1 - self.flag)
def getonoff(self):
if self.flag:
return Pin(self.pin).value()
else:
return 1 - Pin(self.pin).value()
class ADCSensor:
def __init__(self,pin):
self.adc=ADC(Pin(pin))
self.adc.atten(ADC.ATTN_11DB)
def read(self):
return self.adc.read()
class RGB:
def __init__(self, pin, num):
self = NeoPixel(Pin(pin), num)
def write(self,n,r,g,b):
self[n] = (r, g, b)
self.write()
i2c = I2C(scl = Pin(22), sda = Pin(21), freq = 100000)
buf = bytearray(1)
rtc = RTC()
tim = Timer(-1)
try:
i2c.readfrom_mem_into(0x68, 0X75, buf)
except:
pass
else:
if buf[0] == 0x71:
from mpu9250 import *
mpu = MPU9250(i2c)
compass = Compass(mpu)
button_a = Button(17)
button_b = Button(16)
led1 = led(pin = 0, flag = 0)
led2 = led(pin = 5, flag = 0)
infrared_left = Infrared(34)
infrared_right = Infrared(36)
touch1 = MyPin(32)
touch2 = MyPin(33)
touch3 = MyPin(25)
touch4 = MyPin(26)
rgb = NeoPixel(Pin(2), 2) | apache-2.0 | -1,362,624,201,661,192,700 | 25.472081 | 69 | 0.528002 | false |
tea321000/django-project | musicsite/music/migrations/0002_auto_20170305_2121.py | 1 | 1364 | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-03-05 13:21
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('music', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='music',
name='singer',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='Musician_singer', to='music.Musician'),
),
migrations.AlterField(
model_name='musician',
name='birthday',
field=models.DateTimeField(verbose_name='\u51fa\u751f\u65e5\u671f'),
),
migrations.AlterField(
model_name='musician',
name='name',
field=models.CharField(max_length=40, verbose_name='\u539f\u540d'),
),
migrations.AlterField(
model_name='musician',
name='sex',
field=models.CharField(choices=[('M', '\u7537'), ('F', '\u5973')], max_length=1, verbose_name='\u6027\u522b'),
),
migrations.AlterField(
model_name='musician',
name='stagename',
field=models.CharField(blank=True, max_length=40, null=True, verbose_name='\u827a\u540d'),
),
]
| mit | -5,270,324,763,746,811,000 | 32.268293 | 145 | 0.579179 | false |
mRokita/DPLib | dplib/server.py | 1 | 47676 | # DPLib - Asynchronous bot framework for Digital Paint: Paintball 2 servers
# Copyright (C) 2017 Michał Rokita
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import re
import select
from collections import OrderedDict
from enum import Enum
from subprocess import Popen
import asyncio
import os
from socket import socket, AF_INET, SOCK_DGRAM
from time import time
from dplib.parse import render_text, decode_ingame_text
class ServerEvent(Enum):
TIMEOUT = 0
CHAT = 1
ELIM = 2
RESPAWN = 3
MAPCHANGE = 4
DATE = 5
NAMECHANGE = 6
ENTRANCE = 7
FLAG_CAPTURED = 8
ELIM_TEAMS_FLAG = 9
ROUND_STARTED = 10
TEAM_SWITCHED = 11
DISCONNECT = 12
FLAG_GRAB = 13
FLAG_DROP = 14
ROUND_END = 15
GAMEMODE = 16
GAME_END = 17
class GameMode(Enum):
CTF = 'CTF'
ONE_FLAG = '1Flag'
ELIMINATION = 'Elim'
DEATHMATCH = 'DM'
SIEGE = 'Siege'
TDM = 'TDM'
KOTH = 'KOTH'
PONG = 'Pong'
class BadRconPasswordError(Exception):
pass
class SecurityCheckError(Exception):
pass
class MapNotFoundError(Exception):
pass
class ListenerType(Enum):
PERMANENT = 0
TRIGGER_ONCE = 1
REGEXPS = OrderedDict([
(re.compile('^\\[\d\d:\d\d:\d\d\\] (?:(?:\\[OBS\\] )|(?:\\[ELIM\\] ))?(.*?): (.*?)\r?\n'), ServerEvent.CHAT),
# [19:54:18] hTml: test
(re.compile(
'^\\[\d\d:\d\d:\d\d\\] \\*(.*?) (?:\\((.*?)\\) eliminated \\*(.*?) \\((.*?)\\)\\.\r?\n|'
'eliminated ((?:himself)|(?:herself)) with a paintgren\\.\r?\n)'), ServerEvent.ELIM),
# [18:54:24] *|ACEBot_1| (Spyder SE) eliminated *|herself| (Spyder SE).
# [12:25:44] *whoa eliminated herself with a paintgren.
# [12:26:09] *whoa eliminated himself with a paintgren.
(re.compile('^\\[\d\d:\d\d:\d\d\\] \\*(.*?)\\\'s (.*?) revived!\r?\n'), ServerEvent.RESPAWN),
# [19:03:57] *Red's ACEBot_6 revived!
(re.compile('^\\[\d\d:\d\d:\d\d\\] (.*?) entered the game \\((.*?)\\) \\[(.*?)\\]\r?\n'), ServerEvent.ENTRANCE),
# [19:03:57] mRokita entered the game (build 41)
(re.compile('^\\[\d\d:\d\d:\d\d\\] \\*(.*?)\\\'s (.*?) returned the(?: \\*(.*?))? flag!\r?\n'), ServerEvent.FLAG_CAPTURED),
# [18:54:24] *Red's hTml returned the *Blue flag!
(re.compile('^\\[\d\d:\d\d:\d\d\\] \\*(.*?)\\\'s (.*?) earned (\d+) points for possesion of eliminated teams flag!\r?\n'),
ServerEvent.ELIM_TEAMS_FLAG),
# [19:30:23] *Blue's mRokita earned 3 points for possesion of eliminated teams flag!
(re.compile('^\\[\d\d:\d\d:\d\d\\] Round started\\.\\.\\.\r?\n'), ServerEvent.ROUND_STARTED),
# [10:20:11] Round started...
(re.compile(
'(?:^\\[\d\d:\d\d:\d\d\\] (.*?) switched from \\*((?:Red)|(?:Purple)|(?:Blue)|(?:Yellow))'
' to \\*((?:Red)|(?:Purple)|(?:Blue)|(?:Yellow))\\.\r?\n)|'
'(?:^\\[\d\d:\d\d:\d\d\\] (.*?) joined the \\*((?:Red)|(?:Purple)|(?:Blue)|(?:Yellow)) team\\.\r?\n)|'
'(?:^\\[\d\d:\d\d:\d\d\\] (.*?) is now (observing)?\\.\r?\n)'), ServerEvent.TEAM_SWITCHED),
# [10:20:11] mRokita switched from Blue to Red.
# [10:20:11] mRokita is now observing.
# [10:20:11] mRokita is now observing.
(re.compile('^\\[\d\d:\d\d:\d\d\\] [\t|-]{2}GameEnd[\t-](.*?)\r?\n'), ServerEvent.GAME_END),
# [22:40:33] GameEnd 441.9 No winner
# [22:40:33] GameEnd 1032.6 Red:23,Blue:22
# [22:40:33] GameEnd 4.9 DPBot01 wins!
# [22:40:33] GameEnd 42.9 Yellow:5,Blue:0,Purple:0,Red:0
# [22:40:33] GameEnd 42.9 Yellow:5,Blue:12,Purple:7
(re.compile('^\\[\d\d:\d\d:\d\d\\] == Map Loaded: (.+) ==\r?\n'), ServerEvent.MAPCHANGE),
# [10:20:11] == Map Loaded: airtime ==
(re.compile('^\\[\d\d:\d\d:\d\d\\] (.*?) changed name to (.*?)\\.\r?\n'), ServerEvent.NAMECHANGE),
# [19:54:54] name1 changed name to name2.
(re.compile('^\\[\d\d:\d\d:\d\d\\] (.*?) disconnected\\.\r?\n'), ServerEvent.DISCONNECT),
# [19:03:57] whoa disconnected.
(re.compile('^\\[\d\d:\d\d:\d\d\\] \\*(.*?) got the(?: \\*(.*?))? flag\\!\r?\n'), ServerEvent.FLAG_GRAB),
# [19:03:57] *whoa got the *Red flag!
(re.compile('^\\[\d\d:\d\d:\d\d\\] \\*(.*?) dropped the flag\\!\r?\n'), ServerEvent.FLAG_DROP),
# [19:03:57] *whoa dropped the flag!
(re.compile('^\\[\d\d:\d\d:\d\d\\] (.*?) team wins the round\\!\r?\n'), ServerEvent.ROUND_END),
# [14:38:50] Blue team wins the round!
(re.compile('^\\[\d\d:\d\d:\d\d\\] === ((?:Deathmatch)|(?:Team Flag CTF)|(?:Single Flag CTF)|(?:Team Siege)|(?:Team Elim)|(?:Team Siege)|(?:Team Deathmatch)|(?:Team KOTH)|(?:Pong)) ===\r?\n'), ServerEvent.GAMEMODE),
# [09:58:11] === Team Flag CTF ===
# [13:16:19] === Team Siege ===
# [21:53:54] === Pong ===
# [12:21:05] === Deathmatch ===
])
class Player(object):
"""
Player info from sv players command
:Attributes:
* dplogin - dplogin.com account id, None when Player has no account
* nick - nickname:
* build - game build
* server - an instance of :class:`Server`
"""
def __init__(self, server, id, dplogin, nick, build):
self.server = server
self.id = id
self.dplogin = dplogin
self.nick = nick
self.build = build
class Server(object):
"""
Represents a DP:PB2 server
:param hostname: Server hostname, for example '127.0.0.1'
:type hostname: str
:param port: Server port, default 27910
:type port: int
:param logfile: Path to logfile
:param rcon_password: rcon password
:param pty_master: Master of the dp2 process (useful only if you want to run the server from your Python script). Go to the getting started section for details.
:type pty_master: int
:param init_vars: Send come commands used for security
"""
def __init__(self, hostname, port=27910, logfile=None, rcon_password=None, pty_master=None, init_vars=True):
self.__rcon_password = rcon_password
self.__hostname = hostname
self.__init_vars = init_vars
self.__port = port
self.__log_file = None
self.__is_secure = False
self.__alive = False
self.__logfile_name = logfile if not pty_master else None
self.__pty_master = pty_master
self.handlers = {
ServerEvent.CHAT: 'on_chat',
ServerEvent.ELIM: 'on_elim',
ServerEvent.RESPAWN: 'on_respawn',
ServerEvent.ENTRANCE: 'on_entrance',
ServerEvent.FLAG_CAPTURED: 'on_flag_captured',
ServerEvent.ELIM_TEAMS_FLAG: 'on_elim_teams_flag',
ServerEvent.ROUND_STARTED: 'on_round_started',
ServerEvent.TEAM_SWITCHED: 'on_team_switched',
ServerEvent.GAME_END: 'on_game_end',
ServerEvent.MAPCHANGE: 'on_mapchange',
ServerEvent.NAMECHANGE: 'on_namechange',
ServerEvent.DISCONNECT: 'on_disconnect',
ServerEvent.FLAG_GRAB: 'on_flag_grab',
ServerEvent.FLAG_DROP: 'on_flag_drop',
ServerEvent.ROUND_END: 'on_round_end',
ServerEvent.GAMEMODE: 'gamemode',
}
self.__listeners = {
ServerEvent.CHAT: [],
ServerEvent.ELIM: [],
ServerEvent.RESPAWN: [],
ServerEvent.ENTRANCE: [],
ServerEvent.FLAG_CAPTURED: [],
ServerEvent.ELIM_TEAMS_FLAG: [],
ServerEvent.ROUND_STARTED: [],
ServerEvent.TEAM_SWITCHED: [],
ServerEvent.GAME_END: [],
ServerEvent.MAPCHANGE: [],
ServerEvent.NAMECHANGE: [],
ServerEvent.DISCONNECT: [],
ServerEvent.FLAG_GRAB: [],
ServerEvent.FLAG_DROP: [],
ServerEvent.ROUND_END: [],
ServerEvent.GAMEMODE: [],
}
self.loop = asyncio.get_event_loop()
def is_listening(self):
"""
Check if the main loop is running.
:rtype: bool
"""
return self.__alive
@asyncio.coroutine
def on_chat(self, nick, message):
"""
On chat, can be overridden using the :func:`.Server.event` decorator.
:param nick: Player's nick.
:type nick: str
:param message: Message.
:type message: str
"""
pass
@asyncio.coroutine
def on_flag_captured(self, team, nick, flag):
"""
On flag captured, can be overridden using the :func:`.Server.event` decorator.
:param team: Player's team.
:type team: str
:param nick: Player's nick.
:type nick: str
:param flag: Captured flag (Blue|Red|Yellow|Purple|White)
:type flag: str
"""
pass
@asyncio.coroutine
def on_team_switched(self, nick, old_team, new_team):
"""
On team switched, can be overridden using the :func:`.Server.event` decorator.
:param nick: Player's nick
:type nick: str
:param old_team: Old team (Blue|Red|Yellow|Purple|Observer)
:type old_team: str
:param new_team: New team (Blue|Red|Yellow|Purple|Observer)
:type new_team: str
"""
pass
@asyncio.coroutine
def on_round_started(self):
"""
On round started, can be overridden using the :func:`.Server.event` decorator.
"""
pass
@asyncio.coroutine
def on_elim_teams_flag(self, team, nick, points):
"""
On scored points for possession of eliminated teams flag, can be overridden using the :func:`.Server.event` decorator.
:param team: Player's team.
:type team: str
:param nick: Player's nick.
:type nick: str
:param points: Points earned.
:type points: int
"""
pass
@asyncio.coroutine
def on_entrance(self, nick, build, addr):
"""
On entrance, can be overriden using the :func:`.Server.event` decorator.
:param nick: Player's nick
:type nick: str
:param build: Player's game version ('build 41' for example
:type build: str
:param addr: Player's address, IP:PORT ('127.0.0.1:23414' for example)
:type addr: str
"""
pass
@asyncio.coroutine
def on_game_end(self, score_blue, score_red, score_yellow, score_purple):
"""
On game end, can be overriden using the :func:`.Server.event` decorator.
:param score_blue: Blue's score - None if there was no Blue team.
:param score_red: Red's score - None if there was no Red team.
:param score_yellow: Yellow's score - None if there was no Yellow team.
:param score_purple: Purple's score - None if there was no Purple team.
"""
pass
@asyncio.coroutine
def on_elim(self, killer_nick, killer_weapon, victim_nick, victim_weapon, suicide):
"""
On elim can be overridden using the :func:`.Server.event` decorator.
:param killer_nick: Killer's nick
:type killer_nick: str
:param killer_weapon: Killer's weapon
:type killer_weapon: str
:param victim_nick: Victim's nick
:type victim_nick: str
:param victim_weapon: Victim's weapon
:type victim_weapon: str
"""
pass
@asyncio.coroutine
def on_respawn(self, team, nick):
"""
On respawn, can be overridden using the :func:`.Server.event` decorator.
:param team: Player's team (Blue|Red|Yellow|Purple)
:type team: str
:param nick: Player's nick
:type nick: str
"""
pass
@asyncio.coroutine
def on_mapchange(self, mapname):
"""
On mapcange, can be overridden using the :func:`.Server.event` decorator.
:param mapname: Mapname
:type mapname: str
"""
pass
@asyncio.coroutine
def on_namechange(self, old_nick, new_nick):
"""
On name change, can be overridden using the :func:`.Server.event` decorator.
:param old_nick: Old nick
:type old_nick: str
:param new_nick: Old nick
:type new_nick: str
"""
pass
@asyncio.coroutine
def on_disconnect(self, nick):
"""
On disconnect, can be overridden using the :func:`.Server.event`decorator.
:param nick: Disconnected player's nick
:type nick: str
"""
pass
@asyncio.coroutine
def on_flag_grab(self, nick, flag):
"""
On flag grab, can be overridden using the :func:`.Server.event` decorator.
:param nick: Player's nick
:type nick: str
:param team: Flag color (Blue|Red|Yellow|Purple)
:type team: str
"""
pass
@asyncio.coroutine
def on_flag_drop(self, nick):
"""
On flag grab, can be overridden using the :func:`.Server.event` decorator.
:param nick: Player's nick
:type nick: str
:param team: Flag color (Blue|Red|Yellow|Purple)
:type team: str
"""
pass
@asyncio.coroutine
def on_round_end(self):
"""
Onround end, can be overridden using the :func:`.Server.event` decorator.
"""
pass
@asyncio.coroutine
def gamemode(self, gamemode):
"""
Onround end, can be overridden using the :func:`.Server.event` decorator.
:param gamemode: map's gamemode
:type gamemode: str
"""
pass
def event(self, func):
"""
Decorator, used for event registration.
:param func: function to register
:rtype: builtin_function_or_method
:example:
.. code-block:: python
:linenos:
>>> from dplib.server import Server
>>> s = Server(hostname='127.0.0.1', port=27910, logfile=r'qconsole27910.log', rcon_password='hello')
>>> @s.event
... def on_chat(nick, message):
... print((nick, message))
...
>>> s.run()
('mRokita', 'Hi')
"""
if func.__name__ in self.handlers.values():
setattr(self, func.__name__, asyncio.coroutine(func))
return func
else:
raise Exception('Event \'%s\' doesn\'t exist' % func.__name__)
def stop_listening(self):
"""
Stop the main loop
"""
self.__alive = False
def __perform_listeners(self, event_type, args, kwargs):
"""
Performs all pending listeners.
:param event_type: Event type, one of members :class:`ServerEvent`
:param args: Event info
:type args: tuple
:param kwargs: Event info
:type kwargs: dict
"""
to_remove = list()
for i, (check, future) in enumerate(self.__listeners[event_type]):
if not future.cancelled() and not future.done():
if check(*args):
future.set_result(kwargs)
else:
to_remove.append(i)
for i in reversed(to_remove):
self.__listeners[event_type].pop(i)
def nicks_valid(self, *nicks):
nicks_ingame = [p.nick for p in self.get_players()]
for nick in nicks:
if nick not in nicks_ingame:
return False
return True
@asyncio.coroutine
def __handle_event(self, event_type, args):
"""
Handles an event.
:param event_type: Event type, one of members :class:`ServerEvent`
:param args: Event info (re.findall() results)
"""
kwargs = dict()
if event_type == ServerEvent.CHAT:
if args[0] not in [p.nick for p in self.get_players()]:
return
kwargs = {
'nick': args[0],
'message': args[1],
}
self.__perform_listeners(ServerEvent.CHAT, args, kwargs)
elif event_type == ServerEvent.ELIM:
kwargs = {
'killer_nick': args[0],
'killer_weapon': args[1],
'victim_nick': args[2],
'victim_weapon': args[3],
'suicide': args[4],
}
self.__perform_listeners(ServerEvent.ELIM, args, kwargs)
elif event_type == ServerEvent.RESPAWN:
kwargs = {
'team': args[0],
'nick': args[1],
}
self.__perform_listeners(ServerEvent.RESPAWN, args, kwargs)
elif event_type == ServerEvent.ENTRANCE:
kwargs = {
'nick': args[0],
'build': args[1],
'addr': args[2],
}
self.__perform_listeners(ServerEvent.ENTRANCE, args, kwargs)
elif event_type == ServerEvent.FLAG_CAPTURED:
kwargs = {
'team': args[0],
'nick': args[1],
'flag': args[2],
}
self.__perform_listeners(ServerEvent.FLAG_CAPTURED, args, kwargs)
elif event_type == ServerEvent.ELIM_TEAMS_FLAG:
kwargs = {
'team': args[0],
'nick': args[1],
'points': int(args[2]),
}
self.__perform_listeners(ServerEvent.ELIM_TEAMS_FLAG, args, kwargs)
elif event_type == ServerEvent.ROUND_STARTED:
kwargs = dict()
self.__perform_listeners(ServerEvent.ROUND_STARTED, args, kwargs)
elif event_type == ServerEvent.TEAM_SWITCHED:
new_args = tuple([arg for arg in args if arg])
kwargs = {
'nick': new_args[0],
'old_team': new_args[1] if len(new_args) > 2 else 'Observer',
'new_team': new_args[2] if len(new_args) > 2 else new_args[1]
}
if kwargs['new_team'] == 'observing':
kwargs['new_team'] = 'Observer'
kwargs['old_team'] = None
self.__perform_listeners(ServerEvent.TEAM_SWITCHED, new_args, kwargs)
elif event_type == ServerEvent.GAME_END:
kwargs = {
'score_blue': None,
'score_red': None,
'score_purple': None,
'score_yellow': None,
}
teams = args.split(',')
for t in teams:
data = t.split(':')
if data[0] == 'Blue':
kwargs['score_blue'] = data[1]
elif data[0] == 'Red':
kwargs['score_red'] = data[1]
elif data[0] == 'Yellow':
kwargs['score_yellow'] = data[1]
elif data[0] == 'Purple':
kwargs['score_purple'] = data[1]
self.__perform_listeners(ServerEvent.GAME_END,
(kwargs['score_blue'],
kwargs['score_red'],
kwargs['score_yellow'],
kwargs['score_purple']), kwargs)
elif event_type == ServerEvent.MAPCHANGE:
kwargs = {
'mapname': args
}
self.__perform_listeners(ServerEvent.MAPCHANGE, (kwargs['mapname'],), kwargs)
elif event_type == ServerEvent.NAMECHANGE:
kwargs = {
'old_nick': args[0],
'new_nick': args[1]
}
self.__perform_listeners(ServerEvent.NAMECHANGE, (kwargs['old_nick'], kwargs['new_nick']), kwargs)
elif event_type == ServerEvent.DISCONNECT:
kwargs = {
'nick': args
}
self.__perform_listeners(ServerEvent.DISCONNECT, (kwargs['nick'],), kwargs)
elif event_type == ServerEvent.FLAG_GRAB:
kwargs = {
'nick': args[0],
'flag': args[1],
}
self.__perform_listeners(ServerEvent.FLAG_GRAB, (kwargs['nick'], kwargs['flag']), kwargs)
elif event_type == ServerEvent.FLAG_DROP:
kwargs = {
'nick': args
}
self.__perform_listeners(ServerEvent.FLAG_GRAB, (kwargs['nick'],), kwargs)
elif event_type == ServerEvent.ROUND_END:
kwargs = dict()
self.__perform_listeners(ServerEvent.ROUND_END, args, kwargs)
elif event_type == ServerEvent.GAMEMODE:
kwargs = {
'gamemode': args
}
self.__perform_listeners(ServerEvent.GAMEMODE, args, kwargs)
asyncio.ensure_future(self.get_event_handler(event_type)(**kwargs))
def get_event_handler(self, event_type):
return getattr(self, self.handlers[event_type])
@asyncio.coroutine
def __parse_line(self, line):
"""
Tries to match line with all event regexps.
:param line: Line from logs
"""
for r in REGEXPS:
results = r.findall(line)
e = REGEXPS[r]
for res in results:
if e == ServerEvent.CHAT: # For security reasons
if self.nicks_valid(res[0]):
yield from self.__handle_event(event_type=e, args=res)
return
else:
continue
yield from self.__handle_event(event_type=e, args=res)
def rcon(self, command, socket_timeout=3):
"""
Execute a console command using RCON.
:param command: Command
:param socket_timeout: Timeout for the UDP socket.
:return: Response from server
:rtype: str
:example:
.. code-block:: python
:linenos:
>>> from dplib.server import Server
>>> s = Server(hostname='127.0.0.1', port=27910, logfile=r'qconsole27910.log', rcon_password='hello')
>>> s.rcon('sv listuserip')
'ÿÿÿÿprint\\n mRokita [127.0.0.1:9419]\\nadmin is listing IP for mRokita [127.0.0.1:9419]\\n'
"""
sock = socket(AF_INET, SOCK_DGRAM)
sock.connect((self.__hostname, self.__port))
sock.settimeout(socket_timeout)
sock.send(bytes('\xFF\xFF\xFF\xFFrcon {} {}\n'.format(self.__rcon_password, command).encode('latin-1')))
ret = sock.recv(2048).decode('latin-1')
return ret
def status(self):
"""
Execute status query.
:return: Status string
:rtype: str
"""
sock = socket(AF_INET, SOCK_DGRAM)
sock.connect((self.__hostname, self.__port))
sock.settimeout(3)
sock.send(b'\xFF\xFF\xFF\xFFstatus\n')
return sock.recv(2048).decode('latin-1')
def new_map(self, map_name, gamemode=None):
"""
Changes the map using sv newmap <mapname> <gamemode>
:param map_name: map name, without .bsp
:param gamemode: Game mode
:type gamemode: GameMode
:return: Rcon response
:raises MapNotFoundError: When map is not found on the server
:rtype: str
"""
command = 'sv newmap {map}'
if gamemode:
command += ' {gamemode}'
res = self.rcon(command.format(map=map_name, gamemode=gamemode))
if 'Cannot find mapfile' in res or 'usage' in res:
raise MapNotFoundError
return res
def permaban(self, ip=None):
"""
Bans IP address or range of adresses and saves ban list to disk.
:param ip: IP address to ban
:return: Rcon response
:rtype: str
"""
if ip:
resp = self.rcon('addip %s' % ip)
resp += '\n' + self.rcon('writeban')
return resp
else:
raise TypeError('IP address is required.')
def remove_permaban(self, ip=None):
"""
Removes ban on IP address and saves ban list to disk.
:param ip: IP address to unban
:return: Rcon response
:rtype: str
"""
if ip:
resp = self.rcon('removeip %s' % ip)
resp += '\n' + self.rcon('writeban')
return resp
else:
raise TypeError('IP address is required.')
def tempoban(self, id=None, nick=None, duration=3):
"""
Temporarily bans a player with specified id using rcon
:param id: Player's id
:param nick: Player's nick
:param duration: Ban duration in minutes (defaults to 3)
:return: Rcon response
:rtype: str
"""
if type(duration) != int:
raise TypeError('Ban duration should be an integer, not a ' + str(type(duration)))
if nick:
id = self.get_ingame_info(nick).id
if id:
return self.rcon('tban %s %s' % (id, str(duration)))
else:
raise TypeError('Player id or nick is required.')
def remove_tempobans(self):
"""
Removes all temporary bans
:return: Rcon response
:rtype: str
"""
return self.rcon("removetbans")
def kick(self, id=None, nick=None):
"""
Kicks a player with id using rcon.
:param id: Player's id
:param nick: Player's nick
:return: Rcon response
:rtype: str
"""
if nick:
id = self.get_ingame_info(nick).id
if id:
return self.rcon('kick %s' % id)
else:
raise TypeError('Player id or nick is required.')
def say(self, message):
"""
Say a message
:param message: Text, can contain {C} - color char {U} - underline char {I} italic.
Remember to escape user input using :func:`dplib.parse.escape_braces`.
:rtype: str
:return: Rcon response
:example:
.. code-block:: python
:linenos:
>>> from dplib.server import Server
>>> s = Server(hostname='127.0.0.1', port=27910, logfile=r'qconsole27910.log', rcon_password='hello')
>>> s.say('{C}ARed text')
>>> s.say('{U}Underline{U}')
>>> s.say('{I}Italic{I}')
:ingame result:
.. image:: ..\..\doc\images\say_test.png
"""
return self.rcon('say "%s"' % render_text(message))
def cprint(self, message):
"""
Cprints a message.
:param message: Text, can contain {C} - color char {U} - underline char {I} italic.
Remember to escape user input using :func:`dplib.parse.escape_brac
:return: Rcon response
:rtype: str
"""
return self.rcon('sv cprint "%s"' % render_text(message))
def set_cvar(self, var, value):
"""
Set a server cvar
:param var: cvar name
:param value: value to set
:return: Rcon response
:rtype: str
"""
return self.rcon('set %s "%s"' % (var, value))
def get_cvar(self, var):
"""
Gets cvar value
:param var: Variable name
:type var: str
:return: Cvar value
:rtype: str
"""
res = self.rcon('"%s"' % var)
if re.match('^....print\\\nUnknown command \\"%s"\\.\\\n' % re.escape(var), res):
raise NameError('Cvar "%s" does not exist' % var)
return re.findall('^....print\\\n\\"%s\\" is \\"(.*?)\\"\\\n' % re.escape(var), res)[0]
@staticmethod
def __get_predicate(margs, check):
"""
Returns a comparator.
:param margs: Args to check
:param check: Check function
:return: Returns a function that compiles the check function and comparision strings
"""
def predicate(*args):
if len(args) != len(margs):
raise TypeError('predicate() takes %d positional arguments but %d were given' % (len(margs), len(args)))
result = True
for i, a in enumerate(margs):
if a:
result = result and a == args[i]
if callable(check):
result = result and check(*args)
return result
return predicate
@asyncio.coroutine
def wait_for_entrance(self, timeout=None, nick=None, build=None, addr=None, check=None):
"""
Waits for entrance.
:param timeout: Time to wait for entrance event, if exceeded, returns None.
:param nick: Player's nick.
:param build: Player's build.
:param addr: Player's address (IP:PORT)
:return:
"""
future = asyncio.Future(loop=self.loop)
margs = (nick, build, addr)
predicate = self.__get_predicate(margs, check)
self.__listeners[ServerEvent.ENTRANCE].append((predicate, future))
try:
data = yield from asyncio.wait_for(future, timeout,
loop=self.loop)
except asyncio.TimeoutError:
data = None
return data
@asyncio.coroutine
def wait_for_respawn(self, timeout=None, team=None, nick=None, check=None):
"""
Waits for respawn event.
:param timeout: Time to wait for respawn event, if exceeded, returns None.
:param team: Player's team.
:param nick: Player's nick.
:param check: Check function, ignored if none.
:return: Returns message info dict keys: ('team', 'nick').
:rtype: dict
"""
future = asyncio.Future(loop=self.loop)
margs = (team, nick)
predicate = self.__get_predicate(margs, check)
self.__listeners[ServerEvent.RESPAWN].append((predicate, future))
try:
data = yield from asyncio.wait_for(future, timeout,
loop=self.loop)
except asyncio.TimeoutError:
data = None
return data
@asyncio.coroutine
def wait_for_elim_teams_flag(self, timeout=None, team=None, nick=None, points=None, check=None):
"""
Waits for elim teams flag event.
:param timeout: Time to wait for event, if exceeded, returns None.
:param team: Player's team.
:param nick: Player's nick.
:param points: Points scored.
:type points: int
:param check: Check function, ignored if none.
:return: Returns message info dict keys: ('team', 'nick', 'points').
:rtype: dict
"""
future = asyncio.Future(loop=self.loop)
margs = (team, nick, points)
predicate = self.__get_predicate(margs, check)
self.__listeners[ServerEvent.ELIM_TEAMS_FLAG].append((predicate, future))
try:
data = yield from asyncio.wait_for(future, timeout,
loop=self.loop)
except asyncio.TimeoutError:
data = None
return data
@asyncio.coroutine
def wait_for_team_switched(self, timeout=None, nick=None, old_team=None, new_team=None, check=None):
"""
Waits for team switch event.
:param timeout: Time to wait for event, if exceeded, returns None.
:param old_team: Player's old team.
:param new_team: Player's new team.
:param nick: Player's nick.
:param check: Check function, ignored if none.
:return: Returns message info dict keys: ('nick', 'old_team', 'new_nick').
:rtype: dict
"""
future = asyncio.Future(loop=self.loop)
margs = (nick, old_team, new_team)
predicate = self.__get_predicate(margs, check)
self.__listeners[ServerEvent.TEAM_SWITCHED].append((predicate, future))
try:
data = yield from asyncio.wait_for(future, timeout,
loop=self.loop)
except asyncio.TimeoutError:
data = None
return data
@asyncio.coroutine
def wait_for_round_started(self, timeout=None, check=None):
"""
Waits for round start.
:param timeout: Time to wait for event, if exceeded, returns None.
:param check: Check function, ignored if none.
:return: Returns an empty dict.
:rtype: dict
"""
future = asyncio.Future(loop=self.loop)
margs = tuple()
predicate = self.__get_predicate(margs, check)
self.__listeners[ServerEvent.ROUND_STARTED].append((predicate, future))
try:
data = yield from asyncio.wait_for(future, timeout,
loop=self.loop)
except asyncio.TimeoutError:
data = None
return data
@asyncio.coroutine
def wait_for_flag_captured(self, timeout=None, team=None, nick=None, flag=None, check=None):
"""
Waits for flag capture.
:param timeout: Time to wait for event, if exceeded, returns None.
:param team: Player's team.
:param nick: Player's nick.
:param flag: Captured flag.
:param check: Check function, ignored if none.
:return: Returns an empty dict.
:rtype: dict
"""
future = asyncio.Future(loop=self.loop)
margs = (team, nick, flag)
predicate = self.__get_predicate(margs, check)
self.__listeners[ServerEvent.FLAG_CAPTURED].append((predicate, future))
try:
data = yield from asyncio.wait_for(future, timeout,
loop=self.loop)
except asyncio.TimeoutError:
data = None
return data
@asyncio.coroutine
def wait_for_game_end(self, timeout=None, score_blue=None, score_red=None, score_yellow=None, score_purple=None, check=None):
"""
Waits for game end.
:param timeout: Time to wait for event, if exceeded, returns None.
:param score_blue: Blue score
:param score_red: Red score.
:param score_yellow: Yellow score.
:param score_purple: Purple score.
:param check: Check function, ignored if none.
:return: Returns an empty dict.
:rtype: dict
"""
future = asyncio.Future(loop=self.loop)
margs = (score_blue, score_red, score_yellow, score_purple)
predicate = self.__get_predicate(margs, check)
self.__listeners[ServerEvent.GAME_END].append((predicate, future))
try:
data = yield from asyncio.wait_for(future, timeout,
loop=self.loop)
except asyncio.TimeoutError:
data = None
return data
@asyncio.coroutine
def wait_for_elim(self, timeout=None, killer_nick=None, killer_weapon=None, victim_nick=None, victim_weapon=None,
check=None):
"""
Waits for elimination event.
:param timeout: Time to wait for elimination event, if exceeded, returns None.
:param killer_nick: Killer's nick to match, ignored if None.
:param killer_weapon: Killer's weapon to match, ignored if None.
:param victim_nick: Victim's nick to match, ignored if None.
:param victim_weapon: Victim's weapon to match, ignored if None.
:param check: Check function, ignored if None.
:return: Returns message info dict keys: ('killer_nick', 'killer_weapon', 'victim_nick', 'victim_weapon')
:rtype: dict
"""
future = asyncio.Future(loop=self.loop)
margs = (killer_nick, killer_weapon, victim_nick, victim_weapon)
predicate = self.__get_predicate(margs, check)
self.__listeners[ServerEvent.ELIM].append((predicate, future))
try:
elim_info = yield from asyncio.wait_for(future, timeout, loop=self.loop)
except asyncio.TimeoutError:
elim_info = None
return elim_info
@asyncio.coroutine
def wait_for_mapchange(self, timeout=None, mapname=None, check=None):
"""
Waits for mapchange.
:param timeout: Time to wait for elimination event, if exceeded, returns None.
:param mapname: Killer's nick to match, ignored if None.
:param check: Check function, ignored if None.
:return: Returns message info dict keys: ('killer_nick', 'killer_weapon', 'victim_nick', 'victim_weapon')
:rtype: dict
"""
future = asyncio.Future(loop=self.loop)
margs = (mapname,)
predicate = self.__get_predicate(margs, check)
self.__listeners[ServerEvent.MAPCHANGE].append((predicate, future))
try:
mapchange_info = yield from asyncio.wait_for(future, timeout, loop=self.loop)
except asyncio.TimeoutError:
mapchange_info = None
return mapchange_info
@asyncio.coroutine
def wait_for_namechange(self, timeout=None, old_nick=None, new_nick=None, check=None):
"""
Waits for mapchange.
:param timeout: Time to wait for elimination event, if exceeded, returns None.
:param mapname: Killer's nick to match, ignored if None.
:param check: Check function, ignored if None.
:return: Returns message info dict keys: ('killer_nick', 'killer_weapon', 'victim_nick', 'victim_weapon')
:rtype: dict
"""
future = asyncio.Future(loop=self.loop)
margs = (old_nick, new_nick)
predicate = self.__get_predicate(margs, check)
self.__listeners[ServerEvent.NAMECHANGE].append((predicate, future))
try:
mapchange_info = yield from asyncio.wait_for(future, timeout, loop=self.loop)
except asyncio.TimeoutError:
mapchange_info = None
return mapchange_info
@asyncio.coroutine
def wait_for_message(self, timeout=None, nick=None, message=None, check=None):
"""
Waits for a message.
:param timeout: Time to wait for message, if exceeded, returns None.
:param nick: Player's nick to match, ignored if None
:type nick: str
:param message: Message text to match, ignored if None
:type message: str
:param check: Check function, ignored if None
:return: Returns message info dict keys: ('nick', 'message')
:rtype: dict
:example:
.. code-block:: python
:linenos:
@s.event
def on_chat(nick, message):
if message == '!start' and not elim_active:
msg = yield from s.wait_for_message(check=lambda n, m: m.startswith('!hi '))
s.say('Hi ' + msg['message'].split('!hi ')[1] + '!')
"""
future = asyncio.Future(loop=self.loop)
margs = (nick, message)
predicate = self.__get_predicate(margs, check)
self.__listeners[ServerEvent.CHAT].append((predicate, future))
try:
message = yield from asyncio.wait_for(future, timeout,
loop=self.loop)
except asyncio.TimeoutError:
message = None
return message
@asyncio.coroutine
def wait_for_flag_drop(self, timeout=None, nick=None, check=None):
"""
Waits for flag drop.
:param timeout: Time to wait for event, if exceeded, returns None.
:param nick: Player's nick.
:param flag: dropped flag.
:param check: Check function, ignored if none.
:return: Returns an empty dict.
:rtype: dict
"""
future = asyncio.Future(loop=self.loop)
margs = (nick)
predicate = self.__get_predicate(margs, check)
self.__listeners[ServerEvent.FLAG_DROP].append((predicate, future))
try:
data = yield from asyncio.wait_for(future, timeout,
loop=self.loop)
except asyncio.TimeoutError:
data = None
return data
def start(self, scan_old=False, realtime=True, debug=False):
"""
Main loop.
:param scan_old: Scan present logfile data
:type scan_old: bool
:param realtime: Wait for incoming logfile data
:type realtime: bool
"""
if not (self.__logfile_name or self.__pty_master):
raise AttributeError("Logfile name or a Popen process is required.")
self.__alive = True
if self.__logfile_name:
self.__log_file = open(self.__logfile_name, 'rb')
if self.__log_file and scan_old:
self.__log_file.readlines()
buf = ''
if realtime:
while self.__alive:
try:
buf += self._read_log()
lines = buf.splitlines(True)
line = ''
for line in lines:
if debug:
print("[DPLib] %s" % line.strip())
yield from self.__parse_line(line)
if not line or line[-1] != '\n':
buf = line
else:
buf = ''
yield from asyncio.sleep(0.05)
except OSError as e:
raise e
if self.__log_file:
self.__log_file.close()
if self.__pty_master:
os.close(self.__pty_master)
def _read_log(self):
if self.__log_file:
return self.__log_file.readline().decode('latin-1')
elif self.__pty_master:
r, w, x = select.select([self.__pty_master], [], [], 0.01)
if r:
return os.read(self.__pty_master, 1024).decode('latin-1')
else:
return ''
def get_players(self):
"""
Gets playerlist.
:return: List of :class:`.Player` instances
:rtype: list
"""
response = self.rcon('sv players')
response = re.findall('(\d+) \\(?(.*?)\\)?\\] \\* (?:OP \d+, )?(.+) \\((b\d+)\\)', response)
players = list()
for p_data in response:
player = Player(nick=p_data[2],
id=p_data[0],
dplogin=p_data[1],
build=p_data[3],
server=self)
players.append(player)
return players
def get_simple_playerlist(self):
"""
Get a list of player names
:return: List of nicks
:rtype: list
"""
status = self.get_status()
players = status['players']
playerlist = []
for p in players:
playerlist.append(p['name'])
return playerlist
def get_status(self):
"""
Gets server status
:example:
.. code-block:: python
:linenos:
>>> s = Server(hostname='127.0.0.1', port=27910, logfile=r'C:\Games\Paintball2\pball\qconsole27910.log', rcon_password='hello')
>>> s.get_status()
{'players': [{'score': '0', 'ping': '13', 'name': 'mRokita'}], 'sv_certificated': '1', 'mapname': 'beta/wobluda_fix', 'TimeLeft': '20:00', '_scores': 'Red:0 Blue:0 ', 'gamename': 'Digital Paint Paintball 2 v1.930(186)', 'gameversion': 'DPPB2 v1.930(186)', 'sv_login': '1', 'needpass': '0', 'gamedate': 'Aug 10 2015', 'protocol': '34', 'version': '2.00 x86 Aug 10 2015 Win32 RELEASE (41)', 'hostname': 'asdfgh', 'elim': 'airtime', 'fraglimit': '50', 'timelimit': '20', 'gamedir': 'pball', 'game': 'pball', 'maxclients': '8'}
:return: status dict
:rtype: dict
"""
dictionary = {}
players = []
response = self.status().split('\n')[1:]
variables = response[0]
players_str = (response[1:])
for i in players_str:
if not i:
continue
temp_dict = {}
cleaned_name = decode_ingame_text(i)
separated = cleaned_name.split(' ')
temp_dict['score'] = separated[0]
temp_dict['ping'] = separated[1]
temp_dict['name'] = cleaned_name.split("%s %s " % (separated[0], separated[1]))[1][1:-1]
players.append(temp_dict)
dictionary['players'] = players
variables = variables.split('\\')[1:]
for i in range(0, len(variables), 2):
dictionary[variables[i]] = variables[i + 1]
return dictionary
def get_ingame_info(self, nick):
"""
Get ingame info about a player with nickname
:param nick: Nick
:return: An instance of :class:`.Player`
"""
players = self.get_players()
for p in players:
if p.nick == nick:
return p
return None
def make_secure(self, timeout=10):
"""
This function fixes some compatibility and security issues on DP server side
- Adds "mapchange" to sv_blockednames
- Sets sl_logging to 1
All variables are set using the rcon protocol, use this function if you want to wait for the server to start.
:param timeout: Timeout in seconds
"""
sl_logging_set = False
sv_blockednames_set = False
self.__is_secure = False
start_time = time()
while not (sl_logging_set and sv_blockednames_set) and time() - start_time < timeout:
try:
if not sl_logging_set:
sl_logging = self.get_cvar('sl_logging')
if sl_logging != '1':
self.set_cvar('sl_logging', '1')
else:
sl_logging_set = True
if not sv_blockednames_set:
blockednames = self.get_cvar('sv_blockednames')
if not 'maploaded' in blockednames:
self.set_cvar('sv_blockednames', ','.join([blockednames, 'maploaded']))
else:
sv_blockednames_set = True
except ConnectionError or timeout:
pass
if not (sl_logging_set and sv_blockednames_set):
raise SecurityCheckError(
"Configuring the DP server failed,"
" check if the server is running "
"and the rcon_password is correct.")
else:
self.__is_secure = True
def run(self, scan_old=False, realtime=True, debug=False, make_secure=True):
"""
Runs the main loop using asyncio.
:param scan_old: Scan present logfile data
:type scan_old: bool
:param realtime: Wait for incoming logfile data
:type realtime: bool
"""
if make_secure and not self.__rcon_password:
raise AttributeError(
"Setting the rcon_password is required to secure DPLib."
" You have to either set a rcon_password or add set"
" \"sl_logging 1; set sv_blockednames mapname\" "
"to your DP server config and use Server.run with"
" make_secure=False")
if make_secure:
self.make_secure()
self.loop.run_until_complete(self.start(scan_old, realtime, debug))
| agpl-3.0 | -7,791,786,125,953,492,000 | 33.949413 | 535 | 0.543832 | false |
mgeorgehansen/FIFE_Technomage | engine/python/fife/extensions/fife_settings.py | 1 | 15915 | # -*- coding: utf-8 -*-
# ####################################################################
# Copyright (C) 2005-2010 by the FIFE team
# http://www.fifengine.net
# This file is part of FIFE.
#
# FIFE is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# ####################################################################
"""
Settings
==================================
This module provides a nice framework for loading and saving game settings.
It is by no means complete but it does provide a good starting point.
"""
import shutil
import os
from StringIO import StringIO
from fife.extensions import pychan
from fife.extensions.fife_utils import getUserDataDirectory
from fife.extensions.serializers.simplexml import SimpleXMLSerializer
SETTINGS_GUI_XML="""\
<Window name="Settings" title="Settings">
<Label text="Settings menu!" />
<HBox>
<VBox>
<Label text="Resolution:" />
<Label text="Renderer:" />
<Label text="Light Model:" />
</VBox>
<VBox min_size="120,60">
<DropDown name="screen_resolution" min_size="120,0" />
<DropDown name="render_backend" min_size="120,0" />
<DropDown name="lighting_model" min_size="120,0" />
</VBox>
</HBox>
<CheckBox name="enable_fullscreen" text="Use the full screen mode" />
<CheckBox name="enable_sound" text="Enable sound" />
<HBox>
<Spacer />
<Button name="cancelButton" text="Cancel" />
<Button name="okButton" text="Ok" />
<Button name="defaultButton" text="Defaults" />
</HBox>
</Window>
"""
CHANGES_REQUIRE_RESTART="""\
<Window title="Changes require restart">
<Label text="Some of your changes require you to restart." />
<HBox>
<Spacer />
<Button name="closeButton" text="Ok" />
</HBox>
</Window>
"""
FIFE_MODULE = "FIFE"
class Setting(object):
"""
This class manages loading and saving of game settings.
Usage::
from fife.extensions.fife_settings import Setting
settings = Setting(app_name="myapp")
screen_width = settings.get("FIFE", "ScreenWidth", 1024)
screen_height = settings.get("FIFE", "ScreenHeight", 768)
"""
def __init__(self, app_name="", settings_file="", default_settings_file= "settings-dist.xml", settings_gui_xml="", changes_gui_xml="", copy_dist=True, serializer=None):
"""
Initializes the Setting object.
@param app_name: The applications name. If this parameter is provided
alone it will try to read the settings file from the users home directory.
In windows this will be something like: C:\Documents and Settings\user\Application Data\fife
@type app_name: C{string}
@param settings_file: The name of the settings file. If this parameter is
provided it will look for the setting file as you specify it, first looking
in the working directory. It will NOT look in the users home directory.
@type settings_file: C{string}
@param default_settings_file: The name of the default settings file. If the settings_file
does not exist this file will be copied into the place of the settings_file. This file
must exist in the root directory of your project!
@type default_settings_file: C{string}
@param settings_gui_xml: If you specify this parameter you can customize the look
of the settings dialog box.
@param copy_dist: Copies the default settings file to the settings_file location. If
this is False it will create a new empty setting file.
@param serializer: Overrides the default XML serializer
@type serializer: C{SimpleSerializer}
"""
self._app_name = app_name
self._settings_file = settings_file
self._default_settings_file = default_settings_file
self._settings_gui_xml = settings_gui_xml
self._changes_gui_xml = changes_gui_xml
self.OptionsDlg = None
# Holds SettingEntries
self._entries = {}
if self._settings_file == "":
self._settings_file = "settings.xml"
self._appdata = getUserDataDirectory("fife", self._app_name)
else:
self._appdata = os.path.dirname(self._settings_file)
self._settings_file = os.path.basename(self._settings_file)
if self._settings_gui_xml == "":
self._settings_gui_xml = SETTINGS_GUI_XML
if self._changes_gui_xml == "":
self._changes_gui_xml = CHANGES_REQUIRE_RESTART
if not os.path.exists(os.path.join(self._appdata, self._settings_file)):
if os.path.exists(self._default_settings_file) and copy_dist:
shutil.copyfile(self._default_settings_file, os.path.join(self._appdata, self._settings_file))
#default settings
self._resolutions = ['640x480', '800x600', '1024x768', '1280x800', '1440x900']
self._renderbackends = ['OpenGL', 'SDL']
self._lightingmodels = [0, 1, 2]
#Used to stylize the options gui
self._gui_style = "default"
#Initialize the serializer
if serializer:
self._serializer = serializer
else:
self._serializer = SimpleXMLSerializer()
self.initSerializer()
self._initDefaultSettingEntries()
def initSerializer(self):
self._serializer.load(os.path.join(self._appdata, self._settings_file))
def _initDefaultSettingEntries(self):
"""Initializes the default fife setting entries. Not to be called from
outside this class."""
self.createAndAddEntry(FIFE_MODULE, "PlaySounds", "enable_sound",
requiresrestart=True)
self.createAndAddEntry(FIFE_MODULE, "FullScreen", "enable_fullscreen",
requiresrestart=True)
self.createAndAddEntry(FIFE_MODULE, "ScreenResolution", "screen_resolution", initialdata = self._resolutions,
requiresrestart=True)
self.createAndAddEntry(FIFE_MODULE, "RenderBackend", "render_backend", initialdata = self._renderbackends,
requiresrestart=True)
self.createAndAddEntry(FIFE_MODULE, "Lighting", "lighting_model", initialdata = self._lightingmodels,
requiresrestart=True)
def createAndAddEntry(self, module, name, widgetname, applyfunction=None, initialdata=None, requiresrestart=False):
""""
@param module: The Setting module this Entry belongs to
@type module: C{String}
@param name: The Setting's name
@type name: C{String}
@param widgetname: The name of the widget that is used to change this
setting
@type widgetname: C{String}
@param applyfunction: function that makes the changes when the Setting is
saved
@type applyfunction: C{function}
@param initialdata: If the widget supports the setInitialData() function
this can be used to set the initial data
@type initialdata: C{String} or C{Boolean}
@param requiresrestart: Whether or not the changing of this setting
requires a restart
@type requiresrestart: C{Boolean}
"""
entry = SettingEntry(module, name, widgetname, applyfunction, initialdata, requiresrestart)
self.addEntry(entry)
def addEntry(self, entry):
"""Adds a new C{SettingEntry} to the Settting
@param entry: A new SettingEntry that is to be added
@type entry: C{SettingEntry}
"""
if entry.module not in self._entries:
self._entries[entry.module] = {}
self._entries[entry.module][entry.name] = entry
# Make sure the new entry is available
if self.get(entry.module, entry.name) is None:
print "Updating", self._settings_file, "to the default, it is missing the entry:"\
, entry.name ,"for module", entry.module
self.setDefaults()
if self.get(entry.module, entry.name) is None:
print "WARNING:", entry.module, ":", entry.name, "still not found!"
def saveSettings(self, filename=""):
""" Writes the settings to the settings file
@param filename: Specifies the file to save the settings to. If it is not specified
the original settings file is used.
@type filename: C{string}
"""
if self._serializer:
if filename == "":
self._serializer.save(os.path.join(self._appdata, self._settings_file))
else:
self._serializer.save(filename)
def get(self, module, name, defaultValue=None):
""" Gets the value of a specified setting
@param module: Name of the module to get the setting from
@param name: Setting name
@param defaultValue: Specifies the default value to return if the setting is not found
@type defaultValue: C{str} or C{unicode} or C{int} or C{float} or C{bool} or C{list} or C{dict}
"""
if self._serializer:
return self._serializer.get(module, name, defaultValue)
else:
return None
def set(self, module, name, value, extra_attrs={}):
"""
Sets a setting to specified value.
@param module: Module where the setting should be set
@param name: Name of setting
@param value: Value to assign to setting
@type value: C{str} or C{unicode} or C{int} or C{float} or C{bool} or C{list} or C{dict}
@param extra_attrs: Extra attributes to be stored in the XML-file
@type extra_attrs: C{dict}
"""
if self._serializer:
self._serializer.set(module, name, value, extra_attrs)
def setGuiStyle(self, style):
""" Set a custom gui style used for the option dialog.
@param style: Pychan style to be used
@type style: C{string}
"""
self._gui_style = style
def onOptionsPress(self):
"""
Opens the options dialog box. Usually you would bind this to a button.
"""
self.changesRequireRestart = False
self.isSetToDefault = False
if not self.OptionsDlg:
self.loadSettingsDialog()
self.fillWidgets()
self.OptionsDlg.show()
def loadSettingsDialog(self):
"""
Load up the settings xml and return the widget.
"""
self.OptionsDlg = self._loadWidget(self._settings_gui_xml)
self.OptionsDlg.stylize(self._gui_style)
self.OptionsDlg.mapEvents({
'okButton' : self.applySettings,
'cancelButton' : self.OptionsDlg.hide,
'defaultButton' : self.setDefaults
})
return self.OptionsDlg
def _loadWidget(self, dialog):
"""Loads a widget. Can load both files and pure xml strings"""
if os.path.isfile(self._settings_gui_xml):
return pychan.loadXML(dialog)
else:
return pychan.loadXML(StringIO(dialog))
def fillWidgets(self):
for module in self._entries.itervalues():
for entry in module.itervalues():
widget = self.OptionsDlg.findChildByName(entry.settingwidgetname)
value = self.get(entry.module, entry.name)
if type(entry.initialdata) is list:
try:
value = entry.initialdata.index(value)
except ValueError:
raise ValueError("\"" + value + "\" is not a valid value for " + entry.name + ". Valid options: " + str(entry.initialdata))
entry.initializeWidget(widget, value)
def applySettings(self):
"""
Writes the settings file. If a change requires a restart of the engine
it notifies you with a small dialog box.
"""
for module in self._entries.itervalues():
for entry in module.itervalues():
widget = self.OptionsDlg.findChildByName(entry.settingwidgetname)
data = widget.getData()
# If the data is a list we need to get the correct selected data
# from the list. This is needed for e.g. dropdowns or listboxs
if type(entry.initialdata) is list:
data = entry.initialdata[data]
# only take action if something really changed
if data != self.get(entry.module, entry.name):
self.set(entry.module, entry.name, data)
entry.onApply(data)
if entry.requiresrestart:
self.changesRequireRestart = True
self.saveSettings()
self.OptionsDlg.hide()
if self.changesRequireRestart:
self._showChangeRequireRestartDialog()
def _showChangeRequireRestartDialog(self):
"""Shows a dialog that informes the user that a restart is required
to perform the changes."""
RestartDlg = self._loadWidget(self._changes_gui_xml)
RestartDlg.stylize(self._gui_style)
RestartDlg.mapEvents({ 'closeButton' : RestartDlg.hide })
RestartDlg.show()
def setAvailableScreenResolutions(self, reslist):
"""
A list of valid default screen resolutions. This should be called once
right after you instantiate Settings.
Valid screen resolutions must be strings in the form of: WIDTHxHEIGHT
Example:
settings.setAvailableScreenResolutions(["800x600", "1024x768"])
"""
self._resolutions = reslist
def setDefaults(self):
"""
Overwrites the setting file with the default settings file.
"""
shutil.copyfile(self._default_settings_file, os.path.join(self._appdata, self._settings_file))
self.changesRequireRestart = True
self.initSerializer()
#update all widgets with the new data
self.fillWidgets()
def _getEntries(self):
return self._entries
def _setEntries(self, entries):
self._entries = entries
def _getSerializer(self):
return self._serializer
entries = property(_getEntries, _setEntries)
serializer = property(_getSerializer)
class SettingEntry(object):
def __init__(self, module, name, widgetname, applyfunction=None, initialdata=None, requiresrestart=False):
"""
@param module: The Setting module this Entry belongs to
@type module: C{String}
@param name: The Setting's name
@type name: C{String}
@param widgetname: The name of the widget that is used to change this
setting
@type widgetname: C{String}
@param applyfunction: function that makes the changes when the Setting is
saved
@type applyfunction: C{function}
@param initialdata: If the widget supports the setInitialData() function
this can be used to set the initial data
@type initialdata: C{String} or C{Boolean}
@param requiresrestart: Whether or not the changing of this setting
requires a restart
@type requiresrestart: C{Boolean}
"""
self._module = module
self._name = name
self._settingwidgetname = widgetname
self._requiresrestart = requiresrestart
self._initialdata = initialdata
self._applyfunction = applyfunction
def initializeWidget(self, widget, currentValue):
"""Initialize the widget with needed data"""
if self._initialdata is not None:
widget.setInitialData(self._initialdata)
widget.setData(currentValue)
def onApply(self, data):
"""Implement actions that need to be taken when the setting is changed
here.
"""
if self._applyfunction is not None:
self._applyfunction(data)
def _getModule(self):
return self._module
def _setModule(self, module):
self._module = module
def _getName(self):
return self._name
def _setName(self, name):
self._name = name
def _getSettingWidgetName(self):
return self._settingwidgetname
def _setSettingWidgetName(self, settingwidgetname):
self._settingwidgetname = settingwidgetname
def _getRequiresRestart(self):
return self._requiresrestart
def _setRequiresRestart(self, requiresrestart):
self._requiresrestart = requiresrestart
def _getInitialData(self):
return self._initialdata
def _setInitialData(self, initialdata):
self._initialdata = initialdata
def _getApplyFunction(self):
return self._applyfunction
def _setApplyFunction(self, applyfunction):
self._applyfunction = applyfunction
module = property(_getModule, _setModule)
name = property(_getName, _setName)
settingwidgetname = property(_getSettingWidgetName, _setSettingWidgetName)
requiresrestart = property(_getRequiresRestart, _setRequiresRestart)
initialdata = property(_getInitialData, _setInitialData)
applyfunction = property(_getApplyFunction, _setApplyFunction)
def __str__(self):
return "SettingEntry: " + self.name + " Module: " + self.module + " Widget: " + \
self.settingwidgetname + " requiresrestart: " + str(self.requiresrestart) + \
" initialdata: " + str(self.initialdata)
| lgpl-2.1 | 3,252,405,455,924,976,600 | 32.861702 | 169 | 0.711781 | false |
aenon/OnlineJudge | leetcode/5.BitManipulation/477.TotalHammingDistance.py | 1 | 1100 | # 477. Total Hamming Distance
# The Hamming distance between two integers is the number of positions at which the corresponding bits are different.
# Now your job is to find the total Hamming distance between all pairs of the given numbers.
# Example:
# Input: 4, 14, 2
# Output: 6
# Explanation: In binary representation, the 4 is 0100, 14 is 1110, and 2 is 0010 (just
# showing the four bits relevant in this case). So the answer will be:
# HammingDistance(4, 14) + HammingDistance(4, 2) + HammingDistance(14, 2) = 2 + 2 + 2 = 6.
# Note:
# Elements of the given array are in the range of 0 to 10^9
# Length of the array will not exceed 10^4.
class Solution(object):
def totalHammingDistance(self, nums):
"""
:type nums: List[int]
:rtype: int
loop through all the digits
"""
result = 0
for i in xrange(32):
counts = [0] * 2 # the number of 0's and 1's in the ith digit
for number in nums:
counts[number>>i & 1] += 1
result += counts[0] * counts[1]
return result | mit | -7,870,830,576,520,058,000 | 31.382353 | 117 | 0.626364 | false |
f-prettyland/angr | angr/engines/vex/statements/loadg.py | 1 | 2392 | from .... import sim_options as o
from ....state_plugins.sim_action_object import SimActionObject
from ....state_plugins.sim_action import SimActionData
from . import SimIRStmt, SimStatementError
class SimIRStmt_LoadG(SimIRStmt):
def _execute(self):
addr = self._translate_expr(self.stmt.addr)
alt = self._translate_expr(self.stmt.alt)
guard = self._translate_expr(self.stmt.guard)
read_type, converted_type = self.stmt.cvt_types
read_size = self.size_bytes(read_type)
converted_size = self.size_bytes(converted_type)
read_expr = self.state.memory.load(addr.expr, read_size, endness=self.stmt.end)
if read_size == converted_size:
converted_expr = read_expr
elif "S" in self.stmt.cvt:
converted_expr = read_expr.sign_extend(converted_size*self.state.arch.byte_width -
read_size*self.state.arch.byte_width)
elif "U" in self.stmt.cvt:
converted_expr = read_expr.zero_extend(converted_size*self.state.arch.byte_width -
read_size*self.state.arch.byte_width)
else:
raise SimStatementError("Unrecognized IRLoadGOp %s!" % self.stmt.cvt)
read_expr = self.state.se.If(guard.expr != 0, converted_expr, alt.expr)
if o.ACTION_DEPS in self.state.options:
reg_deps = addr.reg_deps() | alt.reg_deps() | guard.reg_deps()
tmp_deps = addr.tmp_deps() | alt.tmp_deps() | guard.tmp_deps()
else:
reg_deps = None
tmp_deps = None
self.state.scratch.store_tmp(self.stmt.dst, read_expr, reg_deps, tmp_deps)
if o.TRACK_MEMORY_ACTIONS in self.state.options:
data_ao = SimActionObject(converted_expr)
alt_ao = SimActionObject(alt.expr, reg_deps=alt.reg_deps(), tmp_deps=alt.tmp_deps())
addr_ao = SimActionObject(addr.expr, reg_deps=addr.reg_deps(), tmp_deps=addr.tmp_deps())
guard_ao = SimActionObject(guard.expr, reg_deps=guard.reg_deps(), tmp_deps=guard.tmp_deps())
size_ao = SimActionObject(self.size_bits(converted_type))
r = SimActionData(self.state, self.state.memory.id, SimActionData.READ, addr=addr_ao, data=data_ao, condition=guard_ao, size=size_ao, fallback=alt_ao)
self.actions.append(r)
| bsd-2-clause | -9,205,185,699,880,460,000 | 49.893617 | 162 | 0.621237 | false |
moozilla/dvcticker | dvcticker/main.py | 1 | 12328 | #todo: raise exceptions, then catch them to generate error images
import webapp2
from google.appengine.api import urlfetch
import json
from PIL import Image, ImageDraw, ImageFont
from google.appengine.api import memcache
import StringIO
import jinja2
import os
from decimal import * #used fixed point math for better accuracy
from google.appengine import runtime # for catching DeadlineExceededError
from google.appengine.api import urlfetch_errors # "
JINJA_ENVIRONMENT = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
#imgFont = ImageFont.load('static/font/ncenB12.pil') # for testing locally, can't get truetype to work locally
imgFont = ImageFont.truetype('static/font/tahoma_bold.ttf', 14, encoding='unic')
def urlfetch_cache(url,exchange):
# fetches a url, but using memcache to not hammer the exchanges server
data = memcache.get(url)
if data is not None:
return process_json(data, exchange)
else:
try:
result = urlfetch.fetch(url,deadline=30) #timeout after 30 sec
if result.status_code == 200:
value = process_json(result.content, exchange)
memcache.add(url, result.content, 30) #cache for 30 sec
memcache.add('longcache'+url, result.content, 3000) #also cache for 5min in case of timeouts
return value
else:
return 'Error: '+exchange+' status code '+str(result.status_code) #'Error accessing Vircurex API'
except runtime.DeadlineExceededError: #raised if the overall request times out
data = memcache.get('longcache'+url)
if data is not None: return process_json(data, exchange)
else: return 'Error: '+exchange+' timeout'
except runtime.apiproxy_errors.DeadlineExceededError: #raised if an RPC exceeded its deadline (set)
data = memcache.get('longcache'+url)
if data is not None: return process_json(data, exchange)
else: return 'Error: '+exchange+' timeout'
except urlfetch_errors.DeadlineExceededError: #raised if the URLFetch times out
data = memcache.get('longcache'+url)
if data is not None: return process_json(data, exchange)
else: return 'Error: '+exchange+' timeout'
except urlfetch.Error: #catch DownloadError
data = memcache.get('longcache'+url)
if data is not None: return process_json(data, exchange)
else: return 'Error: '+exchange+' timeout'
def process_json(txt, exchange):
#should probably add error handling in case bad json is passed
if exchange == 'vircurex':
if txt == '"Unknown currency"': return 'Error: bad Vircurex API result'
obj = json.loads(txt)
return obj['value']
elif exchange == 'mtgox_bid':
obj = json.loads(txt)
if obj['result'] == 'success':
return obj['return']['buy']['value']
else:
return 'Error: bad MTGox API result'
elif exchange == 'mtgox_ask':
obj = json.loads(txt)
if obj['result'] == 'success':
return obj['return']['sell']['value']
else:
return 'Error: bad MTGox API result'
elif exchange == 'btce_bid':
obj = json.loads(txt)
if not any('error' in s for s in obj):
return str(obj['ticker']['buy'])
else:
return 'Error: bad BTC-E API result'
elif exchange == 'btce_ask':
obj = json.loads(txt)
if not any('error' in s for s in obj):
return str(obj['ticker']['sell'])
else:
return 'Error: bad BTC-E API result'
elif exchange == 'campbx_bid':
obj = json.loads(txt)
# need to check for error
return obj['Best Bid']
elif exchange == 'campbx_ask':
obj = json.loads(txt)
# need to check for error
return obj['Best Ask']
else:
return 'Error: invalid exchange'
def get_campbx_value(base,alt,amount):
url = 'http://campbx.com/api/xticker.php'
reverse = False
if base == 'btc':
if alt != 'usd': return 'Error: only BTC/USD valid on CampBX'
exch = 'campbx_bid'
elif base == 'usd':
if alt != 'btc': return 'Error: only BTC/USD valid on CampBX'
exch = 'campbx_ask'
reverse = True
else:
return 'Error: only BTC/USD valid on CampBX'
value = urlfetch_cache(url,exch)
if value.startswith('Error'): return value
if reverse: return str((Decimal(amount) / Decimal(value)).quantize(Decimal('.00000001'), rounding=ROUND_DOWN)) # need to round to a certain number
else: return str(Decimal(amount) * Decimal(value))
def get_mtgox_value(base,alt,amount):
cur = ['usd', 'aud', 'cad', 'chf', 'cny', 'dkk',
'eur', 'gbp', 'hkd', 'jpy', 'nzd', 'pln', 'rub', 'sek', 'sgd', 'thb']
reverse = False # true if going from cur-> btc
if base == 'btc':
if not any(alt in s for s in cur):
return 'Error: invalid destination currency'
url = 'http://data.mtgox.com/api/1/btc'+alt+'/ticker'
exch = 'mtgox_bid'
elif any(base in s for s in cur):
if alt != 'btc':
return 'Error: destination currency must be BTC'
url = 'http://data.mtgox.com/api/1/btc'+base+'/ticker' #mtgox api always has btc first
exch = 'mtgox_ask'
reverse = True
else:
return 'Error: invalid base currency'
value = urlfetch_cache(url,exch)
if value.startswith('Error'): return value
if reverse: return str((Decimal(amount) / Decimal(value)).quantize(Decimal('.00000001'), rounding=ROUND_DOWN)) # need to round to a certain number
else: return str(Decimal(amount) * Decimal(value))
def get_btce_value(base,alt,amount):
# in BTC-e currencies must be traded in pairs, we also support going in reverse (buying)
cur_fwd = {'btc':['usd','rur','eur'], 'ltc':['btc','usd','rur'], 'nmc':['btc'], 'usd':['rur'], 'eur':['usd'], 'nvc':['btc'], 'trc':['btc'], 'ppc':['btc'], 'ftc':['btc'], 'cnc':['btc']}
cur_rev = {'btc':['ltc','nmc','nvc','trc','ppc','ftc','cnc'], 'usd':['btc','ltc'], 'rur':['btc','usd'], 'eur':['btc']}
reverse = False # if going from cur-> btc
if any(base in s for s in cur_fwd) and any(alt in s for s in cur_fwd[base]):
#if not any(alt in s for s in cur_fwd[base]):
#return 'Error: invalid destination currency' # can't return here because some can be base or alt
url = 'https://btc-e.com/api/2/'+base+'_'+alt+'/ticker' #https://btc-e.com/api/2/nmc_btc/ticker
exch = 'btce_bid'
else:
if any(base in s for s in cur_rev):
if not any(alt in s for s in cur_rev[base]):
return 'Error: invalid currency pair'
url = 'https://btc-e.com/api/2/'+alt+'_'+base+'/ticker'
exch = 'btce_ask'
reverse = True
else:
return 'Error: invalid currency pair'
value = urlfetch_cache(url,exch)
if value.startswith('Error'): return value
if reverse: return str((Decimal(amount) / Decimal(value)).quantize(Decimal('.00000001'), rounding=ROUND_DOWN)) # need to round to a certain number
else: return str(Decimal(amount) * Decimal(value))
def get_vircurex_value(type, base, alt, amount):
# gets json from vircurex about bid/ask prices
# eg. https://vircurex.com/api/get_highest_bid.json?base=BTC&alt=NMC
if type == 'bid':
url = 'https://vircurex.com/api/get_highest_bid.json'
elif type == 'ask':
url = 'https://vircurex.com/api/get_lowest_ask.json'
else:
return 'Error: Type must be either "bid" or "ask"'
cur = ['btc', 'dvc', 'ixc', 'ltc', 'nmc', 'ppc', 'trc', 'usd', 'eur', 'ftc', 'frc', 'cnc']
if not any(base in s for s in cur): return 'Error: invalid currency'
if not any(alt in s for s in cur): return 'Error: invalid currency'
url += '?base=' + base + '&alt=' + alt
value = urlfetch_cache(url,'vircurex')
if value.startswith('Error'): return value
return str(Decimal(amount)*Decimal(value)) # return amount * value
def get_bid(exchange, amount, base, alt):
if exchange == 'vircurex':
return get_vircurex_value('bid',base,alt,amount)
elif exchange == 'mtgox':
return get_mtgox_value(base,alt,amount)
elif exchange == 'btc-e':
return get_btce_value(base,alt,amount)
elif exchange == 'campbx':
return get_campbx_value(base,alt,amount)
else:
return 'Error: bad exchange'
def get_text_width(str):
img = Image.new("RGBA", (1,1)) # just used to calculate the text size, size doesn't matter
draw = ImageDraw.Draw(img)
w, h = draw.textsize(str, imgFont) # calculate width font will take up
return w
# returns text, with optional coin icon, in string encoded form so it can be written to HTTP response
def make_img(str, text_pos, coinimg=None):
img = Image.new("RGBA", (get_text_width(str) + text_pos, 20))
draw = ImageDraw.Draw(img) # set draw to new image
if coinimg != None:
img.paste(coinimg, (0,2)) #paste the coin image into the generated image
draw.text((text_pos,1), str, font=imgFont, fill='#555555')
output = StringIO.StringIO()
img.save(output, format='png')
img_to_serve = output.getvalue()
output.close()
return img_to_serve
class MainHandler(webapp2.RequestHandler):
def get(self):
#base = self.request.get('base','dvc')
#alt = self.request.get('alt','btc')
#value = get_vircurex_value('bid',base,alt)
#template_values = {
# 'value': value
#}
template = JINJA_ENVIRONMENT.get_template('index.html')
self.response.write(template.render())#template_values))
class ImageHandler(webapp2.RequestHandler):
def get(self,exchange,amount,base,alt):
if amount == '': amount = '1' # default amount is 1
exchange = exchange.lower() # make sure everything is lowercase
base = base.lower()
if alt == None:
if base == 'btc': alt = 'usd' # btc.png just shows btc value in usd
else: alt = 'btc' # if no alt specified, default to BTC
alt = alt.lower()
value = get_bid(exchange,amount,base,alt)
#if bid.startswith('Error'): value = bid
#else: value = str(Decimal(amount)*Decimal(bid))
text_pos = 19 # 3 px after coin image (all are 16x16)
if value.startswith('Error'):
text_pos = 0
elif alt == 'usd':
# round down to 2 decimal places
value = '$ '+str(Decimal(value).quantize(Decimal('.01'), rounding=ROUND_DOWN))
text_pos = 2
elif alt == 'eur':
# euro symbol in unicode (only works with truetype fonts)
value = u'\u20AC '+str(Decimal(value).quantize(Decimal('.01'), rounding=ROUND_DOWN))
text_pos = 2 # have to position euro symbol so it doesn't cut off
elif any(alt in s for s in ['aud', 'cad', 'chf', 'cny', 'dkk',
'gbp', 'hkd', 'jpy', 'nzd', 'pln', 'rub', 'sek', 'sgd', 'thb', 'rur', 'nvc']):
value = alt.upper() + ' ' + value
text_pos = 2
#text_pos 0 = error
if text_pos!=0 and any(alt in s for s in ['btc', 'dvc', 'ixc', 'ltc', 'nmc', 'ppc', 'trc', 'ftc', 'frc', 'cnc']):
coinimg = Image.open('static/img/'+alt+'.png')
else: coinimg = None
img_to_serve = make_img(value, text_pos, coinimg)
self.response.headers['Content-Type'] = 'image/png'
self.response.out.write(img_to_serve)
class ErrorHandler(webapp2.RequestHandler):
def get(self):
img_to_serve = make_img('Error: Malformed URL', 0)
self.response.headers['Content-Type'] = 'image/png'
self.response.out.write(img_to_serve)
app = webapp2.WSGIApplication([
('/', MainHandler),
('/([^/]+)/(\d*\.?\d*)([A-Za-z]+)(?:/([A-Za-z]+))?(?:\.png)?', ImageHandler),
('/.*', ErrorHandler)
], debug=True)
| mit | 2,237,923,900,706,341,600 | 43.666667 | 188 | 0.589066 | false |
seanjtaylor/out-for-justice | scripts/test_optimize.py | 1 | 1921 |
import random
import pickle
import numpy as np
import networkx as nx
from app.optim import slow_compute_loss, step
def main(input_file, num_police, num_steps, prob_step):
"""
Parameters
----------
num_police : the number of police to use
num_steps : the number of steps to take
prob_step : the probability of taking a step if it doesn't improve loss
"""
with open(input_file) as f:
graph = pickle.load(f)
graph = nx.convert_node_labels_to_integers(graph)
N = graph.number_of_nodes()
# compute random starting places
starting_positions = np.zeros(N)
places = random.sample(xrange(N), num_police)
starting_positions[places] = 1
# one outcome that is uniformly distributed
risks = np.ones(N).reshape((-1, 1))
import time
start = time.time()
# initialize the optimization
positions = [starting_positions]
losses = [slow_compute_loss(graph, positions[-1], risks)]
current = positions[-1]
tried = set()
for i in range(num_steps):
new_position = step(graph, current)
pos_id = tuple(new_position.nonzero()[0])
if pos_id in tried:
continue
tried.add(pos_id)
positions.append(new_position)
losses.append(slow_compute_loss(graph, new_position, risks))
if (losses[-1] < losses[-2]) or (random.random() < prob_step):
current = new_position
print time.time() - start
print sorted(losses)[:10]
if __name__ == '__main__':
from argparse import ArgumentParser
parser = ArgumentParser()
parser.add_argument('input_file')
parser.add_argument('--num_police', type=int, default=1)
parser.add_argument('--num_steps', type=int, default=100)
parser.add_argument('--prob_step', type=float, default=0.25)
args = parser.parse_args()
main(args.input_file, args.num_police, args.num_steps, args.prob_step)
| mit | 3,145,182,075,389,221,000 | 25.680556 | 75 | 0.63925 | false |
dothiko/mypaint | lib/layer/test.py | 1 | 1433 | # This file is part of MyPaint.
# Copyright (C) 2011-2015 by Andrew Chadwick <[email protected]>
# Copyright (C) 2007-2012 by Martin Renold <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
def make_test_stack():
"""Makes a simple test RootLayerStack (2 branches of 3 leaves each)
:return: The root stack, and a list of its leaves.
:rtype: tuple
"""
import lib.layer.group
import lib.layer.data
import lib.layer.tree
root = lib.layer.tree.RootLayerStack(doc=None)
layer0 = lib.layer.group.LayerStack(name='0')
root.append(layer0)
layer00 = lib.layer.data.PaintingLayer(name='00')
layer0.append(layer00)
layer01 = lib.layer.data.PaintingLayer(name='01')
layer0.append(layer01)
layer02 = lib.layer.data.PaintingLayer(name='02')
layer0.append(layer02)
layer1 = lib.layer.group.LayerStack(name='1')
root.append(layer1)
layer10 = lib.layer.data.PaintingLayer(name='10')
layer1.append(layer10)
layer11 = lib.layer.data.PaintingLayer(name='11')
layer1.append(layer11)
layer12 = lib.layer.data.PaintingLayer(name='12')
layer1.append(layer12)
return (root, [layer00, layer01, layer02, layer10, layer11, layer12])
| gpl-2.0 | 8,603,394,478,543,778,000 | 35.74359 | 73 | 0.707606 | false |
delimitry/ascii_clock | asciicanvas.py | 1 | 6119 | #-*- coding: utf-8 -*-
#-----------------------------------------------------------------------
# Author: delimitry
#-----------------------------------------------------------------------
class AsciiCanvas(object):
"""
ASCII canvas for drawing in console using ASCII chars
"""
def __init__(self, cols, lines, fill_char=' '):
"""
Initialize ASCII canvas
"""
if cols < 1 or cols > 1000 or lines < 1 or lines > 1000:
raise Exception('Canvas cols/lines must be in range [1..1000]')
self.cols = cols
self.lines = lines
if not fill_char:
fill_char = ' '
elif len(fill_char) > 1:
fill_char = fill_char[0]
self.fill_char = fill_char
self.canvas = [[fill_char] * (cols) for _ in range(lines)]
def clear(self):
"""
Fill canvas with empty chars
"""
self.canvas = [[self.fill_char] * (self.cols) for _ in range(self.lines)]
def print_out(self):
"""
Print out canvas to console
"""
print(self.get_canvas_as_str())
def add_line(self, x0, y0, x1, y1, fill_char='o'):
"""
Add ASCII line (x0, y0 -> x1, y1) to the canvas, fill line with `fill_char`
"""
if not fill_char:
fill_char = 'o'
elif len(fill_char) > 1:
fill_char = fill_char[0]
if x0 > x1:
# swap A and B
x1, x0 = x0, x1
y1, y0 = y0, y1
# get delta x, y
dx = x1 - x0
dy = y1 - y0
# if a length of line is zero just add point
if dx == 0 and dy == 0:
if self.check_coord_in_range(x0, y0):
self.canvas[y0][x0] = fill_char
return
# when dx >= dy use fill by x-axis, and use fill by y-axis otherwise
if abs(dx) >= abs(dy):
for x in range(x0, x1 + 1):
y = y0 if dx == 0 else y0 + int(round((x - x0) * dy / float((dx))))
if self.check_coord_in_range(x, y):
self.canvas[y][x] = fill_char
else:
if y0 < y1:
for y in range(y0, y1 + 1):
x = x0 if dy == 0 else x0 + int(round((y - y0) * dx / float((dy))))
if self.check_coord_in_range(x, y):
self.canvas[y][x] = fill_char
else:
for y in range(y1, y0 + 1):
x = x0 if dy == 0 else x1 + int(round((y - y1) * dx / float((dy))))
if self.check_coord_in_range(x, y):
self.canvas[y][x] = fill_char
def add_text(self, x, y, text):
"""
Add text to canvas at position (x, y)
"""
for i, c in enumerate(text):
if self.check_coord_in_range(x + i, y):
self.canvas[y][x + i] = c
def add_rect(self, x, y, w, h, fill_char=' ', outline_char='o'):
"""
Add rectangle filled with `fill_char` and outline with `outline_char`
"""
if not fill_char:
fill_char = ' '
elif len(fill_char) > 1:
fill_char = fill_char[0]
if not outline_char:
outline_char = 'o'
elif len(outline_char) > 1:
outline_char = outline_char[0]
for px in range(x, x + w):
for py in range(y, y + h):
if self.check_coord_in_range(px, py):
if px == x or px == x + w - 1 or py == y or py == y + h - 1:
self.canvas[py][px] = outline_char
else:
self.canvas[py][px] = fill_char
def add_nine_patch_rect(self, x, y, w, h, outline_3x3_chars=None):
"""
Add nine-patch rectangle
"""
default_outline_3x3_chars = (
'.', '-', '.',
'|', ' ', '|',
'`', '-', "'"
)
if not outline_3x3_chars:
outline_3x3_chars = default_outline_3x3_chars
# filter chars
filtered_outline_3x3_chars = []
for index, char in enumerate(outline_3x3_chars[0:9]):
if not char:
char = default_outline_3x3_chars[index]
elif len(char) > 1:
char = char[0]
filtered_outline_3x3_chars.append(char)
for px in range(x, x + w):
for py in range(y, y + h):
if self.check_coord_in_range(px, py):
if px == x and py == y:
self.canvas[py][px] = filtered_outline_3x3_chars[0]
elif px == x and y < py < y + h - 1:
self.canvas[py][px] = filtered_outline_3x3_chars[3]
elif px == x and py == y + h - 1:
self.canvas[py][px] = filtered_outline_3x3_chars[6]
elif x < px < x + w - 1 and py == y:
self.canvas[py][px] = filtered_outline_3x3_chars[1]
elif x < px < x + w - 1 and py == y + h - 1:
self.canvas[py][px] = filtered_outline_3x3_chars[7]
elif px == x + w - 1 and py == y:
self.canvas[py][px] = filtered_outline_3x3_chars[2]
elif px == x + w - 1 and y < py < y + h - 1:
self.canvas[py][px] = filtered_outline_3x3_chars[5]
elif px == x + w - 1 and py == y + h - 1:
self.canvas[py][px] = filtered_outline_3x3_chars[8]
else:
self.canvas[py][px] = filtered_outline_3x3_chars[4]
def check_coord_in_range(self, x, y):
"""
Check that coordinate (x, y) is in range, to prevent out of range error
"""
return 0 <= x < self.cols and 0 <= y < self.lines
def get_canvas_as_str(self):
"""
Return canvas as a string
"""
return '\n'.join([''.join(col) for col in self.canvas])
def __str__(self):
"""
Return canvas as a string
"""
return self.get_canvas_as_str()
| mit | 5,868,139,019,117,371,000 | 36.771605 | 87 | 0.440758 | false |
m4nh/roars | scripts/nodes/examples/arp_detector_example.py | 1 | 2688 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
from roars.rosutils.rosnode import RosNode
from roars.vision.cameras import CameraRGB
from roars.vision.arucoutils import MarkerDetector
from roars.vision.arp import ARP
import roars.vision.cvutils as cvutils
import cv2
import numpy as np
import os
import json
#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("rosnode_example")
#⬢⬢⬢⬢⬢➤ Sets HZ from parameters
node.setHz(node.setupParameter("hz", 30))
#⬢⬢⬢⬢⬢➤ Creates Camera Proxy
camera_topic = node.setupParameter(
"camera_topic",
"/camera/rgb/image_raw/compressed"
)
camera_file = node.getFileInPackage(
'roars',
'data/camera_calibrations/asus_xtion.yml'
)
camera = CameraRGB(
configuration_file=camera_file,
rgb_topic=camera_topic,
compressed_image="compressed" in camera_topic
)
#⬢⬢⬢⬢⬢➤ ARP
arp_configuration = node.getFileInPackage(
'roars',
'data/arp_configurations/prototype_configuration.json'
)
arp = ARP(configuration_file=arp_configuration, camera_file=camera_file)
#⬢⬢⬢⬢⬢➤ Points storage
points_per_object = node.setupParameter("points_per_object", 6)
collected_points = []
output_file = node.setupParameter("output_file", "/tmp/arp_objects.json")
#⬢⬢⬢⬢⬢➤ Camera Callback
def cameraCallback(frame):
#⬢⬢⬢⬢⬢➤ Grabs image from Frame
img = frame.rgb_image.copy()
arp_pose = arp.detect(img, debug_draw=True)
if arp_pose:
img_points = cvutils.reproject3DPoint(
arp_pose.p.x(),
arp_pose.p.y(),
arp_pose.p.z(),
camera=camera
)
cv2.circle(
img,
(int(img_points[0]), int(img_points[1])),
5,
(0, 0, 255),
-1
)
#⬢⬢⬢⬢⬢➤ Show
cv2.imshow("output", img)
c = cv2.waitKey(1)
if c == 113:
node.close()
if c == 32 and arp_pose != None:
print("New Point Added", arp_pose.p)
collected_points.append([
arp_pose.p.x(), arp_pose.p.y(), arp_pose.p.z()
])
if len(collected_points) % points_per_object == 0:
print("New Object Stored")
camera.registerUserCallabck(cameraCallback)
#⬢⬢⬢⬢⬢➤ Main Loop
while node.isActive():
node.tick()
def chunks(l, n):
"""Yield successive n-sized chunks from l."""
for i in range(0, len(l), n):
yield l[i:i + n]
probable_objects = list(chunks(collected_points, points_per_object))
objects = []
for o in probable_objects:
if len(o) == points_per_object:
objects.append(o)
with open(output_file, 'w') as handle:
handle.write(json.dumps(objects, indent=4))
| gpl-3.0 | 4,086,218,510,693,855,000 | 23.571429 | 73 | 0.625581 | false |
Subsets and Splits