code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
from datetime import timedelta
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db.models import Q
from django.utils import timezone
from normandy.recipes.models import Recipe
class Command(BaseCommand):
"""
Update signatures for enabled Recipes that have no signature or an old signature
"""
help = 'Update Recipe signatures'
def add_arguments(self, parser):
parser.add_argument(
'-f', '--force',
action='store_true',
help='Update signatures for all recipes'
)
def handle(self, *args, force=False, **options):
if force:
recipes_to_update = Recipe.objects.filter(enabled=True)
else:
recipes_to_update = self.get_outdated_recipes()
count = recipes_to_update.count()
if count == 0:
self.stdout.write('No out of date recipes to sign')
else:
self.stdout.write('Signing {} recipes:'.format(count))
for recipe in recipes_to_update:
self.stdout.write(' * ' + recipe.name)
recipes_to_update.update_signatures()
recipes_to_unsign = Recipe.objects.filter(enabled=False).exclude(signature=None)
count = recipes_to_unsign.count()
if count == 0:
self.stdout.write('No disabled recipes to unsign')
else:
self.stdout.write('Unsigning {} disabled recipes:'.format(count))
for recipe in recipes_to_unsign:
self.stdout.write(' * ' + recipe.name)
sig = recipe.signature
recipe.signature = None
recipe.save()
sig.delete()
def get_outdated_recipes(self):
outdated_age = timedelta(seconds=settings.AUTOGRAPH_SIGNATURE_MAX_AGE)
outdated_filter = Q(signature__timestamp__lt=timezone.now() - outdated_age)
missing_filter = Q(signature=None)
return Recipe.objects.filter(enabled=True).filter(outdated_filter | missing_filter)
| Osmose/normandy | recipe-server/normandy/recipes/management/commands/update_recipe_signatures.py | Python | mpl-2.0 | 2,045 |
#!/usr/bin/env python
from distutils.core import setup
from catkin_pkg.python_setup import generate_distutils_setup
d = generate_distutils_setup(
scripts=['bin/arbotix_gui', 'bin/arbotix_terminal'],
packages=['arbotix_python'],
package_dir={'': 'src'},
)
setup(**d)
| 4ndreas/ROSCoffeButler | src/arbotix_python/setup.py | Python | mit | 285 |
# Copyright 2009 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Interfaces for creating a dependency for a given specification. The
dependency is a second specification, so this is really a way of storing the
order in which specs must be implemented. No attempt is made to prevent
circular dependencies at present."""
__metaclass__ = type
__all__ = [
'ISpecificationDependency',
'ISpecificationDependencyRemoval',
'SpecDependencyIsAlsoRemoval',
]
from zope.interface import (
implements,
Interface,
)
from zope.schema import (
Choice,
Int,
)
from lp import _
class ISpecificationDependency(Interface):
"""A link between a specification and another specification on which it
depends.
"""
specification = Int(title=_('Specification ID'), required=True,
readonly=True)
dependency = Choice(title=_('Depends On'), required=True, readonly=True,
vocabulary='SpecificationDepCandidates')
class ISpecificationDependencyRemoval(Interface):
"""A schema that exists purely to define the text and vocabulary for the
specification dependency removal form.
"""
specification = Int(title=_('Specification ID'), required=True,
readonly=True)
dependency = Choice(title=_('Dependency'), required=True, readonly=True,
description=_("Please select the dependency you would like to "
"remove from the list."),
vocabulary='SpecificationDependencies')
class SpecDependencyIsAlsoRemoval:
implements(ISpecificationDependencyRemoval)
def __init__(self, specdep):
self.specdep = specdep
@property
def specification(self):
return self.specdep.specification
@property
def dependency(self):
return self.specdep.dependency
| abramhindle/UnnaturalCodeFork | python/testdata/launchpad/lib/lp/blueprints/interfaces/specificationdependency.py | Python | agpl-3.0 | 1,862 |
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import sys
import uuid
import eventlet
import mock
from oslo.config import cfg
import testtools
from neutron.agent.common import config
from neutron.agent import dhcp_agent
from neutron.agent.linux import dhcp
from neutron.agent.linux import interface
from neutron.common import config as common_config
from neutron.common import constants as const
from neutron.common import exceptions
from neutron.common import rpc as n_rpc
from neutron.tests import base
HOSTNAME = 'hostname'
dev_man = dhcp.DeviceManager
rpc_api = dhcp_agent.DhcpPluginApi
DEVICE_MANAGER = '%s.%s' % (dev_man.__module__, dev_man.__name__)
DHCP_PLUGIN = '%s.%s' % (rpc_api.__module__, rpc_api.__name__)
fake_tenant_id = 'aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa'
fake_subnet1_allocation_pools = dhcp.DictModel(dict(id='', start='172.9.9.2',
end='172.9.9.254'))
fake_subnet1 = dhcp.DictModel(dict(id='bbbbbbbb-bbbb-bbbb-bbbbbbbbbbbb',
network_id='12345678-1234-5678-1234567890ab',
cidr='172.9.9.0/24', enable_dhcp=True, name='',
tenant_id=fake_tenant_id,
gateway_ip='172.9.9.1', host_routes=[],
dns_nameservers=[], ip_version=4,
allocation_pools=fake_subnet1_allocation_pools))
fake_subnet2_allocation_pools = dhcp.DictModel(dict(id='', start='172.9.8.2',
end='172.9.8.254'))
fake_subnet2 = dhcp.DictModel(dict(id='dddddddd-dddd-dddd-dddddddddddd',
network_id='12345678-1234-5678-1234567890ab',
cidr='172.9.8.0/24', enable_dhcp=False, name='',
tenant_id=fake_tenant_id, gateway_ip='172.9.8.1',
host_routes=[], dns_nameservers=[], ip_version=4,
allocation_pools=fake_subnet2_allocation_pools))
fake_subnet3 = dhcp.DictModel(dict(id='bbbbbbbb-1111-2222-bbbbbbbbbbbb',
network_id='12345678-1234-5678-1234567890ab',
cidr='192.168.1.1/24', enable_dhcp=True))
fake_meta_subnet = dhcp.DictModel(dict(id='bbbbbbbb-1111-2222-bbbbbbbbbbbb',
network_id='12345678-1234-5678-1234567890ab',
cidr='169.254.169.252/30',
gateway_ip='169.254.169.253',
enable_dhcp=True))
fake_fixed_ip1 = dhcp.DictModel(dict(id='', subnet_id=fake_subnet1.id,
ip_address='172.9.9.9'))
fake_meta_fixed_ip = dhcp.DictModel(dict(id='', subnet=fake_meta_subnet,
ip_address='169.254.169.254'))
fake_allocation_pool_subnet1 = dhcp.DictModel(dict(id='', start='172.9.9.2',
end='172.9.9.254'))
fake_port1 = dhcp.DictModel(dict(id='12345678-1234-aaaa-1234567890ab',
device_id='dhcp-12345678-1234-aaaa-1234567890ab',
allocation_pools=fake_subnet1_allocation_pools,
mac_address='aa:bb:cc:dd:ee:ff',
network_id='12345678-1234-5678-1234567890ab',
fixed_ips=[fake_fixed_ip1]))
fake_port2 = dhcp.DictModel(dict(id='12345678-1234-aaaa-123456789000',
mac_address='aa:bb:cc:dd:ee:99',
network_id='12345678-1234-5678-1234567890ab',
fixed_ips=[]))
fake_meta_port = dhcp.DictModel(dict(id='12345678-1234-aaaa-1234567890ab',
mac_address='aa:bb:cc:dd:ee:ff',
network_id='12345678-1234-5678-1234567890ab',
device_owner=const.DEVICE_OWNER_ROUTER_INTF,
device_id='forzanapoli',
fixed_ips=[fake_meta_fixed_ip]))
fake_network = dhcp.NetModel(True, dict(id='12345678-1234-5678-1234567890ab',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa',
admin_state_up=True,
subnets=[fake_subnet1, fake_subnet2],
ports=[fake_port1]))
fake_meta_network = dhcp.NetModel(
True, dict(id='12345678-1234-5678-1234567890ab',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa',
admin_state_up=True,
subnets=[fake_meta_subnet],
ports=[fake_meta_port]))
fake_down_network = dhcp.NetModel(
True, dict(id='12345678-dddd-dddd-1234567890ab',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa',
admin_state_up=False,
subnets=[],
ports=[]))
class TestDhcpAgent(base.BaseTestCase):
def setUp(self):
super(TestDhcpAgent, self).setUp()
dhcp_agent.register_options()
cfg.CONF.set_override('interface_driver',
'neutron.agent.linux.interface.NullDriver')
# disable setting up periodic state reporting
cfg.CONF.set_override('report_interval', 0, 'AGENT')
self.driver_cls_p = mock.patch(
'neutron.agent.dhcp_agent.importutils.import_class')
self.driver = mock.Mock(name='driver')
self.driver.existing_dhcp_networks.return_value = []
self.driver_cls = self.driver_cls_p.start()
self.driver_cls.return_value = self.driver
self.mock_makedirs_p = mock.patch("os.makedirs")
self.mock_makedirs = self.mock_makedirs_p.start()
def test_dhcp_agent_manager(self):
state_rpc_str = 'neutron.agent.rpc.PluginReportStateAPI'
# sync_state is needed for this test
cfg.CONF.set_override('report_interval', 1, 'AGENT')
with mock.patch.object(dhcp_agent.DhcpAgentWithStateReport,
'sync_state',
autospec=True) as mock_sync_state:
with mock.patch.object(dhcp_agent.DhcpAgentWithStateReport,
'periodic_resync',
autospec=True) as mock_periodic_resync:
with mock.patch(state_rpc_str) as state_rpc:
with mock.patch.object(sys, 'argv') as sys_argv:
sys_argv.return_value = [
'dhcp', '--config-file',
base.etcdir('neutron.conf.test')]
cfg.CONF.register_opts(dhcp_agent.DhcpAgent.OPTS)
config.register_interface_driver_opts_helper(cfg.CONF)
config.register_agent_state_opts_helper(cfg.CONF)
config.register_root_helper(cfg.CONF)
cfg.CONF.register_opts(dhcp.OPTS)
cfg.CONF.register_opts(interface.OPTS)
common_config.init(sys.argv[1:])
agent_mgr = dhcp_agent.DhcpAgentWithStateReport(
'testhost')
eventlet.greenthread.sleep(1)
agent_mgr.after_start()
mock_sync_state.assert_called_once_with(agent_mgr)
mock_periodic_resync.assert_called_once_with(agent_mgr)
state_rpc.assert_has_calls(
[mock.call(mock.ANY),
mock.call().report_state(mock.ANY, mock.ANY,
mock.ANY)])
def test_dhcp_agent_main_agent_manager(self):
logging_str = 'neutron.agent.common.config.setup_logging'
launcher_str = 'neutron.openstack.common.service.ServiceLauncher'
with mock.patch(logging_str):
with mock.patch.object(sys, 'argv') as sys_argv:
with mock.patch(launcher_str) as launcher:
sys_argv.return_value = ['dhcp', '--config-file',
base.etcdir('neutron.conf.test')]
dhcp_agent.main()
launcher.assert_has_calls(
[mock.call(), mock.call().launch_service(mock.ANY),
mock.call().wait()])
def test_run_completes_single_pass(self):
with mock.patch(DEVICE_MANAGER):
dhcp = dhcp_agent.DhcpAgent(HOSTNAME)
attrs_to_mock = dict(
[(a, mock.DEFAULT) for a in
['sync_state', 'periodic_resync']])
with mock.patch.multiple(dhcp, **attrs_to_mock) as mocks:
dhcp.run()
mocks['sync_state'].assert_called_once_with()
mocks['periodic_resync'].assert_called_once_with()
def test_call_driver(self):
network = mock.Mock()
network.id = '1'
dhcp = dhcp_agent.DhcpAgent(cfg.CONF)
self.assertTrue(dhcp.call_driver('foo', network))
self.driver.assert_called_once_with(cfg.CONF,
mock.ANY,
'sudo',
mock.ANY,
mock.ANY)
def _test_call_driver_failure(self, exc=None,
trace_level='exception', expected_sync=True):
network = mock.Mock()
network.id = '1'
self.driver.return_value.foo.side_effect = exc or Exception
with mock.patch.object(dhcp_agent.LOG, trace_level) as log:
dhcp = dhcp_agent.DhcpAgent(HOSTNAME)
with mock.patch.object(dhcp,
'schedule_resync') as schedule_resync:
self.assertIsNone(dhcp.call_driver('foo', network))
self.driver.assert_called_once_with(cfg.CONF,
mock.ANY,
'sudo',
mock.ANY,
mock.ANY)
self.assertEqual(log.call_count, 1)
self.assertEqual(expected_sync, schedule_resync.called)
def test_call_driver_failure(self):
self._test_call_driver_failure()
def test_call_driver_remote_error_net_not_found(self):
self._test_call_driver_failure(
exc=n_rpc.RemoteError(exc_type='NetworkNotFound'),
trace_level='warning')
def test_call_driver_network_not_found(self):
self._test_call_driver_failure(
exc=exceptions.NetworkNotFound(net_id='1'),
trace_level='warning')
def test_call_driver_conflict(self):
self._test_call_driver_failure(
exc=exceptions.Conflict(),
trace_level='warning',
expected_sync=False)
def _test_sync_state_helper(self, known_networks, active_networks):
with mock.patch(DHCP_PLUGIN) as plug:
mock_plugin = mock.Mock()
mock_plugin.get_active_networks_info.return_value = active_networks
plug.return_value = mock_plugin
dhcp = dhcp_agent.DhcpAgent(HOSTNAME)
attrs_to_mock = dict(
[(a, mock.DEFAULT) for a in
['refresh_dhcp_helper', 'disable_dhcp_helper', 'cache']])
with mock.patch.multiple(dhcp, **attrs_to_mock) as mocks:
mocks['cache'].get_network_ids.return_value = known_networks
dhcp.sync_state()
exp_refresh = [
mock.call(net_id) for net_id in active_networks]
diff = set(known_networks) - set(active_networks)
exp_disable = [mock.call(net_id) for net_id in diff]
mocks['cache'].assert_has_calls([mock.call.get_network_ids()])
mocks['refresh_dhcp_helper'].assert_has_called(exp_refresh)
mocks['disable_dhcp_helper'].assert_has_called(exp_disable)
def test_sync_state_initial(self):
self._test_sync_state_helper([], ['a'])
def test_sync_state_same(self):
self._test_sync_state_helper(['a'], ['a'])
def test_sync_state_disabled_net(self):
self._test_sync_state_helper(['b'], ['a'])
def test_sync_state_waitall(self):
class mockNetwork():
id = '0'
admin_state_up = True
subnets = []
def __init__(self, id):
self.id = id
with mock.patch.object(dhcp_agent.eventlet.GreenPool, 'waitall') as w:
active_networks = [mockNetwork('1'), mockNetwork('2'),
mockNetwork('3'), mockNetwork('4'),
mockNetwork('5')]
known_networks = ['1', '2', '3', '4', '5']
self._test_sync_state_helper(known_networks, active_networks)
w.assert_called_once_with()
def test_sync_state_plugin_error(self):
with mock.patch(DHCP_PLUGIN) as plug:
mock_plugin = mock.Mock()
mock_plugin.get_active_networks_info.side_effect = Exception
plug.return_value = mock_plugin
with mock.patch.object(dhcp_agent.LOG, 'exception') as log:
dhcp = dhcp_agent.DhcpAgent(HOSTNAME)
with mock.patch.object(dhcp,
'schedule_resync') as schedule_resync:
dhcp.sync_state()
self.assertTrue(log.called)
self.assertTrue(schedule_resync.called)
def test_periodic_resync(self):
dhcp = dhcp_agent.DhcpAgent(HOSTNAME)
with mock.patch.object(dhcp_agent.eventlet, 'spawn') as spawn:
dhcp.periodic_resync()
spawn.assert_called_once_with(dhcp._periodic_resync_helper)
def test_periodoc_resync_helper(self):
with mock.patch.object(dhcp_agent.eventlet, 'sleep') as sleep:
dhcp = dhcp_agent.DhcpAgent(HOSTNAME)
dhcp.needs_resync_reasons = ['reason1', 'reason2']
with mock.patch.object(dhcp, 'sync_state') as sync_state:
sync_state.side_effect = RuntimeError
with testtools.ExpectedException(RuntimeError):
dhcp._periodic_resync_helper()
sync_state.assert_called_once_with()
sleep.assert_called_once_with(dhcp.conf.resync_interval)
self.assertEqual(len(dhcp.needs_resync_reasons), 0)
def test_populate_cache_on_start_without_active_networks_support(self):
# emul dhcp driver that doesn't support retrieving of active networks
self.driver.existing_dhcp_networks.side_effect = NotImplementedError
with mock.patch.object(dhcp_agent.LOG, 'debug') as log:
dhcp = dhcp_agent.DhcpAgent(HOSTNAME)
self.driver.existing_dhcp_networks.assert_called_once_with(
dhcp.conf,
cfg.CONF.root_helper
)
self.assertFalse(dhcp.cache.get_network_ids())
self.assertTrue(log.called)
def test_populate_cache_on_start(self):
networks = ['aaa', 'bbb']
self.driver.existing_dhcp_networks.return_value = networks
dhcp = dhcp_agent.DhcpAgent(HOSTNAME)
self.driver.existing_dhcp_networks.assert_called_once_with(
dhcp.conf,
cfg.CONF.root_helper
)
self.assertEqual(set(networks), set(dhcp.cache.get_network_ids()))
def test_none_interface_driver(self):
cfg.CONF.set_override('interface_driver', None)
with mock.patch.object(dhcp, 'LOG') as log:
self.assertRaises(SystemExit, dhcp.DeviceManager,
cfg.CONF, 'sudo', None)
msg = 'An interface driver must be specified'
log.error.assert_called_once_with(msg)
def test_nonexistent_interface_driver(self):
# Temporarily turn off mock, so could use the real import_class
# to import interface_driver.
self.driver_cls_p.stop()
self.addCleanup(self.driver_cls_p.start)
cfg.CONF.set_override('interface_driver', 'foo')
with mock.patch.object(dhcp, 'LOG') as log:
self.assertRaises(SystemExit, dhcp.DeviceManager,
cfg.CONF, 'sudo', None)
self.assertEqual(log.error.call_count, 1)
class TestLogArgs(base.BaseTestCase):
def test_log_args_without_log_dir_and_file(self):
conf_dict = {'debug': True,
'verbose': False,
'log_dir': None,
'log_file': None,
'use_syslog': True,
'syslog_log_facility': 'LOG_USER'}
conf = dhcp.DictModel(conf_dict)
expected_args = ['--debug',
'--use-syslog',
'--syslog-log-facility=LOG_USER']
args = config.get_log_args(conf, 'log_file_name')
self.assertEqual(expected_args, args)
def test_log_args_without_log_file(self):
conf_dict = {'debug': True,
'verbose': True,
'log_dir': '/etc/tests',
'log_file': None,
'use_syslog': False,
'syslog_log_facility': 'LOG_USER'}
conf = dhcp.DictModel(conf_dict)
expected_args = ['--debug',
'--verbose',
'--log-file=log_file_name',
'--log-dir=/etc/tests']
args = config.get_log_args(conf, 'log_file_name')
self.assertEqual(expected_args, args)
def test_log_args_with_log_dir_and_file(self):
conf_dict = {'debug': True,
'verbose': False,
'log_dir': '/etc/tests',
'log_file': 'tests/filelog',
'use_syslog': False,
'syslog_log_facility': 'LOG_USER'}
conf = dhcp.DictModel(conf_dict)
expected_args = ['--debug',
'--log-file=log_file_name',
'--log-dir=/etc/tests/tests']
args = config.get_log_args(conf, 'log_file_name')
self.assertEqual(expected_args, args)
def test_log_args_without_log_dir(self):
conf_dict = {'debug': True,
'verbose': False,
'log_file': 'tests/filelog',
'log_dir': None,
'use_syslog': False,
'syslog_log_facility': 'LOG_USER'}
conf = dhcp.DictModel(conf_dict)
expected_args = ['--debug',
'--log-file=log_file_name',
'--log-dir=tests']
args = config.get_log_args(conf, 'log_file_name')
self.assertEqual(expected_args, args)
def test_log_args_with_filelog_and_syslog(self):
conf_dict = {'debug': True,
'verbose': True,
'log_file': 'tests/filelog',
'log_dir': '/etc/tests',
'use_syslog': True,
'syslog_log_facility': 'LOG_USER'}
conf = dhcp.DictModel(conf_dict)
expected_args = ['--debug',
'--verbose',
'--log-file=log_file_name',
'--log-dir=/etc/tests/tests']
args = config.get_log_args(conf, 'log_file_name')
self.assertEqual(expected_args, args)
class TestDhcpAgentEventHandler(base.BaseTestCase):
def setUp(self):
super(TestDhcpAgentEventHandler, self).setUp()
config.register_interface_driver_opts_helper(cfg.CONF)
cfg.CONF.register_opts(dhcp.OPTS)
cfg.CONF.set_override('interface_driver',
'neutron.agent.linux.interface.NullDriver')
config.register_root_helper(cfg.CONF)
cfg.CONF.register_opts(dhcp_agent.DhcpAgent.OPTS)
self.plugin_p = mock.patch(DHCP_PLUGIN)
plugin_cls = self.plugin_p.start()
self.plugin = mock.Mock()
plugin_cls.return_value = self.plugin
self.cache_p = mock.patch('neutron.agent.dhcp_agent.NetworkCache')
cache_cls = self.cache_p.start()
self.cache = mock.Mock()
cache_cls.return_value = self.cache
self.mock_makedirs_p = mock.patch("os.makedirs")
self.mock_makedirs = self.mock_makedirs_p.start()
self.mock_init_p = mock.patch('neutron.agent.dhcp_agent.'
'DhcpAgent._populate_networks_cache')
self.mock_init = self.mock_init_p.start()
with mock.patch.object(dhcp.Dnsmasq,
'check_version') as check_v:
check_v.return_value = dhcp.Dnsmasq.MINIMUM_VERSION
self.dhcp = dhcp_agent.DhcpAgent(HOSTNAME)
self.call_driver_p = mock.patch.object(self.dhcp, 'call_driver')
self.call_driver = self.call_driver_p.start()
self.schedule_resync_p = mock.patch.object(self.dhcp,
'schedule_resync')
self.schedule_resync = self.schedule_resync_p.start()
self.external_process_p = mock.patch(
'neutron.agent.linux.external_process.ProcessManager'
)
self.external_process = self.external_process_p.start()
def _enable_dhcp_helper(self, isolated_metadata=False):
if isolated_metadata:
cfg.CONF.set_override('enable_isolated_metadata', True)
self.plugin.get_network_info.return_value = fake_network
self.dhcp.enable_dhcp_helper(fake_network.id)
self.plugin.assert_has_calls(
[mock.call.get_network_info(fake_network.id)])
self.call_driver.assert_called_once_with('enable', fake_network)
self.cache.assert_has_calls([mock.call.put(fake_network)])
if isolated_metadata:
self.external_process.assert_has_calls([
mock.call(
cfg.CONF,
'12345678-1234-5678-1234567890ab',
'sudo',
'qdhcp-12345678-1234-5678-1234567890ab'),
mock.call().enable(mock.ANY)
])
else:
self.assertFalse(self.external_process.call_count)
def test_enable_dhcp_helper_enable_isolated_metadata(self):
self._enable_dhcp_helper(isolated_metadata=True)
def test_enable_dhcp_helper(self):
self._enable_dhcp_helper()
def test_enable_dhcp_helper_down_network(self):
self.plugin.get_network_info.return_value = fake_down_network
self.dhcp.enable_dhcp_helper(fake_down_network.id)
self.plugin.assert_has_calls(
[mock.call.get_network_info(fake_down_network.id)])
self.assertFalse(self.call_driver.called)
self.assertFalse(self.cache.called)
self.assertFalse(self.external_process.called)
def test_enable_dhcp_helper_network_none(self):
self.plugin.get_network_info.return_value = None
with mock.patch.object(dhcp_agent.LOG, 'warn') as log:
self.dhcp.enable_dhcp_helper('fake_id')
self.plugin.assert_has_calls(
[mock.call.get_network_info('fake_id')])
self.assertFalse(self.call_driver.called)
self.assertTrue(log.called)
self.assertFalse(self.dhcp.schedule_resync.called)
def test_enable_dhcp_helper_exception_during_rpc(self):
self.plugin.get_network_info.side_effect = Exception
with mock.patch.object(dhcp_agent.LOG, 'exception') as log:
self.dhcp.enable_dhcp_helper(fake_network.id)
self.plugin.assert_has_calls(
[mock.call.get_network_info(fake_network.id)])
self.assertFalse(self.call_driver.called)
self.assertTrue(log.called)
self.assertTrue(self.schedule_resync.called)
self.assertFalse(self.cache.called)
self.assertFalse(self.external_process.called)
def test_enable_dhcp_helper_driver_failure(self):
self.plugin.get_network_info.return_value = fake_network
self.call_driver.return_value = False
self.dhcp.enable_dhcp_helper(fake_network.id)
self.plugin.assert_has_calls(
[mock.call.get_network_info(fake_network.id)])
self.call_driver.assert_called_once_with('enable', fake_network)
self.assertFalse(self.cache.called)
self.assertFalse(self.external_process.called)
def _disable_dhcp_helper_known_network(self, isolated_metadata=False):
if isolated_metadata:
cfg.CONF.set_override('enable_isolated_metadata', True)
self.cache.get_network_by_id.return_value = fake_network
self.dhcp.disable_dhcp_helper(fake_network.id)
self.cache.assert_has_calls(
[mock.call.get_network_by_id(fake_network.id)])
self.call_driver.assert_called_once_with('disable', fake_network)
if isolated_metadata:
self.external_process.assert_has_calls([
mock.call(
cfg.CONF,
'12345678-1234-5678-1234567890ab',
'sudo',
'qdhcp-12345678-1234-5678-1234567890ab'),
mock.call().disable()
])
else:
self.assertFalse(self.external_process.call_count)
def test_disable_dhcp_helper_known_network_isolated_metadata(self):
self._disable_dhcp_helper_known_network(isolated_metadata=True)
def test_disable_dhcp_helper_known_network(self):
self._disable_dhcp_helper_known_network()
def test_disable_dhcp_helper_unknown_network(self):
self.cache.get_network_by_id.return_value = None
self.dhcp.disable_dhcp_helper('abcdef')
self.cache.assert_has_calls(
[mock.call.get_network_by_id('abcdef')])
self.assertEqual(0, self.call_driver.call_count)
self.assertFalse(self.external_process.called)
def _disable_dhcp_helper_driver_failure(self, isolated_metadata=False):
if isolated_metadata:
cfg.CONF.set_override('enable_isolated_metadata', True)
self.cache.get_network_by_id.return_value = fake_network
self.call_driver.return_value = False
self.dhcp.disable_dhcp_helper(fake_network.id)
self.cache.assert_has_calls(
[mock.call.get_network_by_id(fake_network.id)])
self.call_driver.assert_called_once_with('disable', fake_network)
self.cache.assert_has_calls(
[mock.call.get_network_by_id(fake_network.id)])
if isolated_metadata:
self.external_process.assert_has_calls([
mock.call(
cfg.CONF,
'12345678-1234-5678-1234567890ab',
'sudo',
'qdhcp-12345678-1234-5678-1234567890ab'),
mock.call().disable()
])
else:
self.assertFalse(self.external_process.call_count)
def test_disable_dhcp_helper_driver_failure_isolated_metadata(self):
self._disable_dhcp_helper_driver_failure(isolated_metadata=True)
def test_disable_dhcp_helper_driver_failure(self):
self._disable_dhcp_helper_driver_failure()
def test_enable_isolated_metadata_proxy(self):
class_path = 'neutron.agent.linux.external_process.ProcessManager'
with mock.patch(class_path) as ext_process:
self.dhcp.enable_isolated_metadata_proxy(fake_network)
ext_process.assert_has_calls([
mock.call(
cfg.CONF,
'12345678-1234-5678-1234567890ab',
'sudo',
'qdhcp-12345678-1234-5678-1234567890ab'),
mock.call().enable(mock.ANY)
])
def test_disable_isolated_metadata_proxy(self):
class_path = 'neutron.agent.linux.external_process.ProcessManager'
with mock.patch(class_path) as ext_process:
self.dhcp.disable_isolated_metadata_proxy(fake_network)
ext_process.assert_has_calls([
mock.call(
cfg.CONF,
'12345678-1234-5678-1234567890ab',
'sudo',
'qdhcp-12345678-1234-5678-1234567890ab'),
mock.call().disable()
])
def test_enable_isolated_metadata_proxy_with_metadata_network(self):
cfg.CONF.set_override('enable_metadata_network', True)
cfg.CONF.set_override('debug', True)
cfg.CONF.set_override('verbose', False)
cfg.CONF.set_override('log_file', 'test.log')
class_path = 'neutron.agent.linux.ip_lib.IPWrapper'
self.external_process_p.stop()
# Ensure the mock is restored if this test fail
try:
with mock.patch(class_path) as ip_wrapper:
self.dhcp.enable_isolated_metadata_proxy(fake_meta_network)
ip_wrapper.assert_has_calls([mock.call(
'sudo',
'qdhcp-12345678-1234-5678-1234567890ab'),
mock.call().netns.execute([
'neutron-ns-metadata-proxy',
mock.ANY,
mock.ANY,
'--router_id=forzanapoli',
mock.ANY,
mock.ANY,
'--debug',
('--log-file=neutron-ns-metadata-proxy-%s.log' %
fake_meta_network.id)])
])
finally:
self.external_process_p.start()
def test_network_create_end(self):
payload = dict(network=dict(id=fake_network.id))
with mock.patch.object(self.dhcp, 'enable_dhcp_helper') as enable:
self.dhcp.network_create_end(None, payload)
enable.assertCalledOnceWith(fake_network.id)
def test_network_update_end_admin_state_up(self):
payload = dict(network=dict(id=fake_network.id, admin_state_up=True))
with mock.patch.object(self.dhcp, 'enable_dhcp_helper') as enable:
self.dhcp.network_update_end(None, payload)
enable.assertCalledOnceWith(fake_network.id)
def test_network_update_end_admin_state_down(self):
payload = dict(network=dict(id=fake_network.id, admin_state_up=False))
with mock.patch.object(self.dhcp, 'disable_dhcp_helper') as disable:
self.dhcp.network_update_end(None, payload)
disable.assertCalledOnceWith(fake_network.id)
def test_network_delete_end(self):
payload = dict(network_id=fake_network.id)
with mock.patch.object(self.dhcp, 'disable_dhcp_helper') as disable:
self.dhcp.network_delete_end(None, payload)
disable.assertCalledOnceWith(fake_network.id)
def test_refresh_dhcp_helper_no_dhcp_enabled_networks(self):
network = dhcp.NetModel(True, dict(id='net-id',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa',
admin_state_up=True,
subnets=[],
ports=[]))
self.cache.get_network_by_id.return_value = network
self.plugin.get_network_info.return_value = network
with mock.patch.object(self.dhcp, 'disable_dhcp_helper') as disable:
self.dhcp.refresh_dhcp_helper(network.id)
disable.assert_called_once_with(network.id)
self.assertFalse(self.cache.called)
self.assertFalse(self.call_driver.called)
self.cache.assert_has_calls(
[mock.call.get_network_by_id('net-id')])
def test_refresh_dhcp_helper_exception_during_rpc(self):
network = dhcp.NetModel(True, dict(id='net-id',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa',
admin_state_up=True,
subnets=[],
ports=[]))
self.cache.get_network_by_id.return_value = network
self.plugin.get_network_info.side_effect = Exception
with mock.patch.object(dhcp_agent.LOG, 'exception') as log:
self.dhcp.refresh_dhcp_helper(network.id)
self.assertFalse(self.call_driver.called)
self.cache.assert_has_calls(
[mock.call.get_network_by_id('net-id')])
self.assertTrue(log.called)
self.assertTrue(self.dhcp.schedule_resync.called)
def test_subnet_update_end(self):
payload = dict(subnet=dict(network_id=fake_network.id))
self.cache.get_network_by_id.return_value = fake_network
self.plugin.get_network_info.return_value = fake_network
self.dhcp.subnet_update_end(None, payload)
self.cache.assert_has_calls([mock.call.put(fake_network)])
self.call_driver.assert_called_once_with('reload_allocations',
fake_network)
def test_subnet_update_end_restart(self):
new_state = dhcp.NetModel(True, dict(id=fake_network.id,
tenant_id=fake_network.tenant_id,
admin_state_up=True,
subnets=[fake_subnet1, fake_subnet3],
ports=[fake_port1]))
payload = dict(subnet=dict(network_id=fake_network.id))
self.cache.get_network_by_id.return_value = fake_network
self.plugin.get_network_info.return_value = new_state
self.dhcp.subnet_update_end(None, payload)
self.cache.assert_has_calls([mock.call.put(new_state)])
self.call_driver.assert_called_once_with('restart',
new_state)
def test_subnet_update_end_delete_payload(self):
prev_state = dhcp.NetModel(True, dict(id=fake_network.id,
tenant_id=fake_network.tenant_id,
admin_state_up=True,
subnets=[fake_subnet1, fake_subnet3],
ports=[fake_port1]))
payload = dict(subnet_id=fake_subnet1.id)
self.cache.get_network_by_subnet_id.return_value = prev_state
self.cache.get_network_by_id.return_value = prev_state
self.plugin.get_network_info.return_value = fake_network
self.dhcp.subnet_delete_end(None, payload)
self.cache.assert_has_calls([
mock.call.get_network_by_subnet_id(
'bbbbbbbb-bbbb-bbbb-bbbbbbbbbbbb'),
mock.call.get_network_by_id('12345678-1234-5678-1234567890ab'),
mock.call.put(fake_network)])
self.call_driver.assert_called_once_with('restart',
fake_network)
def test_port_update_end(self):
payload = dict(port=fake_port2)
self.cache.get_network_by_id.return_value = fake_network
self.cache.get_port_by_id.return_value = fake_port2
self.dhcp.port_update_end(None, payload)
self.cache.assert_has_calls(
[mock.call.get_network_by_id(fake_port2.network_id),
mock.call.put_port(mock.ANY)])
self.call_driver.assert_called_once_with('reload_allocations',
fake_network)
def test_port_update_change_ip_on_port(self):
payload = dict(port=fake_port1)
self.cache.get_network_by_id.return_value = fake_network
updated_fake_port1 = copy.deepcopy(fake_port1)
updated_fake_port1.fixed_ips[0].ip_address = '172.9.9.99'
self.cache.get_port_by_id.return_value = updated_fake_port1
self.dhcp.port_update_end(None, payload)
self.cache.assert_has_calls(
[mock.call.get_network_by_id(fake_port1.network_id),
mock.call.put_port(mock.ANY)])
self.call_driver.assert_has_calls(
[mock.call.call_driver('reload_allocations', fake_network)])
def test_port_delete_end(self):
payload = dict(port_id=fake_port2.id)
self.cache.get_network_by_id.return_value = fake_network
self.cache.get_port_by_id.return_value = fake_port2
self.dhcp.port_delete_end(None, payload)
self.cache.assert_has_calls(
[mock.call.get_port_by_id(fake_port2.id),
mock.call.get_network_by_id(fake_network.id),
mock.call.remove_port(fake_port2)])
self.call_driver.assert_has_calls(
[mock.call.call_driver('reload_allocations', fake_network)])
def test_port_delete_end_unknown_port(self):
payload = dict(port_id='unknown')
self.cache.get_port_by_id.return_value = None
self.dhcp.port_delete_end(None, payload)
self.cache.assert_has_calls([mock.call.get_port_by_id('unknown')])
self.assertEqual(self.call_driver.call_count, 0)
class TestDhcpPluginApiProxy(base.BaseTestCase):
def setUp(self):
super(TestDhcpPluginApiProxy, self).setUp()
self.proxy = dhcp_agent.DhcpPluginApi('foo', {}, None)
self.proxy.host = 'foo'
self.call_p = mock.patch.object(self.proxy, 'call')
self.call = self.call_p.start()
self.make_msg_p = mock.patch.object(self.proxy, 'make_msg')
self.make_msg = self.make_msg_p.start()
def test_get_network_info(self):
self.call.return_value = dict(a=1)
retval = self.proxy.get_network_info('netid')
self.assertEqual(retval.a, 1)
self.assertTrue(self.call.called)
self.make_msg.assert_called_once_with('get_network_info',
network_id='netid',
host='foo')
def test_get_dhcp_port(self):
self.call.return_value = dict(a=1)
retval = self.proxy.get_dhcp_port('netid', 'devid')
self.assertEqual(retval.a, 1)
self.assertTrue(self.call.called)
self.make_msg.assert_called_once_with('get_dhcp_port',
network_id='netid',
device_id='devid',
host='foo')
def test_get_dhcp_port_none(self):
self.call.return_value = None
self.assertIsNone(self.proxy.get_dhcp_port('netid', 'devid'))
def test_get_active_networks_info(self):
self.proxy.get_active_networks_info()
self.make_msg.assert_called_once_with('get_active_networks_info',
host='foo')
def test_create_dhcp_port(self):
port_body = (
{'port':
{'name': '', 'admin_state_up': True,
'network_id': fake_network.id,
'tenant_id': fake_network.tenant_id,
'fixed_ips': [{'subnet_id': fake_fixed_ip1.subnet_id}],
'device_id': mock.ANY}})
self.proxy.create_dhcp_port(port_body)
self.make_msg.assert_called_once_with('create_dhcp_port',
port=port_body,
host='foo')
def test_create_dhcp_port_none(self):
self.call.return_value = None
port_body = (
{'port':
{'name': '', 'admin_state_up': True,
'network_id': fake_network.id,
'tenant_id': fake_network.tenant_id,
'fixed_ips': [{'subnet_id': fake_fixed_ip1.subnet_id}],
'device_id': mock.ANY}})
self.assertIsNone(self.proxy.create_dhcp_port(port_body))
def test_update_dhcp_port_none(self):
self.call.return_value = None
port_body = {'port': {'fixed_ips':
[{'subnet_id': fake_fixed_ip1.subnet_id}]}}
self.assertIsNone(self.proxy.update_dhcp_port(fake_port1.id,
port_body))
def test_update_dhcp_port(self):
port_body = {'port': {'fixed_ips':
[{'subnet_id': fake_fixed_ip1.subnet_id}]}}
self.proxy.update_dhcp_port(fake_port1.id, port_body)
self.make_msg.assert_called_once_with('update_dhcp_port',
port_id=fake_port1.id,
port=port_body,
host='foo')
def test_release_dhcp_port(self):
self.proxy.release_dhcp_port('netid', 'devid')
self.assertTrue(self.call.called)
self.make_msg.assert_called_once_with('release_dhcp_port',
network_id='netid',
device_id='devid',
host='foo')
def test_release_port_fixed_ip(self):
self.proxy.release_port_fixed_ip('netid', 'devid', 'subid')
self.assertTrue(self.call.called)
self.make_msg.assert_called_once_with('release_port_fixed_ip',
network_id='netid',
subnet_id='subid',
device_id='devid',
host='foo')
class TestNetworkCache(base.BaseTestCase):
def test_put_network(self):
nc = dhcp_agent.NetworkCache()
nc.put(fake_network)
self.assertEqual(nc.cache,
{fake_network.id: fake_network})
self.assertEqual(nc.subnet_lookup,
{fake_subnet1.id: fake_network.id,
fake_subnet2.id: fake_network.id})
self.assertEqual(nc.port_lookup,
{fake_port1.id: fake_network.id})
def test_put_network_existing(self):
prev_network_info = mock.Mock()
nc = dhcp_agent.NetworkCache()
with mock.patch.object(nc, 'remove') as remove:
nc.cache[fake_network.id] = prev_network_info
nc.put(fake_network)
remove.assert_called_once_with(prev_network_info)
self.assertEqual(nc.cache,
{fake_network.id: fake_network})
self.assertEqual(nc.subnet_lookup,
{fake_subnet1.id: fake_network.id,
fake_subnet2.id: fake_network.id})
self.assertEqual(nc.port_lookup,
{fake_port1.id: fake_network.id})
def test_remove_network(self):
nc = dhcp_agent.NetworkCache()
nc.cache = {fake_network.id: fake_network}
nc.subnet_lookup = {fake_subnet1.id: fake_network.id,
fake_subnet2.id: fake_network.id}
nc.port_lookup = {fake_port1.id: fake_network.id}
nc.remove(fake_network)
self.assertEqual(len(nc.cache), 0)
self.assertEqual(len(nc.subnet_lookup), 0)
self.assertEqual(len(nc.port_lookup), 0)
def test_get_network_by_id(self):
nc = dhcp_agent.NetworkCache()
nc.put(fake_network)
self.assertEqual(nc.get_network_by_id(fake_network.id), fake_network)
def test_get_network_ids(self):
nc = dhcp_agent.NetworkCache()
nc.put(fake_network)
self.assertEqual(nc.get_network_ids(), [fake_network.id])
def test_get_network_by_subnet_id(self):
nc = dhcp_agent.NetworkCache()
nc.put(fake_network)
self.assertEqual(nc.get_network_by_subnet_id(fake_subnet1.id),
fake_network)
def test_get_network_by_port_id(self):
nc = dhcp_agent.NetworkCache()
nc.put(fake_network)
self.assertEqual(nc.get_network_by_port_id(fake_port1.id),
fake_network)
def test_put_port(self):
fake_net = dhcp.NetModel(
True, dict(id='12345678-1234-5678-1234567890ab',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa',
subnets=[fake_subnet1],
ports=[fake_port1]))
nc = dhcp_agent.NetworkCache()
nc.put(fake_net)
nc.put_port(fake_port2)
self.assertEqual(len(nc.port_lookup), 2)
self.assertIn(fake_port2, fake_net.ports)
def test_put_port_existing(self):
fake_net = dhcp.NetModel(
True, dict(id='12345678-1234-5678-1234567890ab',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa',
subnets=[fake_subnet1],
ports=[fake_port1, fake_port2]))
nc = dhcp_agent.NetworkCache()
nc.put(fake_net)
nc.put_port(fake_port2)
self.assertEqual(len(nc.port_lookup), 2)
self.assertIn(fake_port2, fake_net.ports)
def test_remove_port_existing(self):
fake_net = dhcp.NetModel(
True, dict(id='12345678-1234-5678-1234567890ab',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa',
subnets=[fake_subnet1],
ports=[fake_port1, fake_port2]))
nc = dhcp_agent.NetworkCache()
nc.put(fake_net)
nc.remove_port(fake_port2)
self.assertEqual(len(nc.port_lookup), 1)
self.assertNotIn(fake_port2, fake_net.ports)
def test_get_port_by_id(self):
nc = dhcp_agent.NetworkCache()
nc.put(fake_network)
self.assertEqual(nc.get_port_by_id(fake_port1.id), fake_port1)
class FakePort1:
id = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
class FakeV4Subnet:
id = 'dddddddd-dddd-dddd-dddd-dddddddddddd'
ip_version = 4
cidr = '192.168.0.0/24'
gateway_ip = '192.168.0.1'
enable_dhcp = True
class FakeV4SubnetNoGateway:
id = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
ip_version = 4
cidr = '192.168.1.0/24'
gateway_ip = None
enable_dhcp = True
class FakeV4Network:
id = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
subnets = [FakeV4Subnet()]
ports = [FakePort1()]
namespace = 'qdhcp-aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
class FakeV4NetworkNoSubnet:
id = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
subnets = []
ports = []
class FakeV4NetworkNoGateway:
id = 'cccccccc-cccc-cccc-cccc-cccccccccccc'
subnets = [FakeV4SubnetNoGateway()]
ports = [FakePort1()]
class TestDeviceManager(base.BaseTestCase):
def setUp(self):
super(TestDeviceManager, self).setUp()
config.register_interface_driver_opts_helper(cfg.CONF)
config.register_use_namespaces_opts_helper(cfg.CONF)
cfg.CONF.register_opts(dhcp_agent.DhcpAgent.OPTS)
cfg.CONF.register_opts(dhcp.OPTS)
cfg.CONF.set_override('interface_driver',
'neutron.agent.linux.interface.NullDriver')
config.register_root_helper(cfg.CONF)
cfg.CONF.set_override('use_namespaces', True)
cfg.CONF.set_override('enable_isolated_metadata', True)
self.ensure_device_is_ready_p = mock.patch(
'neutron.agent.linux.ip_lib.ensure_device_is_ready')
self.ensure_device_is_ready = (self.ensure_device_is_ready_p.start())
self.dvr_cls_p = mock.patch('neutron.agent.linux.interface.NullDriver')
self.iproute_cls_p = mock.patch('neutron.agent.linux.'
'ip_lib.IpRouteCommand')
driver_cls = self.dvr_cls_p.start()
iproute_cls = self.iproute_cls_p.start()
self.mock_driver = mock.MagicMock()
self.mock_driver.DEV_NAME_LEN = (
interface.LinuxInterfaceDriver.DEV_NAME_LEN)
self.mock_iproute = mock.MagicMock()
driver_cls.return_value = self.mock_driver
iproute_cls.return_value = self.mock_iproute
def _test_setup_helper(self, device_is_ready, net=None, port=None):
net = net or fake_network
port = port or fake_port1
plugin = mock.Mock()
plugin.create_dhcp_port.return_value = port or fake_port1
plugin.get_dhcp_port.return_value = port or fake_port1
self.ensure_device_is_ready.return_value = device_is_ready
self.mock_driver.get_device_name.return_value = 'tap12345678-12'
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, plugin)
dh._set_default_route = mock.Mock()
interface_name = dh.setup(net)
self.assertEqual(interface_name, 'tap12345678-12')
plugin.assert_has_calls([
mock.call.create_dhcp_port(
{'port': {'name': '', 'admin_state_up': True,
'network_id': net.id, 'tenant_id': net.tenant_id,
'fixed_ips':
[{'subnet_id': fake_fixed_ip1.subnet_id}],
'device_id': mock.ANY}})])
expected_ips = ['172.9.9.9/24', '169.254.169.254/16']
expected = [
mock.call.get_device_name(port),
mock.call.init_l3(
'tap12345678-12',
expected_ips,
namespace=net.namespace)]
if not device_is_ready:
expected.insert(1,
mock.call.plug(net.id,
port.id,
'tap12345678-12',
'aa:bb:cc:dd:ee:ff',
namespace=net.namespace))
self.mock_driver.assert_has_calls(expected)
dh._set_default_route.assert_called_once_with(net, 'tap12345678-12')
def test_setup(self):
cfg.CONF.set_override('enable_metadata_network', False)
self._test_setup_helper(False)
cfg.CONF.set_override('enable_metadata_network', True)
self._test_setup_helper(False)
def test_setup_device_is_ready(self):
self._test_setup_helper(True)
def test_create_dhcp_port_raise_conflict(self):
plugin = mock.Mock()
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, plugin)
plugin.create_dhcp_port.return_value = None
self.assertRaises(exceptions.Conflict,
dh.setup_dhcp_port,
fake_network)
def test_create_dhcp_port_create_new(self):
plugin = mock.Mock()
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, plugin)
plugin.create_dhcp_port.return_value = fake_network.ports[0]
dh.setup_dhcp_port(fake_network)
plugin.assert_has_calls([
mock.call.create_dhcp_port(
{'port': {'name': '', 'admin_state_up': True,
'network_id':
fake_network.id, 'tenant_id': fake_network.tenant_id,
'fixed_ips':
[{'subnet_id': fake_fixed_ip1.subnet_id}],
'device_id': mock.ANY}})])
def test_create_dhcp_port_update_add_subnet(self):
plugin = mock.Mock()
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, plugin)
fake_network_copy = copy.deepcopy(fake_network)
fake_network_copy.ports[0].device_id = dh.get_device_id(fake_network)
fake_network_copy.subnets[1].enable_dhcp = True
plugin.update_dhcp_port.return_value = fake_network.ports[0]
dh.setup_dhcp_port(fake_network_copy)
port_body = {'port': {
'network_id': fake_network.id,
'fixed_ips': [{'subnet_id': fake_fixed_ip1.subnet_id,
'ip_address': fake_fixed_ip1.ip_address},
{'subnet_id': fake_subnet2.id}]}}
plugin.assert_has_calls([
mock.call.update_dhcp_port(fake_network_copy.ports[0].id,
port_body)])
def test_update_dhcp_port_raises_conflict(self):
plugin = mock.Mock()
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, plugin)
fake_network_copy = copy.deepcopy(fake_network)
fake_network_copy.ports[0].device_id = dh.get_device_id(fake_network)
fake_network_copy.subnets[1].enable_dhcp = True
plugin.update_dhcp_port.return_value = None
self.assertRaises(exceptions.Conflict,
dh.setup_dhcp_port,
fake_network_copy)
def test_create_dhcp_port_no_update_or_create(self):
plugin = mock.Mock()
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, plugin)
fake_network_copy = copy.deepcopy(fake_network)
fake_network_copy.ports[0].device_id = dh.get_device_id(fake_network)
dh.setup_dhcp_port(fake_network_copy)
self.assertFalse(plugin.setup_dhcp_port.called)
self.assertFalse(plugin.update_dhcp_port.called)
def test_destroy(self):
fake_net = dhcp.NetModel(
True, dict(id='12345678-1234-5678-1234567890ab',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa'))
fake_port = dhcp.DictModel(
dict(id='12345678-1234-aaaa-1234567890ab',
mac_address='aa:bb:cc:dd:ee:ff'))
with mock.patch('neutron.agent.linux.interface.NullDriver') as dvr_cls:
mock_driver = mock.MagicMock()
mock_driver.get_device_name.return_value = 'tap12345678-12'
dvr_cls.return_value = mock_driver
plugin = mock.Mock()
plugin.get_dhcp_port.return_value = fake_port
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, plugin)
dh.destroy(fake_net, 'tap12345678-12')
dvr_cls.assert_called_once_with(cfg.CONF)
mock_driver.assert_has_calls(
[mock.call.unplug('tap12345678-12',
namespace='qdhcp-' + fake_net.id)])
plugin.assert_has_calls(
[mock.call.release_dhcp_port(fake_net.id, mock.ANY)])
def test_get_interface_name(self):
fake_net = dhcp.NetModel(
True, dict(id='12345678-1234-5678-1234567890ab',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa'))
fake_port = dhcp.DictModel(
dict(id='12345678-1234-aaaa-1234567890ab',
mac_address='aa:bb:cc:dd:ee:ff'))
with mock.patch('neutron.agent.linux.interface.NullDriver') as dvr_cls:
mock_driver = mock.MagicMock()
mock_driver.get_device_name.return_value = 'tap12345678-12'
dvr_cls.return_value = mock_driver
plugin = mock.Mock()
plugin.get_dhcp_port.return_value = fake_port
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, plugin)
dh.get_interface_name(fake_net, fake_port)
dvr_cls.assert_called_once_with(cfg.CONF)
mock_driver.assert_has_calls(
[mock.call.get_device_name(fake_port)])
self.assertEqual(len(plugin.mock_calls), 0)
def test_get_device_id(self):
fake_net = dhcp.NetModel(
True, dict(id='12345678-1234-5678-1234567890ab',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa'))
expected = ('dhcp1ae5f96c-c527-5079-82ea-371a01645457-12345678-1234-'
'5678-1234567890ab')
with mock.patch('uuid.uuid5') as uuid5:
uuid5.return_value = '1ae5f96c-c527-5079-82ea-371a01645457'
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
uuid5.called_once_with(uuid.NAMESPACE_DNS, cfg.CONF.host)
self.assertEqual(dh.get_device_id(fake_net), expected)
def test_update(self):
# Try with namespaces and no metadata network
cfg.CONF.set_override('use_namespaces', True)
cfg.CONF.set_override('enable_metadata_network', False)
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
dh._set_default_route = mock.Mock()
network = mock.Mock()
dh.update(network, 'ns-12345678-12')
dh._set_default_route.assert_called_once_with(network,
'ns-12345678-12')
# No namespaces, shouldn't set default route.
cfg.CONF.set_override('use_namespaces', False)
cfg.CONF.set_override('enable_metadata_network', False)
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
dh._set_default_route = mock.Mock()
dh.update(FakeV4Network(), 'tap12345678-12')
self.assertFalse(dh._set_default_route.called)
# Meta data network enabled, don't interfere with its gateway.
cfg.CONF.set_override('use_namespaces', True)
cfg.CONF.set_override('enable_metadata_network', True)
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
dh._set_default_route = mock.Mock()
dh.update(FakeV4Network(), 'ns-12345678-12')
self.assertTrue(dh._set_default_route.called)
# For completeness
cfg.CONF.set_override('use_namespaces', False)
cfg.CONF.set_override('enable_metadata_network', True)
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
dh._set_default_route = mock.Mock()
dh.update(FakeV4Network(), 'ns-12345678-12')
self.assertFalse(dh._set_default_route.called)
def test_set_default_route(self):
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice:
device = mock.Mock()
mock_IPDevice.return_value = device
device.route.get_gateway.return_value = None
# Basic one subnet with gateway.
network = FakeV4Network()
dh._set_default_route(network, 'tap-name')
self.assertEqual(device.route.get_gateway.call_count, 1)
self.assertFalse(device.route.delete_gateway.called)
device.route.add_gateway.assert_called_once_with('192.168.0.1')
def test_set_default_route_no_subnet(self):
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice:
device = mock.Mock()
mock_IPDevice.return_value = device
device.route.get_gateway.return_value = None
network = FakeV4NetworkNoSubnet()
network.namespace = 'qdhcp-1234'
dh._set_default_route(network, 'tap-name')
self.assertEqual(device.route.get_gateway.call_count, 1)
self.assertFalse(device.route.delete_gateway.called)
self.assertFalse(device.route.add_gateway.called)
def test_set_default_route_no_subnet_delete_gateway(self):
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice:
device = mock.Mock()
mock_IPDevice.return_value = device
device.route.get_gateway.return_value = dict(gateway='192.168.0.1')
network = FakeV4NetworkNoSubnet()
network.namespace = 'qdhcp-1234'
dh._set_default_route(network, 'tap-name')
self.assertEqual(device.route.get_gateway.call_count, 1)
device.route.delete_gateway.assert_called_once_with('192.168.0.1')
self.assertFalse(device.route.add_gateway.called)
def test_set_default_route_no_gateway(self):
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice:
device = mock.Mock()
mock_IPDevice.return_value = device
device.route.get_gateway.return_value = dict(gateway='192.168.0.1')
network = FakeV4NetworkNoGateway()
network.namespace = 'qdhcp-1234'
dh._set_default_route(network, 'tap-name')
self.assertEqual(device.route.get_gateway.call_count, 1)
device.route.delete_gateway.assert_called_once_with('192.168.0.1')
self.assertFalse(device.route.add_gateway.called)
def test_set_default_route_do_nothing(self):
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice:
device = mock.Mock()
mock_IPDevice.return_value = device
device.route.get_gateway.return_value = dict(gateway='192.168.0.1')
network = FakeV4Network()
dh._set_default_route(network, 'tap-name')
self.assertEqual(device.route.get_gateway.call_count, 1)
self.assertFalse(device.route.delete_gateway.called)
self.assertFalse(device.route.add_gateway.called)
def test_set_default_route_change_gateway(self):
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice:
device = mock.Mock()
mock_IPDevice.return_value = device
device.route.get_gateway.return_value = dict(gateway='192.168.0.2')
network = FakeV4Network()
dh._set_default_route(network, 'tap-name')
self.assertEqual(device.route.get_gateway.call_count, 1)
self.assertFalse(device.route.delete_gateway.called)
device.route.add_gateway.assert_called_once_with('192.168.0.1')
def test_set_default_route_two_subnets(self):
# Try two subnets. Should set gateway from the first.
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice:
device = mock.Mock()
mock_IPDevice.return_value = device
device.route.get_gateway.return_value = None
network = FakeV4Network()
subnet2 = FakeV4Subnet()
subnet2.gateway_ip = '192.168.1.1'
network.subnets = [subnet2, FakeV4Subnet()]
dh._set_default_route(network, 'tap-name')
self.assertEqual(device.route.get_gateway.call_count, 1)
self.assertFalse(device.route.delete_gateway.called)
device.route.add_gateway.assert_called_once_with('192.168.1.1')
class TestDictModel(base.BaseTestCase):
def test_basic_dict(self):
d = dict(a=1, b=2)
m = dhcp.DictModel(d)
self.assertEqual(m.a, 1)
self.assertEqual(m.b, 2)
def test_dict_has_sub_dict(self):
d = dict(a=dict(b=2))
m = dhcp.DictModel(d)
self.assertEqual(m.a.b, 2)
def test_dict_contains_list(self):
d = dict(a=[1, 2])
m = dhcp.DictModel(d)
self.assertEqual(m.a, [1, 2])
def test_dict_contains_list_of_dicts(self):
d = dict(a=[dict(b=2), dict(c=3)])
m = dhcp.DictModel(d)
self.assertEqual(m.a[0].b, 2)
self.assertEqual(m.a[1].c, 3)
class TestNetModel(base.BaseTestCase):
def test_ns_name(self):
network = dhcp.NetModel(True, {'id': 'foo'})
self.assertEqual(network.namespace, 'qdhcp-foo')
def test_ns_name_false_namespace(self):
network = dhcp.NetModel(False, {'id': 'foo'})
self.assertIsNone(network.namespace)
def test_ns_name_none_namespace(self):
network = dhcp.NetModel(None, {'id': 'foo'})
self.assertIsNone(network.namespace)
| shakamunyi/neutron-vrrp | neutron/tests/unit/test_dhcp_agent.py | Python | apache-2.0 | 63,987 |
#! /usr/bin/python
# -*- coding: utf-8 -*-
from Bio import SeqIO
from shutil import copyfile
import os, argparse, sys, re, subprocess
def main():
parser = argparse.ArgumentParser(description="The Program will call variants given Illumina Data and a reference")
parser.add_argument('-1', action="store", help="The path to a set of reads", dest='r1')
parser.add_argument('-2', action="store", help="The path to a second set of reads if one exists.", dest='r2')
parser.add_argument('-r', action="store", help="The path to your reference genome", dest='ref')
parser.add_argument('-o', action="store", help="The path to where you would like an output file generated", dest="out")
args = parser.parse_args()
r1 = args.r1
r2 = args.r2
ref = args.ref
out = args.out
print "\n"
if ref == None:
print "No reference file found, try -h for help!\n"
exit(1)
IsRef = os.path.isfile(ref)
if IsRef in [False]:
print "The reference file was not found\n"
exit(1)
if r1 and r2 == None:
print "No reads were found!\n"
exit(1)
IsR1=False
IsR2=False
if r1 is not None:
IsR1 = os.path.isfile(r1.encode('utf-8'))
if r2 is not None:
IsR2 = os.path.isfile(r2.encode('utf-8'))
if IsR1 == False and IsR2 == False:
print "The path to your read(s) does not exist!"
exit(1)
if out in [None]:
print "No output path specified\n"
exit(1)
OutExist = os.path.isdir(out)
if OutExist == True:
print "Could not create the output directory, is already exists!\n"
exit(1)
try:
os.mkdir(out)
except:
print "The output path could not be created\n"
exit(1)
print "Reference Found\n"
print "Read(s) Found\n"
print "Output Folder Sucessfully Created\n"
ReferenceCreator(r1, r2, out, ref, IsR1, IsR2)
def ReferenceCreator(r1, r2, out, ref, IsR1, IsR2):
FileFormat_fasta = ref.find('fasta')
FileFormat_fa = ref.find ('fa')
if FileFormat_fasta == -1 and FileFormat_fa ==-1:
print "Attempting to convert reference to a fasta format\n"
FastaExtension = ref.find(".fasta")
FaExtension = ref.find(".fa")
#FE = ref[FileExtension+1:]
###print FE
if FastaExtension ==-1:
with open(ref, "rU") as input_handle, open (out+"/reference.fasta", 'w') as output_handle:
sequences = SeqIO.parse(input_handle, "genbank")
SeqIO.write(sequences, output_handle, "fasta")
output_handle.close()
if FileFormat_fasta !=-1 or FileFormat_fa !=-1:
print "A copy of your file has been brought to your output directory for manipulation\n"
copyfile(ref,out+"/reference.fasta")
reference = out+"/reference.fasta"
print "Creating a the reference sequence index\n"
subprocess.check_call("samtools faidx "+reference, shell=True)
print "Creating a reference sequence dictionary\n"
subprocess.check_call("picard-tools CreateSequenceDictionary REFERENCE="+reference+" OUTPUT="+out+"/reference.dict", shell=True)
BWA(r1, r2, out, ref, IsR1, IsR2,reference)
def BWA(r1, r2, out, ref, IsR1, IsR2, reference):
print "Giving BWA the Index\n"
bwa_index= out+"/bwa_index"
subprocess.check_call("bwa index -p "+ bwa_index + " -a is "+ reference, shell=True)
print "Mapping Reads to Reference with BWA mem\n"
if IsR1 == True and IsR2 == True:
subprocess.check_call("bwa mem -t 4 " +bwa_index+" "+r1+" "+r2+" > "+out+"/aln_reads.sam", shell=True)
if IsR1 == True and IsR2 == False:
subprocess.check_call("bwa mem -t 4 " +bwa_index+" "+r1+" > "+out+"/aln_reads.sam", shell=True)
if IsR1 == False and IsR2 == True:
subprocess.check_call("bwa mem -t 4 " +bwa_index+" "+r2+" > "+out+"/aln_reads.sam", shell=True)
print "Converting sam output to bam output\n"
subprocess.check_call("samtools view -S "+out+"/aln_reads.sam -b -o "+out+"/aln_reads.bam -@ 8", shell=True)
print "Sorting bam output\n"
subprocess.check_call("samtools sort -@ 8 "+out+"/aln_reads.bam -o "+out+"/aln_sorted_reads.bam", shell=True)
print "Indexing the bam file\n"
subprocess.check_call("samtools index "+out+"/aln_sorted_reads.bam", shell=True)
print "Creating a pileup using samtools mpileup, which then pipes stdout to bcftools to call variants\n"
subprocess.check_call("samtools mpileup -ugf "+reference+" "+out+"/aln_sorted_reads.bam | bcftools call -vmO z -o "+out+"/raw_snps.vcf.gz", shell=True)
subprocess.check_call("samtools mpileup -f "+reference+" -s "+out+"/aln_sorted_reads.bam > "+out+"/mpileup.tsv", shell=True)
print "Filtering variant calls. They will appear in filtered_snps.vcf\n"
subprocess.check_call("bcftools filter -O z -o "+out+"/filtered_snps.vcf.gz -s LOWQUAL -i'%QUAL>20' " +out+"/raw_snps.vcf.gz", shell=True)
print "Opening your output with gunzip\n"
subprocess.check_call("gunzip "+out+"/raw_snps.vcf.gz", shell=True)
subprocess.check_call("gunzip "+out+"/filtered_snps.vcf", shell=True)
print "Program Complete! Thanks for using!\n"
if __name__ == "__main__":
main()
| nate422/VarCaller | VarCaller.py | Python | mit | 4,904 |
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import print_function
import json
import unittest
import doctest
import mock
import multiprocessing
import os
import re
import signal
import sqlalchemy
import subprocess
import tempfile
import warnings
from datetime import timedelta
from dateutil.relativedelta import relativedelta
from email.mime.application import MIMEApplication
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from numpy.testing import assert_array_almost_equal
from tempfile import NamedTemporaryFile
from time import sleep
from airflow import configuration
from airflow.executors import SequentialExecutor
from airflow.models import Variable, TaskInstance
from airflow import jobs, models, DAG, utils, macros, settings, exceptions
from airflow.models import BaseOperator
from airflow.models.connection import Connection
from airflow.models.taskfail import TaskFail
from airflow.operators.bash_operator import BashOperator
from airflow.operators.check_operator import CheckOperator, ValueCheckOperator
from airflow.operators.dagrun_operator import TriggerDagRunOperator
from airflow.operators.python_operator import PythonOperator
from airflow.operators.dummy_operator import DummyOperator
from airflow.hooks.base_hook import BaseHook
from airflow.hooks.sqlite_hook import SqliteHook
from airflow.bin import cli
from airflow.settings import Session
from airflow.utils import timezone
from airflow.utils.timezone import datetime
from airflow.utils.state import State
from airflow.utils.dates import days_ago, infer_time_unit, round_time, scale_time_units
from airflow.exceptions import AirflowException
from airflow.configuration import AirflowConfigException, run_command
from pendulum import utcnow
import six
NUM_EXAMPLE_DAGS = 18
DEV_NULL = '/dev/null'
TEST_DAG_FOLDER = os.path.join(
os.path.dirname(os.path.realpath(__file__)), 'dags')
DEFAULT_DATE = datetime(2015, 1, 1)
DEFAULT_DATE_ISO = DEFAULT_DATE.isoformat()
DEFAULT_DATE_DS = DEFAULT_DATE_ISO[:10]
TEST_DAG_ID = 'unit_tests'
EXAMPLE_DAG_DEFAULT_DATE = days_ago(2)
try:
import cPickle as pickle
except ImportError:
# Python 3
import pickle
class OperatorSubclass(BaseOperator):
"""
An operator to test template substitution
"""
template_fields = ['some_templated_field']
def __init__(self, some_templated_field, *args, **kwargs):
super(OperatorSubclass, self).__init__(*args, **kwargs)
self.some_templated_field = some_templated_field
def execute(*args, **kwargs):
pass
class CoreTest(unittest.TestCase):
default_scheduler_args = {"num_runs": 1}
def setUp(self):
configuration.conf.load_test_config()
self.dagbag = models.DagBag(
dag_folder=DEV_NULL, include_examples=True)
self.args = {'owner': 'airflow', 'start_date': DEFAULT_DATE}
self.dag = DAG(TEST_DAG_ID, default_args=self.args)
self.dag_bash = self.dagbag.dags['example_bash_operator']
self.runme_0 = self.dag_bash.get_task('runme_0')
self.run_after_loop = self.dag_bash.get_task('run_after_loop')
self.run_this_last = self.dag_bash.get_task('run_this_last')
def tearDown(self):
if os.environ.get('KUBERNETES_VERSION') is None:
session = Session()
session.query(models.TaskInstance).filter_by(
dag_id=TEST_DAG_ID).delete()
session.query(TaskFail).filter_by(
dag_id=TEST_DAG_ID).delete()
session.commit()
session.close()
def test_schedule_dag_no_previous_runs(self):
"""
Tests scheduling a dag with no previous runs
"""
dag = DAG(TEST_DAG_ID + 'test_schedule_dag_no_previous_runs')
dag.add_task(models.BaseOperator(
task_id="faketastic",
owner='Also fake',
start_date=datetime(2015, 1, 2, 0, 0)))
dag_run = jobs.SchedulerJob(**self.default_scheduler_args).create_dag_run(dag)
self.assertIsNotNone(dag_run)
self.assertEqual(dag.dag_id, dag_run.dag_id)
self.assertIsNotNone(dag_run.run_id)
self.assertNotEqual('', dag_run.run_id)
self.assertEqual(
datetime(2015, 1, 2, 0, 0),
dag_run.execution_date,
msg='dag_run.execution_date did not match expectation: {0}'
.format(dag_run.execution_date)
)
self.assertEqual(State.RUNNING, dag_run.state)
self.assertFalse(dag_run.external_trigger)
dag.clear()
def test_schedule_dag_relativedelta(self):
"""
Tests scheduling a dag with a relativedelta schedule_interval
"""
delta = relativedelta(hours=+1)
dag = DAG(TEST_DAG_ID + 'test_schedule_dag_relativedelta',
schedule_interval=delta)
dag.add_task(models.BaseOperator(
task_id="faketastic",
owner='Also fake',
start_date=datetime(2015, 1, 2, 0, 0)))
dag_run = jobs.SchedulerJob(**self.default_scheduler_args).create_dag_run(dag)
self.assertIsNotNone(dag_run)
self.assertEqual(dag.dag_id, dag_run.dag_id)
self.assertIsNotNone(dag_run.run_id)
self.assertNotEqual('', dag_run.run_id)
self.assertEqual(
datetime(2015, 1, 2, 0, 0),
dag_run.execution_date,
msg='dag_run.execution_date did not match expectation: {0}'
.format(dag_run.execution_date)
)
self.assertEqual(State.RUNNING, dag_run.state)
self.assertFalse(dag_run.external_trigger)
dag_run2 = jobs.SchedulerJob(**self.default_scheduler_args).create_dag_run(dag)
self.assertIsNotNone(dag_run2)
self.assertEqual(dag.dag_id, dag_run2.dag_id)
self.assertIsNotNone(dag_run2.run_id)
self.assertNotEqual('', dag_run2.run_id)
self.assertEqual(
datetime(2015, 1, 2, 0, 0) + delta,
dag_run2.execution_date,
msg='dag_run2.execution_date did not match expectation: {0}'
.format(dag_run2.execution_date)
)
self.assertEqual(State.RUNNING, dag_run2.state)
self.assertFalse(dag_run2.external_trigger)
dag.clear()
def test_schedule_dag_fake_scheduled_previous(self):
"""
Test scheduling a dag where there is a prior DagRun
which has the same run_id as the next run should have
"""
delta = timedelta(hours=1)
dag = DAG(TEST_DAG_ID + 'test_schedule_dag_fake_scheduled_previous',
schedule_interval=delta,
start_date=DEFAULT_DATE)
dag.add_task(models.BaseOperator(
task_id="faketastic",
owner='Also fake',
start_date=DEFAULT_DATE))
scheduler = jobs.SchedulerJob(**self.default_scheduler_args)
dag.create_dagrun(run_id=models.DagRun.id_for_date(DEFAULT_DATE),
execution_date=DEFAULT_DATE,
state=State.SUCCESS,
external_trigger=True)
dag_run = scheduler.create_dag_run(dag)
self.assertIsNotNone(dag_run)
self.assertEqual(dag.dag_id, dag_run.dag_id)
self.assertIsNotNone(dag_run.run_id)
self.assertNotEqual('', dag_run.run_id)
self.assertEqual(
DEFAULT_DATE + delta,
dag_run.execution_date,
msg='dag_run.execution_date did not match expectation: {0}'
.format(dag_run.execution_date)
)
self.assertEqual(State.RUNNING, dag_run.state)
self.assertFalse(dag_run.external_trigger)
def test_schedule_dag_once(self):
"""
Tests scheduling a dag scheduled for @once - should be scheduled the first time
it is called, and not scheduled the second.
"""
dag = DAG(TEST_DAG_ID + 'test_schedule_dag_once')
dag.schedule_interval = '@once'
dag.add_task(models.BaseOperator(
task_id="faketastic",
owner='Also fake',
start_date=datetime(2015, 1, 2, 0, 0)))
dag_run = jobs.SchedulerJob(**self.default_scheduler_args).create_dag_run(dag)
dag_run2 = jobs.SchedulerJob(**self.default_scheduler_args).create_dag_run(dag)
self.assertIsNotNone(dag_run)
self.assertIsNone(dag_run2)
dag.clear()
def test_fractional_seconds(self):
"""
Tests if fractional seconds are stored in the database
"""
dag = DAG(TEST_DAG_ID + 'test_fractional_seconds')
dag.schedule_interval = '@once'
dag.add_task(models.BaseOperator(
task_id="faketastic",
owner='Also fake',
start_date=datetime(2015, 1, 2, 0, 0)))
start_date = timezone.utcnow()
run = dag.create_dagrun(
run_id='test_' + start_date.isoformat(),
execution_date=start_date,
start_date=start_date,
state=State.RUNNING,
external_trigger=False
)
run.refresh_from_db()
self.assertEqual(start_date, run.execution_date,
"dag run execution_date loses precision")
self.assertEqual(start_date, run.start_date,
"dag run start_date loses precision ")
def test_schedule_dag_start_end_dates(self):
"""
Tests that an attempt to schedule a task after the Dag's end_date
does not succeed.
"""
delta = timedelta(hours=1)
runs = 3
start_date = DEFAULT_DATE
end_date = start_date + (runs - 1) * delta
dag = DAG(TEST_DAG_ID + 'test_schedule_dag_start_end_dates',
start_date=start_date,
end_date=end_date,
schedule_interval=delta)
dag.add_task(models.BaseOperator(task_id='faketastic',
owner='Also fake'))
# Create and schedule the dag runs
dag_runs = []
scheduler = jobs.SchedulerJob(**self.default_scheduler_args)
for i in range(runs):
dag_runs.append(scheduler.create_dag_run(dag))
additional_dag_run = scheduler.create_dag_run(dag)
for dag_run in dag_runs:
self.assertIsNotNone(dag_run)
self.assertIsNone(additional_dag_run)
def test_schedule_dag_no_end_date_up_to_today_only(self):
"""
Tests that a Dag created without an end_date can only be scheduled up
to and including the current datetime.
For example, if today is 2016-01-01 and we are scheduling from a
start_date of 2015-01-01, only jobs up to, but not including
2016-01-01 should be scheduled.
"""
session = settings.Session()
delta = timedelta(days=1)
now = utcnow()
start_date = now.subtract(weeks=1)
runs = (now - start_date).days
dag = DAG(TEST_DAG_ID + 'test_schedule_dag_no_end_date_up_to_today_only',
start_date=start_date,
schedule_interval=delta)
dag.add_task(models.BaseOperator(task_id='faketastic',
owner='Also fake'))
dag_runs = []
scheduler = jobs.SchedulerJob(**self.default_scheduler_args)
for i in range(runs):
dag_run = scheduler.create_dag_run(dag)
dag_runs.append(dag_run)
# Mark the DagRun as complete
dag_run.state = State.SUCCESS
session.merge(dag_run)
session.commit()
# Attempt to schedule an additional dag run (for 2016-01-01)
additional_dag_run = scheduler.create_dag_run(dag)
for dag_run in dag_runs:
self.assertIsNotNone(dag_run)
self.assertIsNone(additional_dag_run)
def test_confirm_unittest_mod(self):
self.assertTrue(configuration.conf.get('core', 'unit_test_mode'))
def test_pickling(self):
dp = self.dag.pickle()
self.assertEqual(dp.pickle.dag_id, self.dag.dag_id)
def test_rich_comparison_ops(self):
class DAGsubclass(DAG):
pass
dag_eq = DAG(TEST_DAG_ID, default_args=self.args)
dag_diff_load_time = DAG(TEST_DAG_ID, default_args=self.args)
dag_diff_name = DAG(TEST_DAG_ID + '_neq', default_args=self.args)
dag_subclass = DAGsubclass(TEST_DAG_ID, default_args=self.args)
dag_subclass_diff_name = DAGsubclass(
TEST_DAG_ID + '2', default_args=self.args)
for d in [dag_eq, dag_diff_name, dag_subclass, dag_subclass_diff_name]:
d.last_loaded = self.dag.last_loaded
# test identity equality
self.assertEqual(self.dag, self.dag)
# test dag (in)equality based on _comps
self.assertEqual(dag_eq, self.dag)
self.assertNotEqual(dag_diff_name, self.dag)
self.assertNotEqual(dag_diff_load_time, self.dag)
# test dag inequality based on type even if _comps happen to match
self.assertNotEqual(dag_subclass, self.dag)
# a dag should equal an unpickled version of itself
d = pickle.dumps(self.dag)
self.assertEqual(pickle.loads(d), self.dag)
# dags are ordered based on dag_id no matter what the type is
self.assertLess(self.dag, dag_diff_name)
self.assertGreater(self.dag, dag_diff_load_time)
self.assertLess(self.dag, dag_subclass_diff_name)
# greater than should have been created automatically by functools
self.assertGreater(dag_diff_name, self.dag)
# hashes are non-random and match equality
self.assertEqual(hash(self.dag), hash(self.dag))
self.assertEqual(hash(dag_eq), hash(self.dag))
self.assertNotEqual(hash(dag_diff_name), hash(self.dag))
self.assertNotEqual(hash(dag_subclass), hash(self.dag))
def test_check_operators(self):
conn_id = "sqlite_default"
captainHook = BaseHook.get_hook(conn_id=conn_id)
captainHook.run("CREATE TABLE operator_test_table (a, b)")
captainHook.run("insert into operator_test_table values (1,2)")
t = CheckOperator(
task_id='check',
sql="select count(*) from operator_test_table",
conn_id=conn_id,
dag=self.dag)
t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
t = ValueCheckOperator(
task_id='value_check',
pass_value=95,
tolerance=0.1,
conn_id=conn_id,
sql="SELECT 100",
dag=self.dag)
t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
captainHook.run("drop table operator_test_table")
def test_clear_api(self):
task = self.dag_bash.tasks[0]
task.clear(
start_date=DEFAULT_DATE, end_date=DEFAULT_DATE,
upstream=True, downstream=True)
ti = models.TaskInstance(task=task, execution_date=DEFAULT_DATE)
ti.are_dependents_done()
def test_illegal_args(self):
"""
Tests that Operators reject illegal arguments
"""
with warnings.catch_warnings(record=True) as w:
BashOperator(
task_id='test_illegal_args',
bash_command='echo success',
dag=self.dag,
illegal_argument_1234='hello?')
self.assertTrue(
issubclass(w[0].category, PendingDeprecationWarning))
self.assertIn(
('Invalid arguments were passed to BashOperator '
'(task_id: test_illegal_args).'),
w[0].message.args[0])
def test_bash_operator(self):
t = BashOperator(
task_id='test_bash_operator',
bash_command="echo success",
dag=self.dag)
t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_bash_operator_multi_byte_output(self):
t = BashOperator(
task_id='test_multi_byte_bash_operator',
bash_command=u"echo \u2600",
dag=self.dag,
output_encoding='utf-8')
t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_bash_operator_kill(self):
import psutil
sleep_time = "100%d" % os.getpid()
t = BashOperator(
task_id='test_bash_operator_kill',
execution_timeout=timedelta(seconds=1),
bash_command="/bin/bash -c 'sleep %s'" % sleep_time,
dag=self.dag)
self.assertRaises(
exceptions.AirflowTaskTimeout,
t.run,
start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
sleep(2)
pid = -1
for proc in psutil.process_iter():
if proc.cmdline() == ['sleep', sleep_time]:
pid = proc.pid
if pid != -1:
os.kill(pid, signal.SIGTERM)
self.fail("BashOperator's subprocess still running after stopping on timeout!")
def test_on_failure_callback(self):
# Annoying workaround for nonlocal not existing in python 2
data = {'called': False}
def check_failure(context, test_case=self):
data['called'] = True
error = context.get('exception')
test_case.assertIsInstance(error, AirflowException)
t = BashOperator(
task_id='check_on_failure_callback',
bash_command="exit 1",
dag=self.dag,
on_failure_callback=check_failure)
self.assertRaises(
exceptions.AirflowException,
t.run,
start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
self.assertTrue(data['called'])
def test_trigger_dagrun(self):
def trigga(context, obj):
if True:
return obj
t = TriggerDagRunOperator(
task_id='test_trigger_dagrun',
trigger_dag_id='example_bash_operator',
python_callable=trigga,
dag=self.dag)
t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_dryrun(self):
t = BashOperator(
task_id='test_dryrun',
bash_command="echo success",
dag=self.dag)
t.dry_run()
def test_sqlite(self):
import airflow.operators.sqlite_operator
t = airflow.operators.sqlite_operator.SqliteOperator(
task_id='time_sqlite',
sql="CREATE TABLE IF NOT EXISTS unitest (dummy VARCHAR(20))",
dag=self.dag)
t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_timeout(self):
t = PythonOperator(
task_id='test_timeout',
execution_timeout=timedelta(seconds=1),
python_callable=lambda: sleep(5),
dag=self.dag)
self.assertRaises(
exceptions.AirflowTaskTimeout,
t.run,
start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_python_op(self):
def test_py_op(templates_dict, ds, **kwargs):
if not templates_dict['ds'] == ds:
raise Exception("failure")
t = PythonOperator(
task_id='test_py_op',
provide_context=True,
python_callable=test_py_op,
templates_dict={'ds': "{{ ds }}"},
dag=self.dag)
t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_complex_template(self):
def verify_templated_field(context):
self.assertEqual(context['ti'].task.some_templated_field['bar'][1],
context['ds'])
t = OperatorSubclass(
task_id='test_complex_template',
some_templated_field={
'foo': '123',
'bar': ['baz', '{{ ds }}']
},
dag=self.dag)
t.execute = verify_templated_field
t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_template_with_variable(self):
"""
Test the availability of variables in templates
"""
val = {
'test_value': 'a test value'
}
Variable.set("a_variable", val['test_value'])
def verify_templated_field(context):
self.assertEqual(context['ti'].task.some_templated_field,
val['test_value'])
t = OperatorSubclass(
task_id='test_complex_template',
some_templated_field='{{ var.value.a_variable }}',
dag=self.dag)
t.execute = verify_templated_field
t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_template_with_json_variable(self):
"""
Test the availability of variables (serialized as JSON) in templates
"""
val = {
'test_value': {'foo': 'bar', 'obj': {'v1': 'yes', 'v2': 'no'}}
}
Variable.set("a_variable", val['test_value'], serialize_json=True)
def verify_templated_field(context):
self.assertEqual(context['ti'].task.some_templated_field,
val['test_value']['obj']['v2'])
t = OperatorSubclass(
task_id='test_complex_template',
some_templated_field='{{ var.json.a_variable.obj.v2 }}',
dag=self.dag)
t.execute = verify_templated_field
t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_template_with_json_variable_as_value(self):
"""
Test the availability of variables (serialized as JSON) in templates, but
accessed as a value
"""
val = {
'test_value': {'foo': 'bar'}
}
Variable.set("a_variable", val['test_value'], serialize_json=True)
def verify_templated_field(context):
self.assertEqual(context['ti'].task.some_templated_field,
u'{"foo": "bar"}')
t = OperatorSubclass(
task_id='test_complex_template',
some_templated_field='{{ var.value.a_variable }}',
dag=self.dag)
t.execute = verify_templated_field
t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_template_non_bool(self):
"""
Test templates can handle objects with no sense of truthiness
"""
class NonBoolObject(object):
def __len__(self):
return NotImplemented
def __bool__(self):
return NotImplemented
t = OperatorSubclass(
task_id='test_bad_template_obj',
some_templated_field=NonBoolObject(),
dag=self.dag)
t.resolve_template_files()
def test_task_get_template(self):
TI = models.TaskInstance
ti = TI(
task=self.runme_0, execution_date=DEFAULT_DATE)
ti.dag = self.dag_bash
ti.run(ignore_ti_state=True)
context = ti.get_template_context()
# DEFAULT DATE is 2015-01-01
self.assertEqual(context['ds'], '2015-01-01')
self.assertEqual(context['ds_nodash'], '20150101')
# next_ds is 2015-01-02 as the dag interval is daily
self.assertEqual(context['next_ds'], '2015-01-02')
self.assertEqual(context['next_ds_nodash'], '20150102')
# prev_ds is 2014-12-31 as the dag interval is daily
self.assertEqual(context['prev_ds'], '2014-12-31')
self.assertEqual(context['prev_ds_nodash'], '20141231')
self.assertEqual(context['ts'], '2015-01-01T00:00:00+00:00')
self.assertEqual(context['ts_nodash'], '20150101T000000')
self.assertEqual(context['ts_nodash_with_tz'], '20150101T000000+0000')
self.assertEqual(context['yesterday_ds'], '2014-12-31')
self.assertEqual(context['yesterday_ds_nodash'], '20141231')
self.assertEqual(context['tomorrow_ds'], '2015-01-02')
self.assertEqual(context['tomorrow_ds_nodash'], '20150102')
def test_import_examples(self):
self.assertEqual(len(self.dagbag.dags), NUM_EXAMPLE_DAGS)
def test_local_task_job(self):
TI = models.TaskInstance
ti = TI(
task=self.runme_0, execution_date=DEFAULT_DATE)
job = jobs.LocalTaskJob(task_instance=ti, ignore_ti_state=True)
job.run()
def test_raw_job(self):
TI = models.TaskInstance
ti = TI(
task=self.runme_0, execution_date=DEFAULT_DATE)
ti.dag = self.dag_bash
ti.run(ignore_ti_state=True)
def test_doctests(self):
modules = [utils, macros]
for mod in modules:
failed, tests = doctest.testmod(mod)
if failed:
raise Exception("Failed a doctest")
def test_variable_set_get_round_trip(self):
Variable.set("tested_var_set_id", "Monday morning breakfast")
self.assertEqual("Monday morning breakfast", Variable.get("tested_var_set_id"))
def test_variable_set_get_round_trip_json(self):
value = {"a": 17, "b": 47}
Variable.set("tested_var_set_id", value, serialize_json=True)
self.assertEqual(value, Variable.get("tested_var_set_id", deserialize_json=True))
def test_get_non_existing_var_should_return_default(self):
default_value = "some default val"
self.assertEqual(default_value, Variable.get("thisIdDoesNotExist",
default_var=default_value))
def test_get_non_existing_var_should_raise_key_error(self):
with self.assertRaises(KeyError):
Variable.get("thisIdDoesNotExist")
def test_get_non_existing_var_with_none_default_should_return_none(self):
self.assertIsNone(Variable.get("thisIdDoesNotExist", default_var=None))
def test_get_non_existing_var_should_not_deserialize_json_default(self):
default_value = "}{ this is a non JSON default }{"
self.assertEqual(default_value, Variable.get("thisIdDoesNotExist",
default_var=default_value,
deserialize_json=True))
def test_variable_setdefault_round_trip(self):
key = "tested_var_setdefault_1_id"
value = "Monday morning breakfast in Paris"
Variable.setdefault(key, value)
self.assertEqual(value, Variable.get(key))
def test_variable_setdefault_round_trip_json(self):
key = "tested_var_setdefault_2_id"
value = {"city": 'Paris', "Hapiness": True}
Variable.setdefault(key, value, deserialize_json=True)
self.assertEqual(value, Variable.get(key, deserialize_json=True))
def test_variable_setdefault_existing_json(self):
key = "tested_var_setdefault_2_id"
value = {"city": 'Paris', "Hapiness": True}
Variable.set(key, value, serialize_json=True)
val = Variable.setdefault(key, value, deserialize_json=True)
# Check the returned value, and the stored value are handled correctly.
self.assertEqual(value, val)
self.assertEqual(value, Variable.get(key, deserialize_json=True))
def test_parameterized_config_gen(self):
cfg = configuration.parameterized_config(configuration.DEFAULT_CONFIG)
# making sure some basic building blocks are present:
self.assertIn("[core]", cfg)
self.assertIn("dags_folder", cfg)
self.assertIn("sql_alchemy_conn", cfg)
self.assertIn("fernet_key", cfg)
# making sure replacement actually happened
self.assertNotIn("{AIRFLOW_HOME}", cfg)
self.assertNotIn("{FERNET_KEY}", cfg)
def test_config_use_original_when_original_and_fallback_are_present(self):
self.assertTrue(configuration.conf.has_option("core", "FERNET_KEY"))
self.assertFalse(configuration.conf.has_option("core", "FERNET_KEY_CMD"))
FERNET_KEY = configuration.conf.get('core', 'FERNET_KEY')
configuration.conf.set("core", "FERNET_KEY_CMD", "printf HELLO")
FALLBACK_FERNET_KEY = configuration.conf.get(
"core",
"FERNET_KEY"
)
self.assertEqual(FERNET_KEY, FALLBACK_FERNET_KEY)
# restore the conf back to the original state
configuration.conf.remove_option("core", "FERNET_KEY_CMD")
def test_config_throw_error_when_original_and_fallback_is_absent(self):
self.assertTrue(configuration.conf.has_option("core", "FERNET_KEY"))
self.assertFalse(configuration.conf.has_option("core", "FERNET_KEY_CMD"))
FERNET_KEY = configuration.conf.get("core", "FERNET_KEY")
configuration.conf.remove_option("core", "FERNET_KEY")
with self.assertRaises(AirflowConfigException) as cm:
configuration.conf.get("core", "FERNET_KEY")
exception = str(cm.exception)
message = "section/key [core/fernet_key] not found in config"
self.assertEqual(message, exception)
# restore the conf back to the original state
configuration.conf.set("core", "FERNET_KEY", FERNET_KEY)
self.assertTrue(configuration.conf.has_option("core", "FERNET_KEY"))
def test_config_override_original_when_non_empty_envvar_is_provided(self):
key = "AIRFLOW__CORE__FERNET_KEY"
value = "some value"
self.assertNotIn(key, os.environ)
os.environ[key] = value
FERNET_KEY = configuration.conf.get('core', 'FERNET_KEY')
self.assertEqual(value, FERNET_KEY)
# restore the envvar back to the original state
del os.environ[key]
def test_config_override_original_when_empty_envvar_is_provided(self):
key = "AIRFLOW__CORE__FERNET_KEY"
value = ""
self.assertNotIn(key, os.environ)
os.environ[key] = value
FERNET_KEY = configuration.conf.get('core', 'FERNET_KEY')
self.assertEqual(value, FERNET_KEY)
# restore the envvar back to the original state
del os.environ[key]
def test_round_time(self):
rt1 = round_time(datetime(2015, 1, 1, 6), timedelta(days=1))
self.assertEqual(datetime(2015, 1, 1, 0, 0), rt1)
rt2 = round_time(datetime(2015, 1, 2), relativedelta(months=1))
self.assertEqual(datetime(2015, 1, 1, 0, 0), rt2)
rt3 = round_time(datetime(2015, 9, 16, 0, 0), timedelta(1), datetime(
2015, 9, 14, 0, 0))
self.assertEqual(datetime(2015, 9, 16, 0, 0), rt3)
rt4 = round_time(datetime(2015, 9, 15, 0, 0), timedelta(1), datetime(
2015, 9, 14, 0, 0))
self.assertEqual(datetime(2015, 9, 15, 0, 0), rt4)
rt5 = round_time(datetime(2015, 9, 14, 0, 0), timedelta(1), datetime(
2015, 9, 14, 0, 0))
self.assertEqual(datetime(2015, 9, 14, 0, 0), rt5)
rt6 = round_time(datetime(2015, 9, 13, 0, 0), timedelta(1), datetime(
2015, 9, 14, 0, 0))
self.assertEqual(datetime(2015, 9, 14, 0, 0), rt6)
def test_infer_time_unit(self):
self.assertEqual('minutes', infer_time_unit([130, 5400, 10]))
self.assertEqual('seconds', infer_time_unit([110, 50, 10, 100]))
self.assertEqual('hours', infer_time_unit([100000, 50000, 10000, 20000]))
self.assertEqual('days', infer_time_unit([200000, 100000]))
def test_scale_time_units(self):
# use assert_almost_equal from numpy.testing since we are comparing
# floating point arrays
arr1 = scale_time_units([130, 5400, 10], 'minutes')
assert_array_almost_equal(arr1, [2.167, 90.0, 0.167], decimal=3)
arr2 = scale_time_units([110, 50, 10, 100], 'seconds')
assert_array_almost_equal(arr2, [110.0, 50.0, 10.0, 100.0], decimal=3)
arr3 = scale_time_units([100000, 50000, 10000, 20000], 'hours')
assert_array_almost_equal(arr3, [27.778, 13.889, 2.778, 5.556],
decimal=3)
arr4 = scale_time_units([200000, 100000], 'days')
assert_array_almost_equal(arr4, [2.315, 1.157], decimal=3)
def test_duplicate_dependencies(self):
regexp = "Dependency (.*)runme_0(.*)run_after_loop(.*) " \
"already registered"
with self.assertRaisesRegexp(AirflowException, regexp):
self.runme_0.set_downstream(self.run_after_loop)
with self.assertRaisesRegexp(AirflowException, regexp):
self.run_after_loop.set_upstream(self.runme_0)
def test_bad_trigger_rule(self):
with self.assertRaises(AirflowException):
DummyOperator(
task_id='test_bad_trigger',
trigger_rule="non_existent",
dag=self.dag)
def test_terminate_task(self):
"""If a task instance's db state get deleted, it should fail"""
TI = models.TaskInstance
dag = self.dagbag.dags.get('test_utils')
task = dag.task_dict.get('sleeps_forever')
ti = TI(task=task, execution_date=DEFAULT_DATE)
job = jobs.LocalTaskJob(
task_instance=ti, ignore_ti_state=True, executor=SequentialExecutor())
# Running task instance asynchronously
p = multiprocessing.Process(target=job.run)
p.start()
sleep(5)
settings.engine.dispose()
session = settings.Session()
ti.refresh_from_db(session=session)
# making sure it's actually running
self.assertEqual(State.RUNNING, ti.state)
ti = session.query(TI).filter_by(
dag_id=task.dag_id,
task_id=task.task_id,
execution_date=DEFAULT_DATE
).one()
# deleting the instance should result in a failure
session.delete(ti)
session.commit()
# waiting for the async task to finish
p.join()
# making sure that the task ended up as failed
ti.refresh_from_db(session=session)
self.assertEqual(State.FAILED, ti.state)
session.close()
def test_task_fail_duration(self):
"""If a task fails, the duration should be recorded in TaskFail"""
p = BashOperator(
task_id='pass_sleepy',
bash_command='sleep 3',
dag=self.dag)
f = BashOperator(
task_id='fail_sleepy',
bash_command='sleep 5',
execution_timeout=timedelta(seconds=3),
retry_delay=timedelta(seconds=0),
dag=self.dag)
session = settings.Session()
try:
p.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
except Exception:
pass
try:
f.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
except Exception:
pass
p_fails = session.query(TaskFail).filter_by(
task_id='pass_sleepy',
dag_id=self.dag.dag_id,
execution_date=DEFAULT_DATE).all()
f_fails = session.query(TaskFail).filter_by(
task_id='fail_sleepy',
dag_id=self.dag.dag_id,
execution_date=DEFAULT_DATE).all()
self.assertEqual(0, len(p_fails))
self.assertEqual(1, len(f_fails))
self.assertGreaterEqual(sum([f.duration for f in f_fails]), 3)
def test_run_command(self):
if six.PY3:
write = r'sys.stdout.buffer.write("\u1000foo".encode("utf8"))'
else:
write = r'sys.stdout.write(u"\u1000foo".encode("utf8"))'
cmd = 'import sys; {0}; sys.stdout.flush()'.format(write)
self.assertEqual(run_command("python -c '{0}'".format(cmd)),
u'\u1000foo' if six.PY3 else 'foo')
self.assertEqual(run_command('echo "foo bar"'), u'foo bar\n')
self.assertRaises(AirflowConfigException, run_command, 'bash -c "exit 1"')
def test_trigger_dagrun_with_execution_date(self):
utc_now = timezone.utcnow()
run_id = 'trig__' + utc_now.isoformat()
def payload_generator(context, object):
object.run_id = run_id
return object
task = TriggerDagRunOperator(task_id='test_trigger_dagrun_with_execution_date',
trigger_dag_id='example_bash_operator',
python_callable=payload_generator,
execution_date=utc_now,
dag=self.dag)
task.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
dag_runs = models.DagRun.find(dag_id='example_bash_operator',
run_id=run_id)
self.assertEqual(len(dag_runs), 1)
dag_run = dag_runs[0]
self.assertEqual(dag_run.execution_date, utc_now)
def test_trigger_dagrun_with_str_execution_date(self):
utc_now_str = timezone.utcnow().isoformat()
self.assertIsInstance(utc_now_str, six.string_types)
run_id = 'trig__' + utc_now_str
def payload_generator(context, object):
object.run_id = run_id
return object
task = TriggerDagRunOperator(
task_id='test_trigger_dagrun_with_str_execution_date',
trigger_dag_id='example_bash_operator',
python_callable=payload_generator,
execution_date=utc_now_str,
dag=self.dag)
task.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
dag_runs = models.DagRun.find(dag_id='example_bash_operator',
run_id=run_id)
self.assertEqual(len(dag_runs), 1)
dag_run = dag_runs[0]
self.assertEqual(dag_run.execution_date.isoformat(), utc_now_str)
def test_trigger_dagrun_with_templated_execution_date(self):
task = TriggerDagRunOperator(
task_id='test_trigger_dagrun_with_str_execution_date',
trigger_dag_id='example_bash_operator',
execution_date='{{ execution_date }}',
dag=self.dag)
self.assertTrue(isinstance(task.execution_date, six.string_types))
self.assertEqual(task.execution_date, '{{ execution_date }}')
ti = TaskInstance(task=task, execution_date=DEFAULT_DATE)
ti.render_templates()
self.assertEqual(timezone.parse(task.execution_date), DEFAULT_DATE)
def test_externally_triggered_dagrun(self):
TI = models.TaskInstance
# Create the dagrun between two "scheduled" execution dates of the DAG
EXECUTION_DATE = DEFAULT_DATE + timedelta(days=2)
EXECUTION_DS = EXECUTION_DATE.strftime('%Y-%m-%d')
EXECUTION_DS_NODASH = EXECUTION_DS.replace('-', '')
dag = DAG(
TEST_DAG_ID,
default_args=self.args,
schedule_interval=timedelta(weeks=1),
start_date=DEFAULT_DATE)
task = DummyOperator(task_id='test_externally_triggered_dag_context',
dag=dag)
dag.create_dagrun(run_id=models.DagRun.id_for_date(EXECUTION_DATE),
execution_date=EXECUTION_DATE,
state=State.RUNNING,
external_trigger=True)
task.run(
start_date=EXECUTION_DATE, end_date=EXECUTION_DATE)
ti = TI(task=task, execution_date=EXECUTION_DATE)
context = ti.get_template_context()
# next_ds/prev_ds should be the execution date for manually triggered runs
self.assertEqual(context['next_ds'], EXECUTION_DS)
self.assertEqual(context['next_ds_nodash'], EXECUTION_DS_NODASH)
self.assertEqual(context['prev_ds'], EXECUTION_DS)
self.assertEqual(context['prev_ds_nodash'], EXECUTION_DS_NODASH)
class CliTests(unittest.TestCase):
TEST_USER1_EMAIL = '[email protected]'
TEST_USER2_EMAIL = '[email protected]'
@classmethod
def setUpClass(cls):
super(CliTests, cls).setUpClass()
cls._cleanup()
def setUp(self):
super(CliTests, self).setUp()
configuration.load_test_config()
from airflow.www import app as application
self.app, self.appbuilder = application.create_app(session=Session, testing=True)
self.app.config['TESTING'] = True
self.parser = cli.CLIFactory.get_parser()
self.dagbag = models.DagBag(dag_folder=DEV_NULL, include_examples=True)
settings.configure_orm()
self.session = Session
def tearDown(self):
self._cleanup(session=self.session)
for email in [self.TEST_USER1_EMAIL, self.TEST_USER2_EMAIL]:
test_user = self.appbuilder.sm.find_user(email=email)
if test_user:
self.appbuilder.sm.del_register_user(test_user)
for role_name in ['FakeTeamA', 'FakeTeamB']:
if self.appbuilder.sm.find_role(role_name):
self.appbuilder.sm.delete_role(role_name)
super(CliTests, self).tearDown()
@staticmethod
def _cleanup(session=None):
if session is None:
session = Session()
session.query(models.Pool).delete()
session.query(models.Variable).delete()
session.commit()
session.close()
def test_cli_list_dags(self):
args = self.parser.parse_args(['list_dags', '--report'])
cli.list_dags(args)
def test_cli_list_dag_runs(self):
cli.trigger_dag(self.parser.parse_args([
'trigger_dag', 'example_bash_operator', ]))
args = self.parser.parse_args(['list_dag_runs',
'example_bash_operator',
'--no_backfill'])
cli.list_dag_runs(args)
def test_cli_create_user_random_password(self):
args = self.parser.parse_args([
'users', '-c', '--username', 'test1', '--lastname', 'doe',
'--firstname', 'jon',
'--email', '[email protected]', '--role', 'Viewer', '--use_random_password'
])
cli.users(args)
def test_cli_create_user_supplied_password(self):
args = self.parser.parse_args([
'users', '-c', '--username', 'test2', '--lastname', 'doe',
'--firstname', 'jon',
'--email', '[email protected]', '--role', 'Viewer', '--password', 'test'
])
cli.users(args)
def test_cli_delete_user(self):
args = self.parser.parse_args([
'users', '-c', '--username', 'test3', '--lastname', 'doe',
'--firstname', 'jon',
'--email', '[email protected]', '--role', 'Viewer', '--use_random_password'
])
cli.users(args)
args = self.parser.parse_args([
'users', '-d', '--username', 'test3',
])
cli.users(args)
def test_cli_list_users(self):
for i in range(0, 3):
args = self.parser.parse_args([
'users', '-c', '--username', 'user{}'.format(i), '--lastname',
'doe', '--firstname', 'jon',
'--email', 'jdoe+{}@gmail.com'.format(i), '--role', 'Viewer',
'--use_random_password'
])
cli.users(args)
with mock.patch('sys.stdout',
new_callable=six.StringIO) as mock_stdout:
cli.users(self.parser.parse_args(['users', '-l']))
stdout = mock_stdout.getvalue()
for i in range(0, 3):
self.assertIn('user{}'.format(i), stdout)
def test_cli_import_users(self):
def assertUserInRoles(email, roles):
for role in roles:
self.assertTrue(self._does_user_belong_to_role(email, role))
def assertUserNotInRoles(email, roles):
for role in roles:
self.assertFalse(self._does_user_belong_to_role(email, role))
assertUserNotInRoles(self.TEST_USER1_EMAIL, ['Admin', 'Op'])
assertUserNotInRoles(self.TEST_USER2_EMAIL, ['Public'])
users = [
{
"username": "imported_user1", "lastname": "doe1",
"firstname": "jon", "email": self.TEST_USER1_EMAIL,
"roles": ["Admin", "Op"]
},
{
"username": "imported_user2", "lastname": "doe2",
"firstname": "jon", "email": self.TEST_USER2_EMAIL,
"roles": ["Public"]
}
]
self._import_users_from_file(users)
assertUserInRoles(self.TEST_USER1_EMAIL, ['Admin', 'Op'])
assertUserInRoles(self.TEST_USER2_EMAIL, ['Public'])
users = [
{
"username": "imported_user1", "lastname": "doe1",
"firstname": "jon", "email": self.TEST_USER1_EMAIL,
"roles": ["Public"]
},
{
"username": "imported_user2", "lastname": "doe2",
"firstname": "jon", "email": self.TEST_USER2_EMAIL,
"roles": ["Admin"]
}
]
self._import_users_from_file(users)
assertUserNotInRoles(self.TEST_USER1_EMAIL, ['Admin', 'Op'])
assertUserInRoles(self.TEST_USER1_EMAIL, ['Public'])
assertUserNotInRoles(self.TEST_USER2_EMAIL, ['Public'])
assertUserInRoles(self.TEST_USER2_EMAIL, ['Admin'])
def test_cli_export_users(self):
user1 = {"username": "imported_user1", "lastname": "doe1",
"firstname": "jon", "email": self.TEST_USER1_EMAIL,
"roles": ["Public"]}
user2 = {"username": "imported_user2", "lastname": "doe2",
"firstname": "jon", "email": self.TEST_USER2_EMAIL,
"roles": ["Admin"]}
self._import_users_from_file([user1, user2])
users_filename = self._export_users_to_file()
with open(users_filename, mode='r') as f:
retrieved_users = json.loads(f.read())
os.remove(users_filename)
# ensure that an export can be imported
self._import_users_from_file(retrieved_users)
def find_by_username(username):
matches = [u for u in retrieved_users
if u['username'] == username]
if not matches:
self.fail("Couldn't find user with username {}".format(username))
else:
matches[0].pop('id') # this key not required for import
return matches[0]
self.assertEqual(find_by_username('imported_user1'), user1)
self.assertEqual(find_by_username('imported_user2'), user2)
def _import_users_from_file(self, user_list):
json_file_content = json.dumps(user_list)
f = NamedTemporaryFile(delete=False)
try:
f.write(json_file_content.encode())
f.flush()
args = self.parser.parse_args([
'users', '-i', f.name
])
cli.users(args)
finally:
os.remove(f.name)
def _export_users_to_file(self):
f = NamedTemporaryFile(delete=False)
args = self.parser.parse_args([
'users', '-e', f.name
])
cli.users(args)
return f.name
def _does_user_belong_to_role(self, email, rolename):
user = self.appbuilder.sm.find_user(email=email)
role = self.appbuilder.sm.find_role(rolename)
if user and role:
return role in user.roles
return False
def test_cli_add_user_role(self):
args = self.parser.parse_args([
'users', '-c', '--username', 'test4', '--lastname', 'doe',
'--firstname', 'jon',
'--email', self.TEST_USER1_EMAIL, '--role', 'Viewer', '--use_random_password'
])
cli.users(args)
self.assertFalse(
self._does_user_belong_to_role(email=self.TEST_USER1_EMAIL,
rolename='Op'),
"User should not yet be a member of role 'Op'"
)
args = self.parser.parse_args([
'users', '--add-role', '--username', 'test4', '--role', 'Op'
])
cli.users(args)
self.assertTrue(
self._does_user_belong_to_role(email=self.TEST_USER1_EMAIL,
rolename='Op'),
"User should have been added to role 'Op'"
)
def test_cli_remove_user_role(self):
args = self.parser.parse_args([
'users', '-c', '--username', 'test4', '--lastname', 'doe',
'--firstname', 'jon',
'--email', self.TEST_USER1_EMAIL, '--role', 'Viewer', '--use_random_password'
])
cli.users(args)
self.assertTrue(
self._does_user_belong_to_role(email=self.TEST_USER1_EMAIL,
rolename='Viewer'),
"User should have been created with role 'Viewer'"
)
args = self.parser.parse_args([
'users', '--remove-role', '--username', 'test4', '--role', 'Viewer'
])
cli.users(args)
self.assertFalse(
self._does_user_belong_to_role(email=self.TEST_USER1_EMAIL,
rolename='Viewer'),
"User should have been removed from role 'Viewer'"
)
@mock.patch("airflow.bin.cli.DagBag")
def test_cli_sync_perm(self, dagbag_mock):
self.expect_dagbag_contains([
DAG('has_access_control',
access_control={
'Public': {'can_dag_read'}
}),
DAG('no_access_control')
], dagbag_mock)
self.appbuilder.sm = mock.Mock()
args = self.parser.parse_args([
'sync_perm'
])
cli.sync_perm(args)
self.appbuilder.sm.sync_roles.assert_called_once()
self.assertEqual(2,
len(self.appbuilder.sm.sync_perm_for_dag.mock_calls))
self.appbuilder.sm.sync_perm_for_dag.assert_any_call(
'has_access_control',
{'Public': {'can_dag_read'}}
)
self.appbuilder.sm.sync_perm_for_dag.assert_any_call(
'no_access_control',
None,
)
def expect_dagbag_contains(self, dags, dagbag_mock):
dagbag = mock.Mock()
dagbag.dags = {dag.dag_id: dag for dag in dags}
dagbag_mock.return_value = dagbag
def test_cli_create_roles(self):
self.assertIsNone(self.appbuilder.sm.find_role('FakeTeamA'))
self.assertIsNone(self.appbuilder.sm.find_role('FakeTeamB'))
args = self.parser.parse_args([
'roles', '--create', 'FakeTeamA', 'FakeTeamB'
])
cli.roles(args)
self.assertIsNotNone(self.appbuilder.sm.find_role('FakeTeamA'))
self.assertIsNotNone(self.appbuilder.sm.find_role('FakeTeamB'))
def test_cli_create_roles_is_reentrant(self):
self.assertIsNone(self.appbuilder.sm.find_role('FakeTeamA'))
self.assertIsNone(self.appbuilder.sm.find_role('FakeTeamB'))
args = self.parser.parse_args([
'roles', '--create', 'FakeTeamA', 'FakeTeamB'
])
cli.roles(args)
cli.roles(args)
self.assertIsNotNone(self.appbuilder.sm.find_role('FakeTeamA'))
self.assertIsNotNone(self.appbuilder.sm.find_role('FakeTeamB'))
def test_cli_list_roles(self):
self.appbuilder.sm.add_role('FakeTeamA')
self.appbuilder.sm.add_role('FakeTeamB')
with mock.patch('sys.stdout',
new_callable=six.StringIO) as mock_stdout:
cli.roles(self.parser.parse_args(['roles', '-l']))
stdout = mock_stdout.getvalue()
self.assertIn('FakeTeamA', stdout)
self.assertIn('FakeTeamB', stdout)
def test_cli_list_tasks(self):
for dag_id in self.dagbag.dags.keys():
args = self.parser.parse_args(['list_tasks', dag_id])
cli.list_tasks(args)
args = self.parser.parse_args([
'list_tasks', 'example_bash_operator', '--tree'])
cli.list_tasks(args)
def test_cli_list_jobs(self):
args = self.parser.parse_args(['list_jobs'])
cli.list_jobs(args)
def test_cli_list_jobs_with_args(self):
args = self.parser.parse_args(['list_jobs', '--dag_id',
'example_bash_operator',
'--state', 'success',
'--limit', '100'])
cli.list_jobs(args)
@mock.patch("airflow.bin.cli.db.initdb")
def test_cli_initdb(self, initdb_mock):
cli.initdb(self.parser.parse_args(['initdb']))
initdb_mock.assert_called_once_with()
@mock.patch("airflow.bin.cli.db.resetdb")
def test_cli_resetdb(self, resetdb_mock):
cli.resetdb(self.parser.parse_args(['resetdb', '--yes']))
resetdb_mock.assert_called_once_with()
def test_cli_connections_list(self):
with mock.patch('sys.stdout',
new_callable=six.StringIO) as mock_stdout:
cli.connections(self.parser.parse_args(['connections', '--list']))
stdout = mock_stdout.getvalue()
conns = [[x.strip("'") for x in re.findall(r"'\w+'", line)[:2]]
for ii, line in enumerate(stdout.split('\n'))
if ii % 2 == 1]
conns = [conn for conn in conns if len(conn) > 0]
# Assert that some of the connections are present in the output as
# expected:
self.assertIn(['aws_default', 'aws'], conns)
self.assertIn(['beeline_default', 'beeline'], conns)
self.assertIn(['emr_default', 'emr'], conns)
self.assertIn(['mssql_default', 'mssql'], conns)
self.assertIn(['mysql_default', 'mysql'], conns)
self.assertIn(['postgres_default', 'postgres'], conns)
self.assertIn(['wasb_default', 'wasb'], conns)
self.assertIn(['segment_default', 'segment'], conns)
# Attempt to list connections with invalid cli args
with mock.patch('sys.stdout',
new_callable=six.StringIO) as mock_stdout:
cli.connections(self.parser.parse_args(
['connections', '--list', '--conn_id=fake', '--conn_uri=fake-uri',
'--conn_type=fake-type', '--conn_host=fake_host',
'--conn_login=fake_login', '--conn_password=fake_password',
'--conn_schema=fake_schema', '--conn_port=fake_port', '--conn_extra=fake_extra']))
stdout = mock_stdout.getvalue()
# Check list attempt stdout
lines = [l for l in stdout.split('\n') if len(l) > 0]
self.assertListEqual(lines, [
("\tThe following args are not compatible with the " +
"--list flag: ['conn_id', 'conn_uri', 'conn_extra', " +
"'conn_type', 'conn_host', 'conn_login', " +
"'conn_password', 'conn_schema', 'conn_port']"),
])
def test_cli_connections_list_redirect(self):
cmd = ['airflow', 'connections', '--list']
with tempfile.TemporaryFile() as fp:
p = subprocess.Popen(cmd, stdout=fp)
p.wait()
self.assertEqual(0, p.returncode)
def test_cli_connections_add_delete(self):
# Add connections:
uri = 'postgresql://airflow:airflow@host:5432/airflow'
with mock.patch('sys.stdout',
new_callable=six.StringIO) as mock_stdout:
cli.connections(self.parser.parse_args(
['connections', '--add', '--conn_id=new1',
'--conn_uri=%s' % uri]))
cli.connections(self.parser.parse_args(
['connections', '-a', '--conn_id=new2',
'--conn_uri=%s' % uri]))
cli.connections(self.parser.parse_args(
['connections', '--add', '--conn_id=new3',
'--conn_uri=%s' % uri, '--conn_extra', "{'extra': 'yes'}"]))
cli.connections(self.parser.parse_args(
['connections', '-a', '--conn_id=new4',
'--conn_uri=%s' % uri, '--conn_extra', "{'extra': 'yes'}"]))
cli.connections(self.parser.parse_args(
['connections', '--add', '--conn_id=new5',
'--conn_type=hive_metastore', '--conn_login=airflow',
'--conn_password=airflow', '--conn_host=host',
'--conn_port=9083', '--conn_schema=airflow']))
cli.connections(self.parser.parse_args(
['connections', '-a', '--conn_id=new6',
'--conn_uri', "", '--conn_type=google_cloud_platform', '--conn_extra', "{'extra': 'yes'}"]))
stdout = mock_stdout.getvalue()
# Check addition stdout
lines = [l for l in stdout.split('\n') if len(l) > 0]
self.assertListEqual(lines, [
("\tSuccessfully added `conn_id`=new1 : " +
"postgresql://airflow:airflow@host:5432/airflow"),
("\tSuccessfully added `conn_id`=new2 : " +
"postgresql://airflow:airflow@host:5432/airflow"),
("\tSuccessfully added `conn_id`=new3 : " +
"postgresql://airflow:airflow@host:5432/airflow"),
("\tSuccessfully added `conn_id`=new4 : " +
"postgresql://airflow:airflow@host:5432/airflow"),
("\tSuccessfully added `conn_id`=new5 : " +
"hive_metastore://airflow:airflow@host:9083/airflow"),
("\tSuccessfully added `conn_id`=new6 : " +
"google_cloud_platform://:@:")
])
# Attempt to add duplicate
with mock.patch('sys.stdout',
new_callable=six.StringIO) as mock_stdout:
cli.connections(self.parser.parse_args(
['connections', '--add', '--conn_id=new1',
'--conn_uri=%s' % uri]))
stdout = mock_stdout.getvalue()
# Check stdout for addition attempt
lines = [l for l in stdout.split('\n') if len(l) > 0]
self.assertListEqual(lines, [
"\tA connection with `conn_id`=new1 already exists",
])
# Attempt to add without providing conn_id
with mock.patch('sys.stdout',
new_callable=six.StringIO) as mock_stdout:
cli.connections(self.parser.parse_args(
['connections', '--add', '--conn_uri=%s' % uri]))
stdout = mock_stdout.getvalue()
# Check stdout for addition attempt
lines = [l for l in stdout.split('\n') if len(l) > 0]
self.assertListEqual(lines, [
("\tThe following args are required to add a connection:" +
" ['conn_id']"),
])
# Attempt to add without providing conn_uri
with mock.patch('sys.stdout',
new_callable=six.StringIO) as mock_stdout:
cli.connections(self.parser.parse_args(
['connections', '--add', '--conn_id=new']))
stdout = mock_stdout.getvalue()
# Check stdout for addition attempt
lines = [l for l in stdout.split('\n') if len(l) > 0]
self.assertListEqual(lines, [
("\tThe following args are required to add a connection:" +
" ['conn_uri or conn_type']"),
])
# Prepare to add connections
session = settings.Session()
extra = {'new1': None,
'new2': None,
'new3': "{'extra': 'yes'}",
'new4': "{'extra': 'yes'}"}
# Add connections
for index in range(1, 6):
conn_id = 'new%s' % index
result = (session
.query(Connection)
.filter(Connection.conn_id == conn_id)
.first())
result = (result.conn_id, result.conn_type, result.host,
result.port, result.get_extra())
if conn_id in ['new1', 'new2', 'new3', 'new4']:
self.assertEqual(result, (conn_id, 'postgres', 'host', 5432,
extra[conn_id]))
elif conn_id == 'new5':
self.assertEqual(result, (conn_id, 'hive_metastore', 'host',
9083, None))
elif conn_id == 'new6':
self.assertEqual(result, (conn_id, 'google_cloud_platform',
None, None, "{'extra': 'yes'}"))
# Delete connections
with mock.patch('sys.stdout',
new_callable=six.StringIO) as mock_stdout:
cli.connections(self.parser.parse_args(
['connections', '--delete', '--conn_id=new1']))
cli.connections(self.parser.parse_args(
['connections', '--delete', '--conn_id=new2']))
cli.connections(self.parser.parse_args(
['connections', '--delete', '--conn_id=new3']))
cli.connections(self.parser.parse_args(
['connections', '--delete', '--conn_id=new4']))
cli.connections(self.parser.parse_args(
['connections', '--delete', '--conn_id=new5']))
cli.connections(self.parser.parse_args(
['connections', '--delete', '--conn_id=new6']))
stdout = mock_stdout.getvalue()
# Check deletion stdout
lines = [l for l in stdout.split('\n') if len(l) > 0]
self.assertListEqual(lines, [
"\tSuccessfully deleted `conn_id`=new1",
"\tSuccessfully deleted `conn_id`=new2",
"\tSuccessfully deleted `conn_id`=new3",
"\tSuccessfully deleted `conn_id`=new4",
"\tSuccessfully deleted `conn_id`=new5",
"\tSuccessfully deleted `conn_id`=new6"
])
# Check deletions
for index in range(1, 7):
conn_id = 'new%s' % index
result = (session.query(Connection)
.filter(Connection.conn_id == conn_id)
.first())
self.assertTrue(result is None)
# Attempt to delete a non-existing connnection
with mock.patch('sys.stdout',
new_callable=six.StringIO) as mock_stdout:
cli.connections(self.parser.parse_args(
['connections', '--delete', '--conn_id=fake']))
stdout = mock_stdout.getvalue()
# Check deletion attempt stdout
lines = [l for l in stdout.split('\n') if len(l) > 0]
self.assertListEqual(lines, [
"\tDid not find a connection with `conn_id`=fake",
])
# Attempt to delete with invalid cli args
with mock.patch('sys.stdout',
new_callable=six.StringIO) as mock_stdout:
cli.connections(self.parser.parse_args(
['connections', '--delete', '--conn_id=fake',
'--conn_uri=%s' % uri, '--conn_type=fake-type']))
stdout = mock_stdout.getvalue()
# Check deletion attempt stdout
lines = [l for l in stdout.split('\n') if len(l) > 0]
self.assertListEqual(lines, [
("\tThe following args are not compatible with the " +
"--delete flag: ['conn_uri', 'conn_type']"),
])
session.close()
def test_cli_test(self):
cli.test(self.parser.parse_args([
'test', 'example_bash_operator', 'runme_0',
DEFAULT_DATE.isoformat()]))
cli.test(self.parser.parse_args([
'test', 'example_bash_operator', 'runme_0', '--dry_run',
DEFAULT_DATE.isoformat()]))
def test_cli_test_with_params(self):
cli.test(self.parser.parse_args([
'test', 'example_passing_params_via_test_command', 'run_this',
'-tp', '{"foo":"bar"}', DEFAULT_DATE.isoformat()]))
cli.test(self.parser.parse_args([
'test', 'example_passing_params_via_test_command', 'also_run_this',
'-tp', '{"foo":"bar"}', DEFAULT_DATE.isoformat()]))
def test_cli_run(self):
cli.run(self.parser.parse_args([
'run', 'example_bash_operator', 'runme_0', '-l',
DEFAULT_DATE.isoformat()]))
def test_task_state(self):
cli.task_state(self.parser.parse_args([
'task_state', 'example_bash_operator', 'runme_0',
DEFAULT_DATE.isoformat()]))
def test_dag_state(self):
self.assertEqual(None, cli.dag_state(self.parser.parse_args([
'dag_state', 'example_bash_operator', DEFAULT_DATE.isoformat()])))
def test_pause(self):
args = self.parser.parse_args([
'pause', 'example_bash_operator'])
cli.pause(args)
self.assertIn(self.dagbag.dags['example_bash_operator'].is_paused, [True, 1])
args = self.parser.parse_args([
'unpause', 'example_bash_operator'])
cli.unpause(args)
self.assertIn(self.dagbag.dags['example_bash_operator'].is_paused, [False, 0])
def test_subdag_clear(self):
args = self.parser.parse_args([
'clear', 'example_subdag_operator', '--no_confirm'])
cli.clear(args)
args = self.parser.parse_args([
'clear', 'example_subdag_operator', '--no_confirm', '--exclude_subdags'])
cli.clear(args)
def test_parentdag_downstream_clear(self):
args = self.parser.parse_args([
'clear', 'example_subdag_operator.section-1', '--no_confirm'])
cli.clear(args)
args = self.parser.parse_args([
'clear', 'example_subdag_operator.section-1', '--no_confirm',
'--exclude_parentdag'])
cli.clear(args)
def test_get_dags(self):
dags = cli.get_dags(self.parser.parse_args(['clear', 'example_subdag_operator',
'-c']))
self.assertEqual(len(dags), 1)
dags = cli.get_dags(self.parser.parse_args(['clear', 'subdag', '-dx', '-c']))
self.assertGreater(len(dags), 1)
with self.assertRaises(AirflowException):
cli.get_dags(self.parser.parse_args(['clear', 'foobar', '-dx', '-c']))
def test_backfill(self):
cli.backfill(self.parser.parse_args([
'backfill', 'example_bash_operator',
'-s', DEFAULT_DATE.isoformat()]))
cli.backfill(self.parser.parse_args([
'backfill', 'example_bash_operator', '-t', 'runme_0', '--dry_run',
'-s', DEFAULT_DATE.isoformat()]))
cli.backfill(self.parser.parse_args([
'backfill', 'example_bash_operator', '--dry_run',
'-s', DEFAULT_DATE.isoformat()]))
cli.backfill(self.parser.parse_args([
'backfill', 'example_bash_operator', '-l',
'-s', DEFAULT_DATE.isoformat()]))
def test_process_subdir_path_with_placeholder(self):
self.assertEqual(os.path.join(settings.DAGS_FOLDER, 'abc'), cli.process_subdir('DAGS_FOLDER/abc'))
def test_trigger_dag(self):
cli.trigger_dag(self.parser.parse_args([
'trigger_dag', 'example_bash_operator',
'-c', '{"foo": "bar"}']))
self.assertRaises(
ValueError,
cli.trigger_dag,
self.parser.parse_args([
'trigger_dag', 'example_bash_operator',
'--run_id', 'trigger_dag_xxx',
'-c', 'NOT JSON'])
)
def test_delete_dag(self):
DM = models.DagModel
key = "my_dag_id"
session = settings.Session()
session.add(DM(dag_id=key))
session.commit()
cli.delete_dag(self.parser.parse_args([
'delete_dag', key, '--yes']))
self.assertEqual(session.query(DM).filter_by(dag_id=key).count(), 0)
self.assertRaises(
AirflowException,
cli.delete_dag,
self.parser.parse_args([
'delete_dag',
'does_not_exist_dag',
'--yes'])
)
def test_pool_create(self):
cli.pool(self.parser.parse_args(['pool', '-s', 'foo', '1', 'test']))
self.assertEqual(self.session.query(models.Pool).count(), 1)
def test_pool_get(self):
cli.pool(self.parser.parse_args(['pool', '-s', 'foo', '1', 'test']))
try:
cli.pool(self.parser.parse_args(['pool', '-g', 'foo']))
except Exception as e:
self.fail("The 'pool -g foo' command raised unexpectedly: %s" % e)
def test_pool_delete(self):
cli.pool(self.parser.parse_args(['pool', '-s', 'foo', '1', 'test']))
cli.pool(self.parser.parse_args(['pool', '-x', 'foo']))
self.assertEqual(self.session.query(models.Pool).count(), 0)
def test_pool_no_args(self):
try:
cli.pool(self.parser.parse_args(['pool']))
except Exception as e:
self.fail("The 'pool' command raised unexpectedly: %s" % e)
def test_pool_import_export(self):
# Create two pools first
pool_config_input = {
"foo": {
"description": "foo_test",
"slots": 1
},
"baz": {
"description": "baz_test",
"slots": 2
}
}
with open('pools_import.json', mode='w') as f:
json.dump(pool_config_input, f)
# Import json
try:
cli.pool(self.parser.parse_args(['pool', '-i', 'pools_import.json']))
except Exception as e:
self.fail("The 'pool -i pools_import.json' failed: %s" % e)
# Export json
try:
cli.pool(self.parser.parse_args(['pool', '-e', 'pools_export.json']))
except Exception as e:
self.fail("The 'pool -e pools_export.json' failed: %s" % e)
with open('pools_export.json', mode='r') as f:
pool_config_output = json.load(f)
self.assertEqual(
pool_config_input,
pool_config_output,
"Input and output pool files are not same")
os.remove('pools_import.json')
os.remove('pools_export.json')
def test_variables(self):
# Checks if all subcommands are properly received
cli.variables(self.parser.parse_args([
'variables', '-s', 'foo', '{"foo":"bar"}']))
cli.variables(self.parser.parse_args([
'variables', '-g', 'foo']))
cli.variables(self.parser.parse_args([
'variables', '-g', 'baz', '-d', 'bar']))
cli.variables(self.parser.parse_args([
'variables']))
cli.variables(self.parser.parse_args([
'variables', '-x', 'bar']))
cli.variables(self.parser.parse_args([
'variables', '-i', DEV_NULL]))
cli.variables(self.parser.parse_args([
'variables', '-e', DEV_NULL]))
cli.variables(self.parser.parse_args([
'variables', '-s', 'bar', 'original']))
# First export
cli.variables(self.parser.parse_args([
'variables', '-e', 'variables1.json']))
first_exp = open('variables1.json', 'r')
cli.variables(self.parser.parse_args([
'variables', '-s', 'bar', 'updated']))
cli.variables(self.parser.parse_args([
'variables', '-s', 'foo', '{"foo":"oops"}']))
cli.variables(self.parser.parse_args([
'variables', '-x', 'foo']))
# First import
cli.variables(self.parser.parse_args([
'variables', '-i', 'variables1.json']))
self.assertEqual('original', models.Variable.get('bar'))
self.assertEqual('{"foo": "bar"}', models.Variable.get('foo'))
# Second export
cli.variables(self.parser.parse_args([
'variables', '-e', 'variables2.json']))
second_exp = open('variables2.json', 'r')
self.assertEqual(first_exp.read(), second_exp.read())
second_exp.close()
first_exp.close()
# Second import
cli.variables(self.parser.parse_args([
'variables', '-i', 'variables2.json']))
self.assertEqual('original', models.Variable.get('bar'))
self.assertEqual('{"foo": "bar"}', models.Variable.get('foo'))
os.remove('variables1.json')
os.remove('variables2.json')
def _wait_pidfile(self, pidfile):
while True:
try:
with open(pidfile) as f:
return int(f.read())
except Exception:
sleep(1)
def test_cli_webserver_foreground(self):
# Confirm that webserver hasn't been launched.
# pgrep returns exit status 1 if no process matched.
self.assertEqual(1, subprocess.Popen(["pgrep", "-c", "airflow"]).wait())
self.assertEqual(1, subprocess.Popen(["pgrep", "-c", "gunicorn"]).wait())
# Run webserver in foreground and terminate it.
p = subprocess.Popen(["airflow", "webserver"])
p.terminate()
p.wait()
# Assert that no process remains.
self.assertEqual(1, subprocess.Popen(["pgrep", "-c", "airflow"]).wait())
self.assertEqual(1, subprocess.Popen(["pgrep", "-c", "gunicorn"]).wait())
@unittest.skipIf("TRAVIS" in os.environ and bool(os.environ["TRAVIS"]),
"Skipping test due to lack of required file permission")
def test_cli_webserver_foreground_with_pid(self):
# Run webserver in foreground with --pid option
pidfile = tempfile.mkstemp()[1]
p = subprocess.Popen(["airflow", "webserver", "--pid", pidfile])
# Check the file specified by --pid option exists
self._wait_pidfile(pidfile)
# Terminate webserver
p.terminate()
p.wait()
@unittest.skipIf("TRAVIS" in os.environ and bool(os.environ["TRAVIS"]),
"Skipping test due to lack of required file permission")
def test_cli_webserver_background(self):
import psutil
# Confirm that webserver hasn't been launched.
self.assertEqual(1, subprocess.Popen(["pgrep", "-c", "airflow"]).wait())
self.assertEqual(1, subprocess.Popen(["pgrep", "-c", "gunicorn"]).wait())
# Run webserver in background.
subprocess.Popen(["airflow", "webserver", "-D"])
pidfile = cli.setup_locations("webserver")[0]
self._wait_pidfile(pidfile)
# Assert that gunicorn and its monitor are launched.
self.assertEqual(0, subprocess.Popen(["pgrep", "-c", "airflow"]).wait())
self.assertEqual(0, subprocess.Popen(["pgrep", "-c", "gunicorn"]).wait())
# Terminate monitor process.
pidfile = cli.setup_locations("webserver-monitor")[0]
pid = self._wait_pidfile(pidfile)
p = psutil.Process(pid)
p.terminate()
p.wait()
# Assert that no process remains.
self.assertEqual(1, subprocess.Popen(["pgrep", "-c", "airflow"]).wait())
self.assertEqual(1, subprocess.Popen(["pgrep", "-c", "gunicorn"]).wait())
# Patch for causing webserver timeout
@mock.patch("airflow.bin.cli.get_num_workers_running", return_value=0)
def test_cli_webserver_shutdown_when_gunicorn_master_is_killed(self, _):
# Shorten timeout so that this test doesn't take too long time
configuration.conf.set("webserver", "web_server_master_timeout", "10")
args = self.parser.parse_args(['webserver'])
with self.assertRaises(SystemExit) as e:
cli.webserver(args)
self.assertEqual(e.exception.code, 1)
class FakeWebHDFSHook(object):
def __init__(self, conn_id):
self.conn_id = conn_id
def get_conn(self):
return self.conn_id
def check_for_path(self, hdfs_path):
return hdfs_path
class FakeSnakeBiteClientException(Exception):
pass
class FakeSnakeBiteClient(object):
def __init__(self):
self.started = True
def ls(self, path, include_toplevel=False):
"""
the fake snakebite client
:param path: the array of path to test
:param include_toplevel: to return the toplevel directory info
:return: a list for path for the matching queries
"""
if path[0] == '/datadirectory/empty_directory' and not include_toplevel:
return []
elif path[0] == '/datadirectory/datafile':
return [{
'group': u'supergroup',
'permission': 420,
'file_type': 'f',
'access_time': 1481122343796,
'block_replication': 3,
'modification_time': 1481122343862,
'length': 0,
'blocksize': 134217728,
'owner': u'hdfs',
'path': '/datadirectory/datafile'
}]
elif path[0] == '/datadirectory/empty_directory' and include_toplevel:
return [{
'group': u'supergroup',
'permission': 493,
'file_type': 'd',
'access_time': 0,
'block_replication': 0,
'modification_time': 1481132141540,
'length': 0,
'blocksize': 0,
'owner': u'hdfs',
'path': '/datadirectory/empty_directory'
}]
elif path[0] == '/datadirectory/not_empty_directory' and include_toplevel:
return [{
'group': u'supergroup',
'permission': 493,
'file_type': 'd',
'access_time': 0,
'block_replication': 0,
'modification_time': 1481132141540,
'length': 0,
'blocksize': 0,
'owner': u'hdfs',
'path': '/datadirectory/empty_directory'
}, {
'group': u'supergroup',
'permission': 420,
'file_type': 'f',
'access_time': 1481122343796,
'block_replication': 3,
'modification_time': 1481122343862,
'length': 0,
'blocksize': 134217728,
'owner': u'hdfs',
'path': '/datadirectory/not_empty_directory/test_file'
}]
elif path[0] == '/datadirectory/not_empty_directory':
return [{
'group': u'supergroup',
'permission': 420,
'file_type': 'f',
'access_time': 1481122343796,
'block_replication': 3,
'modification_time': 1481122343862,
'length': 0,
'blocksize': 134217728,
'owner': u'hdfs',
'path': '/datadirectory/not_empty_directory/test_file'
}]
elif path[0] == '/datadirectory/not_existing_file_or_directory':
raise FakeSnakeBiteClientException
elif path[0] == '/datadirectory/regex_dir':
return [{
'group': u'supergroup',
'permission': 420,
'file_type': 'f',
'access_time': 1481122343796,
'block_replication': 3,
'modification_time': 1481122343862, 'length': 12582912,
'blocksize': 134217728,
'owner': u'hdfs',
'path': '/datadirectory/regex_dir/test1file'
}, {
'group': u'supergroup',
'permission': 420,
'file_type': 'f',
'access_time': 1481122343796,
'block_replication': 3,
'modification_time': 1481122343862,
'length': 12582912,
'blocksize': 134217728,
'owner': u'hdfs',
'path': '/datadirectory/regex_dir/test2file'
}, {
'group': u'supergroup',
'permission': 420,
'file_type': 'f',
'access_time': 1481122343796,
'block_replication': 3,
'modification_time': 1481122343862,
'length': 12582912,
'blocksize': 134217728,
'owner': u'hdfs',
'path': '/datadirectory/regex_dir/test3file'
}, {
'group': u'supergroup',
'permission': 420,
'file_type': 'f',
'access_time': 1481122343796,
'block_replication': 3,
'modification_time': 1481122343862,
'length': 12582912,
'blocksize': 134217728,
'owner': u'hdfs',
'path': '/datadirectory/regex_dir/copying_file_1.txt._COPYING_'
}, {
'group': u'supergroup',
'permission': 420,
'file_type': 'f',
'access_time': 1481122343796,
'block_replication': 3,
'modification_time': 1481122343862,
'length': 12582912,
'blocksize': 134217728,
'owner': u'hdfs',
'path': '/datadirectory/regex_dir/copying_file_3.txt.sftp'
}]
else:
raise FakeSnakeBiteClientException
class FakeHDFSHook(object):
def __init__(self, conn_id=None):
self.conn_id = conn_id
def get_conn(self):
client = FakeSnakeBiteClient()
return client
class ConnectionTest(unittest.TestCase):
def setUp(self):
configuration.load_test_config()
utils.db.initdb()
os.environ['AIRFLOW_CONN_TEST_URI'] = (
'postgres://username:[email protected]:5432/the_database')
os.environ['AIRFLOW_CONN_TEST_URI_NO_CREDS'] = (
'postgres://ec2.compute.com/the_database')
def tearDown(self):
env_vars = ['AIRFLOW_CONN_TEST_URI', 'AIRFLOW_CONN_AIRFLOW_DB']
for ev in env_vars:
if ev in os.environ:
del os.environ[ev]
def test_using_env_var(self):
c = SqliteHook.get_connection(conn_id='test_uri')
self.assertEqual('ec2.compute.com', c.host)
self.assertEqual('the_database', c.schema)
self.assertEqual('username', c.login)
self.assertEqual('password', c.password)
self.assertEqual(5432, c.port)
def test_using_unix_socket_env_var(self):
c = SqliteHook.get_connection(conn_id='test_uri_no_creds')
self.assertEqual('ec2.compute.com', c.host)
self.assertEqual('the_database', c.schema)
self.assertIsNone(c.login)
self.assertIsNone(c.password)
self.assertIsNone(c.port)
def test_param_setup(self):
c = Connection(conn_id='local_mysql', conn_type='mysql',
host='localhost', login='airflow',
password='airflow', schema='airflow')
self.assertEqual('localhost', c.host)
self.assertEqual('airflow', c.schema)
self.assertEqual('airflow', c.login)
self.assertEqual('airflow', c.password)
self.assertIsNone(c.port)
def test_env_var_priority(self):
c = SqliteHook.get_connection(conn_id='airflow_db')
self.assertNotEqual('ec2.compute.com', c.host)
os.environ['AIRFLOW_CONN_AIRFLOW_DB'] = \
'postgres://username:[email protected]:5432/the_database'
c = SqliteHook.get_connection(conn_id='airflow_db')
self.assertEqual('ec2.compute.com', c.host)
self.assertEqual('the_database', c.schema)
self.assertEqual('username', c.login)
self.assertEqual('password', c.password)
self.assertEqual(5432, c.port)
del os.environ['AIRFLOW_CONN_AIRFLOW_DB']
def test_dbapi_get_uri(self):
conn = BaseHook.get_connection(conn_id='test_uri')
hook = conn.get_hook()
self.assertEqual('postgres://username:[email protected]:5432/the_database', hook.get_uri())
conn2 = BaseHook.get_connection(conn_id='test_uri_no_creds')
hook2 = conn2.get_hook()
self.assertEqual('postgres://ec2.compute.com/the_database', hook2.get_uri())
def test_dbapi_get_sqlalchemy_engine(self):
conn = BaseHook.get_connection(conn_id='test_uri')
hook = conn.get_hook()
engine = hook.get_sqlalchemy_engine()
self.assertIsInstance(engine, sqlalchemy.engine.Engine)
self.assertEqual('postgres://username:[email protected]:5432/the_database', str(engine.url))
def test_get_connections_env_var(self):
conns = SqliteHook.get_connections(conn_id='test_uri')
assert len(conns) == 1
assert conns[0].host == 'ec2.compute.com'
assert conns[0].schema == 'the_database'
assert conns[0].login == 'username'
assert conns[0].password == 'password'
assert conns[0].port == 5432
class WebHDFSHookTest(unittest.TestCase):
def setUp(self):
configuration.load_test_config()
def test_simple_init(self):
from airflow.hooks.webhdfs_hook import WebHDFSHook
c = WebHDFSHook()
self.assertIsNone(c.proxy_user)
def test_init_proxy_user(self):
from airflow.hooks.webhdfs_hook import WebHDFSHook
c = WebHDFSHook(proxy_user='someone')
self.assertEqual('someone', c.proxy_user)
HDFSHook = None
if six.PY2:
from airflow.hooks.hdfs_hook import HDFSHook
import snakebite
@unittest.skipIf(HDFSHook is None,
"Skipping test because HDFSHook is not installed")
class HDFSHookTest(unittest.TestCase):
def setUp(self):
configuration.load_test_config()
os.environ['AIRFLOW_CONN_HDFS_DEFAULT'] = 'hdfs://localhost:8020'
def test_get_client(self):
client = HDFSHook(proxy_user='foo').get_conn()
self.assertIsInstance(client, snakebite.client.Client)
self.assertEqual('localhost', client.host)
self.assertEqual(8020, client.port)
self.assertEqual('foo', client.service.channel.effective_user)
@mock.patch('airflow.hooks.hdfs_hook.AutoConfigClient')
@mock.patch('airflow.hooks.hdfs_hook.HDFSHook.get_connections')
def test_get_autoconfig_client(self, mock_get_connections,
MockAutoConfigClient):
c = Connection(conn_id='hdfs', conn_type='hdfs',
host='localhost', port=8020, login='foo',
extra=json.dumps({'autoconfig': True}))
mock_get_connections.return_value = [c]
HDFSHook(hdfs_conn_id='hdfs').get_conn()
MockAutoConfigClient.assert_called_once_with(effective_user='foo',
use_sasl=False)
@mock.patch('airflow.hooks.hdfs_hook.AutoConfigClient')
def test_get_autoconfig_client_no_conn(self, MockAutoConfigClient):
HDFSHook(hdfs_conn_id='hdfs_missing', autoconfig=True).get_conn()
MockAutoConfigClient.assert_called_once_with(effective_user=None,
use_sasl=False)
@mock.patch('airflow.hooks.hdfs_hook.HDFSHook.get_connections')
def test_get_ha_client(self, mock_get_connections):
c1 = Connection(conn_id='hdfs_default', conn_type='hdfs',
host='localhost', port=8020)
c2 = Connection(conn_id='hdfs_default', conn_type='hdfs',
host='localhost2', port=8020)
mock_get_connections.return_value = [c1, c2]
client = HDFSHook().get_conn()
self.assertIsInstance(client, snakebite.client.HAClient)
send_email_test = mock.Mock()
class EmailTest(unittest.TestCase):
def setUp(self):
configuration.conf.remove_option('email', 'EMAIL_BACKEND')
@mock.patch('airflow.utils.email.send_email')
def test_default_backend(self, mock_send_email):
res = utils.email.send_email('to', 'subject', 'content')
mock_send_email.assert_called_with('to', 'subject', 'content')
self.assertEqual(mock_send_email.return_value, res)
@mock.patch('airflow.utils.email.send_email_smtp')
def test_custom_backend(self, mock_send_email):
configuration.conf.set('email', 'EMAIL_BACKEND', 'tests.core.send_email_test')
utils.email.send_email('to', 'subject', 'content')
send_email_test.assert_called_with(
'to', 'subject', 'content', files=None, dryrun=False,
cc=None, bcc=None, mime_charset='utf-8', mime_subtype='mixed')
self.assertFalse(mock_send_email.called)
class EmailSmtpTest(unittest.TestCase):
def setUp(self):
configuration.conf.set('smtp', 'SMTP_SSL', 'False')
@mock.patch('airflow.utils.email.send_MIME_email')
def test_send_smtp(self, mock_send_mime):
attachment = tempfile.NamedTemporaryFile()
attachment.write(b'attachment')
attachment.seek(0)
utils.email.send_email_smtp('to', 'subject', 'content', files=[attachment.name])
self.assertTrue(mock_send_mime.called)
call_args = mock_send_mime.call_args[0]
self.assertEqual(configuration.conf.get('smtp', 'SMTP_MAIL_FROM'), call_args[0])
self.assertEqual(['to'], call_args[1])
msg = call_args[2]
self.assertEqual('subject', msg['Subject'])
self.assertEqual(configuration.conf.get('smtp', 'SMTP_MAIL_FROM'), msg['From'])
self.assertEqual(2, len(msg.get_payload()))
filename = u'attachment; filename="' + os.path.basename(attachment.name) + '"'
self.assertEqual(filename, msg.get_payload()[-1].get(u'Content-Disposition'))
mimeapp = MIMEApplication('attachment')
self.assertEqual(mimeapp.get_payload(), msg.get_payload()[-1].get_payload())
@mock.patch('airflow.utils.email.send_MIME_email')
def test_send_smtp_with_multibyte_content(self, mock_send_mime):
utils.email.send_email_smtp('to', 'subject', '🔥', mime_charset='utf-8')
self.assertTrue(mock_send_mime.called)
call_args = mock_send_mime.call_args[0]
msg = call_args[2]
mimetext = MIMEText('🔥', 'mixed', 'utf-8')
self.assertEqual(mimetext.get_payload(), msg.get_payload()[0].get_payload())
@mock.patch('airflow.utils.email.send_MIME_email')
def test_send_bcc_smtp(self, mock_send_mime):
attachment = tempfile.NamedTemporaryFile()
attachment.write(b'attachment')
attachment.seek(0)
utils.email.send_email_smtp('to', 'subject', 'content', files=[attachment.name], cc='cc', bcc='bcc')
self.assertTrue(mock_send_mime.called)
call_args = mock_send_mime.call_args[0]
self.assertEqual(configuration.conf.get('smtp', 'SMTP_MAIL_FROM'), call_args[0])
self.assertEqual(['to', 'cc', 'bcc'], call_args[1])
msg = call_args[2]
self.assertEqual('subject', msg['Subject'])
self.assertEqual(configuration.conf.get('smtp', 'SMTP_MAIL_FROM'), msg['From'])
self.assertEqual(2, len(msg.get_payload()))
self.assertEqual(u'attachment; filename="' + os.path.basename(attachment.name) + '"',
msg.get_payload()[-1].get(u'Content-Disposition'))
mimeapp = MIMEApplication('attachment')
self.assertEqual(mimeapp.get_payload(), msg.get_payload()[-1].get_payload())
@mock.patch('smtplib.SMTP_SSL')
@mock.patch('smtplib.SMTP')
def test_send_mime(self, mock_smtp, mock_smtp_ssl):
mock_smtp.return_value = mock.Mock()
mock_smtp_ssl.return_value = mock.Mock()
msg = MIMEMultipart()
utils.email.send_MIME_email('from', 'to', msg, dryrun=False)
mock_smtp.assert_called_with(
configuration.conf.get('smtp', 'SMTP_HOST'),
configuration.conf.getint('smtp', 'SMTP_PORT'),
)
self.assertTrue(mock_smtp.return_value.starttls.called)
mock_smtp.return_value.login.assert_called_with(
configuration.conf.get('smtp', 'SMTP_USER'),
configuration.conf.get('smtp', 'SMTP_PASSWORD'),
)
mock_smtp.return_value.sendmail.assert_called_with('from', 'to', msg.as_string())
self.assertTrue(mock_smtp.return_value.quit.called)
@mock.patch('smtplib.SMTP_SSL')
@mock.patch('smtplib.SMTP')
def test_send_mime_ssl(self, mock_smtp, mock_smtp_ssl):
configuration.conf.set('smtp', 'SMTP_SSL', 'True')
mock_smtp.return_value = mock.Mock()
mock_smtp_ssl.return_value = mock.Mock()
utils.email.send_MIME_email('from', 'to', MIMEMultipart(), dryrun=False)
self.assertFalse(mock_smtp.called)
mock_smtp_ssl.assert_called_with(
configuration.conf.get('smtp', 'SMTP_HOST'),
configuration.conf.getint('smtp', 'SMTP_PORT'),
)
@mock.patch('smtplib.SMTP_SSL')
@mock.patch('smtplib.SMTP')
def test_send_mime_noauth(self, mock_smtp, mock_smtp_ssl):
configuration.conf.remove_option('smtp', 'SMTP_USER')
configuration.conf.remove_option('smtp', 'SMTP_PASSWORD')
mock_smtp.return_value = mock.Mock()
mock_smtp_ssl.return_value = mock.Mock()
utils.email.send_MIME_email('from', 'to', MIMEMultipart(), dryrun=False)
self.assertFalse(mock_smtp_ssl.called)
mock_smtp.assert_called_with(
configuration.conf.get('smtp', 'SMTP_HOST'),
configuration.conf.getint('smtp', 'SMTP_PORT'),
)
self.assertFalse(mock_smtp.login.called)
@mock.patch('smtplib.SMTP_SSL')
@mock.patch('smtplib.SMTP')
def test_send_mime_dryrun(self, mock_smtp, mock_smtp_ssl):
utils.email.send_MIME_email('from', 'to', MIMEMultipart(), dryrun=True)
self.assertFalse(mock_smtp.called)
self.assertFalse(mock_smtp_ssl.called)
if __name__ == '__main__':
unittest.main()
| r39132/airflow | tests/core.py | Python | apache-2.0 | 94,499 |
from django.views.generic.edit import FormView
from django.views.generic import ListView
from .forms import ImageForm
from .models import Image
class UploadView(FormView):
template_name = 'imageview/upload.html'
form_class = ImageForm
success_url = '/'
def form_valid(self, form):
form.save()
return super(UploadView, self).form_valid(form)
class MainView(ListView):
template_name = 'imageview/list.html'
model = Image
| w00tw00tw00tw00t/imageview | imageview/views.py | Python | gpl-2.0 | 465 |
from __future__ import absolute_import
import urwid
import urwid.util
import os
from netlib.http import CONTENT_MISSING
import netlib.utils
from .. import utils
from ..models import decoded
from . import signals
try:
import pyperclip
except:
pyperclip = False
VIEW_FLOW_REQUEST = 0
VIEW_FLOW_RESPONSE = 1
METHOD_OPTIONS = [
("get", "g"),
("post", "p"),
("put", "u"),
("head", "h"),
("trace", "t"),
("delete", "d"),
("options", "o"),
("edit raw", "e"),
]
def is_keypress(k):
"""
Is this input event a keypress?
"""
if isinstance(k, basestring):
return True
def highlight_key(str, key, textattr="text", keyattr="key"):
l = []
parts = str.split(key, 1)
if parts[0]:
l.append((textattr, parts[0]))
l.append((keyattr, key))
if parts[1]:
l.append((textattr, parts[1]))
return l
KEY_MAX = 30
def format_keyvals(lst, key="key", val="text", indent=0):
"""
Format a list of (key, value) tuples.
If key is None, it's treated specially:
- We assume a sub-value, and add an extra indent.
- The value is treated as a pre-formatted list of directives.
"""
ret = []
if lst:
maxk = min(max(len(i[0]) for i in lst if i and i[0]), KEY_MAX)
for i, kv in enumerate(lst):
if kv is None:
ret.append(urwid.Text(""))
else:
if isinstance(kv[1], urwid.Widget):
v = kv[1]
elif kv[1] is None:
v = urwid.Text("")
else:
v = urwid.Text([(val, kv[1])])
ret.append(
urwid.Columns(
[
("fixed", indent, urwid.Text("")),
(
"fixed",
maxk,
urwid.Text([(key, kv[0] or "")])
),
v
],
dividechars = 2
)
)
return ret
def shortcuts(k):
if k == " ":
k = "page down"
elif k == "j":
k = "down"
elif k == "k":
k = "up"
return k
def fcol(s, attr):
s = unicode(s)
return (
"fixed",
len(s),
urwid.Text(
[
(attr, s)
]
)
)
if urwid.util.detected_encoding:
SYMBOL_REPLAY = u"\u21ba"
SYMBOL_RETURN = u"\u2190"
SYMBOL_MARK = u"\u25cf"
else:
SYMBOL_REPLAY = u"[r]"
SYMBOL_RETURN = u"<-"
SYMBOL_MARK = "[m]"
def raw_format_flow(f, focus, extended, padding):
f = dict(f)
pile = []
req = []
if extended:
req.append(
fcol(
utils.format_timestamp(f["req_timestamp"]),
"highlight"
)
)
else:
req.append(fcol(">>" if focus else " ", "focus"))
if f["marked"]:
req.append(fcol(SYMBOL_MARK, "mark"))
if f["req_is_replay"]:
req.append(fcol(SYMBOL_REPLAY, "replay"))
req.append(fcol(f["req_method"], "method"))
preamble = sum(i[1] for i in req) + len(req) - 1
if f["intercepted"] and not f["acked"]:
uc = "intercept"
elif f["resp_code"] or f["err_msg"]:
uc = "text"
else:
uc = "title"
req.append(
urwid.Text([(uc, f["req_url"])])
)
pile.append(urwid.Columns(req, dividechars=1))
resp = []
resp.append(
("fixed", preamble, urwid.Text(""))
)
if f["resp_code"]:
codes = {
2: "code_200",
3: "code_300",
4: "code_400",
5: "code_500",
}
ccol = codes.get(f["resp_code"] / 100, "code_other")
resp.append(fcol(SYMBOL_RETURN, ccol))
if f["resp_is_replay"]:
resp.append(fcol(SYMBOL_REPLAY, "replay"))
resp.append(fcol(f["resp_code"], ccol))
if f["intercepted"] and f["resp_code"] and not f["acked"]:
rc = "intercept"
else:
rc = "text"
if f["resp_ctype"]:
resp.append(fcol(f["resp_ctype"], rc))
resp.append(fcol(f["resp_clen"], rc))
resp.append(fcol(f["roundtrip"], rc))
elif f["err_msg"]:
resp.append(fcol(SYMBOL_RETURN, "error"))
resp.append(
urwid.Text([
(
"error",
f["err_msg"]
)
])
)
pile.append(urwid.Columns(resp, dividechars=1))
return urwid.Pile(pile)
# Save file to disk
def save_data(path, data, master, state):
if not path:
return
try:
with file(path, "wb") as f:
f.write(data)
except IOError as v:
signals.status_message.send(message=v.strerror)
def ask_save_overwite(path, data, master, state):
if not path:
return
path = os.path.expanduser(path)
if os.path.exists(path):
def save_overwite(k):
if k == "y":
save_data(path, data, master, state)
signals.status_prompt_onekey.send(
prompt = "'" + path + "' already exists. Overwite?",
keys = (
("yes", "y"),
("no", "n"),
),
callback = save_overwite
)
else:
save_data(path, data, master, state)
def ask_save_path(prompt, data, master, state):
signals.status_prompt_path.send(
prompt = prompt,
callback = ask_save_overwite,
args = (data, master, state)
)
def copy_flow_format_data(part, scope, flow):
if part == "u":
data = flow.request.url
else:
data = ""
if scope in ("q", "a"):
if flow.request.content is None or flow.request.content == CONTENT_MISSING:
return None, "Request content is missing"
with decoded(flow.request):
if part == "h":
data += flow.client_conn.protocol.assemble(flow.request)
elif part == "c":
data += flow.request.content
else:
raise ValueError("Unknown part: {}".format(part))
if scope == "a" and flow.request.content and flow.response:
# Add padding between request and response
data += "\r\n" * 2
if scope in ("s", "a") and flow.response:
if flow.response.content is None or flow.response.content == CONTENT_MISSING:
return None, "Response content is missing"
with decoded(flow.response):
if part == "h":
data += flow.client_conn.protocol.assemble(flow.response)
elif part == "c":
data += flow.response.content
else:
raise ValueError("Unknown part: {}".format(part))
return data, False
def copy_flow(part, scope, flow, master, state):
"""
part: _c_ontent, _h_eaders+content, _u_rl
scope: _a_ll, re_q_uest, re_s_ponse
"""
data, err = copy_flow_format_data(part, scope, flow)
if err:
signals.status_message.send(message=err)
return
if not data:
if scope == "q":
signals.status_message.send(message="No request content to copy.")
elif scope == "s":
signals.status_message.send(message="No response content to copy.")
else:
signals.status_message.send(message="No contents to copy.")
return
# pyperclip calls encode('utf-8') on data to be copied without checking.
# if data are already encoded that way UnicodeDecodeError is thrown.
toclip = ""
try:
toclip = data.decode('utf-8')
except (UnicodeDecodeError):
toclip = data
try:
pyperclip.copy(toclip)
except (RuntimeError, UnicodeDecodeError, AttributeError):
def save(k):
if k == "y":
ask_save_path("Save data", data, master, state)
signals.status_prompt_onekey.send(
prompt = "Cannot copy data to clipboard. Save as file?",
keys = (
("yes", "y"),
("no", "n"),
),
callback = save
)
def ask_copy_part(scope, flow, master, state):
choices = [
("content", "c"),
("headers+content", "h")
]
if scope != "s":
choices.append(("url", "u"))
signals.status_prompt_onekey.send(
prompt = "Copy",
keys = choices,
callback = copy_flow,
args = (scope, flow, master, state)
)
def ask_save_body(part, master, state, flow):
"""
Save either the request or the response body to disk. part can either be
"q" (request), "s" (response) or None (ask user if necessary).
"""
request_has_content = flow.request and flow.request.content
response_has_content = flow.response and flow.response.content
if part is None:
# We first need to determine whether we want to save the request or the
# response content.
if request_has_content and response_has_content:
signals.status_prompt_onekey.send(
prompt = "Save",
keys = (
("request", "q"),
("response", "s"),
),
callback = ask_save_body,
args = (master, state, flow)
)
elif response_has_content:
ask_save_body("s", master, state, flow)
else:
ask_save_body("q", master, state, flow)
elif part == "q" and request_has_content:
ask_save_path(
"Save request content",
flow.request.get_decoded_content(),
master,
state
)
elif part == "s" and response_has_content:
ask_save_path(
"Save response content",
flow.response.get_decoded_content(),
master,
state
)
else:
signals.status_message.send(message="No content to save.")
flowcache = utils.LRUCache(800)
def format_flow(f, focus, extended=False, hostheader=False, padding=2,
marked=False):
d = dict(
intercepted = f.intercepted,
acked = f.reply.acked,
req_timestamp = f.request.timestamp_start,
req_is_replay = f.request.is_replay,
req_method = f.request.method,
req_url = f.request.pretty_url if hostheader else f.request.url,
err_msg = f.error.msg if f.error else None,
resp_code = f.response.status_code if f.response else None,
marked = marked,
)
if f.response:
if f.response.content:
contentdesc = netlib.utils.pretty_size(len(f.response.content))
elif f.response.content == CONTENT_MISSING:
contentdesc = "[content missing]"
else:
contentdesc = "[no content]"
duration = 0
if f.response.timestamp_end and f.request.timestamp_start:
duration = f.response.timestamp_end - f.request.timestamp_start
roundtrip = utils.pretty_duration(duration)
d.update(dict(
resp_code = f.response.status_code,
resp_is_replay = f.response.is_replay,
resp_clen = contentdesc,
roundtrip = roundtrip,
))
t = f.response.headers.get("content-type")
if t:
d["resp_ctype"] = t.split(";")[0]
else:
d["resp_ctype"] = ""
return flowcache.get(
raw_format_flow,
tuple(sorted(d.items())), focus, extended, padding
)
| tekii/mitmproxy | libmproxy/console/common.py | Python | mit | 11,726 |
import os
import shutil
import tempfile
from django.test import TestCase
from django.urls import reverse
from ..models import RegistrationCenter, Office, Constituency, SubConstituency
from .. import utils
from .factories import OfficeFactory, ConstituencyFactory, SubConstituencyFactory, \
RegistrationCenterFactory
from libya_elections.constants import NO_NAMEDTHING
from staff.tests.base import StaffUserMixin
def get_copy_center_base_csv():
"""Return the base CSV for copy centers as a lists of lists (rows & columns)"""
current_dir = os.path.dirname(os.path.realpath(__file__))
file_path = os.path.join(current_dir, 'uploads', 'copy_center_base.csv')
with open(file_path, 'rb') as f:
lines = f.read().decode('utf-8').split('\n')
return [line.split(',') for line in lines if line]
class CSVColumnConstants(object):
"""Constants mapping CSV columns to ints"""
CENTER_ID = 0
NAME = 1
COPY_OF_ID = 2
CENTER_TYPE = 12
class CenterFileTestMixin(object):
def setUp(self):
super(CenterFileTestMixin, self).setUp()
self.url = reverse('upload-centers-csv')
RegistrationCenterFactory(name="Deleted center", deleted=True)
def tearDown(self):
if hasattr(self, 'file'):
self.file.close()
def get_csv_file(self, filename):
# generates a simple csv we can use for tests
current_dir = os.path.dirname(os.path.realpath(__file__))
file_path = os.path.join(current_dir, 'uploads', filename)
self.file = open(file_path, 'rb')
return self.file
@staticmethod
def get_messages(response):
messages = response.context['messages']
return [str(msg) for msg in messages]
def upload_csv(self, filename='valid_ecc.csv', follow=True):
csv_file = self.get_csv_file(filename)
response = self.client.post(self.url, data={'csv': csv_file}, follow=follow)
return response
class CenterFileUpload(CenterFileTestMixin, StaffUserMixin, TestCase):
# tests for the ecc file upload functionality
permissions = ['add_registrationcenter']
model = RegistrationCenter
@classmethod
def setUpClass(klass): # Files only
# Create a temp dir for CSV files created on the fly.
klass.temp_dir = tempfile.mkdtemp()
@classmethod
def tearDownClass(klass): # Files only
# Clean up temp CSV files.
shutil.rmtree(klass.temp_dir)
def setUp(self):
super(CenterFileUpload, self).setUp()
# Create some things
for id in [1, NO_NAMEDTHING]:
# create one test instance and one special 'no-named-thing' instance (999)
if not Office.objects.filter(id=id).exists():
OfficeFactory(id=id)
if not Constituency.objects.filter(id=id).exists():
ConstituencyFactory(id=id)
if not SubConstituency.objects.filter(id=id).exists():
SubConstituencyFactory(id=id)
def write_csv(self, rows):
"""Given a list of lists, write them as a CSV to a temp file and return the filename.
The list of lists should be rows and columns as returned by get_copy_center_base_csv().
"""
fh, filename = tempfile.mkstemp(suffix='.csv', dir=self.temp_dir)
os.close(fh)
with open(filename, 'wb') as f:
f.write('\n'.join([','.join(row) for row in rows]).encode('utf-8'))
return filename
def test_upload_page_works(self):
# requesting the upload page works and the right template it's used
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'register/upload_centers_csv.html')
def test_empty_upload(self):
# form does not validate if an empty form it's submitted.
# same template as the one we landed on it's used and the form
# has an error.
response = self.client.post(self.url, data={})
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'register/upload_centers_csv.html')
self.assertFormError(response, 'form', 'csv', 'This field is required.')
def test_success_upload_page(self):
# after successfully uploading a file we are presented with a
# success template.
response = self.upload_csv()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'register/upload_centers_csv.html')
def test_upload_new_centers(self):
# Uploading a csv file with new center information creates new entries
# in the database.
response = self.upload_csv()
self.assertEqual(response.status_code, 200)
centers = RegistrationCenter.objects.all()
self.assertEqual(centers.count(), 3)
messages = self.get_messages(response)
self.assertIn(
utils.STATUS_MESSAGE.format(created=3, updated=0, dupes=0, blank=0),
messages
)
def test_upload_dupes(self):
# Upload does not create or update records if they did not change.
response = self.upload_csv()
self.assertEqual(response.status_code, 200)
centers = RegistrationCenter.objects.all()
self.assertEqual(centers.count(), 3)
response = self.upload_csv()
messages = self.get_messages(response)
self.assertIn(
utils.STATUS_MESSAGE.format(created=0, updated=0, dupes=3, blank=0),
messages
)
def test_upload_after_delete(self):
# Upload, mark records deleted, upload again
response = self.upload_csv()
self.assertEqual(response.status_code, 200)
centers = RegistrationCenter.objects.all()
self.assertEqual(centers.count(), 3)
RegistrationCenter.objects.all().update(deleted=True)
response = self.upload_csv()
centers = RegistrationCenter.objects.all()
self.assertEqual(centers.count(), 3)
messages = self.get_messages(response)
self.assertIn(
utils.STATUS_MESSAGE.format(created=3, updated=0, dupes=0, blank=0),
messages
)
def test_upload_update(self):
# CSV updates a record if its attributes differ from those in the db.
RegistrationCenter.objects.create(center_id=11001, name="Center 3")
RegistrationCenter.objects.create(center_id=11001, name="Center 3", deleted=True)
response = self.upload_csv()
self.assertEqual(response.status_code, 200)
reg_center = RegistrationCenter.objects.get(center_id=11001)
self.assertNotEqual(reg_center.name, "Center 3")
messages = self.get_messages(response)
self.assertIn(
utils.STATUS_MESSAGE.format(created=2, updated=1, dupes=0, blank=0),
messages
)
def test_non_csv(self):
# Non a CSV file should be generate a specific error.
response = self.upload_csv(filename='icon_clock.gif')
self.assertEqual(response.status_code, 200)
centers = RegistrationCenter.objects.all()
self.assertEqual(centers.count(), 0)
messages = self.get_messages(response)
self.assertIn(utils.COULD_NOT_PARSE_ERROR, messages)
def test_bad_formatted_csv(self):
# CSV files that contain rows with the wrong number of columns are not accepted.
# Even compliant rows are not imported.
response = self.upload_csv(filename='too_many_columns.csv')
self.assertEqual(response.status_code, 200)
centers = RegistrationCenter.objects.all()
# file contained one valid center but it should not have been imported
self.assertEqual(centers.count(), 0)
messages = self.get_messages(response)
self.assertIn(
utils.PARSING_ERROR.format(line_number=2, columns=", ".join(utils.CSV_FIELDS)),
messages[0]
)
def test_too_many_headers(self):
# If the number of headers exceeds the number of columns expected,
# fail gracefully and inform the user that their file has the wrong format
response = self.upload_csv(filename='too_many_headers.csv')
self.assertEqual(response.status_code, 200)
centers = RegistrationCenter.objects.all()
# Too many headers ==> entire file is rejected
self.assertEqual(centers.count(), 0)
messages = self.get_messages(response)
self.assertIn(
utils.PARSING_ERROR.format(line_number=1, columns=", ".join(utils.CSV_FIELDS)),
messages[0]
)
def test_too_few_headers(self):
# If the number of headers less than the number of columns expected,
# fail gracefully and inform the user that their file has the wrong format
response = self.upload_csv(filename='too_few_headers.csv')
self.assertEqual(response.status_code, 200)
centers = RegistrationCenter.objects.all()
# Too few headers ==> entire file is rejected
self.assertEqual(centers.count(), 0)
messages = self.get_messages(response)
self.assertIn(
utils.PARSING_ERROR.format(line_number=1, columns=", ".join(utils.CSV_FIELDS)),
messages[0]
)
def test_wrong_file_headers(self):
# Uploading a csv file with columns in the wrong order should fail
response = self.upload_csv(filename='wrong_headers.csv')
self.assertEqual(response.status_code, 200)
centers = RegistrationCenter.objects.all()
# no centers were created because we encountered an error on line 1.
self.assertEqual(centers.count(), 0)
messages = self.get_messages(response)
self.assertIn(
utils.PARSING_ERROR.format(line_number=1, columns=", ".join(utils.CSV_FIELDS)),
messages
)
def test_blank_csv(self):
# Uploading a blank csv file should not create any centers
response = self.upload_csv(filename='blank.csv')
self.assertEqual(response.status_code, 200)
centers = RegistrationCenter.objects.all()
# No records were created
self.assertEqual(centers.count(), 0)
def test_blank_inbetween_csv(self):
# Blank lines are valid in between two rows
response = self.upload_csv(filename='blank_inbetween.csv')
self.assertEqual(response.status_code, 200)
centers = RegistrationCenter.objects.all()
self.assertEqual(centers.count(), 5)
messages = self.get_messages(response)
self.assertIn(
utils.STATUS_MESSAGE.format(created=5, updated=0, dupes=0, blank=3),
messages
)
def test_noninteger_center_id_csv(self):
# center id should be able to be cast into an integer otherwise a
# parsing error will occur and a message indicating the line number
# where the error occurred will be presented to the user.
response = self.upload_csv(filename='noninteger_center_id.csv')
self.assertEqual(response.status_code, 200)
centers = RegistrationCenter.objects.all()
# Due to error, no centers were created
self.assertEqual(centers.count(), 0)
messages = self.get_messages(response)
self.assertIn(
utils.FORM_FIELD_ERROR.format(field_name="center_id", value="110A1", line_number=2,
error='Enter a whole number.'),
messages[0]
)
def test_wrong_length_center_id_csv(self):
response = self.upload_csv(filename='wrong_length_center_id.csv')
self.assertEqual(response.status_code, 200)
centers = RegistrationCenter.objects.all()
# Due to error, no centers were created
self.assertEqual(centers.count(), 0)
messages = self.get_messages(response)
self.assertIn(
utils.FORM_FIELD_ERROR.format(field_name="center_id", value="110001", line_number=2,
error='Ensure this value is less than or equal to'),
messages[0]
)
def test_bad_office_id_csv(self):
# office id should be able to be cast into an integer otherwise a
# parsing error will occur and a message indicating the line number
# where the error occurred will be presented to the user.
response = self.upload_csv(filename='bad_office_id.csv')
self.assertEqual(response.status_code, 200)
centers = RegistrationCenter.objects.all()
# Due to error, no centers were created
self.assertEqual(centers.count(), 0)
messages = self.get_messages(response)
self.assertIn(
utils.FORM_FIELD_ERROR.format(field_name="office_id", value="", line_number=2,
error='This field is required.'),
messages[0]
)
def test_centers_not_associated_with_office_con_subcon_csv(self):
# Some Centers are not associated with offices, cons or subcons. For this purpose,
# each of these NamedThing models has a special instance with an ID of NO_NAMEDTHING
# (999) to represent the 'Absence of an associated NamedThing'.
# https://github.com/hnec-vr/libya-elections/issues/949
response = self.upload_csv(filename='no_associated_namedthings.csv')
self.assertEqual(response.status_code, 200)
# 1 center was created
ecc = RegistrationCenter.objects.get()
self.assertEqual(NO_NAMEDTHING, ecc.office.id)
self.assertEqual(NO_NAMEDTHING, ecc.constituency.id)
self.assertEqual(NO_NAMEDTHING, ecc.subconstituency.id)
def test_bad_constituency_id_csv(self):
# constituency id should be able to be cast into an integer otherwise a
# parsing error will occur and a message indicating the line number
# where the error occurred will be presented to the user.
response = self.upload_csv(filename='bad_constituency_id.csv')
self.assertEqual(response.status_code, 200)
centers = RegistrationCenter.objects.all()
# Due to error, no centers were created
self.assertEqual(centers.count(), 0)
messages = self.get_messages(response)
self.assertIn(
utils.FORM_FIELD_ERROR.format(field_name="constituency_id", value="x", line_number=2,
error='Enter a whole number.'),
messages[0]
)
def test_bad_subconstituency_id_csv(self):
# subconstituency id should be able to be cast into an integer otherwise a
# parsing error will occur and a message indicating the line number
# where the error occurred will be presented to the user.
response = self.upload_csv(filename='bad_subconstituency_id.csv')
self.assertEqual(response.status_code, 200)
centers = RegistrationCenter.objects.all()
# Due to error, no centers were created
self.assertEqual(centers.count(), 0)
messages = self.get_messages(response)
self.assertIn(
utils.FORM_FIELD_ERROR.format(field_name="subconstituency_id", value="x", line_number=2,
error='Enter a whole number.'),
messages[0]
)
def test_just_one_latlong(self):
# Providing just one of lat, long is an error
response = self.upload_csv(filename='just_one_latlong.csv')
self.assertEqual(response.status_code, 200)
centers = RegistrationCenter.objects.all()
# Due to error, no centers were created
self.assertEqual(centers.count(), 0)
messages = self.get_messages(response)
self.assertIn(
utils.FORM_ERROR.format(line_number=2,
error='Either set both latitude and longitude or neither'),
messages[0]
)
def test_invalid_lat(self):
# Invalid latitude
response = self.upload_csv(filename='invalid_lat.csv')
self.assertEqual(response.status_code, 200)
centers = RegistrationCenter.objects.all()
# Due to error, no centers were created
self.assertEqual(centers.count(), 0)
messages = self.get_messages(response)
self.assertIn(
utils.FORM_FIELD_ERROR.format(
line_number=2, field_name='center_lat', value="1234",
error='Ensure that there are no more than 3 digits before the decimal'),
messages[0]
)
def test_nonexistent_office(self):
response = self.upload_csv(filename='nonexistent_office.csv')
self.assertEqual(response.status_code, 200)
centers = RegistrationCenter.objects.all()
# Due to error, no centers were created
self.assertEqual(centers.count(), 0)
messages = self.get_messages(response)
self.assertIn(
utils.FORM_FIELD_ERROR.format(
line_number=2, field_name='office_id', value='22',
error='Office does not exist.'),
messages[0]
)
def test_nonexistent_constituency(self):
response = self.upload_csv(filename='nonexistent_constituency.csv')
self.assertEqual(response.status_code, 200)
centers = RegistrationCenter.objects.all()
# Due to error, no centers were created
self.assertEqual(centers.count(), 0)
messages = self.get_messages(response)
self.assertIn(
utils.FORM_FIELD_ERROR.format(
line_number=2, field_name='constituency_id', value='22',
error='Constituency does not exist.'),
messages[0]
)
def test_nonexistent_subconstituency(self):
response = self.upload_csv(filename='nonexistent_subconstituency.csv')
self.assertEqual(response.status_code, 200)
centers = RegistrationCenter.objects.all()
# Due to error, no centers were created
self.assertEqual(centers.count(), 0)
messages = self.get_messages(response)
self.assertIn(
utils.FORM_FIELD_ERROR.format(
line_number=2, field_name='subconstituency_id', value='22',
error='Subconstituency does not exist.'),
messages[0]
)
def test_blank_center_name(self):
response = self.upload_csv(filename='blank_center_name.csv')
self.assertEqual(response.status_code, 200)
centers = RegistrationCenter.objects.all()
# Due to error, no centers were created
self.assertEqual(centers.count(), 0)
messages = self.get_messages(response)
self.assertIn(
utils.FORM_FIELD_ERROR.format(
line_number=2, field_name='name', value='',
error='This field is required.'),
messages[0]
)
def test_newline_in_center_name(self):
response = self.upload_csv(filename='newline_center_name.csv')
self.assertEqual(response.status_code, 200)
centers = RegistrationCenter.objects.all()
# Due to error, no centers were created
self.assertEqual(centers.count(), 0)
messages = self.get_messages(response)
self.assertIn(
utils.FORM_FIELD_ERROR.format(
line_number=2, field_name='name', value='new\nline',
error='Newlines are not allowed.'),
messages[0]
)
def test_reg_open_field_set_to_true(self):
# The 'reg_open' field is not included in the CSV file.
# We should ensure that it is set to True (the model default)
response = self.upload_csv()
self.assertEqual(response.status_code, 200)
centers = RegistrationCenter.objects.all()
self.assertEqual(centers.count(), 3)
for ecc in centers:
self.assertEqual(ecc.reg_open, True)
def test_simple_copy_center_ok(self):
# test that simple copy center creation works
RegistrationCenterFactory(center_id=70001)
csv = get_copy_center_base_csv()
csv[1][CSVColumnConstants.COPY_OF_ID] = '70001'
csv[1][CSVColumnConstants.CENTER_TYPE] = \
RegistrationCenter.Types.NAMES['ar'][RegistrationCenter.Types.COPY]
response = self.upload_csv(filename=self.write_csv(csv))
self.assertEqual(response.status_code, 200)
centers = RegistrationCenter.objects.all()
self.assertEqual(len(centers), 2)
self.assertEqual(centers[0].copy_of, centers[1])
self.assertEqual(list(centers[1].copied_by.all()), [centers[0]])
def test_copy_center_same_file_reference_ok(self):
# test that a copy center can reference an original created in the same file
csv = get_copy_center_base_csv()
# Duplicate the data row and make row the 2nd data row refer to the first.
csv.append(csv[1][::])
csv[2][CSVColumnConstants.CENTER_ID] = '70002'
csv[2][CSVColumnConstants.COPY_OF_ID] = '70000'
csv[2][CSVColumnConstants.CENTER_TYPE] = \
RegistrationCenter.Types.NAMES['ar'][RegistrationCenter.Types.COPY]
response = self.upload_csv(filename=self.write_csv(csv))
self.assertEqual(response.status_code, 200)
centers = RegistrationCenter.objects.all()
self.assertEqual(len(centers), 2)
self.assertEqual(centers[1].copy_of, centers[0])
self.assertEqual(list(centers[0].copied_by.all()), [centers[1]])
def test_copy_center_failed_reference(self):
# test that one can't create a copy center that refers to a non-existent center.
csv = get_copy_center_base_csv()
csv[1][CSVColumnConstants.COPY_OF_ID] = '70001'
response = self.upload_csv(filename=self.write_csv(csv))
self.assertEqual(response.status_code, 200)
centers = RegistrationCenter.objects.all()
# Due to error, no centers were created
self.assertEqual(centers.count(), 0)
messages = self.get_messages(response)
self.assertIn(
utils.FORM_FIELD_ERROR.format(
line_number=2, field_name='copy_of_id', value='70001',
error='Copy centre does not exist.'),
messages[0]
)
def test_copy_center_read_only(self):
# test that copy centers are read only
original_center = RegistrationCenterFactory(center_id=70000)
copy_center = RegistrationCenterFactory(center_id=70001)
copy_center.copy_of = original_center
copy_center.save()
csv = get_copy_center_base_csv()
csv[1][CSVColumnConstants.CENTER_ID] = '70001'
csv[1][CSVColumnConstants.NAME] = 'different_name_to_trigger_an_attempt_to_edit'
csv[1][CSVColumnConstants.COPY_OF_ID] = '70000'
response = self.upload_csv(filename=self.write_csv(csv))
self.assertEqual(response.status_code, 200)
centers = RegistrationCenter.objects.all()
self.assertListEqual([center.center_id for center in centers], [70000, 70001])
messages = self.get_messages(response)
self.assertIn(
utils.FORM_ERROR.format(
line_number=2, error='Copy centres are read-only.'),
messages[0]
)
def test_existing_center_cant_become_copy_center(self):
# test that an existing center can't be turned into a copy center.
RegistrationCenterFactory(center_id=70000)
RegistrationCenterFactory(center_id=70001)
csv = get_copy_center_base_csv()
csv[1][CSVColumnConstants.COPY_OF_ID] = '70001'
csv[1][CSVColumnConstants.CENTER_TYPE] = \
RegistrationCenter.Types.NAMES['en'][RegistrationCenter.Types.COPY]
response = self.upload_csv(filename=self.write_csv(csv))
self.assertEqual(response.status_code, 200)
# No new centers should have been created
centers = RegistrationCenter.objects.all()
self.assertListEqual([center.center_id for center in centers], [70000, 70001])
messages = self.get_messages(response)
self.assertIn(
utils.FORM_ERROR.format(
line_number=2, error='A centre may not be changed to a copy centre.'),
messages[0]
)
def test_existing_center_must_remain_copy_center(self):
# test that an existing copy center can't become a non-copy center.
original_center = RegistrationCenterFactory(center_id=70000)
copy_center = RegistrationCenterFactory(center_id=70001)
copy_center.copy_of = original_center
copy_center.save()
csv = get_copy_center_base_csv()
csv[1][CSVColumnConstants.CENTER_ID] = '70001'
csv[1][CSVColumnConstants.COPY_OF_ID] = ''
csv[1][CSVColumnConstants.CENTER_TYPE] = \
RegistrationCenter.Types.NAMES['en'][RegistrationCenter.Types.GENERAL]
response = self.upload_csv(filename=self.write_csv(csv))
self.assertEqual(response.status_code, 200)
# No new centers should have been created
centers = RegistrationCenter.objects.all()
self.assertListEqual([center.center_id for center in centers], [70000, 70001])
messages = self.get_messages(response)
self.assertIn(
utils.FORM_ERROR.format(
line_number=2, error='Copy centres are read-only.'),
messages[0]
)
def test_center_type_valid(self):
# In the CSV file, 'center_type' is an arabic string field. We should
# parse it and convert to a corresponding integer from RegistrationCenter.Types.CHOICES.
response = self.upload_csv(filename='valid_center_types.csv')
self.assertEqual(response.status_code, 200)
centers = RegistrationCenter.objects.all()
self.assertEqual(centers.count(), 13)
# The first 6 centers in the test CSV have Arabic names. (At present we don't have have
# an Arabic translation for "Split" so there's no point in testing it.)
for i, center in enumerate(centers[:6]):
self.assertEqual(center.center_type, RegistrationCenter.Types.CHOICES[i][0])
# The last 7 centers in the test CSV have English names.
for i, center in enumerate(centers[6:]):
self.assertEqual(center.center_type, RegistrationCenter.Types.CHOICES[i][0])
def test_center_type_invalid(self):
# If we don't recognize the value in the 'center_type' field, then return an error.
response = self.upload_csv(filename='invalid_center_types.csv')
self.assertEqual(response.status_code, 200)
centers = RegistrationCenter.objects.all()
self.assertEqual(centers.count(), 0)
messages = self.get_messages(response)
self.assertIn(
utils.FORM_FIELD_ERROR.format(
line_number=2, field_name='center_type', value='invalid_center_type',
error='That is not a valid center_type'),
messages[0]
)
def test_center_type_copy_required_for_copy_centers(self):
# Copy centers must have the copy center type
RegistrationCenterFactory(center_id=70000)
csv = get_copy_center_base_csv()
csv[1][CSVColumnConstants.CENTER_ID] = '70001'
csv[1][CSVColumnConstants.COPY_OF_ID] = '70000'
csv[1][CSVColumnConstants.CENTER_TYPE] = \
RegistrationCenter.Types.NAMES['ar'][RegistrationCenter.Types.OIL]
response = self.upload_csv(filename=self.write_csv(csv))
self.assertEqual(response.status_code, 200)
# No new centers should have been created
centers = RegistrationCenter.objects.all()
self.assertListEqual([center.center_id for center in centers], [70000])
messages = self.get_messages(response)
self.assertIn(
utils.FORM_ERROR.format(
line_number=2, error='Copy centre type must be "copy".'),
messages[0]
)
def test_center_type_copy_rejected_for_noncopy_centers(self):
# Non-copy centers may not have the copy center type
csv = get_copy_center_base_csv()
csv[1][CSVColumnConstants.CENTER_TYPE] = \
RegistrationCenter.Types.NAMES['ar'][RegistrationCenter.Types.COPY]
response = self.upload_csv(filename=self.write_csv(csv))
self.assertEqual(response.status_code, 200)
# No new centers should have been created
centers = RegistrationCenter.objects.all()
self.assertEqual(len(centers), 0)
messages = self.get_messages(response)
self.assertIn(
utils.FORM_ERROR.format(
line_number=2, error='Centre type "copy" requires copy centre information.'),
messages[0]
)
| SmartElect/SmartElect | register/tests/test_center_csv.py | Python | apache-2.0 | 28,774 |
from mock import patch
from django.test import TestCase
from colab.management.commands.celery import Command
class CeleryCommandTest(TestCase):
@patch.object(Command, 'validate')
@patch('colab.management.commands.celery.base.execute_from_commandline')
def test_run_from_argv(self, execute_from_commandline_mock, validate_mock):
cmd = Command()
cmd.requires_system_checks = True
cmd.run_from_argv(["arg1", "arg2", "arg3"])
self.assertTrue(validate_mock.called)
self.assertTrue(execute_from_commandline_mock.called)
| colab/colab | colab/management/tests/test_celery_command.py | Python | gpl-2.0 | 573 |
# © 2018 Savoir-faire Linux
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import api, fields, models, _
class Project(models.Model):
_name = 'project.project'
_inherit = ['project.project', 'mail.thread']
code = fields.Char(
string='Number',
track_visibility='onchange',
)
responsible_id = fields.Many2one(
'res.partner',
string='Event Responsible',
track_visibility='onchange',
)
partner_id = fields.Many2one(
'res.partner',
string='Client',
track_visibility='onchange',
)
client_type = fields.Many2one(
'res.partner.category.type',
string='Client Type',
track_visibility='onchange',
)
sector_id = fields.Many2one(
'res.partner.sector',
string='Faculty Sector',
track_visibility='onchange',
)
project_type = fields.Selection(
[
('event', 'Event'),
('project', 'Project'),
],
string='Type',
default='project',
)
description = fields.Html(
string='Description'
)
notes = fields.Html(
string='Notes',
track_visibility='onchange',
)
state = fields.Selection(
[
('draft', 'Draft'),
('option', 'Option'),
('approved', 'Approved'),
('postponed', 'Postponed'),
('canceled', 'Canceled')
],
string='State',
default='draft',
track_visibility='onchange',
)
event_log_count = fields.Integer(
string='Event Logs',
compute='_compute_event_log_count',
)
@api.onchange('partner_id')
def _onchange_partner_id(self):
if self.partner_id:
self.client_type = self.partner_id.tag_id.client_type
def _compute_event_log_count(self):
for rec in self:
rec.event_log_count = self.env['auditlog.log'].search_count([
('model_id', '=', self.env.ref(
'project.model_project_project').id),
('res_id', '=', rec.id)
])
@api.multi
def write(self, vals):
super(Project, self).write(vals)
if self.project_type == 'event':
self.write_activity(vals)
@api.multi
def write_activity(self, vals):
activity_vals = {}
if 'responsible_id' in vals:
activity_vals['responsible_id'] = vals['responsible_id']
if 'partner_id' in vals:
activity_vals['partner_id'] = vals['partner_id']
if 'client_type' in vals:
activity_vals['client_type'] = vals['client_type']
if 'sector_id' in vals:
activity_vals['sector_id'] = vals['sector_id']
for activity in self.task_ids:
if activity_vals:
activity.write(activity_vals)
@api.multi
def action_cancel(self):
if self.state == 'approved':
self.send_message('canceled')
for activity in self.task_ids:
activity.action_cancel()
self.write({'state': 'canceled'})
@api.multi
def action_accept(self):
return self.get_confirmation_wizard('accept')
@api.multi
def action_option(self):
return self.get_confirmation_wizard('option')
@api.multi
def action_postpone(self):
if self.state == 'approved':
self.send_message('postponed')
for activity in self.task_ids:
activity.action_postpone()
self.write({'state': 'postponed'})
@api.model
def create(self, vals):
if 'project_type' in vals:
if vals['project_type'] == 'event':
vals['code'] = self.env['ir.sequence'] \
.next_by_code('project.project')
return super(Project, self).create(vals)
@api.model
def name_search(self, name='', args=None, operator='ilike', limit=100):
args = args or []
domain = []
if name:
domain = ['|', ('name', operator, name),
('code', operator, name)]
return super(Project, self).search(
domain + args, limit=limit).name_get()
@api.multi
def confirm_accept_reservation(self):
for activity in self.task_ids:
if activity.task_state in [
'draft', 'option', 'postponed', 'canceled']:
for child in activity.child_ids:
self.child_reservation(child)
activity.send_message('requested')
activity.open_resources_reservation()
activity.write({'task_state': 'approved'})
if self.state in ['draft', 'option', 'postponed', 'canceled']:
self.send_message('approved')
self.write({'state': 'approved'})
@staticmethod
def child_reservation(child):
child.draft_resources_reservation()
if child.task_state in ['draft', 'option', 'postponed',
'canceled']:
child.send_message('requested')
child.open_resources_reservation()
child.write({'task_state': 'requested'})
@staticmethod
def get_message_body(action):
switcher = {
'draft': ' ',
'option': _('The following is optional and \
appears as crosshatched on your calendar'),
'approved': _('The following is approved'),
'postponed': _('The following is postponed \
and no longer appear on your calendars'),
'canceled': _('The following is canceled\
and no longer on your calendars')
}
return switcher.get(action)
def get_message(self, action):
mail_channel = 'project.mail_channel_project_task_event'
message = _('<br>Event: <br>') + self.name + '<br>'
for activity in self.task_ids:
message += _('Activity: ') + activity.name + _('<br> Tasks: ')
for index_task, task in enumerate(activity.child_ids):
message += task.name
if index_task < len(activity.child_ids) - 1:
message += ', '
else:
message += '<br>'
return {
'body': self.get_message_body(action) + message,
'channel_ids': [(6, 0, [self.env.ref
(mail_channel).id])],
'email_from': 'Administrator <[email protected]>',
'message_type': 'notification',
'model': 'project.project',
'partner_ids': [(6, 0, [self.responsible_id.id])],
'record_name': self.name,
'reply_to': 'Administrator <[email protected]>',
'res_id': self.id,
'subject': self.code
}
def send_message(self, action):
self.env['mail.message'].create(self.get_message(action))
def get_confirmation_wizard(self, action):
res = ''
for activity in self.task_ids:
res += activity.get_booked_resources()
if res != '':
res = _('The following resources are already booked:<br>') + res
message = _('Please confirm your reservation.<br>') + res + _(
'Do you want to continue?')
new_wizard = self.env['reservation.validation.wiz'].create(
{
'event_id': self.id,
'message': message,
'action': action,
}
)
return {
'name': 'Confirm reservation',
'type': 'ir.actions.act_window',
'view_type': 'form',
'view_mode': 'form',
'res_model': 'reservation.validation.wiz',
'target': 'new',
'res_id': new_wizard.id,
}
@api.multi
def map_tasks(self, new_project_id):
""" copy and map tasks from old to new project """
tasks = self.env['project.task']
task_ids = self.env['project.task'].with_context(
active_test=False).search([('project_id', '=', self.id)]).ids
for task in self.env['project.task'].browse(task_ids):
defaults = {
'stage_id': task.stage_id.id,
'name': _("%s (copy)") % task.name,
'project_id': new_project_id}
tasks += task.copy(defaults)
return self.browse(new_project_id)
@api.multi
def _message_track(self, tracked_fields, initial):
mail_track = super()._message_track(tracked_fields, initial)
changes = mail_track[0]
tracking_value_ids = mail_track[1]
order_fields = self.order_event_fields(tracking_value_ids)
return changes, order_fields
@staticmethod
def order_event_fields(tracking_values):
event_fields_list = [
'state',
'name',
'code',
'responsible_id',
'partner_id',
'notes',
'user_id'
]
event_tracking_values = []
for index in range(len(tracking_values)):
for k in range(len(event_fields_list)):
event = tracking_values.__getitem__(index)
if event.__getitem__(2).get('field') \
== event_fields_list[k]:
event_tracking_values.append(event)
return event_tracking_values
| savoirfairelinux/project-addons | project_event/models/project_project.py | Python | lgpl-3.0 | 9,412 |
# -*- coding: utf-8 -*-
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Integration tests for the acl command."""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import re
from gslib.command import CreateOrGetGsutilLogger
from gslib.cs_api_map import ApiSelector
from gslib.storage_url import StorageUrlFromString
import gslib.tests.testcase as testcase
from gslib.tests.testcase.integration_testcase import SkipForGS
from gslib.tests.testcase.integration_testcase import SkipForS3
from gslib.tests.util import GenerationFromURI as urigen
from gslib.tests.util import ObjectToURI as suri
from gslib.tests.util import SetBotoConfigForTest
from gslib.tests.util import unittest
from gslib.utils import acl_helper
from gslib.utils.constants import UTF8
from gslib.utils.retry_util import Retry
from gslib.utils.translation_helper import AclTranslation
PUBLIC_READ_JSON_ACL_TEXT = '"entity":"allUsers","role":"READER"'
class TestAclBase(testcase.GsUtilIntegrationTestCase):
"""Integration test case base class for acl command."""
_set_acl_prefix = ['acl', 'set']
_get_acl_prefix = ['acl', 'get']
_set_defacl_prefix = ['defacl', 'set']
_ch_acl_prefix = ['acl', 'ch']
_project_team = 'viewers'
@SkipForS3('Tests use GS ACL model.')
class TestAcl(TestAclBase):
"""Integration tests for acl command."""
def setUp(self):
super(TestAcl, self).setUp()
self.sample_uri = self.CreateBucket()
self.sample_url = StorageUrlFromString(str(self.sample_uri))
self.logger = CreateOrGetGsutilLogger('acl')
# Argument to acl ch -p must be the project number, not a name; create a
# bucket to perform translation.
self._project_number = self.json_api.GetBucket(
self.CreateBucket().bucket_name, fields=['projectNumber']).projectNumber
self._project_test_acl = '%s-%s' % (self._project_team,
self._project_number)
def test_set_invalid_acl_object(self):
"""Ensures that invalid content returns a bad request error."""
obj_uri = suri(self.CreateObject(contents=b'foo'))
inpath = self.CreateTempFile(contents=b'badAcl')
stderr = self.RunGsUtil(self._set_acl_prefix + [inpath, obj_uri],
return_stderr=True,
expected_status=1)
self.assertIn('ArgumentException', stderr)
def test_set_invalid_acl_bucket(self):
"""Ensures that invalid content returns a bad request error."""
bucket_uri = suri(self.CreateBucket())
inpath = self.CreateTempFile(contents=b'badAcl')
stderr = self.RunGsUtil(self._set_acl_prefix + [inpath, bucket_uri],
return_stderr=True,
expected_status=1)
self.assertIn('ArgumentException', stderr)
def test_set_xml_acl_json_api_object(self):
"""Ensures XML content returns a bad request error and migration warning."""
obj_uri = suri(self.CreateObject(contents=b'foo'))
inpath = self.CreateTempFile(contents=b'<ValidXml></ValidXml>')
stderr = self.RunGsUtil(self._set_acl_prefix + [inpath, obj_uri],
return_stderr=True,
expected_status=1)
self.assertIn('ArgumentException', stderr)
self.assertIn('XML ACL data provided', stderr)
def test_set_xml_acl_json_api_bucket(self):
"""Ensures XML content returns a bad request error and migration warning."""
bucket_uri = suri(self.CreateBucket())
inpath = self.CreateTempFile(contents=b'<ValidXml></ValidXml>')
stderr = self.RunGsUtil(self._set_acl_prefix + [inpath, bucket_uri],
return_stderr=True,
expected_status=1)
self.assertIn('ArgumentException', stderr)
self.assertIn('XML ACL data provided', stderr)
def test_set_valid_acl_object(self):
"""Tests setting a valid ACL on an object."""
obj_uri = suri(self.CreateObject(contents=b'foo'))
acl_string = self.RunGsUtil(self._get_acl_prefix + [obj_uri],
return_stdout=True)
inpath = self.CreateTempFile(contents=acl_string.encode(UTF8))
self.RunGsUtil(self._set_acl_prefix + ['public-read', obj_uri])
acl_string2 = self.RunGsUtil(self._get_acl_prefix + [obj_uri],
return_stdout=True)
self.RunGsUtil(self._set_acl_prefix + [inpath, obj_uri])
acl_string3 = self.RunGsUtil(self._get_acl_prefix + [obj_uri],
return_stdout=True)
self.assertNotEqual(acl_string, acl_string2)
self.assertEqual(acl_string, acl_string3)
def test_set_valid_permission_whitespace_object(self):
"""Ensures that whitespace is allowed in role and entity elements."""
obj_uri = suri(self.CreateObject(contents=b'foo'))
acl_string = self.RunGsUtil(self._get_acl_prefix + [obj_uri],
return_stdout=True)
acl_string = re.sub(r'"role"', r'"role" \n', acl_string)
acl_string = re.sub(r'"entity"', r'\n "entity"', acl_string)
inpath = self.CreateTempFile(contents=acl_string.encode(UTF8))
self.RunGsUtil(self._set_acl_prefix + [inpath, obj_uri])
def test_set_valid_acl_bucket(self):
"""Ensures that valid canned and XML ACLs work with get/set."""
if self._ServiceAccountCredentialsPresent():
# See comments in _ServiceAccountCredentialsPresent
return unittest.skip('Canned ACLs orphan service account permissions.')
bucket_uri = suri(self.CreateBucket())
acl_string = self.RunGsUtil(self._get_acl_prefix + [bucket_uri],
return_stdout=True)
inpath = self.CreateTempFile(contents=acl_string.encode(UTF8))
self.RunGsUtil(self._set_acl_prefix + ['public-read', bucket_uri])
acl_string2 = self.RunGsUtil(self._get_acl_prefix + [bucket_uri],
return_stdout=True)
self.RunGsUtil(self._set_acl_prefix + [inpath, bucket_uri])
acl_string3 = self.RunGsUtil(self._get_acl_prefix + [bucket_uri],
return_stdout=True)
self.assertNotEqual(acl_string, acl_string2)
self.assertEqual(acl_string, acl_string3)
def test_invalid_canned_acl_object(self):
"""Ensures that an invalid canned ACL returns a CommandException."""
obj_uri = suri(self.CreateObject(contents=b'foo'))
stderr = self.RunGsUtil(self._set_acl_prefix +
['not-a-canned-acl', obj_uri],
return_stderr=True,
expected_status=1)
self.assertIn('CommandException', stderr)
self.assertIn('Invalid canned ACL', stderr)
def test_set_valid_def_acl_bucket(self):
"""Ensures that valid default canned and XML ACLs works with get/set."""
bucket_uri = self.CreateBucket()
# Default ACL is project private.
obj_uri1 = suri(self.CreateObject(bucket_uri=bucket_uri, contents=b'foo'))
acl_string = self.RunGsUtil(self._get_acl_prefix + [obj_uri1],
return_stdout=True)
# Change it to authenticated-read.
self.RunGsUtil(
self._set_defacl_prefix +
['authenticated-read', suri(bucket_uri)])
# Default object ACL may take some time to propagate.
@Retry(AssertionError, tries=5, timeout_secs=1)
def _Check1():
obj_uri2 = suri(self.CreateObject(bucket_uri=bucket_uri,
contents=b'foo2'))
acl_string2 = self.RunGsUtil(self._get_acl_prefix + [obj_uri2],
return_stdout=True)
self.assertNotEqual(acl_string, acl_string2)
self.assertIn('allAuthenticatedUsers', acl_string2)
_Check1()
# Now change it back to the default via XML.
inpath = self.CreateTempFile(contents=acl_string.encode(UTF8))
self.RunGsUtil(self._set_defacl_prefix + [inpath, suri(bucket_uri)])
# Default object ACL may take some time to propagate.
@Retry(AssertionError, tries=5, timeout_secs=1)
def _Check2():
obj_uri3 = suri(self.CreateObject(bucket_uri=bucket_uri,
contents=b'foo3'))
acl_string3 = self.RunGsUtil(self._get_acl_prefix + [obj_uri3],
return_stdout=True)
self.assertEqual(acl_string, acl_string3)
_Check2()
def test_acl_set_version_specific_uri(self):
"""Tests setting an ACL on a specific version of an object."""
bucket_uri = self.CreateVersionedBucket()
# Create initial object version.
uri = self.CreateObject(bucket_uri=bucket_uri, contents=b'data')
# Create a second object version.
inpath = self.CreateTempFile(contents=b'def')
self.RunGsUtil(['cp', inpath, uri.uri])
# Find out the two object version IDs.
lines = self.AssertNObjectsInBucket(bucket_uri, 2, versioned=True)
v0_uri_str, v1_uri_str = lines[0], lines[1]
# Check that neither version currently has public-read permission
# (default ACL is project-private).
orig_acls = []
for uri_str in (v0_uri_str, v1_uri_str):
acl = self.RunGsUtil(self._get_acl_prefix + [uri_str], return_stdout=True)
self.assertNotIn(PUBLIC_READ_JSON_ACL_TEXT,
self._strip_json_whitespace(acl))
orig_acls.append(acl)
# Set the ACL for the older version of the object to public-read.
self.RunGsUtil(self._set_acl_prefix + ['public-read', v0_uri_str])
# Check that the older version's ACL is public-read, but newer version
# is not.
acl = self.RunGsUtil(self._get_acl_prefix + [v0_uri_str],
return_stdout=True)
self.assertIn(PUBLIC_READ_JSON_ACL_TEXT, self._strip_json_whitespace(acl))
acl = self.RunGsUtil(self._get_acl_prefix + [v1_uri_str],
return_stdout=True)
self.assertNotIn(PUBLIC_READ_JSON_ACL_TEXT,
self._strip_json_whitespace(acl))
# Check that reading the ACL with the version-less URI returns the
# original ACL (since the version-less URI means the current version).
acl = self.RunGsUtil(self._get_acl_prefix + [uri.uri], return_stdout=True)
self.assertEqual(acl, orig_acls[0])
def _strip_json_whitespace(self, json_text):
return re.sub(r'\s*', '', json_text)
def testAclChangeWithUserId(self):
change = acl_helper.AclChange(self.USER_TEST_ID + ':r',
scope_type=acl_helper.ChangeType.USER)
acl = list(AclTranslation.BotoBucketAclToMessage(self.sample_uri.get_acl()))
change.Execute(self.sample_url, acl, 'acl', self.logger)
self._AssertHas(acl, 'READER', 'UserById', self.USER_TEST_ID)
def testAclChangeWithGroupId(self):
change = acl_helper.AclChange(self.GROUP_TEST_ID + ':r',
scope_type=acl_helper.ChangeType.GROUP)
acl = list(AclTranslation.BotoBucketAclToMessage(self.sample_uri.get_acl()))
change.Execute(self.sample_url, acl, 'acl', self.logger)
self._AssertHas(acl, 'READER', 'GroupById', self.GROUP_TEST_ID)
def testAclChangeWithUserEmail(self):
change = acl_helper.AclChange(self.USER_TEST_ADDRESS + ':r',
scope_type=acl_helper.ChangeType.USER)
acl = list(AclTranslation.BotoBucketAclToMessage(self.sample_uri.get_acl()))
change.Execute(self.sample_url, acl, 'acl', self.logger)
self._AssertHas(acl, 'READER', 'UserByEmail', self.USER_TEST_ADDRESS)
def testAclChangeWithGroupEmail(self):
change = acl_helper.AclChange(self.GROUP_TEST_ADDRESS + ':fc',
scope_type=acl_helper.ChangeType.GROUP)
acl = list(AclTranslation.BotoBucketAclToMessage(self.sample_uri.get_acl()))
change.Execute(self.sample_url, acl, 'acl', self.logger)
self._AssertHas(acl, 'OWNER', 'GroupByEmail', self.GROUP_TEST_ADDRESS)
def testAclChangeWithDomain(self):
change = acl_helper.AclChange(self.DOMAIN_TEST + ':READ',
scope_type=acl_helper.ChangeType.GROUP)
acl = list(AclTranslation.BotoBucketAclToMessage(self.sample_uri.get_acl()))
change.Execute(self.sample_url, acl, 'acl', self.logger)
self._AssertHas(acl, 'READER', 'GroupByDomain', self.DOMAIN_TEST)
def testAclChangeWithProjectOwners(self):
change = acl_helper.AclChange(self._project_test_acl + ':READ',
scope_type=acl_helper.ChangeType.PROJECT)
acl = list(AclTranslation.BotoBucketAclToMessage(self.sample_uri.get_acl()))
change.Execute(self.sample_url, acl, 'acl', self.logger)
self._AssertHas(acl, 'READER', 'Project', self._project_test_acl)
def testAclChangeWithAllUsers(self):
change = acl_helper.AclChange('AllUsers:WRITE',
scope_type=acl_helper.ChangeType.GROUP)
acl = list(AclTranslation.BotoBucketAclToMessage(self.sample_uri.get_acl()))
change.Execute(self.sample_url, acl, 'acl', self.logger)
self._AssertHas(acl, 'WRITER', 'AllUsers')
def testAclChangeWithAllAuthUsers(self):
change = acl_helper.AclChange('AllAuthenticatedUsers:READ',
scope_type=acl_helper.ChangeType.GROUP)
acl = list(AclTranslation.BotoBucketAclToMessage(self.sample_uri.get_acl()))
change.Execute(self.sample_url, acl, 'acl', self.logger)
self._AssertHas(acl, 'READER', 'AllAuthenticatedUsers')
remove = acl_helper.AclDel('AllAuthenticatedUsers')
remove.Execute(self.sample_url, acl, 'acl', self.logger)
self._AssertHasNo(acl, 'READER', 'AllAuthenticatedUsers')
def testAclDelWithUser(self):
add = acl_helper.AclChange(self.USER_TEST_ADDRESS + ':READ',
scope_type=acl_helper.ChangeType.USER)
acl = list(AclTranslation.BotoBucketAclToMessage(self.sample_uri.get_acl()))
add.Execute(self.sample_url, acl, 'acl', self.logger)
self._AssertHas(acl, 'READER', 'UserByEmail', self.USER_TEST_ADDRESS)
remove = acl_helper.AclDel(self.USER_TEST_ADDRESS)
remove.Execute(self.sample_url, acl, 'acl', self.logger)
self._AssertHasNo(acl, 'READ', 'UserByEmail', self.USER_TEST_ADDRESS)
def testAclDelWithProjectOwners(self):
add = acl_helper.AclChange(self._project_test_acl + ':READ',
scope_type=acl_helper.ChangeType.PROJECT)
acl = list(AclTranslation.BotoBucketAclToMessage(self.sample_uri.get_acl()))
add.Execute(self.sample_url, acl, 'acl', self.logger)
self._AssertHas(acl, 'READER', 'Project', self._project_test_acl)
remove = acl_helper.AclDel(self._project_test_acl)
remove.Execute(self.sample_url, acl, 'acl', self.logger)
self._AssertHasNo(acl, 'READ', 'Project', self._project_test_acl)
def testAclDelWithGroup(self):
add = acl_helper.AclChange(self.USER_TEST_ADDRESS + ':READ',
scope_type=acl_helper.ChangeType.GROUP)
acl = list(AclTranslation.BotoBucketAclToMessage(self.sample_uri.get_acl()))
add.Execute(self.sample_url, acl, 'acl', self.logger)
self._AssertHas(acl, 'READER', 'GroupByEmail', self.USER_TEST_ADDRESS)
remove = acl_helper.AclDel(self.USER_TEST_ADDRESS)
remove.Execute(self.sample_url, acl, 'acl', self.logger)
self._AssertHasNo(acl, 'READER', 'GroupByEmail', self.GROUP_TEST_ADDRESS)
#
# Here are a whole lot of verbose asserts
#
def _AssertHas(self, current_acl, perm, scope, value=None):
matches = list(
self._YieldMatchingEntriesJson(current_acl, perm, scope, value))
self.assertEqual(1, len(matches))
def _AssertHasNo(self, current_acl, perm, scope, value=None):
matches = list(
self._YieldMatchingEntriesJson(current_acl, perm, scope, value))
self.assertEqual(0, len(matches))
def _YieldMatchingEntriesJson(self, current_acl, perm, scope, value=None):
"""Generator that yields entries that match the change descriptor.
Args:
current_acl: A list of apitools_messages.BucketAccessControls or
ObjectAccessControls which will be searched for matching
entries.
perm: Role (permission) to match.
scope: Scope type to match.
value: Value to match (against the scope type).
Yields:
An apitools_messages.BucketAccessControl or ObjectAccessControl.
"""
for entry in current_acl:
if (scope in ['UserById', 'GroupById'] and entry.entityId and
value == entry.entityId and entry.role == perm):
yield entry
elif (scope in ['UserByEmail', 'GroupByEmail'] and entry.email and
value == entry.email and entry.role == perm):
yield entry
elif (scope == 'GroupByDomain' and entry.domain and
value == entry.domain and entry.role == perm):
yield entry
elif (scope == 'Project' and entry.role == perm and
value == entry.entityId):
yield entry
elif (scope in ['AllUsers', 'AllAuthenticatedUsers'] and
entry.entity.lower() == scope.lower() and entry.role == perm):
yield entry
def _MakeScopeRegex(self, role, entity_type, email_address):
template_regex = (r'\{.*"entity":\s*"%s-%s".*"role":\s*"%s".*\}' %
(entity_type, email_address, role))
return re.compile(template_regex, flags=re.DOTALL)
def _MakeProjectScopeRegex(self, role, project_team, project_number):
template_regex = (
r'\{.*"entity":\s*"project-%s-%s",\s*"projectTeam":\s*\{\s*"'
r'projectNumber":\s*"%s",\s*"team":\s*"%s"\s*\},\s*"role":\s*"%s".*\}' %
(project_team, project_number, project_number, project_team, role))
return re.compile(template_regex, flags=re.DOTALL)
def testBucketAclChange(self):
"""Tests acl change on a bucket."""
test_regex = self._MakeScopeRegex('OWNER', 'user', self.USER_TEST_ADDRESS)
json_text = self.RunGsUtil(self._get_acl_prefix + [suri(self.sample_uri)],
return_stdout=True)
self.assertNotRegex(json_text, test_regex)
self.RunGsUtil(
self._ch_acl_prefix +
['-u', self.USER_TEST_ADDRESS +
':fc', suri(self.sample_uri)])
json_text = self.RunGsUtil(self._get_acl_prefix + [suri(self.sample_uri)],
return_stdout=True)
self.assertRegex(json_text, test_regex)
test_regex2 = self._MakeScopeRegex('WRITER', 'user', self.USER_TEST_ADDRESS)
self.RunGsUtil(self._ch_acl_prefix +
['-u', self.USER_TEST_ADDRESS + ':w',
suri(self.sample_uri)])
json_text2 = self.RunGsUtil(self._get_acl_prefix + [suri(self.sample_uri)],
return_stdout=True)
self.assertRegex(json_text2, test_regex2)
self.RunGsUtil(self._ch_acl_prefix +
['-d', self.USER_TEST_ADDRESS,
suri(self.sample_uri)])
json_text3 = self.RunGsUtil(self._get_acl_prefix + [suri(self.sample_uri)],
return_stdout=True)
self.assertNotRegex(json_text3, test_regex)
def testProjectAclChangesOnBucket(self):
"""Tests project entity acl changes on a bucket."""
if self.test_api == ApiSelector.XML:
stderr = self.RunGsUtil(
self._ch_acl_prefix +
['-p', self._project_test_acl + ':w',
suri(self.sample_uri)],
expected_status=1,
return_stderr=True)
self.assertIn(('CommandException: XML API does not support project'
' scopes, cannot translate ACL.'), stderr)
else:
test_regex = self._MakeProjectScopeRegex('WRITER', self._project_team,
self._project_number)
self.RunGsUtil(
self._ch_acl_prefix +
['-p', self._project_test_acl +
':w', suri(self.sample_uri)])
json_text = self.RunGsUtil(self._get_acl_prefix + [suri(self.sample_uri)],
return_stdout=True)
self.assertRegex(json_text, test_regex)
self.RunGsUtil(self._ch_acl_prefix +
['-d', self._project_test_acl,
suri(self.sample_uri)])
json_text2 = self.RunGsUtil(self._get_acl_prefix +
[suri(self.sample_uri)],
return_stdout=True)
self.assertNotRegex(json_text2, test_regex)
def testObjectAclChange(self):
"""Tests acl change on an object."""
obj = self.CreateObject(bucket_uri=self.sample_uri, contents=b'something')
self.AssertNObjectsInBucket(self.sample_uri, 1)
test_regex = self._MakeScopeRegex('READER', 'group',
self.GROUP_TEST_ADDRESS)
json_text = self.RunGsUtil(self._get_acl_prefix + [suri(obj)],
return_stdout=True)
self.assertNotRegex(json_text, test_regex)
self.RunGsUtil(self._ch_acl_prefix +
['-g', self.GROUP_TEST_ADDRESS +
':READ', suri(obj)])
json_text = self.RunGsUtil(self._get_acl_prefix + [suri(obj)],
return_stdout=True)
self.assertRegex(json_text, test_regex)
test_regex2 = self._MakeScopeRegex('OWNER', 'group',
self.GROUP_TEST_ADDRESS)
self.RunGsUtil(self._ch_acl_prefix +
['-g', self.GROUP_TEST_ADDRESS + ':OWNER',
suri(obj)])
json_text2 = self.RunGsUtil(self._get_acl_prefix + [suri(obj)],
return_stdout=True)
self.assertRegex(json_text2, test_regex2)
self.RunGsUtil(self._ch_acl_prefix +
['-d', self.GROUP_TEST_ADDRESS,
suri(obj)])
json_text3 = self.RunGsUtil(self._get_acl_prefix + [suri(obj)],
return_stdout=True)
self.assertNotRegex(json_text3, test_regex2)
all_auth_regex = re.compile(
r'\{.*"entity":\s*"allAuthenticatedUsers".*"role":\s*"OWNER".*\}',
flags=re.DOTALL)
self.RunGsUtil(self._ch_acl_prefix + ['-g', 'AllAuth:O', suri(obj)])
json_text4 = self.RunGsUtil(self._get_acl_prefix + [suri(obj)],
return_stdout=True)
self.assertRegex(json_text4, all_auth_regex)
def testObjectAclChangeAllUsers(self):
"""Tests acl ch AllUsers:R on an object."""
obj = self.CreateObject(bucket_uri=self.sample_uri, contents=b'something')
self.AssertNObjectsInBucket(self.sample_uri, 1)
all_users_regex = re.compile(
r'\{.*"entity":\s*"allUsers".*"role":\s*"READER".*\}', flags=re.DOTALL)
json_text = self.RunGsUtil(self._get_acl_prefix + [suri(obj)],
return_stdout=True)
self.assertNotRegex(json_text, all_users_regex)
self.RunGsUtil(self._ch_acl_prefix + ['-g', 'AllUsers:R', suri(obj)])
json_text = self.RunGsUtil(self._get_acl_prefix + [suri(obj)],
return_stdout=True)
self.assertRegex(json_text, all_users_regex)
def testSeekAheadAcl(self):
"""Tests seek-ahead iterator with ACL sub-commands."""
object_uri = self.CreateObject(contents=b'foo')
# Get the object's current ACL for application via set.
current_acl = self.RunGsUtil(['acl', 'get', suri(object_uri)],
return_stdout=True)
current_acl_file = self.CreateTempFile(contents=current_acl.encode(UTF8))
with SetBotoConfigForTest([('GSUtil', 'task_estimation_threshold', '1'),
('GSUtil', 'task_estimation_force', 'True')]):
stderr = self.RunGsUtil(
['-m', 'acl', 'ch', '-u', 'AllUsers:R',
suri(object_uri)],
return_stderr=True)
self.assertIn('Estimated work for this command: objects: 1\n', stderr)
stderr = self.RunGsUtil(
['-m', 'acl', 'set', current_acl_file,
suri(object_uri)],
return_stderr=True)
self.assertIn('Estimated work for this command: objects: 1\n', stderr)
with SetBotoConfigForTest([('GSUtil', 'task_estimation_threshold', '0'),
('GSUtil', 'task_estimation_force', 'True')]):
stderr = self.RunGsUtil(
['-m', 'acl', 'ch', '-u', 'AllUsers:R',
suri(object_uri)],
return_stderr=True)
self.assertNotIn('Estimated work', stderr)
def testMultithreadedAclChange(self, count=10):
"""Tests multi-threaded acl changing on several objects."""
objects = []
for i in range(count):
objects.append(
self.CreateObject(bucket_uri=self.sample_uri,
contents='something {0}'.format(i).encode('ascii')))
self.AssertNObjectsInBucket(self.sample_uri, count)
test_regex = self._MakeScopeRegex('READER', 'group',
self.GROUP_TEST_ADDRESS)
json_texts = []
for obj in objects:
json_texts.append(
self.RunGsUtil(self._get_acl_prefix + [suri(obj)],
return_stdout=True))
for json_text in json_texts:
self.assertNotRegex(json_text, test_regex)
uris = [suri(obj) for obj in objects]
self.RunGsUtil(['-m', '-DD'] + self._ch_acl_prefix +
['-g', self.GROUP_TEST_ADDRESS + ':READ'] + uris)
json_texts = []
for obj in objects:
json_texts.append(
self.RunGsUtil(self._get_acl_prefix + [suri(obj)],
return_stdout=True))
for json_text in json_texts:
self.assertRegex(json_text, test_regex)
def testRecursiveChangeAcl(self):
"""Tests recursively changing ACLs on nested objects."""
obj = self.CreateObject(bucket_uri=self.sample_uri,
object_name='foo/bar',
contents=b'something')
self.AssertNObjectsInBucket(self.sample_uri, 1)
test_regex = self._MakeScopeRegex('READER', 'group',
self.GROUP_TEST_ADDRESS)
json_text = self.RunGsUtil(self._get_acl_prefix + [suri(obj)],
return_stdout=True)
self.assertNotRegex(json_text, test_regex)
@Retry(AssertionError, tries=5, timeout_secs=1)
def _AddAcl():
self.RunGsUtil(
self._ch_acl_prefix +
['-R', '-g', self.GROUP_TEST_ADDRESS + ':READ',
suri(obj)[:-3]])
json_text = self.RunGsUtil(self._get_acl_prefix + [suri(obj)],
return_stdout=True)
self.assertRegex(json_text, test_regex)
_AddAcl()
@Retry(AssertionError, tries=5, timeout_secs=1)
def _DeleteAcl():
# Make sure we treat grant addresses case insensitively.
delete_grant = self.GROUP_TEST_ADDRESS.upper()
self.RunGsUtil(self._ch_acl_prefix + ['-d', delete_grant, suri(obj)])
json_text = self.RunGsUtil(self._get_acl_prefix + [suri(obj)],
return_stdout=True)
self.assertNotRegex(json_text, test_regex)
_DeleteAcl()
def testMultiVersionSupport(self):
"""Tests changing ACLs on multiple object versions."""
bucket = self.CreateVersionedBucket()
object_name = self.MakeTempName('obj')
obj1_uri = self.CreateObject(bucket_uri=bucket,
object_name=object_name,
contents=b'One thing')
# Create another on the same URI, giving us a second version.
self.CreateObject(bucket_uri=bucket,
object_name=object_name,
contents=b'Another thing',
gs_idempotent_generation=urigen(obj1_uri))
lines = self.AssertNObjectsInBucket(bucket, 2, versioned=True)
obj_v1, obj_v2 = lines[0], lines[1]
test_regex = self._MakeScopeRegex('READER', 'group',
self.GROUP_TEST_ADDRESS)
json_text = self.RunGsUtil(self._get_acl_prefix + [obj_v1],
return_stdout=True)
self.assertNotRegex(json_text, test_regex)
self.RunGsUtil(self._ch_acl_prefix +
['-g', self.GROUP_TEST_ADDRESS + ':READ', obj_v1])
json_text = self.RunGsUtil(self._get_acl_prefix + [obj_v1],
return_stdout=True)
self.assertRegex(json_text, test_regex)
json_text = self.RunGsUtil(self._get_acl_prefix + [obj_v2],
return_stdout=True)
self.assertNotRegex(json_text, test_regex)
def testBadRequestAclChange(self):
stdout, stderr = self.RunGsUtil(
self._ch_acl_prefix +
['-u', '[email protected]:R',
suri(self.sample_uri)],
return_stdout=True,
return_stderr=True,
expected_status=1)
self.assertIn('BadRequestException', stderr)
self.assertNotIn('Retrying', stdout)
self.assertNotIn('Retrying', stderr)
def testAclGetWithoutFullControl(self):
object_uri = self.CreateObject(contents=b'foo')
expected_error_regex = r'Anonymous \S+ do(es)? not have'
with self.SetAnonymousBotoCreds():
stderr = self.RunGsUtil(self._get_acl_prefix + [suri(object_uri)],
return_stderr=True,
expected_status=1)
self.assertRegex(stderr, expected_error_regex)
def testTooFewArgumentsFails(self):
"""Tests calling ACL commands with insufficient number of arguments."""
# No arguments for get, but valid subcommand.
stderr = self.RunGsUtil(self._get_acl_prefix,
return_stderr=True,
expected_status=1)
self.assertIn('command requires at least', stderr)
# No arguments for set, but valid subcommand.
stderr = self.RunGsUtil(self._set_acl_prefix,
return_stderr=True,
expected_status=1)
self.assertIn('command requires at least', stderr)
# No arguments for ch, but valid subcommand.
stderr = self.RunGsUtil(self._ch_acl_prefix,
return_stderr=True,
expected_status=1)
self.assertIn('command requires at least', stderr)
# Neither arguments nor subcommand.
stderr = self.RunGsUtil(['acl'], return_stderr=True, expected_status=1)
self.assertIn('command requires at least', stderr)
def testMinusF(self):
"""Tests -f option to continue after failure."""
bucket_uri = self.CreateBucket()
obj_uri = suri(
self.CreateObject(bucket_uri=bucket_uri,
object_name='foo',
contents=b'foo'))
acl_string = self.RunGsUtil(self._get_acl_prefix + [obj_uri],
return_stdout=True)
self.RunGsUtil(self._set_acl_prefix +
['-f', 'public-read',
suri(bucket_uri) + 'foo2', obj_uri],
expected_status=1)
acl_string2 = self.RunGsUtil(self._get_acl_prefix + [obj_uri],
return_stdout=True)
self.assertNotEqual(acl_string, acl_string2)
class TestS3CompatibleAcl(TestAclBase):
"""ACL integration tests that work for s3 and gs URLs."""
def testAclObjectGetSet(self):
bucket_uri = self.CreateBucket()
obj_uri = self.CreateObject(bucket_uri=bucket_uri, contents=b'foo')
self.AssertNObjectsInBucket(bucket_uri, 1)
stdout = self.RunGsUtil(self._get_acl_prefix + [suri(obj_uri)],
return_stdout=True)
set_contents = self.CreateTempFile(contents=stdout.encode(UTF8))
self.RunGsUtil(self._set_acl_prefix + [set_contents, suri(obj_uri)])
def testAclBucketGetSet(self):
bucket_uri = self.CreateBucket()
stdout = self.RunGsUtil(self._get_acl_prefix + [suri(bucket_uri)],
return_stdout=True)
set_contents = self.CreateTempFile(contents=stdout.encode(UTF8))
self.RunGsUtil(self._set_acl_prefix + [set_contents, suri(bucket_uri)])
@SkipForGS('S3 ACLs accept XML and should not cause an XML warning.')
class TestS3OnlyAcl(TestAclBase):
"""ACL integration tests that work only for s3 URLs."""
# TODO: Format all test case names consistently.
def test_set_xml_acl(self):
"""Ensures XML content does not return an XML warning for S3."""
obj_uri = suri(self.CreateObject(contents=b'foo'))
inpath = self.CreateTempFile(contents=b'<ValidXml></ValidXml>')
stderr = self.RunGsUtil(self._set_acl_prefix + [inpath, obj_uri],
return_stderr=True,
expected_status=1)
self.assertIn('BadRequestException', stderr)
self.assertNotIn('XML ACL data provided', stderr)
def test_set_xml_acl_bucket(self):
"""Ensures XML content does not return an XML warning for S3."""
bucket_uri = suri(self.CreateBucket())
inpath = self.CreateTempFile(contents=b'<ValidXml></ValidXml>')
stderr = self.RunGsUtil(self._set_acl_prefix + [inpath, bucket_uri],
return_stderr=True,
expected_status=1)
self.assertIn('BadRequestException', stderr)
self.assertNotIn('XML ACL data provided', stderr)
class TestAclOldAlias(TestAcl):
_set_acl_prefix = ['setacl']
_get_acl_prefix = ['getacl']
_set_defacl_prefix = ['setdefacl']
_ch_acl_prefix = ['chacl']
| catapult-project/catapult | third_party/gsutil/gslib/tests/test_acl.py | Python | bsd-3-clause | 33,748 |
import numpy as np
from bokeh.layouts import layout
from bokeh.models import CustomJS, Slider, ColumnDataSource, WidgetBox
from bokeh.plotting import figure, output_file, show
output_file('dashboard.html')
tools = 'pan'
def bollinger():
# Define Bollinger Bands.
upperband = np.random.random_integers(100, 150, size=100)
lowerband = upperband - 100
x_data = np.arange(1, 101)
# Bollinger shading glyph:
band_x = np.append(x_data, x_data[::-1])
band_y = np.append(lowerband, upperband[::-1])
p = figure(x_axis_type='datetime', tools=tools)
p.patch(band_x, band_y, color='#7570B3', fill_alpha=0.2)
p.title.text = 'Bollinger Bands'
p.title_location = 'left'
p.title.align = 'left'
p.plot_height = 600
p.plot_width = 800
p.grid.grid_line_alpha = 0.4
return [p]
def slider():
x = np.linspace(0, 10, 100)
y = np.sin(x)
source = ColumnDataSource(data=dict(x=x, y=y))
plot = figure(
y_range=(-10, 10), tools='', toolbar_location=None,
title="Sliders example")
plot.line('x', 'y', source=source, line_width=3, line_alpha=0.6)
callback = CustomJS(args=dict(source=source), code="""
var data = source.data;
var A = amp.value;
var k = freq.value;
var phi = phase.value;
var B = offset.value;
x = data['x']
y = data['y']
for (i = 0; i < x.length; i++) {
y[i] = B + A*Math.sin(k*x[i]+phi);
}
source.change.emit();
""")
amp_slider = Slider(start=0.1, end=10, value=1, step=.1, title="Amplitude", callback=callback, callback_policy='mouseup')
callback.args["amp"] = amp_slider
freq_slider = Slider(start=0.1, end=10, value=1, step=.1, title="Frequency", callback=callback)
callback.args["freq"] = freq_slider
phase_slider = Slider(start=0, end=6.4, value=0, step=.1, title="Phase", callback=callback)
callback.args["phase"] = phase_slider
offset_slider = Slider(start=-5, end=5, value=0, step=.1, title="Offset", callback=callback)
callback.args["offset"] = offset_slider
widgets = WidgetBox(amp_slider, freq_slider, phase_slider, offset_slider)
return [widgets, plot]
def linked_panning():
N = 100
x = np.linspace(0, 4 * np.pi, N)
y1 = np.sin(x)
y2 = np.cos(x)
y3 = np.sin(x) + np.cos(x)
s1 = figure(tools=tools)
s1.circle(x, y1, color="navy", size=8, alpha=0.5)
s2 = figure(tools=tools, x_range=s1.x_range, y_range=s1.y_range)
s2.circle(x, y2, color="firebrick", size=8, alpha=0.5)
s3 = figure(tools='pan, box_select', x_range=s1.x_range)
s3.circle(x, y3, color="olive", size=8, alpha=0.5)
return [s1, s2, s3]
l = layout([
bollinger(),
slider(),
linked_panning(),
], sizing_mode='stretch_both')
show(l)
| percyfal/bokeh | examples/howto/layouts/dashboard.py | Python | bsd-3-clause | 2,812 |
#!/usr/bin/env python
from __future__ import unicode_literals
import warnings
from django.core.management.base import BaseCommand, CommandError
from djangoseo.base import registry, populate_metadata
class Command(BaseCommand):
help = "Populate the database with metadata instances for all models " \
"listed in seo_models."
@staticmethod
def populate_all_metadata():
"""
Create metadata instances for all models in seo_models if empty.
Once you have created a single metadata instance, this will not run.
This is because it is a potentially slow operation that need only be
done once. If you want to ensure that everything is populated, run the
populate_metadata management command.
"""
for Metadata in registry.values():
InstanceMetadata = Metadata._meta.get_model('modelinstance')
if InstanceMetadata is not None:
for model in Metadata._meta.seo_models:
populate_metadata(model, InstanceMetadata)
def handle(self, *args, **options):
warnings.warn("This is deprecated command. It's not necessary yet and "
"potentially slow.", DeprecationWarning, stacklevel=2)
if len(args) > 0:
raise CommandError("This command currently takes no arguments")
self.populate_all_metadata()
| romansalin/django-seo | djangoseo/management/commands/populate_metadata.py | Python | mit | 1,392 |
import rospkg
from python_qt_binding.QtCore import Qt, QMetaType, QDataStream, QVariant, pyqtSignal, QLineF, QPointF
from python_qt_binding import loadUi
from rqt_gui_py.plugin import Plugin
from python_qt_binding.QtWidgets import QWidget, QTreeWidget, QTreeWidgetItem,QListWidgetItem, \
QSlider, QGroupBox, QVBoxLayout, QLabel, QLineEdit, QListWidget, QAbstractItemView, QFileDialog, QDoubleSpinBox, QMessageBox, \
QInputDialog, QShortcut
from python_qt_binding.QtGui import QDoubleValidator, QKeySequence, QPixmap, QPainter, QPen, QColor, QPolygonF, QTransform, QBrush
import os
import math
class BallPool:
rp = rospkg.RosPack()
def _getBallFile(self, colorStr):
"""
gets the path for the ball file
:param colorStr: color of the ball as string
:return:
"""
#return os.path.join(BallPool.rp.get_path('bitbots_live_tool_rqt'), 'resource', 'ui_images', "ball-" + colorStr + "_big66x66.png")
return os.path.join(BallPool.rp.get_path('bitbots_live_tool_rqt'), 'resource', 'ui_images', "new_icons", "ball_" + colorStr + ".png")
def __init__(self, frame, size=64):
"""
:param frame: the frame of the ball
:param size: size of the ball
"""
self.pool = {"blue": [], "green": [], "red": [], "yellow": [], "cyan": [], "magenta": []}
self.frame = frame
self.size = size
def getBallLabel(self, colorStr):
"""
sets the ball label
:param colorStr: the color fo the ball as string
:return:
"""
ls = self.pool.get(colorStr)
if ls == []:
pxmap = QPixmap(self._getBallFile(colorStr))
ball = QLabel(parent=self.frame)
ball.setPixmap(pxmap)
ball.setScaledContents(True)
ball.setFixedSize(self.size, self.size)
ball.show()
return ball
else:
ball = ls.pop()
ball.show()
return ball
def returnBallLabel(self, ball, colorStr):
"""
returns the finished ball label
:param ball: the ball
:param colorStr: color as string
:return:
"""
self.pool[colorStr].append(ball)
ball.hide()
class RobotWidget(QWidget):
def __init__(self, parent):
"""
:param parent: parent
"""
super(RobotWidget, self).__init__(parent)
self.angle = 0
self._pixmap = None
def setPixmap(self, pixmap):
"""
sets the pixmap
:param pixmap: the pixmap
:return:
"""
self._pixmap = pixmap
def setScaledContents(self, bool):
None
def pixmap(self):
"""
returns pixmap
:return:
"""
return self._pixmap
def paintEvent(self, event):
painter = QPainter(self)
#painter.setPen(QtCore.Qt.black)
#painter.translate(self.x(), self.y())
painter.rotate(self.angle)
painter.drawPixmap(0, 0, self.width(), self.height(), self._pixmap)
painter.end()
class RobotPool:
rp = rospkg.RosPack()
def _getRobFile(self, colorStr):
"""
sets the path for the robot file
:param colorStr: color as string
:return:
"""
#return os.path.join(BallPool.rp.get_path('bitbots_live_tool_rqt'), 'resource', 'ui_images', "Rob_" + colorStr + "-big.png")
return os.path.join(BallPool.rp.get_path('bitbots_live_tool_rqt'), 'resource', 'ui_images', "new_icons", "player_" + colorStr + ".png")
def __init__(self, frame, size=46):
"""
:param frame: frame of robot
:param size: size of robot
"""
self.pool = {"blue": [], "green": [], "red": [], "yellow": []}
self.frame = frame
self.size = size
def getRobotLabel(self, colorStr):
"""
initiates the robot label
:param colorStr: color as string
:return:
"""
ls = self.pool.get(colorStr)
if ls == []:
pxmap = QPixmap(self._getRobFile(colorStr))
rob = QLabel(parent=self.frame)
rob.setPixmap(pxmap)
rob.originalPixmap = QPixmap(pxmap)
rob.setScaledContents(True)
rob.setFixedSize(self.size, self.size)
rob.show()
return rob
else:
rob = ls.pop()
rob.show()
return rob
def returnRobotLabel(self, rob, colorStr):
"""
returns the robot
:param rob: the robot
:param colorStr: color as string
:return:
"""
self.pool[colorStr].append(rob)
rob.hide()
class CrossPool:
rp = rospkg.RosPack()
def _getCrossFile(self, colorStr):
"""
gets the path of the cross file
:param colorStr: color as string
:return:
"""
return os.path.join(BallPool.rp.get_path('bitbots_live_tool_rqt'), 'resource', 'ui_images', "cross-" + colorStr + "-big66x66.png")
def __init__(self, frame, size=32):
"""
:param frame: the frame
:param size: the size
"""
self.pool = {"blue": [], "green": [], "red": [], "yellow": []}
self.frame = frame
self.size = size
def getCrossLabel(self, colorStr):
"""
initiates the cross label
:param colorStr: color as string
:return:
"""
ls = self.pool.get(colorStr)
if ls == []:
pxmap = QPixmap(self._getCrossFile(colorStr))
crs = QLabel(parent=self.frame)
crs.setPixmap(pxmap)
crs.setScaledContents(True)
crs.setFixedSize(self.size, self.size)
crs.show()
return crs
else:
crs = ls.pop()
crs.show()
return crs
def returnCrossLabel(self, crs, colorStr):
"""
returns the cross label
:param crs: the cross
:param colorStr: color as string
:return:
"""
self.pool[colorStr].append(crs)
crs.hide()
class OpponentPool:
rp = rospkg.RosPack()
def _getOpponentFile(self, colorStr):
"""
gets the path of the file
:param colorStr: color as string
:return:
"""
#return os.path.join(BallPool.rp.get_path('bitbots_live_tool_rqt'), 'resource', 'ui_images', "opponent_" + colorStr + "-big.png")
return os.path.join(BallPool.rp.get_path('bitbots_live_tool_rqt'), 'resource', 'ui_images', "new_icons", "opponent_" + colorStr + ".png")
def __init__(self, frame, size=32):
"""
:param frame: the frame
:param size: the size
"""
self.pool = {"blue": [], "green": [], "red": [], "yellow": [], "magenta": [], "cyan": []}
self.frame = frame
self.size = size
def getOpponentLabel(self, colorStr):
"""
initiates the opponent label
:param colorStr: color as string
:return:
"""
ls = self.pool.get(colorStr)
if ls == []:
pxmap = QPixmap(self._getOpponentFile(colorStr))
opp = QLabel(parent=self.frame)
opp.setPixmap(pxmap)
opp.setScaledContents(True)
opp.setFixedSize(self.size, self.size)
opp.show()
return opp
else:
opp = ls.pop()
opp.show()
return opp
def returnOpponentLabel(self, opp, colorStr):
"""
returns the opponent label
:param opp: the opponent
:param colorStr: color as string
:return:
"""
self.pool[colorStr].append(opp)
opp.hide()
class TeammatePool:
rp = rospkg.RosPack()
def _getTeammateFile(self, colorStr):
"""
gets the path of the file
:param colorStr: color as string
:return:
"""
#return os.path.join(BallPool.rp.get_path('bitbots_live_tool_rqt'), 'resource', 'ui_images', "Rob1_" + colorStr + "-big.png")
return os.path.join(BallPool.rp.get_path('bitbots_live_tool_rqt'), 'resource', 'ui_images', "new_icons", "teammate_" + colorStr + ".png")
def __init__(self, frame, size=32):
"""
:param frame: frame
:param size: size
"""
self.pool ={"blue": [], "green": [], "red": [], "yellow": [], "cyan": [], "magenta": []}
self.frame = frame
self.size = size
def getTeammateLabel(self, colorStr):
"""
initiates the teammate label for the single field
:param colorStr: color as string
:return:
"""
ls = self.pool.get(colorStr)
if ls == []:
pxmap = QPixmap(self._getTeammateFile(colorStr))
mate = QLabel(parent=self.frame)
mate.setPixmap(pxmap)
mate.setScaledContents(True)
mate.setFixedSize(self.size, self.size)
mate.show()
return mate
else:
mate = ls.pop()
mate.show()
return mate
def returnTeammateLabel(self, mate, colorStr):
"""
returns the teammate label
:param mate: the teammate
:param colorStr: color as string
:return:
"""
self.pool[colorStr].append(mate)
mate.hide()
class CyanMagentaPool:
rp = rospkg.RosPack()
def _getTeammateFile(self, colorStr):
"""
gets the teammate file
:param colorStr: color as string
:return:
"""
return os.path.join(BallPool.rp.get_path('bitbots_live_tool_rqt'), 'resource', 'ui_images', "Rob1_" + colorStr + "-big.png")
def __init__(self, frame, size=32):
"""
:param frame: frame
:param size: size
"""
self.pool ={"cyan": [], "magenta": []}
self.frame = frame
self.size = size
def getTeammateLabel(self, colorStr):
"""
initaties the teammate label for the quarter field
:param colorStr: color as string
:return:
"""
ls = self.pool.get(colorStr)
if ls == []:
pxmap = QPixmap(self._getTeammateFile(colorStr))
mate = QLabel(parent=self.frame)
mate.setPixmap(pxmap)
mate.setScaledContents(True)
mate.setFixedSize(self.size, self.size)
mate.show()
return mate
else:
mate = ls.pop()
mate.show()
return mate
def returnTeammateLabel(self, mate, colorStr):
"""
returns the teammate label
:param mate: the teammate
:param colorStr: color as string
:return:
"""
self.pool[colorStr].append(mate)
mate.hide()
class UndefinedPool:
rp = rospkg.RosPack()
def _getUndefFile(self, colorStr=""):
"""
gets the path for the file
:param colorStr: color as string
:return:
"""
#return os.path.join(BallPool.rp.get_path('bitbots_live_tool_rqt'), 'resource', 'ui_images', "Undef_grey-big33x67.png")
return os.path.join(BallPool.rp.get_path('bitbots_live_tool_rqt'), 'resource', 'ui_images', "new_icons", "undefined_" + colorStr + ".png")
def __init__(self, frame, size=32):
"""
:param frame: frame
:param size: size
"""
self.pool = {"blue": [], "green": [], "red": [], "yellow": [], "grey": []}
self.frame = frame
self.size = size
def getUndefLabel(self, colorStr):
"""
initiates the undefined label
:param colorStr: color as string
:return:
"""
ls = self.pool.get(colorStr)
if ls == []:
pxmap = QPixmap(self._getUndefFile(colorStr))
undf = QLabel(parent=self.frame)
undf.setPixmap(pxmap)
undf.setScaledContents(True)
undf.setFixedSize(self.size, self.size)
undf.show()
return undf
else:
undf = ls.pop()
undf.show()
return undf
def returnUndefLabel(self, undf, colorStr):
"""
returns the undefined label
:param undf: undefined obstacle
:param colorStr: color as string
:return:
"""
self.pool[colorStr].append(undf)
undf.hide()
class Arrowlabel(QWidget):
def __init__(self, parent):
super(Arrowlabel, self).__init__(parent)
self.setFixedSize(80, 80)
self.angleRobo = 0
self.angleVel = 0
self.color = QColor(111, 111, 111)
def setLinearAngle(self, x, y):
self.angleVel = self.radToDeg(self.legendAngle((x, y, 0), (1, 0, 0))) # winkel in rad
size = self.len((x, y, 0)) * 2
self.setFixedSize(size, size)
self.update()
def setRoboAngle(self, angle):
self.angleRobo = angle
self.update()
def cross(self, a, b):
return (a[1]*b[2]-a[2]*b[1], a[2]*b[0]-a[0]*b[2], a[0]*b[1]-a[1]*b[0])
def dot(self, a, b):
return (a[0]*b[0]+ a[1]*b[1]+ a[2]*b[2])
def len(self, a):
return math.sqrt(a[0]*a[0] + a[1]*a[1] + a[2]*a[2])
def legendAngle(self, a, b):
angle = 0
sign = self.sign(self.cross(a, b)[2]) # z wert aus kreuzprodukt
if sign == 0:
sign = 1 # damit der vektor auch nach hinten zeigen kann
if (self.len(a) * self.len(b)) != 0:
angle = math.acos(self.dot(a,b) / (self.len(a) * self.len(b))) # immer positiv
return angle * sign
def sign(self, x):
return math.copysign(1, x)
def radToDeg(self, rads):
return rads * 57.29578
def paintEvent(self, event):
painter = QPainter(self)
#painter.begin(self)
# puts the arrow in the middle
painter.translate(self.width()/2, self.height()/2)
painter.rotate(self.angleRobo + self.angleVel)
line = QLineF(0, 0, self.width() / 2 - 3, 0)
headSize = min(self.width() / 20, 4)
points = QPolygonF()
points.append(QPointF(self.width() / 2 - headSize * 2 , headSize))
points.append(QPointF(self.width() / 2 - headSize * 2, - headSize))
points.append(QPointF(self.width() / 2 -3, 0))
pen = QPen(self.color, 2)
painter.setPen(pen)
brush = QBrush(self.color)
painter.setBrush(brush)
painter.drawLine(line)
painter.drawConvexPolygon(points)
#painter.end()
class AngularLabel(QWidget):
def __init__(self, parent):
super(AngularLabel, self).__init__(parent)
self.colorGreen = QColor(66, 255, 100, 128)
self.colorRed = QColor(255, 66, 100, 128)
self.brushGreen = QBrush(self.colorGreen)
self.brushRed = QBrush(self.colorRed)
self.penGreen = QPen(self.brushGreen, 1)
self.penRed = QPen(self.brushRed, 1)
self.setFixedSize(100, 100)
self.angle = 0
self.velocity = 0
def setAngles(self, startAbs, velocity=None):
self.angle = startAbs * 16
if velocity != None:
self.velocity = velocity * 16
self.update()
def paintEvent(self, event):
p = QPainter()
p.begin(self)
if self.velocity < 0:
p.setBrush(self.brushGreen)
p.setPen(self.penGreen)
else:
p.setBrush(self.brushRed)
p.setPen(self.penRed)
p.drawPie(0, 0, self.width(), self.height(), self.angle, self.velocity)
p.end() | bit-bots/bitbots_misc | bitbots_live_tool_rqt/scripts/label_pool.py | Python | mit | 15,588 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""SSDFeatureExtractor for InceptionV2 features."""
import tensorflow as tf
from object_detection.meta_architectures import ssd_meta_arch
from object_detection.models import feature_map_generators
from object_detection.utils import ops
from object_detection.utils import shape_utils
from nets import inception_v2
slim = tf.contrib.slim
class SSDInceptionV2FeatureExtractor(ssd_meta_arch.SSDFeatureExtractor):
"""SSD Feature Extractor using InceptionV2 features."""
def __init__(self,
is_training,
depth_multiplier,
min_depth,
pad_to_multiple,
conv_hyperparams,
batch_norm_trainable=True,
reuse_weights=None,
use_explicit_padding=False,
use_depthwise=False):
"""InceptionV2 Feature Extractor for SSD Models.
Args:
is_training: whether the network is in training mode.
depth_multiplier: float depth multiplier for feature extractor.
min_depth: minimum feature extractor depth.
pad_to_multiple: the nearest multiple to zero pad the input height and
width dimensions to.
conv_hyperparams: tf slim arg_scope for conv2d and separable_conv2d ops.
batch_norm_trainable: Whether to update batch norm parameters during
training or not. When training with a small batch size
(e.g. 1), it is desirable to disable batch norm update and use
pretrained batch norm params.
reuse_weights: Whether to reuse variables. Default is None.
use_explicit_padding: Whether to use explicit padding when extracting
features. Default is False.
use_depthwise: Whether to use depthwise convolutions. Default is False.
"""
super(SSDInceptionV2FeatureExtractor, self).__init__(
is_training, depth_multiplier, min_depth, pad_to_multiple,
conv_hyperparams, batch_norm_trainable, reuse_weights,
use_explicit_padding, use_depthwise)
def preprocess(self, resized_inputs):
"""SSD preprocessing.
Maps pixel values to the range [-1, 1].
Args:
resized_inputs: a [batch, height, width, channels] float tensor
representing a batch of images.
Returns:
preprocessed_inputs: a [batch, height, width, channels] float tensor
representing a batch of images.
"""
return (2.0 / 255.0) * resized_inputs - 1.0
def extract_features(self, preprocessed_inputs):
"""Extract features from preprocessed inputs.
Args:
preprocessed_inputs: a [batch, height, width, channels] float tensor
representing a batch of images.
Returns:
feature_maps: a list of tensors where the ith tensor has shape
[batch, height_i, width_i, depth_i]
"""
preprocessed_inputs = shape_utils.check_min_image_dim(
33, preprocessed_inputs)
feature_map_layout = {
'from_layer': ['Mixed_4c', 'Mixed_5c', '', '', '', ''],
'layer_depth': [-1, -1, 512, 256, 256, 128],
'use_explicit_padding': self._use_explicit_padding,
'use_depthwise': self._use_depthwise,
}
with slim.arg_scope(self._conv_hyperparams):
with tf.variable_scope('InceptionV2',
reuse=self._reuse_weights) as scope:
_, image_features = inception_v2.inception_v2_base(
ops.pad_to_multiple(preprocessed_inputs, self._pad_to_multiple),
final_endpoint='Mixed_5c',
min_depth=self._min_depth,
depth_multiplier=self._depth_multiplier,
scope=scope)
feature_maps = feature_map_generators.multi_resolution_feature_maps(
feature_map_layout=feature_map_layout,
depth_multiplier=self._depth_multiplier,
min_depth=self._min_depth,
insert_1x1_conv=True,
image_features=image_features)
return feature_maps.values()
| jiaphuan/models | research/object_detection/models/ssd_inception_v2_feature_extractor.py | Python | apache-2.0 | 4,579 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import tensorflow as tf
import numpy as np
X = tf.random_normal(shape=[3, 5, 6], dtype=tf.float32)
X = tf.reshape(X, [-1, 5, 6])
cell = tf.nn.rnn_cell.BasicLSTMCell(10) # 也可以换成别的,比如GRUCell,BasicRNNCell等等
lstm_multi = tf.nn.rnn_cell.MultiRNNCell([cell] * 2, state_is_tuple=True)
state = lstm_multi.zero_state(3, tf.float32)
output, state = tf.nn.dynamic_rnn(lstm_multi, X, initial_state=state, time_major=False)
'''
with tf.Session() as sess:
sess.run(tf.initialize_all_variables())
print output.get_shape()
print sess.run(state)
'''
| iABC2XYZ/abc | Epics/rnn/testDynamicRNN5.py | Python | gpl-3.0 | 624 |
# Driver to talk to the Teensy and read all of the packets it sends into a queue.
# Packet streaming is done in a separate thread.
# Written for Python 2.7, but may be possible to update to a newer version.
import gnc_packet
import serial
import threading
import Queue
class MemCardDriver:
def __init__(self, port = ''):
self.queue = Queue.Queue()
# Connect to serial.
self.ser = None
try:
self.ser = serial.Serial(
port=port,
baudrate=921600,
parity=serial.PARITY_NONE,
stopbits=serial.STOPBITS_ONE,
bytesize=serial.EIGHTBITS,
timeout=None
)
print "Serial port opened"
except serial.SerialException as e:
print str(e), self.ser
return
# Start streaming thread.
self.thread = threading.Thread(target = MemCardDriver.StreamingThread, args = (self,))
self.thread.start()
# Thread to stream packets from the serial port to a queue.
# The start of a packet is indicated by two bytes of 0xAA. This should hopefully
# let the driver connet after the teensy is already returning data, but this
# hasn't been tested. The 0xAA bytes are discarded and are not passed to the Packet
# class for parsing.
def StreamingThread(self):
while 1:
# Read byte, compare to 0xAA
b = self.ser.read(1)
if not b == '\xAA':
print 1, hex(ord(b))
continue
# Read byte, compare to 0xAA
b = self.ser.read(1)
if not b == '\xAA':
print 2, hex(ord(b))
continue
# Read size and type
header = self.ser.read(gnc_packet.Packet.HEADER_SIZE)
(size, checksum, type) = gnc_packet.Packet.UnpackHeader(header)
if not gnc_packet.Packet.VerifyPacketType(type, size):
continue
# Read data, parse into packet
data = self.ser.read(size)
packet = gnc_packet.Packet(type, size, checksum, data)
# Put in queue for higher level interface
if not packet.type == gnc_packet.PacketType.INVALID:
self.queue.put(packet)
else:
print "Got an invalid packet"
def Pop(self):
return self.queue.get(block=True)
if __name__ == "__main__":
print "run main.py" | aalberg/hem-teensy | python/memcard_driver.py | Python | gpl-2.0 | 2,219 |
import logging
import signal
import sys
import unittest
from os.path import abspath, dirname, join
from robot.running import TestSuite, TestSuiteBuilder
from robot.utils import StringIO
from robot.utils.asserts import assert_equals
from resources.runningtestcase import RunningTestCase
from resources.Listener import Listener
CURDIR = dirname(abspath(__file__))
ROOTDIR = dirname(dirname(CURDIR))
DATADIR = join(ROOTDIR, 'atest', 'testdata', 'misc')
def run(suite, **kwargs):
config = dict(output=None, log=None, report=None,
stdout=StringIO(), stderr=StringIO())
config.update(kwargs)
result = suite.run(**config)
return result.suite
def build(path):
return TestSuiteBuilder().build(join(DATADIR, path))
def assert_suite(suite, name, status, message='', tests=1):
assert_equals(suite.name, name)
assert_equals(suite.status, status)
assert_equals(suite.message, message)
assert_equals(len(suite.tests), tests)
def assert_test(test, name, status, tags=(), msg=''):
assert_equals(test.name, name)
assert_equals(test.status, status)
assert_equals(test.message, msg)
assert_equals(tuple(test.tags), tags)
class TestRunning(unittest.TestCase):
def test_one_library_keyword(self):
suite = TestSuite(name='Suite')
suite.tests.create(name='Test').keywords.create('Log',
args=['Hello, world!'])
result = run(suite)
assert_suite(result, 'Suite', 'PASS')
assert_test(result.tests[0], 'Test', 'PASS')
def test_failing_library_keyword(self):
suite = TestSuite(name='Suite')
test = suite.tests.create(name='Test')
test.keywords.create('Log', args=['Dont fail yet.'])
test.keywords.create('Fail', args=['Hello, world!'])
result = run(suite)
assert_suite(result, 'Suite', 'FAIL')
assert_test(result.tests[0], 'Test', 'FAIL', msg='Hello, world!')
def test_assign(self):
suite = TestSuite(name='Suite')
test = suite.tests.create(name='Test')
test.keywords.create(assign=['${var}'], name='Set Variable', args=['value in variable'])
test.keywords.create('Fail', args=['${var}'])
result = run(suite)
assert_suite(result, 'Suite', 'FAIL')
assert_test(result.tests[0], 'Test', 'FAIL', msg='value in variable')
def test_suites_in_suites(self):
root = TestSuite(name='Root')
root.suites.create(name='Child')\
.tests.create(name='Test')\
.keywords.create('Log', args=['Hello, world!'])
result = run(root)
assert_suite(result, 'Root', 'PASS', tests=0)
assert_suite(result.suites[0], 'Child', 'PASS')
assert_test(result.suites[0].tests[0], 'Test', 'PASS')
def test_user_keywords(self):
suite = TestSuite(name='Suite')
suite.tests.create(name='Test').keywords.create('User keyword', args=['From uk'])
uk = suite.resource.keywords.create(name='User keyword', args=['${msg}'])
uk.keywords.create(name='Fail', args=['${msg}'])
result = run(suite)
assert_suite(result, 'Suite', 'FAIL')
assert_test(result.tests[0], 'Test', 'FAIL', msg='From uk')
def test_variables(self):
suite = TestSuite(name='Suite')
suite.resource.variables.create('${ERROR}', 'Error message')
suite.resource.variables.create('@{LIST}', ['Error', 'added tag'])
suite.tests.create(name='T1').keywords.create('Fail', args=['${ERROR}'])
suite.tests.create(name='T2').keywords.create('Fail', args=['@{LIST}'])
result = run(suite)
assert_suite(result, 'Suite', 'FAIL', tests=2)
assert_test(result.tests[0], 'T1', 'FAIL', msg='Error message')
assert_test(result.tests[1], 'T2', 'FAIL', ('added tag',), 'Error')
def test_modifiers_are_not_used(self):
# These options are valid but not used. Modifiers can be passed to
# suite.visit() explicitly if needed.
suite = TestSuite(name='Suite')
suite.tests.create(name='Test').keywords.create('No Operation')
result = run(suite, prerunmodifier='not used', prerebotmodifier=42)
assert_suite(result, 'Suite', 'PASS', tests=1)
class TestTestSetupAndTeardown(unittest.TestCase):
def setUp(self):
self.tests = run(build('setups_and_teardowns.robot')).tests
def test_passing_setup_and_teardown(self):
assert_test(self.tests[0], 'Test with setup and teardown', 'PASS')
def test_failing_setup(self):
assert_test(self.tests[1], 'Test with failing setup', 'FAIL',
msg='Setup failed:\nTest Setup')
def test_failing_teardown(self):
assert_test(self.tests[2], 'Test with failing teardown', 'FAIL',
msg='Teardown failed:\nTest Teardown')
def test_failing_test_with_failing_teardown(self):
assert_test(self.tests[3], 'Failing test with failing teardown', 'FAIL',
msg='Keyword\n\nAlso teardown failed:\nTest Teardown')
class TestSuiteSetupAndTeardown(unittest.TestCase):
def setUp(self):
self.suite = build('setups_and_teardowns.robot')
def test_passing_setup_and_teardown(self):
suite = run(self.suite)
assert_suite(suite, 'Setups And Teardowns', 'FAIL', tests=4)
assert_test(suite.tests[0], 'Test with setup and teardown', 'PASS')
def test_failing_setup(self):
suite = run(self.suite, variable='SUITE SETUP:Fail')
assert_suite(suite, 'Setups And Teardowns', 'FAIL',
'Suite setup failed:\nAssertionError', 4)
assert_test(suite.tests[0], 'Test with setup and teardown', 'FAIL',
msg='Parent suite setup failed:\nAssertionError')
def test_failing_teardown(self):
suite = run(self.suite, variable='SUITE TEARDOWN:Fail')
assert_suite(suite, 'Setups And Teardowns', 'FAIL',
'Suite teardown failed:\nAssertionError', 4)
assert_test(suite.tests[0], 'Test with setup and teardown', 'FAIL',
msg='Parent suite teardown failed:\nAssertionError')
def test_failing_test_with_failing_teardown(self):
suite = run(self.suite, variable=['SUITE SETUP:Fail', 'SUITE TEARDOWN:Fail'])
assert_suite(suite, 'Setups And Teardowns', 'FAIL',
'Suite setup failed:\nAssertionError\n\n'
'Also suite teardown failed:\nAssertionError', 4)
assert_test(suite.tests[0], 'Test with setup and teardown', 'FAIL',
msg='Parent suite setup failed:\nAssertionError\n\n'
'Also parent suite teardown failed:\nAssertionError')
def test_nested_setups_and_teardowns(self):
root = TestSuite(name='Root')
root.keywords.create('Fail', args=['Top level'], type='teardown')
root.suites.append(self.suite)
suite = run(root, variable=['SUITE SETUP:Fail', 'SUITE TEARDOWN:Fail'])
assert_suite(suite, 'Root', 'FAIL',
'Suite teardown failed:\nTop level', 0)
assert_suite(suite.suites[0], 'Setups And Teardowns', 'FAIL',
'Suite setup failed:\nAssertionError\n\n'
'Also suite teardown failed:\nAssertionError', 4)
assert_test(suite.suites[0].tests[0], 'Test with setup and teardown', 'FAIL',
msg='Parent suite setup failed:\nAssertionError\n\n'
'Also parent suite teardown failed:\nAssertionError\n\n'
'Also parent suite teardown failed:\nTop level')
class TestCustomStreams(RunningTestCase):
def test_stdout_and_stderr(self):
self._run()
self._assert_output(sys.__stdout__,
[('My Suite', 2), ('My Test', 1),
('1 critical test, 1 passed, 0 failed', 1)])
self._assert_output(sys.__stderr__, [('Hello, world!', 1)])
def test_custom_stdout_and_stderr(self):
stdout, stderr = StringIO(), StringIO()
self._run(stdout, stderr)
self._assert_normal_stdout_stderr_are_empty()
self._assert_output(stdout, [('My Suite', 2), ('My Test', 1)])
self._assert_output(stderr, [('Hello, world!', 1)])
def test_same_custom_stdout_and_stderr(self):
output = StringIO()
self._run(output, output)
self._assert_normal_stdout_stderr_are_empty()
self._assert_output(output, [('My Suite', 2), ('My Test', 1),
('Hello, world!', 1)])
def test_run_multiple_times_with_different_stdout_and_stderr(self):
stdout, stderr = StringIO(), StringIO()
self._run(stdout, stderr)
self._assert_normal_stdout_stderr_are_empty()
self._assert_output(stdout, [('My Suite', 2), ('My Test', 1)])
self._assert_output(stderr, [('Hello, world!', 1)])
stdout.close(); stderr.close()
output = StringIO()
self._run(output, output, variable='MESSAGE:Hi, again!')
self._assert_normal_stdout_stderr_are_empty()
self._assert_output(output, [('My Suite', 2), ('My Test', 1),
('Hi, again!', 1), ('Hello, world!', 0)])
output.close()
self._run(variable='MESSAGE:Last hi!')
self._assert_output(sys.__stdout__, [('My Suite', 2), ('My Test', 1)])
self._assert_output(sys.__stderr__, [('Last hi!', 1), ('Hello, world!', 0)])
def _run(self, stdout=None, stderr=None, **options):
suite = TestSuite(name='My Suite')
suite.resource.variables.create('${MESSAGE}', 'Hello, world!')
suite.tests.create(name='My Test').keywords.create('Log', args=['${MESSAGE}', 'WARN'])
run(suite, stdout=stdout, stderr=stderr, **options)
def _assert_normal_stdout_stderr_are_empty(self):
self._assert_outputs()
class TestPreservingSignalHandlers(unittest.TestCase):
def setUp(self):
self.orig_sigint = signal.getsignal(signal.SIGINT)
self.orig_sigterm = signal.getsignal(signal.SIGTERM)
def tearDown(self):
signal.signal(signal.SIGINT, self.orig_sigint)
signal.signal(signal.SIGTERM, self.orig_sigterm)
def test_original_signal_handlers_are_restored(self):
my_sigterm = lambda signum, frame: None
signal.signal(signal.SIGTERM, my_sigterm)
suite = TestSuite(name='My Suite')
suite.tests.create(name='My Test').keywords.create('Log', args=['Hi!'])
run(suite)
assert_equals(signal.getsignal(signal.SIGINT), self.orig_sigint)
assert_equals(signal.getsignal(signal.SIGTERM), my_sigterm)
class TestStateBetweenTestRuns(unittest.TestCase):
def test_reset_logging_conf(self):
assert_equals(logging.getLogger().handlers, [])
assert_equals(logging.raiseExceptions, 1)
suite = TestSuite(name='My Suite')
suite.tests.create(name='My Test').keywords.create('Log', args=['Hi!'])
run(suite)
assert_equals(logging.getLogger().handlers, [])
assert_equals(logging.raiseExceptions, 1)
class TestListeners(RunningTestCase):
def test_listeners(self):
module_file = join(ROOTDIR, 'utest', 'resources', 'Listener.py')
suite = build('setups_and_teardowns.robot')
suite.run(output=None, log=None, report=None, listener=[module_file+":1", Listener(2)])
self._assert_outputs([("[from listener 1]", 1), ("[from listener 2]", 1)])
def test_listeners_unregistration(self):
module_file = join(ROOTDIR, 'utest', 'resources', 'Listener.py')
suite = build('setups_and_teardowns.robot')
suite.run(output=None, log=None, report=None, listener=module_file+":1")
self._assert_outputs([("[from listener 1]", 1), ("[listener close]", 1)])
self._clear_outputs()
suite.run(output=None, log=None, report=None)
self._assert_outputs([("[from listener 1]", 0), ("[listener close]", 0)])
if __name__ == '__main__':
unittest.main()
| moto-timo/robotframework | utest/running/test_running.py | Python | apache-2.0 | 12,074 |
"""
Django settings for Mantenimiento project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
TEMPLATE_DIRS = [os.path.join(BASE_DIR, 'eventos/templates')]
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '9gx&hqdkiboq!-nt9*l5(cix6_k8r38t_tbj3o@u4nlp02*j7#'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'eventos',
'south',
'ajax_select',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'Mantenimiento.urls'
WSGI_APPLICATION = 'Mantenimiento.wsgi.application'
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.tz',
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.request',
)
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'mantenimiento',
'USER': 'postgres',
'PASSWORD': 'ascent',
'HOST': '127.0.0.1'
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'es'
TIME_ZONE = 'America/Mexico_City'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
)
#SESSION_EXPIRE_AT_BROWSER_CLOSE = True
#SESSION_COOKIE_AGE = 60 * 10 #
AJAX_LOOKUP_CHANNELS = {
# define a custom lookup channel
'supervisor' : ('eventos.lookups', 'SupervisorLookup'),
'personal' : ('eventos.lookups', 'PersonalLookup'),
'partederepuesto' : ('eventos.lookups', 'ParteDeRepuestoLookup'),
'materialocupado' : ('eventos.lookups','MaterialOcupadoLookup')
}
LOGIN_URL = '/accesar/' | timh52280/GestorDeMantenimiento | Mantenimiento/settings.py | Python | gpl-2.0 | 3,134 |
from pook import interceptors
class CustomInterceptor(interceptors.BaseInterceptor):
pass
def test_add_custom_interceptor():
interceptors.add(CustomInterceptor)
assert CustomInterceptor in interceptors.interceptors
| h2non/pook | tests/unit/interceptors/module_test.py | Python | mit | 232 |
# Copyright (C) 2011-2012 CRS4.
#
# This file is part of Seal.
#
# Seal is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Seal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Seal. If not, see <http://www.gnu.org/licenses/>.
from seal.lib.mr.hit_processor_chain_link import HitProcessorChainLink
class FilterLink(HitProcessorChainLink):
def __init__(self, monitor, next_link = None):
super(FilterLink, self).__init__(next_link)
self.min_hit_quality = 0
self.remove_unmapped = False # if true, all unmapped are removed regardless of hit quality
self.event_monitor = monitor
@staticmethod
def _remove_i(pair, i):
pair[i] = None
other_hit = pair[i^1]
if other_hit:
other_hit.remove_mate()
return pair
def process(self, pair):
if len(pair) != 2:
raise ValueError("pair length != 2 (it's %d)" % len(pair))
pair = list(pair) # tuples can't be modified
for i in 0, 1:
if self.remove_unmapped and pair[i].is_unmapped():
pair = self._remove_i(pair, i)
self.event_monitor.count("reads filtered: unmapped")
elif pair[i].qual < self.min_hit_quality:
pair = self._remove_i(pair, i)
self.event_monitor.count("reads filtered: low quality")
if self.next_link and any(pair):
self.next_link.process(tuple(pair)) # forward pair to next element in chain
class RapiFilterLink(HitProcessorChainLink):
def __init__(self, monitor, next_link = None):
super(RapiFilterLink, self).__init__(next_link)
self.min_hit_quality = 0
self.remove_unmapped = False # if true, all unmapped are removed regardless of hit quality
self.event_monitor = monitor
@staticmethod
def _remove_i(pair, i):
pair[i] = None
other_hit = pair[i^1]
if other_hit:
other_hit.remove_mate()
return pair
def process(self, pair):
if len(pair) != 2:
raise ValueError("pair length != 2 (it's %d)" % len(pair))
pair = list(pair) # tuples can't be modified
for i in 0, 1:
if self.remove_unmapped and not pair[i].mapped:
pair = self._remove_i(pair, i)
self.event_monitor.count("reads filtered: unmapped")
elif pair[i].mapq < self.min_hit_quality:
pair = self._remove_i(pair, i)
self.event_monitor.count("reads filtered: low quality")
super(RapiFilterLink, self).process(pair) # forward pair to next element in chain
| ilveroluca/seal | seal/lib/mr/filter_link.py | Python | gpl-3.0 | 3,069 |
from xii import definition, command
class ResumeCommand(command.Command):
name = "resume"
help = "resume all paused domains"
def run(self):
self.each_component("resume")
| xii/xii | src/xii/builtin/commands/resume/command_resume.py | Python | apache-2.0 | 193 |
#!/usr/bin/env python
'''
object tracking
==================
Example of using tracking to decide if a moving object passed a line
Usage
-----
main_tracker.py [<video source>]
Keys:
p - pause video
s - toggle single step mode
'''
# Python 2/3 compatibility
from __future__ import print_function
import sys
PY3 = sys.version_info[0] == 3
if PY3:
xrange = range
import numpy as np
import cv2
from time import sleep,clock,time
from video import VideoStream
from video import videoSequence
from tracker import Background
from tracker import SimpleTracker
from tracker import Blobber
import configuration
from opencv import draw_str,RectSelector
from threading import Thread,Event
simulate = False
#simulate = True
parser = configuration.Config()
parser.readConfig()
contrast = parser.getint('Camera','contrast')
saturation = parser.getint('Camera','saturation')
maxd = parser.getint('Object','maxsize')
mind = parser.getint('Object','minsize')
hthres = 400.0
min_speed = 5
#imageSizeX = 300
#imageSizeY = 400
#imageSizeX = 320
#imageSizeY = 240
imageSizeX = 640
imageSizeY = 480
new_time = 0.0
old_time = 0.0
dt = 0.04
# Setup SimpleBlobDetector parameters.
params = cv2.SimpleBlobDetector_Params()
# Change thresholds
params.minThreshold = 10
params.maxThreshold = 255
#params.thresholdStep = 10
# distance
params.minDistBetweenBlobs = 40.0
#color
params.filterByColor = False
params.blobColor = 255
# Filter by Area.
params.filterByArea = True
params.minArea = 100
params.maxArea = 50000
# Filter by Circularity
params.filterByCircularity = False
params.minCircularity = 0.1
# Filter by Convexity
params.filterByConvexity = False
params.minConvexity = 0.87
#params.maxConvexity = 0.87
# Filter by Inertia
params.filterByInertia = False
#params.minInertiaRatio = 0.01
params.maxInertiaRatio = 0.3
class container:
index = 0
thresHold = 80 * np.ones((imageSizeX,imageSizeY,3), np.uint8)
vis = 80 * np.ones((imageSizeX,imageSizeY,3), np.uint8)
trackList = None
#pts = []
#szs = []
times = [0., 0., 0., 0.]
keypoints = None
boxes = None
#------------------------------------------------------------------------
class App:
def __init__(self,videoSrc=0, usePiCamera=False):
global params
self.paused = False
self.wtime = 0x00
self.fcnt = 0
self.frame = None
self.camera = None
self.name = 'tracker'
self.threaded = True
#self.tracker = lktracker.LKTracker()
cv2.namedWindow(self.name)
self.recSel = RectSelector(self.name, self.onRect)
self.reducedRect = (0,0,0,0)
self.reducedArea = False
self.usePiCamera = usePiCamera
#self.cap = VideoStream(src=videoSrc, usePiCamera=usePiCamera,resolution=(480,368),framerate=32)
self.cap = VideoStream(src=videoSrc, usePiCamera=usePiCamera,resolution=(imageSizeX,imageSizeY),framerate=55)
#self.cap = VideoStream(src=videoSrc, usePiCamera=usePiCamera,resolution=(320,240),framerate=32)
self.cap.start()
if usePiCamera:
global contrast
global saturation
self.camera = self.cap.stream.camera
cv2.createTrackbar('contrast', self.name, 0, 100, self.setContrast)
cv2.setTrackbarPos('contrast', self.name, contrast)
cv2.createTrackbar('saturation', self.name, 0, 100, self.setSaturation)
cv2.setTrackbarPos('saturation', self.name, saturation)
sleep(2.0) ## wait for camera to warm up
global min_speed
cv2.createTrackbar('max size', self.name, 10, 200, self.setMaxd)
cv2.setTrackbarPos('max size', self.name, maxd)
cv2.createTrackbar('min size', self.name, 5, 100, self.setMind)
cv2.setTrackbarPos('min size', self.name, mind)
cv2.createTrackbar('blobspeed', self.name, 3, 500, self.setSpeed)
cv2.setTrackbarPos('blobspeed', self.name, min_speed)
cv2.createTrackbar('hist thres', self.name, 300, 500, self.setThres)
cv2.setTrackbarPos('hist thres', self.name, int(hthres))
global imageSizeX,imageSizeY
global old_time
old_time,img = self.cap.read()
print("init time: %d" % (old_time))
imageSizeY,imageSizeX = img.shape[:2]
print(imageSizeX, imageSizeY)
self.tracker = SimpleTracker.SimpleTracker(imageSizeX = imageSizeX, imageSizeY = imageSizeY)
#self.tracker = SimpleTrackerIMM.SimpleTracker(imageSizeX = imageSizeX, imageSizeY = imageSizeY)
#self.bgSeperator = Background.SeperatorMOG2(hist=8)
#self.bgSeperator = Background.SeperatorMOG2_OCL(hist=8)
#self.bgSeperator = Background.SeperatorGMG(hist=8, shadows=False)
#self.bgSeperator = Background.SeperatorKNN(shadows=False)
self.bgSeperator = Background.simpleBackground()
#self.detector = cv2.SimpleBlobDetector_create(params)
self.ddd = Blobber.blobDetector(params)
if self.threaded:
self.setupThreading()
def __del__(self):
global parser
for t in self.threads:
t.join()
cv2.destroyAllWindows()
self.cap.stop()
parser.writeConfig()
def setSpeed( self, value ):
pass
def setThres( self, value ):
pass
def setContrast( self, value ):
global contrast
contrast = value
self.camera.contrast = contrast
def setSaturation( self, value ):
global saturation
saturation = value
self.camera.saturation = saturation
def setMaxd( self, val ):
global maxd
maxd = val
def setMind( self, val ):
global mind
mind = val
def onRect(self, rect):
x,y,x1,y1 = rect
w = x1 - x; h = y1 - y
self.reducedRect = (x,y,w,h)
if w + h > 100:
self.reducedArea = True
else:
self.reducedArea = False
def startThreading(self):
for t in self.threads:
t.start()
self.T4Ready.set()
self.T3Ready.set()
self.T2Ready.set()
self.RunT1.set() #
def setupThreading(self):
self.threads = []
self.results = []
self.index = [0,0,0,0]
for i in range(4):
self.results += [container()]
self.RunT1 = Event()
self.RunT2 = Event()
self.RunT3 = Event()
self.RunT4 = Event()
self.T2Ready = Event()
self.T3Ready = Event()
self.T4Ready = Event()
t1 = Thread(target=self.t1Run, args=(1,))
t1.setDaemon(True)
self.threads += [t1]
t2 = Thread(target=self.t2Run, args=(2,))
t2.setDaemon(True)
self.threads += [t2]
t3 = Thread(target=self.t3Run, args=(3,))
t3.setDaemon(True)
self.threads += [t3]
def t1Run(self, arg=0):
global new_time
global old_time
global dt
while True:
self.RunT1.wait()
self.RunT1.clear()
t0 = clock()
i = self.index[0]
#--- run some stuff
##print("run t1 %d" % (i))
new_time,vis = self.cap.read()
if vis is None:
vis = self.vis
dt = new_time - old_time
old_time = new_time
# background sepration
thrOk, self.results[i].thresHold = self.bgSeperator.seperate(vis)
self.results[i].vis = vis
self.results[i].index += 1
#self.results[i].thresHold = self.thresHold
#--- stuff ready
self.results[i].times[0] = 1000.0 * (clock() - t0)
i += 1; i %= 4
self.index[0] = i
self.T2Ready.wait()
self.RunT2.set() # i
def t2Run(self, arg=0):
while True:
self.RunT2.wait()
self.RunT2.clear()
self.T2Ready.clear()
##print("t2 start t1")
self.RunT1.set()
t0 = clock()
i = self.index[1]
#-- stop processing
#while self.wtime == 0:
# sleep(1.0)
#--- run some stuff
##print("run t2 %d" % (i))
vis = self.results[i].vis
thresHold = self.results[i].thresHold
#self.results[i].pts = np.array([pt.pt for pt in keypoints]).reshape(-1, 2)
#self.results[i].szs = np.array([pt.size for pt in keypoints]).reshape(-1, 1)
#self.results[i].keypoints = self.detector.detect(thresHold)
self.results[i].boxes = self.ddd.detect(thresHold)
#--- stuff ready
self.results[i].times[1] = 1000.0 * (clock() - t0)
i += 1; i %= 4
self.index[1] = i
self.T3Ready.wait()
self.T2Ready.set()
self.RunT3.set()
def t3Run(self, arg=0):
global dt
while True:
self.RunT3.wait()
self.RunT3.clear()
self.T3Ready.clear()
t0 = clock()
i = self.index[2]
#--- run some stuff
##print("run t3 %d" % (dt))
vis = self.results[i].vis
#keypoints = self.results[i].keypoints
#self.results[i].trackList = self.tracker.trackKeypoints(vis, keypoints, dt)
boxes = self.results[i].boxes
self.results[i].trackList = self.tracker.trackBoxes(vis, boxes, dt)
#tracks = self.tracker.trackContours(vis, pts, szs)
#--- stuff ready
self.results[i].times[2] = 1000 * (clock() - t0)
i += 1; i %= 4
self.index[2] = i
self.T4Ready.wait()
self.T3Ready.set()
self.RunT4.set() # i
def run(self):
global dt
global new_time
global old_time
old_time = time()
if self.threaded:
self.startThreading()
while True:
if self.wtime == 0:
deltaT = 0.04
if self.threaded:
#- read the next frame (in thread 1)
index = self.index[3]
self.RunT4.wait()
self.RunT4.clear()
self.T4Ready.clear()
vis = self.results[index].vis
trackList = self.results[index].trackList
thresHold = self.results[index].thresHold
keypoints = self.results[index].keypoints
boxes = self.results[index].boxes
if abs(dt) < 1e-10:
dt=0.04
frate = '%4.2f' % (1.0/dt)
print("frate: %s" % (frate))
str_frate = '%6s %5.2f, %5.2f, %5.2f, %5.2f %d' % (frate, \
self.results[index].times[0], \
self.results[index].times[1], \
self.results[index].times[2], \
self.results[index].times[3], \
self.results[0].index )
else:
#-- t1 ---------------------------------------
new_time,frame = self.cap.read()
if frame is None:
break
if usePiCamera:
vis = frame
else:
vis = frame.copy()
#- determine delay
dt = new_time - old_time
old_time = new_time
frate = '%4.2f' % (1.0/dt)
thresOk,thresHold = self.bgSeperator.seperate(vis)
if not thresOk:
continue
#-- t2 ---------------------------------------
#self.tracker.track(vis, deltaT)
keypoints = self.ddd.detect(thresHold)
# coords: pts[:,:2] size: pts[:,2:]
#pts = np.array([[pt.pt[0],pt.pt[1],pt.size] for pt in keypoints]).reshape(-1, 3)
#vis = cv2.drawKeypoints(vis, keypoints, np.array([]), (20,220,20), cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS)
#_, contours,hierarchy = cv2.findContours(thresHold, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
#_, contours,hierarchy = cv2.findContours(thresHold, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
boxes = self.ddd.detect(thresHold)
#lst = []; szs = []
for box in boxes:
cv2.rectangle(vis,(box[0],box[1]),(box[2],box[3]),(0,255,0),1)
# strng = "%d/%d" % (xc,yc)
# cv2.putText(vis, strng, (xc, yc), cv2.FONT_HERSHEY_SIMPLEX, 0.3, (20,150,20), 1)
#pts = np.array(lst).reshape(-1,2)
#szs = np.array(szs).reshape(-1,4)
#-- t3 ---------------------------------------
#trackList = self.tracker.trackKeypoints(vis, keypoints, dt)
trackList = self.tracker.trackBoxes(vis, boxes, dt)
#debImg = cv2.resize(self.tracker.image,None,fx=5,fy=5)
#cv2.imshow("Debug", debImg)
#tracks = self.tracker.trackContours(vis, pts, szs)
str_frate = '%6s' % (frate)
#-- t4 ---------------------------------------
#vis = cv2.drawKeypoints(vis, keypoints, np.array([]), (20,220,20), cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS)
#pts = np.array([pt.pt for pt in keypoints]).reshape(-1, 2)
#for (x,y) in pts:
# cv2.drawMarker(vis, (int(x),int(y)), (20,220,220), cv2.MARKER_DIAMOND,10)
for x0,y0,x1,y1 in boxes:
xm = x0 + (x1 - x0) / 2
ym = y0 + (y1 - y0) / 2
cv2.rectangle(vis,(x0,y0),(x1,y1),(0,255,0),1)
cv2.drawMarker(vis, (int(xm),int(ym)), (20,220,220), cv2.MARKER_DIAMOND,10)
for (ix,iy),ttrack in trackList.items():
ttrack.showTrack(vis, (0,255,0))
cv2.putText(vis, str_frate, (3, 14), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (20,150,20), 2)
#print(str_frate)
#--
#self.recSel.draw(vis)
cv2.imshow(self.name, vis)
cv2.imshow("Threshold", thresHold)
self.fcnt += 1
#self.results[index].times[3] = 1000.0 * (clock() - t0)
#print(self.results[index].times)
ch = cv2.waitKey(self.wtime) & 0xFF
if ch == ord('s'):
self.wtime ^= 1
if ch == 27:
break
if self.threaded:
index += 1; index %= 4
self.index[3] = index
self.T4Ready.set()
if __name__ == '__main__':
print(__doc__)
import sys
try:
video_src = sys.argv[1]
usePiCamera = False
except:
video_src = 0
usePiCamera = True
App(videoSrc=video_src,usePiCamera=usePiCamera).run()
| barney-NG/pyCAMTracker | src/main_tracker.py | Python | mit | 14,947 |
import liclient
def show_methods():
obj = liclient.client
methods = [x for x in dir(obj) if x[0:1] != '_' if callable(getattr(obj, x))]
print 'Load Impact client methods:'
print '\n'.join(methods)
def run_method(name, args):
obj = liclient.client
method = getattr(obj, name)
output = method(*args)
print(output)
| schibsted/overloadimpact | overloadimpact/cli_tool/api_method.py | Python | mit | 346 |
# -*- coding: utf-8 -*-
import pytest
from distutils.version import LooseVersion
from freezegun import freeze_time
MARKER_NAME = 'freeze_time'
FIXTURE_NAME = 'freezer'
def get_closest_marker(node, name):
"""
Get our marker, regardless of pytest version
"""
if LooseVersion(pytest.__version__) < LooseVersion('3.6.0'):
return node.get_marker('freeze_time')
else:
return node.get_closest_marker('freeze_time')
@pytest.fixture(name=FIXTURE_NAME)
def freezer_fixture(request):
"""
Freeze time and make it available to the test
"""
args = []
kwargs = {}
ignore = []
# If we've got a marker, use the arguments provided there
marker = get_closest_marker(request.node, MARKER_NAME)
if marker:
ignore = marker.kwargs.pop('ignore', [])
args = marker.args
kwargs = marker.kwargs
# Always want to ignore _pytest
ignore.append('_pytest.terminal')
ignore.append('_pytest.runner')
# Freeze time around the test
freezer = freeze_time(*args, ignore=ignore, **kwargs)
frozen_time = freezer.start()
yield frozen_time
freezer.stop()
def pytest_collection_modifyitems(items):
"""
Inject our fixture into any tests with our marker
"""
for item in items:
if get_closest_marker(item, MARKER_NAME):
item.fixturenames.insert(0, FIXTURE_NAME)
def pytest_configure(config):
"""
Register our marker
"""
config.addinivalue_line(
"markers", "{}(...): use freezegun to freeze time".format(MARKER_NAME)
)
| ktosiek/pytest-freezegun | pytest_freezegun.py | Python | mit | 1,579 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
==========================
Metabolic Components Tests
==========================
:Authors:
Moritz Emanuel Beber
Alexandra Mirela Grigore
Nikolaus Sonnenschein
:Date:
2011-04-08
:Copyright:
Copyright(c) 2011 Jacobs University of Bremen. All rights reserved.
:File:
test_metabolism.py
"""
import inspect
import itertools
import nose.tools as nt
from .. import metabolism as pymet
def test_components():
for (name, cls) in inspect.getmembers(pymet, predicate=inspect.isclass):
if name.startswith("Basic"):
yield (check_basic_cls, cls)
elif name.startswith("SBML"):
yield (check_sbml_cls, cls)
elif name == "MetabolicSystem":
yield (check_system, cls)
def check_basic_cls(cls):
names = ["foo", "bar", None]
instances = [cls(name) for name in names]
# test __str__
check__str__(instances, names)
# test __repr__
check__repr__(instances)
def check__str__(instances, names):
for (instance, name) in itertools.izip(instances, names):
if name == None:
nt.assert_equal(str(instance),
"%s_%d" % (instance.__class__.__name__, instance._index))
else:
nt.assert_equal(str(instance), name)
def check__repr__(instances):
for instance in instances:
cls = instance.__class__
nt.assert_equal(repr(instance), "<%s.%s, %d>" % (cls.__module__,
cls.__name__, id(instance)))
def check_sbml_cls(cls):
if cls.__name__ == "SBMLCompartment":
pass
elif cls.__name__ == "SBMLCompound":
pass
elif cls.__name__ == "SBMLCompartmentCompound":
pass
elif cls.__name__ == "SBMLReaction":
pass
def check_system(cls):
pass
#class TestSBMLCompartment:
#
# def setUp(self):
# self.comp_1 = pymet.SBMLCompartment("foo")
# self.comp_2 = pymet.SBMLCompartment("bar", True)
#
# def test__str__(self):
# nt.assert_equal(str(self.comp_1), "foo")
# nt.assert_equal(str(self.comp_2), "bar")
#
# def test__repr__(self):
# nt.assert_equal(repr(self.comp_1),
# "<pymetabolism.metabolism.metabolism.SBMLCompartment, %d>" %
# self.comp_1._index)
# nt.assert_equal(repr(self.comp_2),
# "<pymetabolism.metabolism.metabolism.SBMLCompartment, %d>" %
# self.comp_2._index)
#
# def test__lt__(self):
# nt.assert_raises(NotImplementedError, self.comp_1.__lt__, (self.comp_2,))
#
# def test__le__(self):
# nt.assert_raises(NotImplementedError, self.comp_1.__le__, (self.comp_2,))
#
# def test__eq__(self):
# nt.assert_equal(self.comp_1, pymet.SBMLCompartment("foo"))
# nt.assert_equal(self.comp_2, pymet.SBMLCompartment("bar"))
#
# def test__ne__(self):
# for (x, y) in itertools.combinations([self.comp_1, self.comp_2], 2):
# nt.assert_not_equal(x, y)
#
# def test__gt__(self):
# nt.assert_raises(NotImplementedError, self.comp_1.__gt__, (self.comp_2,))
#
# def test__ge__(self):
# nt.assert_raises(NotImplementedError, self.comp_1.__ge__, (self.comp_2,))
#
# def test__cmp__(self):
# nt.assert_raises(NotImplementedError, self.comp_1.__cmp__, (self.comp_2,))
#
#class TestSBMLCompound:
#
# def setUp(self):
# self.comp_1 = pymet.SBMLCompound("foo")
# self.comp_2 = pymet.SBMLCompound("bar")
#
# def test__str__(self):
# nt.assert_equal(str(self.comp_1), "foo")
# nt.assert_equal(str(self.comp_2), "bar")
#
# def test__repr__(self):
# nt.assert_equal(repr(self.comp_1),
# "<pymetabolism.metabolism.metabolism.SBMLCompound, %d>" %
# self.comp_1._index)
# nt.assert_equal(repr(self.comp_2),
# "<pymetabolism.metabolism.metabolism.SBMLCompound, %d>" %
# self.comp_2._index)
#
# def test__lt__(self):
# nt.assert_raises(NotImplementedError, self.comp_1.__lt__, (self.comp_2,))
#
# def test__le__(self):
# nt.assert_raises(NotImplementedError, self.comp_1.__le__, (self.comp_2,))
#
# def test__eq__(self):
# nt.assert_equal(self.comp_1, pymet.SBMLCompound("foo"))
# nt.assert_equal(self.comp_2, pymet.SBMLCompound("bar"))
#
# def test__ne__(self):
# for (x, y) in itertools.combinations([self.comp_1, self.comp_2], 2):
# nt.assert_not_equal(x, y)
#
# def test__gt__(self):
# nt.assert_raises(NotImplementedError, self.comp_1.__gt__, (self.comp_2,))
#
# def test__ge__(self):
# nt.assert_raises(NotImplementedError, self.comp_1.__ge__, (self.comp_2,))
#
# def test__cmp__(self):
# nt.assert_raises(NotImplementedError, self.comp_1.__cmp__, (self.comp_2,))
#
# def test__contains__(self):
# nt.assert_raises(NotImplementedError, self.comp_1.__contains__, ("C",))
#
#
#class TestSBMLCompartmentCompound:
#
# def setUp(self):
# self.comp_1 = pymet.SBMLCompartmentCompound(pymet.SBMLCompound("foo"),
# pymet.SBMLCompartment("bar"))
# self.comp_2 = pymet.SBMLCompartmentCompound(pymet.SBMLCompound("crow"),
# pymet.SBMLCompartment("bar"))
#
# def test__str__(self):
# nt.assert_equal(str(self.comp_1), "foo(bar)")
# nt.assert_equal(str(self.comp_2), "crow(bar)")
#
# def test__repr__(self):
# nt.assert_equal(repr(self.comp_1),
# "<pymetabolism.metabolism.metabolism.SBMLCompartmentCompound, %d>" %
# self.comp_1._index)
# nt.assert_equal(repr(self.comp_2),
# "<pymetabolism.metabolism.metabolism.SBMLCompartmentCompound, %d>" %
# self.comp_2._index)
#
# def test__lt__(self):
# nt.assert_raises(NotImplementedError, self.comp_1.__lt__, (self.comp_2,))
#
# def test__le__(self):
# nt.assert_raises(NotImplementedError, self.comp_1.__le__, (self.comp_2,))
#
# def test__eq__(self):
# nt.assert_equal(self.comp_1, pymet.SBMLCompartmentCompound(
# pymet.SBMLCompound("foo"), pymet.SBMLCompartment("bar")))
# nt.assert_equal(self.comp_2, pymet.SBMLCompartmentCompound(
# pymet.SBMLCompound("crow"), pymet.SBMLCompartment("bar")))
#
# def test__ne__(self):
# for (x, y) in itertools.combinations([self.comp_1, self.comp_2], 2):
# nt.assert_not_equal(x, y)
#
# def test__gt__(self):
# nt.assert_raises(NotImplementedError, self.comp_1.__gt__, (self.comp_2,))
#
# def test__ge__(self):
# nt.assert_raises(NotImplementedError, self.comp_1.__ge__, (self.comp_2,))
#
# def test__cmp__(self):
# nt.assert_raises(NotImplementedError, self.comp_1.__cmp__, (self.comp_2,))
#
# def test__contains__(self):
# nt.assert_raises(NotImplementedError, self.comp_1.__contains__, ("C",))
#
#
#class TestSBMLReaction:
#
# def setUp(self):
# self.comp_1 = pymet.SBMLReaction("foo", {pymet.SBMLCompound("A"): 2,
# pymet.SBMLCompound("B"): 1}, {pymet.SBMLCompound("C"): 1})
# self.comp_2 = pymet.SBMLReaction("bar", {pymet.SBMLCompound("D"): 2,
# pymet.SBMLCompound("E"): 1}, {pymet.SBMLCompound("F"): 1}, True)
# self.comp_3 = pymet.SBMLReaction("snafu",
# {pymet.SBMLCompartmentCompound(pymet.SBMLCompound("X"),
# pymet.SBMLCompartment("cyt")): 2,
# pymet.SBMLCompartmentCompound(pymet.SBMLCompound("Y"),
# pymet.SBMLCompartment("cyt")): 1},
# {pymet.SBMLCompartmentCompound(pymet.SBMLCompound("Z"),
# pymet.SBMLCompartment("cyt")): 1})
#
## def test__str__(self):
## nt.assert_equal(str(self.comp_1), "foo")
## nt.assert_equal(str(self.comp_2), "bar")
## nt.assert_equal(str(self.comp_3), "SBMLReaction_%d" %
## self.comp_3._index)
#
# def test__repr__(self):
# nt.assert_equal(repr(self.comp_1),
# "<pymetabolism.metabolism.metabolism.SBMLReaction, %d>" %
# self.comp_1._index)
# nt.assert_equal(repr(self.comp_2),
# "<pymetabolism.metabolism.metabolism.SBMLReaction, %d>" %
# self.comp_2._index)
# nt.assert_equal(repr(self.comp_3),
# "<pymetabolism.metabolism.metabolism.SBMLReaction, %d>" %
# self.comp_3._index)
#
# def test__lt__(self):
# nt.assert_raises(NotImplementedError, self.comp_1.__lt__, (self.comp_2,))
#
# def test__le__(self):
# nt.assert_raises(NotImplementedError, self.comp_1.__le__, (self.comp_2,))
#
# def test__eq__(self):
# nt.assert_equal(self.comp_1, pymet.SBMLReaction("foo",
# {pymet.SBMLCompound("A"): 2, pymet.SBMLCompound("B"): 1},
# {pymet.SBMLCompound("C"): 1}))
# nt.assert_equal(self.comp_2, pymet.SBMLReaction("bar",
# {pymet.SBMLCompound("D"): 2, pymet.SBMLCompound("E"): 1},
# {pymet.SBMLCompound("F"): 1}, True))
# nt.assert_equal(self.comp_3, pymet.SBMLReaction("snafu",
# {pymet.SBMLCompartmentCompound(pymet.SBMLCompound("X"),
# pymet.SBMLCompartment("cyt")): 2,
# pymet.SBMLCompartmentCompound(pymet.SBMLCompound("Y"),
# pymet.SBMLCompartment("cyt")): 1},
# {pymet.SBMLCompartmentCompound(pymet.SBMLCompound("Z"),
# pymet.SBMLCompartment("cyt")): 1}))
#
# def test__ne__(self):
# for (x, y) in itertools.combinations([self.comp_1, self.comp_2,
# self.comp_3], 2):
# nt.assert_not_equal(x, y)
#
# def test__gt__(self):
# nt.assert_raises(NotImplementedError, self.comp_1.__gt__, (self.comp_2,))
#
# def test__ge__(self):
# nt.assert_raises(NotImplementedError, self.comp_1.__ge__, (self.comp_2,))
#
# def test__cmp__(self):
# nt.assert_raises(NotImplementedError, self.comp_1.__cmp__, (self.comp_2,))
#
# def test_reversibility(self):
# nt.assert_false(self.comp_1.reversible)
# nt.assert_true(self.comp_2.reversible)
# nt.assert_equal(self.comp_3.reversible, self.comp_1.reversible)
#
#
#class TestMetabolicSystem(object):
#
# def __init__(self):
# self.options = OptionsManager()
# self.options.reversible_suffix = "r"
# self.parser = self.options.get_parser()
# self.system = self.parser.parse(os.path.join(os.path.dirname(__file__),
# "..", "..", "tests", "data", "Ec_core_flux1.xml"))
# self.empty = None
#
# def setup(self):
# self.empty = pymet.MetabolicSystem()
#
# def test_add(self):
# c = pymet.BasicCompound()
# self.empty.add(c)
# sc = pymet.SBMLCompound("foo")
# self.empty.add(sc)
# sc2 = pymet.SBMLCompound("man")
# self.empty.add(sc2)
# co = pymet.SBMLCompartment("choo")
# self.empty.add(co)
# r = pymet.BasicReaction()
# self.empty.add(r)
# sr = pymet.SBMLReaction("stuff", substrates={sc2: 2}, products={sc: 1})
# self.empty.add(sr)
# nt.assert_equals(len(self.empty.compounds), 3)
# nt.assert_equals(len(self.empty.reactions), 2)
# nt.assert_equals(len(self.empty.compartments), 1)
# self.empty.add(sc)
# nt.assert_equals(len(self.empty.compounds), 3)
# nt.assert_raises(PyMetabolismError, self.empty.add, 4)
#
# def test_update(self):
# c = pymet.BasicCompound()
# sc = pymet.SBMLCompound("foo")
# sc2 = pymet.SBMLCompound("man")
# self.empty.update([c], type(c))
# self.empty.update([sc, sc2], type(sc))
# co = pymet.SBMLCompartment("choo")
# self.empty.update([co], type(co))
# r = pymet.BasicReaction()
# sr = pymet.SBMLReaction("stuff", substrates={sc2: 2}, products={sc: 1})
# self.empty.update([r], type(r))
# self.empty.update([sr], type(sr))
# nt.assert_equals(len(self.empty.compounds), 3)
# nt.assert_equals(len(self.empty.reactions), 2)
# nt.assert_equals(len(self.empty.compartments), 1)
# self.empty.update([c], type(c))
# nt.assert_equals(len(self.empty.compounds), 3)
# nt.assert_raises(PyMetabolismError, self.empty.update, [4], type(4))
#
# def test_verify_consistency(self):
# nt.assert_false(self.system.verify_consistency())
# r = pymet.SBMLReaction("Biomass_Ecoli_core_N__w_GAM_", {}, {})
# self.system.reactions.remove(r)
# nt.assert_true(self.system.verify_consistency())
# self.system = self.parser.parse(os.path.join(os.path.dirname(__file__),
# "..", "..", "tests", "data", "Ec_core_flux1.xml"))
#
# def test_detect_unconserved_metabolites(self):
# pass
#
# def test_generate_fba_model(self):
# pass
#
# def test_generate_network(self):
# network = self.system.generate_network(stoichiometric_coefficients=True)
# nt.assert_equals(network.order(), len(self.system.compounds) +
# len(self.system.reactions))
# nt.assert_equals(network.size(), sum(len(rxn) for rxn in\
# self.system.reactions))
#
| Midnighter/pymetabolism | pymetabolism/metabolism/tests/test_metabolism.py | Python | bsd-3-clause | 13,210 |
from selenium import webdriver
from selenium.webdriver.common.by import By
import time
baseUrl = "https://forum-testing.herokuapp.com/v1.0/demo"
driver = webdriver.Firefox()
driver.maximize_window()
driver.get(baseUrl)
# Find parent handle -> Main Window
parentHandle = driver.current_window_handle
print("Parent Handle: " + parentHandle)
# Find open window button and click it
driver.find_element(By.ID, "openwindow").click()
time.sleep(2)
# Find all handles, there should two handles after clicking open window button
handles = driver.window_handles
# Switch to window and search course
for handle in handles:
print("Handle: " + handle)
if handle not in parentHandle:
driver.switch_to.window(handle)
print("Switched to window:: " + handle)
searchBox = driver.find_element(By.ID, "create_user")
searchBox.click()
time.sleep(2)
driver.close()
break
# Switch back to the parent handle
driver.switch_to.window(parentHandle)
driver.find_element(By.ID, "name").send_keys("Test Successful")
time.sleep(2)
driver.quit()
| twiindan/selenium_lessons | 04_Selenium/05_switch_window_iframe/switch_to_window.py | Python | apache-2.0 | 1,087 |
# Copyright 2015 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
from __future__ import print_function
from basics.inventory import device_control, inventory_mounted, mounted, device_dismount, inventory_unmounted
from helpers import mount_from_settings
from unittest.case import TestCase
from unittest import main
class Test(TestCase):
def setUp(self):
mounted_list = inventory_mounted()
if mounted_list:
for device_name in mounted_list:
device_dismount(device_name)
self.assertFalse(mounted(device_name), 'Expected dismount: ' + device_name)
def test_device_mount(self):
device_names = inventory_unmounted()
self.assertTrue(device_names, 'One or more devices must be configured.')
for device_name in device_names:
expected = mount_from_settings(device_name)
self.assertTrue(mounted(device_name), 'Expected mounted: ' + device_name)
actual = device_control(device_name)
self.assertEqual(device_name, actual.device_name)
self.assertEqual(expected.device_name, actual.device_name)
self.assertEqual(expected.address, actual.address)
self.assertEqual(expected.port, actual.port)
self.assertEqual(expected.username, actual.username)
self.assertEqual(expected.password, actual.password)
if __name__ == '__main__':
main()
| tbarrongh/cosc-learning-labs | test/test_device_mount.py | Python | apache-2.0 | 1,963 |
# Copyright 2009-2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you
# may not use this file except in compliance with the License. You
# may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
"""Tools for connecting to MongoDB.
.. seealso:: :doc:`/examples/high_availability` for examples of connecting
to replica sets or sets of mongos servers.
To get a :class:`~pymongo.database.Database` instance from a
:class:`MongoClient` use either dictionary-style or attribute-style
access:
.. doctest::
>>> from pymongo import MongoClient
>>> c = MongoClient()
>>> c.test_database
Database(MongoClient('localhost', 27017), u'test_database')
>>> c['test-database']
Database(MongoClient('localhost', 27017), u'test-database')
"""
import contextlib
import datetime
import threading
import warnings
import weakref
from collections import defaultdict
from bson.py3compat import (integer_types,
string_type)
from pymongo import (common,
database,
message,
periodic_executor,
uri_parser)
from pymongo.client_options import ClientOptions
from pymongo.cursor_manager import CursorManager
from pymongo.errors import (AutoReconnect,
ConfigurationError,
ConnectionFailure,
InvalidOperation,
InvalidURI,
NetworkTimeout,
NotMasterError,
OperationFailure)
from pymongo.read_preferences import ReadPreference
from pymongo.server_selectors import (writable_preferred_server_selector,
writable_server_selector)
from pymongo.server_type import SERVER_TYPE
from pymongo.topology import Topology
from pymongo.topology_description import TOPOLOGY_TYPE
from pymongo.settings import TopologySettings
class MongoClient(common.BaseObject):
HOST = "localhost"
PORT = 27017
# Define order to retrieve options from ClientOptions for __repr__.
# No host/port; these are retrieved from TopologySettings.
_constructor_args = ('document_class', 'tz_aware', 'connect')
def __init__(
self,
host=None,
port=None,
document_class=dict,
tz_aware=False,
connect=True,
**kwargs):
"""Client for a MongoDB instance, a replica set, or a set of mongoses.
The client object is thread-safe and has connection-pooling built in.
If an operation fails because of a network error,
:class:`~pymongo.errors.ConnectionFailure` is raised and the client
reconnects in the background. Application code should handle this
exception (recognizing that the operation failed) and then continue to
execute.
The `host` parameter can be a full `mongodb URI
<http://dochub.mongodb.org/core/connections>`_, in addition to
a simple hostname. It can also be a list of hostnames or
URIs. Any port specified in the host string(s) will override
the `port` parameter. If multiple mongodb URIs containing
database or auth information are passed, the last database,
username, and password present will be used. For username and
passwords reserved characters like ':', '/', '+' and '@' must be
escaped following RFC 2396.
:Parameters:
- `host` (optional): hostname or IP address of the
instance to connect to, or a mongodb URI, or a list of
hostnames / mongodb URIs. If `host` is an IPv6 literal
it must be enclosed in '[' and ']' characters following
the RFC2732 URL syntax (e.g. '[::1]' for localhost)
- `port` (optional): port number on which to connect
- `document_class` (optional): default class to use for
documents returned from queries on this client
- `tz_aware` (optional): if ``True``,
:class:`~datetime.datetime` instances returned as values
in a document by this :class:`MongoClient` will be timezone
aware (otherwise they will be naive)
- `connect` (optional): if ``True`` (the default), immediately
begin connecting to MongoDB in the background. Otherwise connect
on the first operation.
| **Other optional parameters can be passed as keyword arguments:**
- `maxPoolSize` (optional): The maximum number of connections
that the pool will open simultaneously. If this is set, operations
will block if there are `maxPoolSize` outstanding connections
from the pool. Defaults to 100. Cannot be 0.
- `socketTimeoutMS`: (integer or None) Controls how long (in
milliseconds) the driver will wait for a response after sending an
ordinary (non-monitoring) database operation before concluding that
a network error has occurred. Defaults to ``None`` (no timeout).
- `connectTimeoutMS`: (integer or None) Controls how long (in
milliseconds) the driver will wait during server monitoring when
connecting a new socket to a server before concluding the server
is unavailable. Defaults to ``20000`` (20 seconds).
- `serverSelectionTimeoutMS`: (integer) Controls how long (in
milliseconds) the driver will wait to find an available,
appropriate server to carry out a database operation; while it is
waiting, multiple server monitoring operations may be carried out,
each controlled by `connectTimeoutMS`. Defaults to ``30000`` (30
seconds).
- `waitQueueTimeoutMS`: (integer or None) How long (in milliseconds)
a thread will wait for a socket from the pool if the pool has no
free sockets. Defaults to ``None`` (no timeout).
- `waitQueueMultiple`: (integer or None) Multiplied by maxPoolSize
to give the number of threads allowed to wait for a socket at one
time. Defaults to ``None`` (no limit).
- `socketKeepAlive`: (boolean) Whether to send periodic keep-alive
packets on connected sockets. Defaults to ``False`` (do not send
keep-alive packets).
| **Write Concern options:**
| (Only set if passed. No default values.)
- `w`: (integer or string) If this is a replica set, write operations
will block until they have been replicated to the specified number
or tagged set of servers. `w=<int>` always includes the replica set
primary (e.g. w=3 means write to the primary and wait until
replicated to **two** secondaries). Passing w=0 **disables write
acknowledgement** and all other write concern options.
- `wtimeout`: (integer) Used in conjunction with `w`. Specify a value
in milliseconds to control how long to wait for write propagation
to complete. If replication does not complete in the given
timeframe, a timeout exception is raised.
- `j`: If ``True`` block until write operations have been committed
to the journal. Cannot be used in combination with `fsync`. Prior
to MongoDB 2.6 this option was ignored if the server was running
without journaling. Starting with MongoDB 2.6 write operations will
fail with an exception if this option is used when the server is
running without journaling.
- `fsync`: If ``True`` and the server is running without journaling,
blocks until the server has synced all data files to disk. If the
server is running with journaling, this acts the same as the `j`
option, blocking until write operations have been committed to the
journal. Cannot be used in combination with `j`.
| **Replica set keyword arguments for connecting with a replica set
- either directly or via a mongos:**
- `replicaSet`: (string or None) The name of the replica set to
connect to. The driver will verify that all servers it connects to
match this name. Implies that the hosts specified are a seed list
and the driver should attempt to find all members of the set.
Defaults to ``None``.
- `read_preference`: The read preference for this client. If
connecting directly to a secondary then a read preference mode
*other* than PRIMARY is required - otherwise all queries will throw
:class:`~pymongo.errors.AutoReconnect` "not master".
See :class:`~pymongo.read_preferences.ReadPreference` for all
available read preference options. Defaults to ``PRIMARY``.
| **SSL configuration:**
- `ssl`: If ``True``, create the connection to the server using SSL.
Defaults to ``False``.
- `ssl_keyfile`: The private keyfile used to identify the local
connection against mongod. If included with the ``certfile`` then
only the ``ssl_certfile`` is needed. Implies ``ssl=True``.
Defaults to ``None``.
- `ssl_certfile`: The certificate file used to identify the local
connection against mongod. Implies ``ssl=True``. Defaults to
``None``.
- `ssl_cert_reqs`: Specifies whether a certificate is required from
the other side of the connection, and whether it will be validated
if provided. It must be one of the three values ``ssl.CERT_NONE``
(certificates ignored), ``ssl.CERT_OPTIONAL``
(not required, but validated if provided), or ``ssl.CERT_REQUIRED``
(required and validated). If the value of this parameter is not
``ssl.CERT_NONE`` and a value is not provided for ``ssl_ca_certs``
PyMongo will attempt to load system provided CA certificates.
If the python version in use does not support loading system CA
certificates then the ``ssl_ca_certs`` parameter must point
to a file of CA certificates. Implies ``ssl=True``. Defaults to
``ssl.CERT_REQUIRED`` if not provided and ``ssl=True``.
- `ssl_ca_certs`: The ca_certs file contains a set of concatenated
"certification authority" certificates, which are used to validate
certificates passed from the other end of the connection.
Implies ``ssl=True``. Defaults to ``None``.
- `ssl_match_hostname`: If ``True`` (the default), and
`ssl_cert_reqs` is not ``ssl.CERT_NONE``, enables hostname
verification using the :func:`~ssl.match_hostname` function from
python's :mod:`~ssl` module. Think very carefully before setting
this to ``False`` as that could make your application vulnerable to
man-in-the-middle attacks.
.. mongodoc:: connections
.. versionchanged:: 3.0
:class:`~pymongo.mongo_client.MongoClient` is now the one and only
client class for a standalone server, mongos, or replica set.
It includes the functionality that had been split into
:class:`~pymongo.mongo_client.MongoReplicaSetClient`: it can connect
to a replica set, discover all its members, and monitor the set for
stepdowns, elections, and reconfigs.
The :class:`~pymongo.mongo_client.MongoClient` constructor no
longer blocks while connecting to the server or servers, and it no
longer raises :class:`~pymongo.errors.ConnectionFailure` if they
are unavailable, nor :class:`~pymongo.errors.ConfigurationError`
if the user's credentials are wrong. Instead, the constructor
returns immediately and launches the connection process on
background threads.
Therefore the ``alive`` method is removed since it no longer
provides meaningful information; even if the client is disconnected,
it may discover a server in time to fulfill the next operation.
In PyMongo 2.x, :class:`~pymongo.MongoClient` accepted a list of
standalone MongoDB servers and used the first it could connect to::
MongoClient(['host1.com:27017', 'host2.com:27017'])
A list of multiple standalones is no longer supported; if multiple
servers are listed they must be members of the same replica set, or
mongoses in the same sharded cluster.
The behavior for a list of mongoses is changed from "high
availability" to "load balancing". Before, the client connected to
the lowest-latency mongos in the list, and used it until a network
error prompted it to re-evaluate all mongoses' latencies and
reconnect to one of them. In PyMongo 3, the client monitors its
network latency to all the mongoses continuously, and distributes
operations evenly among those with the lowest latency. See
:ref:`mongos-load-balancing` for more information.
The ``connect`` option is added.
The ``start_request``, ``in_request``, and ``end_request`` methods
are removed, as well as the ``auto_start_request`` option.
The ``copy_database`` method is removed, see the
:doc:`copy_database examples </examples/copydb>` for alternatives.
The :meth:`MongoClient.disconnect` method is removed; it was a
synonym for :meth:`~pymongo.MongoClient.close`.
:class:`~pymongo.mongo_client.MongoClient` no longer returns an
instance of :class:`~pymongo.database.Database` for attribute names
with leading underscores. You must use dict-style lookups instead::
client['__my_database__']
Not::
client.__my_database__
"""
if host is None:
host = self.HOST
if isinstance(host, string_type):
host = [host]
if port is None:
port = self.PORT
if not isinstance(port, int):
raise TypeError("port must be an instance of int")
seeds = set()
username = None
password = None
dbase = None
opts = {}
for entity in host:
if "://" in entity:
if entity.startswith("mongodb://"):
res = uri_parser.parse_uri(entity, port, False)
seeds.update(res["nodelist"])
username = res["username"] or username
password = res["password"] or password
dbase = res["database"] or dbase
opts = res["options"]
else:
idx = entity.find("://")
raise InvalidURI("Invalid URI scheme: "
"%s" % (entity[:idx],))
else:
seeds.update(uri_parser.split_hosts(entity, port))
if not seeds:
raise ConfigurationError("need to specify at least one host")
# _pool_class, _monitor_class, and _condition_class are for deep
# customization of PyMongo, e.g. Motor.
pool_class = kwargs.pop('_pool_class', None)
monitor_class = kwargs.pop('_monitor_class', None)
condition_class = kwargs.pop('_condition_class', None)
opts['document_class'] = document_class
opts['tz_aware'] = tz_aware
opts['connect'] = connect
opts.update(kwargs)
self.__options = options = ClientOptions(
username, password, dbase, opts)
self.__default_database_name = dbase
self.__lock = threading.Lock()
self.__cursor_manager = CursorManager(self)
self.__kill_cursors_queue = []
# Cache of existing indexes used by ensure_index ops.
self.__index_cache = {}
super(MongoClient, self).__init__(options.codec_options,
options.read_preference,
options.write_concern)
self.__all_credentials = {}
creds = options.credentials
if creds:
self._cache_credentials(creds.source, creds)
self._topology_settings = TopologySettings(
seeds=seeds,
replica_set_name=options.replica_set_name,
pool_class=pool_class,
pool_options=options.pool_options,
monitor_class=monitor_class,
condition_class=condition_class,
local_threshold_ms=options.local_threshold_ms,
server_selection_timeout=options.server_selection_timeout)
self._topology = Topology(self._topology_settings)
if connect:
self._topology.open()
def target():
client = self_ref()
if client is None:
return False # Stop the executor.
MongoClient._process_kill_cursors_queue(client)
return True
executor = periodic_executor.PeriodicExecutor(
condition_class=self._topology_settings.condition_class,
interval=common.KILL_CURSOR_FREQUENCY,
min_interval=0,
target=target)
# We strongly reference the executor and it weakly references us via
# this closure. When the client is freed, stop the executor soon.
self_ref = weakref.ref(self, executor.close)
self._kill_cursors_executor = executor
executor.open()
def _cache_credentials(self, source, credentials, connect=False):
"""Save a set of authentication credentials.
The credentials are used to login a socket whenever one is created.
If `connect` is True, verify the credentials on the server first.
"""
# Don't let other threads affect this call's data.
all_credentials = self.__all_credentials.copy()
if source in all_credentials:
# Nothing to do if we already have these credentials.
if credentials == all_credentials[source]:
return
raise OperationFailure('Another user is already authenticated '
'to this database. You must logout first.')
if connect:
server = self._get_topology().select_server(
writable_preferred_server_selector)
# get_socket() logs out of the database if logged in with old
# credentials, and logs in with new ones.
with server.get_socket(all_credentials) as sock_info:
sock_info.authenticate(credentials)
# If several threads run _cache_credentials at once, last one wins.
self.__all_credentials[source] = credentials
def _purge_credentials(self, source):
"""Purge credentials from the authentication cache."""
self.__all_credentials.pop(source, None)
def _cached(self, dbname, coll, index):
"""Test if `index` is cached."""
cache = self.__index_cache
now = datetime.datetime.utcnow()
return (dbname in cache and
coll in cache[dbname] and
index in cache[dbname][coll] and
now < cache[dbname][coll][index])
def _cache_index(self, dbname, collection, index, cache_for):
"""Add an index to the index cache for ensure_index operations."""
now = datetime.datetime.utcnow()
expire = datetime.timedelta(seconds=cache_for) + now
if database not in self.__index_cache:
self.__index_cache[dbname] = {}
self.__index_cache[dbname][collection] = {}
self.__index_cache[dbname][collection][index] = expire
elif collection not in self.__index_cache[dbname]:
self.__index_cache[dbname][collection] = {}
self.__index_cache[dbname][collection][index] = expire
else:
self.__index_cache[dbname][collection][index] = expire
def _purge_index(self, database_name,
collection_name=None, index_name=None):
"""Purge an index from the index cache.
If `index_name` is None purge an entire collection.
If `collection_name` is None purge an entire database.
"""
if not database_name in self.__index_cache:
return
if collection_name is None:
del self.__index_cache[database_name]
return
if not collection_name in self.__index_cache[database_name]:
return
if index_name is None:
del self.__index_cache[database_name][collection_name]
return
if index_name in self.__index_cache[database_name][collection_name]:
del self.__index_cache[database_name][collection_name][index_name]
def _server_property(self, attr_name, default=None):
"""An attribute of the current server's description.
Returns "default" while there is no current server, primary, or mongos.
Not threadsafe if used multiple times in a single method, since
the server may change. In such cases, store a local reference to a
ServerDescription first, then use its properties.
"""
try:
server = self._topology.select_server(
writable_server_selector, server_selection_timeout=0)
return getattr(server.description, attr_name)
except ConnectionFailure:
return default
@property
def address(self):
"""(host, port) of the current standalone, primary, or mongos, or None.
Accessing :attr:`address` raises :exc:`~.errors.InvalidOperation` if
the client is load-balancing among mongoses, since there is no single
address. Use :attr:`nodes` instead.
.. versionadded:: 3.0
"""
try:
return self._topology.get_direct_or_primary()
except InvalidOperation:
# Only one case where Topology throws InvalidOperation.
raise InvalidOperation(
'Cannot use "address" property when load balancing among'
' mongoses, use "nodes" instead.')
@property
def primary(self):
"""The (host, port) of the current primary of the replica set.
Returns ``None`` if this client is not connected to a replica set,
there is no primary, or this client was created without the
`replicaSet` option.
.. versionadded:: 3.0
MongoClient gained this property in version 3.0 when
MongoReplicaSetClient's functionality was merged in.
"""
return self._topology.get_primary()
@property
def secondaries(self):
"""The secondary members known to this client.
A sequence of (host, port) pairs. Empty if this client is not
connected to a replica set, there are no visible secondaries, or this
client was created without the `replicaSet` option.
.. versionadded:: 3.0
MongoClient gained this property in version 3.0 when
MongoReplicaSetClient's functionality was merged in.
"""
return self._topology.get_secondaries()
@property
def arbiters(self):
"""Arbiters in the replica set.
A sequence of (host, port) pairs. Empty if this client is not
connected to a replica set, there are no arbiters, or this client was
created without the `replicaSet` option.
"""
return self._topology.get_arbiters()
@property
def is_primary(self):
"""If this client if connected to a server that can accept writes.
True if the current server is a standalone, mongos, or the primary of
a replica set.
"""
return self._server_property('is_writable', False)
@property
def is_mongos(self):
"""If this client is connected to mongos.
"""
return self._server_property('server_type') == SERVER_TYPE.Mongos
@property
def max_pool_size(self):
"""The maximum number of sockets the pool will open concurrently.
When the pool has reached `max_pool_size`, operations block waiting for
a socket to be returned to the pool. If ``waitQueueTimeoutMS`` is set,
a blocked operation will raise :exc:`~pymongo.errors.ConnectionFailure`
after a timeout. By default ``waitQueueTimeoutMS`` is not set.
"""
return self.__options.pool_options.max_pool_size
@property
def nodes(self):
"""Set of all currently connected servers.
.. warning:: When connected to a replica set the value of :attr:`nodes`
can change over time as :class:`MongoClient`'s view of the replica
set changes. :attr:`nodes` can also be an empty set when
:class:`MongoClient` is first instantiated and hasn't yet connected
to any servers, or a network partition causes it to lose connection
to all servers.
"""
description = self._topology.description
return frozenset(s.address for s in description.known_servers)
@property
def max_bson_size(self):
"""The largest BSON object the connected server accepts in bytes.
Defaults to 16MB if not connected to a server.
"""
return self._server_property('max_bson_size', common.MAX_BSON_SIZE)
@property
def max_message_size(self):
"""The largest message the connected server accepts in bytes.
Defaults to 32MB if not connected to a server.
"""
return self._server_property(
'max_message_size', common.MAX_MESSAGE_SIZE)
@property
def max_write_batch_size(self):
"""The maxWriteBatchSize reported by the server.
Returns a default value when connected to server versions prior to
MongoDB 2.6.
"""
return self._server_property(
'max_write_batch_size', common.MAX_WRITE_BATCH_SIZE)
@property
def local_threshold_ms(self):
"""The local threshold for this instance."""
return self.__options.local_threshold_ms
@property
def server_selection_timeout(self):
"""The server selection timeout for this instance in seconds."""
return self.__options.server_selection_timeout
def _is_writable(self):
"""Attempt to connect to a writable server, or return False.
"""
topology = self._get_topology() # Starts monitors if necessary.
try:
svr = topology.select_server(writable_server_selector)
# When directly connected to a secondary, arbiter, etc.,
# select_server returns it, whatever the selector. Check
# again if the server is writable.
return svr.description.is_writable
except ConnectionFailure:
return False
def close(self):
"""Disconnect from MongoDB.
Close all sockets in the connection pools and stop the monitor threads.
If this instance is used again it will be automatically re-opened and
the threads restarted.
"""
self._topology.close()
def set_cursor_manager(self, manager_class):
"""Set this client's cursor manager.
Raises :class:`TypeError` if `manager_class` is not a subclass of
:class:`~pymongo.cursor_manager.CursorManager`. A cursor manager
handles closing cursors. Different managers can implement different
policies in terms of when to actually kill a cursor that has
been closed.
:Parameters:
- `manager_class`: cursor manager to use
.. versionchanged:: 3.0
Undeprecated.
"""
manager = manager_class(self)
if not isinstance(manager, CursorManager):
raise TypeError("manager_class must be a subclass of "
"CursorManager")
self.__cursor_manager = manager
def _get_topology(self):
"""Get the internal :class:`~pymongo.topology.Topology` object.
If this client was created with "connect=False", calling _get_topology
launches the connection process in the background.
"""
self._topology.open()
return self._topology
@contextlib.contextmanager
def _get_socket(self, selector):
server = self._get_topology().select_server(selector)
try:
with server.get_socket(self.__all_credentials) as sock_info:
yield sock_info
except NetworkTimeout:
# The socket has been closed. Don't reset the server.
# Server Discovery And Monitoring Spec: "When an application
# operation fails because of any network error besides a socket
# timeout...."
raise
except NotMasterError:
# "When the client sees a "not master" error it MUST replace the
# server's description with type Unknown. It MUST request an
# immediate check of the server."
self._reset_server_and_request_check(server.description.address)
raise
except ConnectionFailure:
# "Client MUST replace the server's description with type Unknown
# ... MUST NOT request an immediate check of the server."
self.__reset_server(server.description.address)
raise
def _socket_for_writes(self):
return self._get_socket(writable_server_selector)
@contextlib.contextmanager
def _socket_for_reads(self, read_preference):
preference = read_preference or ReadPreference.PRIMARY
# Get a socket for a server matching the read preference, and yield
# sock_info, slave_ok. Server Selection Spec: "slaveOK must be sent to
# mongods with topology type Single. If the server type is Mongos,
# follow the rules for passing read preference to mongos, even for
# topology type Single."
# Thread safe: if the type is single it cannot change.
topology = self._get_topology()
single = topology.description.topology_type == TOPOLOGY_TYPE.Single
with self._get_socket(read_preference) as sock_info:
slave_ok = (single and not sock_info.is_mongos) or (
preference != ReadPreference.PRIMARY)
yield sock_info, slave_ok
def _send_message_with_response(self, operation, read_preference=None,
exhaust=False, address=None):
"""Send a message to MongoDB and return a Response.
:Parameters:
- `operation`: a _Query or _GetMore object.
- `read_preference` (optional): A ReadPreference.
- `exhaust` (optional): If True, the socket used stays checked out.
It is returned along with its Pool in the Response.
- `address` (optional): Optional address when sending a message
to a specific server, used for getMore.
"""
with self.__lock:
# If needed, restart kill-cursors thread after a fork.
self._kill_cursors_executor.open()
topology = self._get_topology()
if address:
server = topology.select_server_by_address(address)
if not server:
raise AutoReconnect('server %s:%d no longer available'
% address)
else:
selector = read_preference or writable_server_selector
server = topology.select_server(selector)
# A _Query's slaveOk bit is already set for queries with non-primary
# read preference. If this is a direct connection to a mongod, override
# and *always* set the slaveOk bit. See bullet point 2 in
# server-selection.rst#topology-type-single.
set_slave_ok = (
topology.description.topology_type == TOPOLOGY_TYPE.Single
and server.description.server_type != SERVER_TYPE.Mongos)
return self._reset_on_error(
server,
server.send_message_with_response,
operation,
set_slave_ok,
self.__all_credentials,
exhaust)
def _reset_on_error(self, server, func, *args, **kwargs):
"""Execute an operation. Reset the server on network error.
Returns fn()'s return value on success. On error, clears the server's
pool and marks the server Unknown.
Re-raises any exception thrown by fn().
"""
try:
return func(*args, **kwargs)
except NetworkTimeout:
# The socket has been closed. Don't reset the server.
raise
except ConnectionFailure:
self.__reset_server(server.description.address)
raise
def __reset_server(self, address):
"""Clear our connection pool for a server and mark it Unknown."""
self._topology.reset_server(address)
def _reset_server_and_request_check(self, address):
"""Clear our pool for a server, mark it Unknown, and check it soon."""
self._topology.reset_server_and_request_check(address)
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.address == other.address
return NotImplemented
def __ne__(self, other):
return not self == other
def _repr_helper(self):
def option_repr(option, value):
"""Fix options whose __repr__ isn't usable in a constructor."""
if option == 'document_class':
if value is dict:
return 'document_class=dict'
else:
return 'document_class=%s.%s' % (value.__module__,
value.__name__)
return '%s=%r' % (option, value)
# Host first...
options = ['host=%r' % [
'%s:%d' % (host, port)
for host, port in self._topology_settings.seeds]]
# ... then everything in self._constructor_args...
options.extend(
option_repr(key, self.__options._options[key])
for key in self._constructor_args)
# ... then everything else.
options.extend(
option_repr(key, self.__options._options[key])
for key in self.__options._options
if key not in set(self._constructor_args))
return ', '.join(options)
def __repr__(self):
return ("MongoClient(%s)" % (self._repr_helper(),))
def __getattr__(self, name):
"""Get a database by name.
Raises :class:`~pymongo.errors.InvalidName` if an invalid
database name is used.
:Parameters:
- `name`: the name of the database to get
"""
if name.startswith('_'):
raise AttributeError(
"MongoClient has no attribute %r. To access the %s"
" database, use client[%r]." % (name, name, name))
return self.__getitem__(name)
def __getitem__(self, name):
"""Get a database by name.
Raises :class:`~pymongo.errors.InvalidName` if an invalid
database name is used.
:Parameters:
- `name`: the name of the database to get
"""
return database.Database(self, name)
def close_cursor(self, cursor_id, address=None):
"""Close a single database cursor.
Raises :class:`TypeError` if `cursor_id` is not an instance of
``(int, long)``. What closing the cursor actually means
depends on this client's cursor manager.
:Parameters:
- `cursor_id`: id of cursor to close
- `address` (optional): (host, port) pair of the cursor's server.
If it is not provided, the client attempts to close the cursor on
the primary or standalone, or a mongos server.
.. versionchanged:: 3.0
Added ``address`` parameter.
"""
if not isinstance(cursor_id, integer_types):
raise TypeError("cursor_id must be an instance of (int, long)")
self.__cursor_manager.close(cursor_id, address)
def kill_cursors(self, cursor_ids, address=None):
"""Send a kill cursors message soon with the given ids.
Raises :class:`TypeError` if `cursor_ids` is not an instance of
``list``.
This method may be called from a :class:`~pymongo.cursor.Cursor`
destructor during garbage collection, so it isn't safe to take a
lock or do network I/O. Instead, we schedule the cursor to be closed
soon on a background thread.
:Parameters:
- `cursor_ids`: list of cursor ids to kill
- `address` (optional): (host, port) pair of the cursor's server.
If it is not provided, the client attempts to close the cursor on
the primary or standalone, or a mongos server.
.. versionchanged:: 3.0
Now accepts an `address` argument. Schedules the cursors to be
closed on a background thread instead of sending the message
immediately.
"""
if not isinstance(cursor_ids, list):
raise TypeError("cursor_ids must be a list")
# "Atomic", needs no lock.
self.__kill_cursors_queue.append((address, cursor_ids))
# This method is run periodically by a background thread.
def _process_kill_cursors_queue(self):
"""Process any pending kill cursors requests."""
address_to_cursor_ids = defaultdict(list)
# Other threads or the GC may append to the queue concurrently.
while True:
try:
address, cursor_ids = self.__kill_cursors_queue.pop()
except IndexError:
break
address_to_cursor_ids[address].extend(cursor_ids)
# Don't re-open topology if it's closed and there's no pending cursors.
if address_to_cursor_ids:
topology = self._get_topology()
for address, cursor_ids in address_to_cursor_ids.items():
try:
if address:
server = topology.select_server_by_address(address)
else:
# Application called close_cursor() with no address.
server = topology.select_server(
writable_server_selector)
server.send_message(message.kill_cursors(cursor_ids),
self.__all_credentials)
except ConnectionFailure as exc:
warnings.warn("couldn't close cursor on %s: %s"
% (address, exc))
def server_info(self):
"""Get information about the MongoDB server we're connected to."""
return self.admin.command("buildinfo",
read_preference=ReadPreference.PRIMARY)
def database_names(self):
"""Get a list of the names of all databases on the connected server."""
return [db["name"] for db in
self.admin.command(
"listDatabases",
read_preference=ReadPreference.PRIMARY)["databases"]]
def drop_database(self, name_or_database):
"""Drop a database.
Raises :class:`TypeError` if `name_or_database` is not an instance of
:class:`basestring` (:class:`str` in python 3) or
:class:`~pymongo.database.Database`.
:Parameters:
- `name_or_database`: the name of a database to drop, or a
:class:`~pymongo.database.Database` instance representing the
database to drop
"""
name = name_or_database
if isinstance(name, database.Database):
name = name.name
if not isinstance(name, string_type):
raise TypeError("name_or_database must be an instance "
"of %s or a Database" % (string_type.__name__,))
self._purge_index(name)
self[name].command("dropDatabase",
read_preference=ReadPreference.PRIMARY)
def get_default_database(self):
"""Get the database named in the MongoDB connection URI.
>>> uri = 'mongodb://host/my_database'
>>> client = MongoClient(uri)
>>> db = client.get_default_database()
>>> assert db.name == 'my_database'
Useful in scripts where you want to choose which database to use
based only on the URI in a configuration file.
"""
if self.__default_database_name is None:
raise ConfigurationError('No default database defined')
return self[self.__default_database_name]
def get_database(self, name, codec_options=None,
read_preference=None, write_concern=None):
"""Get a :class:`~pymongo.database.Database` with the given name and
options.
Useful for creating a :class:`~pymongo.database.Database` with
different codec options, read preference, and/or write concern from
this :class:`MongoClient`.
>>> client.read_preference
Primary()
>>> db1 = client.test
>>> db1.read_preference
Primary()
>>> from pymongo import ReadPreference
>>> db2 = client.get_database(
... 'test', read_preference=ReadPreference.SECONDARY)
>>> db2.read_preference
Secondary(tag_sets=None)
:Parameters:
- `name`: The name of the database - a string.
- `codec_options` (optional): An instance of
:class:`~bson.codec_options.CodecOptions`. If ``None`` (the
default) the :attr:`codec_options` of this :class:`MongoClient` is
used.
- `read_preference` (optional): The read preference to use. If
``None`` (the default) the :attr:`read_preference` of this
:class:`MongoClient` is used. See :mod:`~pymongo.read_preferences`
for options.
- `write_concern` (optional): An instance of
:class:`~pymongo.write_concern.WriteConcern`. If ``None`` (the
default) the :attr:`write_concern` of this :class:`MongoClient` is
used.
"""
return database.Database(
self, name, codec_options, read_preference, write_concern)
@property
def is_locked(self):
"""Is this server locked? While locked, all write operations
are blocked, although read operations may still be allowed.
Use :meth:`unlock` to unlock.
"""
ops = self.admin.current_op()
return bool(ops.get('fsyncLock', 0))
def fsync(self, **kwargs):
"""Flush all pending writes to datafiles.
:Parameters:
Optional parameters can be passed as keyword arguments:
- `lock`: If True lock the server to disallow writes.
- `async`: If True don't block while synchronizing.
.. warning:: `async` and `lock` can not be used together.
.. warning:: MongoDB does not support the `async` option
on Windows and will raise an exception on that
platform.
"""
self.admin.command("fsync",
read_preference=ReadPreference.PRIMARY, **kwargs)
def unlock(self):
"""Unlock a previously locked server.
"""
coll = self.admin.get_collection(
"$cmd.sys.unlock", read_preference=ReadPreference.PRIMARY)
coll.find_one()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def __iter__(self):
return self
def __next__(self):
raise TypeError("'MongoClient' object is not iterable")
next = __next__
| macdiesel/mongo-python-driver | pymongo/mongo_client.py | Python | apache-2.0 | 44,468 |
"""Defines a sync module for Blink."""
import logging
from requests.structures import CaseInsensitiveDict
from blinkpy import api
from blinkpy.camera import BlinkCamera, BlinkCameraMini, BlinkDoorbell
from blinkpy.helpers.util import time_to_seconds
from blinkpy.helpers.constants import ONLINE
_LOGGER = logging.getLogger(__name__)
class BlinkSyncModule:
"""Class to initialize sync module."""
def __init__(self, blink, network_name, network_id, camera_list):
"""
Initialize Blink sync module.
:param blink: Blink class instantiation
"""
self.blink = blink
self.network_id = network_id
self.region_id = blink.auth.region_id
self.name = network_name
self.serial = None
self.status = "offline"
self.sync_id = None
self.host = None
self.summary = None
self.network_info = None
self.events = []
self.cameras = CaseInsensitiveDict({})
self.motion_interval = blink.motion_interval
self.motion = {}
self.last_record = {}
self.camera_list = camera_list
self.available = False
@property
def attributes(self):
"""Return sync attributes."""
attr = {
"name": self.name,
"id": self.sync_id,
"network_id": self.network_id,
"serial": self.serial,
"status": self.status,
"region_id": self.region_id,
}
return attr
@property
def urls(self):
"""Return device urls."""
return self.blink.urls
@property
def online(self):
"""Return boolean system online status."""
try:
return ONLINE[self.status]
except KeyError:
_LOGGER.error("Unknown sync module status %s", self.status)
self.available = False
return False
@property
def arm(self):
"""Return status of sync module: armed/disarmed."""
try:
return self.network_info["network"]["armed"]
except (KeyError, TypeError):
self.available = False
return None
@arm.setter
def arm(self, value):
"""Arm or disarm camera."""
if value:
return api.request_system_arm(self.blink, self.network_id)
return api.request_system_disarm(self.blink, self.network_id)
def start(self):
"""Initialize the system."""
response = self.sync_initialize()
if not response:
return False
try:
self.sync_id = self.summary["id"]
self.serial = self.summary["serial"]
self.status = self.summary["status"]
except KeyError:
_LOGGER.error("Could not extract some sync module info: %s", response)
is_ok = self.get_network_info()
self.check_new_videos()
if not is_ok or not self.update_cameras():
return False
self.available = True
return True
def sync_initialize(self):
"""Initialize a sync module."""
response = api.request_syncmodule(self.blink, self.network_id)
try:
self.summary = response["syncmodule"]
self.network_id = self.summary["network_id"]
except (TypeError, KeyError):
_LOGGER.error(
"Could not retrieve sync module information with response: %s", response
)
return False
return response
def update_cameras(self, camera_type=BlinkCamera):
"""Update cameras from server."""
try:
for camera_config in self.camera_list:
if "name" not in camera_config:
break
blink_camera_type = camera_config.get("type", "")
name = camera_config["name"]
self.motion[name] = False
owl_info = self.get_owl_info(name)
lotus_info = self.get_lotus_info(name)
if blink_camera_type == "mini":
camera_type = BlinkCameraMini
if blink_camera_type == "lotus":
camera_type = BlinkDoorbell
self.cameras[name] = camera_type(self)
camera_info = self.get_camera_info(
camera_config["id"], owl_info=owl_info, lotus_info=lotus_info
)
self.cameras[name].update(camera_info, force_cache=True, force=True)
except KeyError:
_LOGGER.error("Could not create camera instances for %s", self.name)
return False
return True
def get_owl_info(self, name):
"""Extract owl information."""
try:
for owl in self.blink.homescreen["owls"]:
if owl["name"] == name:
return owl
except (TypeError, KeyError):
pass
return None
def get_lotus_info(self, name):
"""Extract lotus information."""
try:
for doorbell in self.blink.homescreen["doorbells"]:
if doorbell["name"] == name:
return doorbell
except (TypeError, KeyError):
pass
return None
def get_events(self, **kwargs):
"""Retrieve events from server."""
force = kwargs.pop("force", False)
response = api.request_sync_events(self.blink, self.network_id, force=force)
try:
return response["event"]
except (TypeError, KeyError):
_LOGGER.error("Could not extract events: %s", response)
return False
def get_camera_info(self, camera_id, **kwargs):
"""Retrieve camera information."""
owl = kwargs.get("owl_info", None)
if owl is not None:
return owl
lotus = kwargs.get("lotus_info", None)
if lotus is not None:
return lotus
response = api.request_camera_info(self.blink, self.network_id, camera_id)
try:
return response["camera"][0]
except (TypeError, KeyError):
_LOGGER.error("Could not extract camera info: %s", response)
return {}
def get_network_info(self):
"""Retrieve network status."""
self.network_info = api.request_network_update(self.blink, self.network_id)
try:
if self.network_info["network"]["sync_module_error"]:
raise KeyError
except (TypeError, KeyError):
self.available = False
return False
return True
def refresh(self, force_cache=False):
"""Get all blink cameras and pulls their most recent status."""
if not self.get_network_info():
return
self.check_new_videos()
for camera_name in self.cameras.keys():
camera_id = self.cameras[camera_name].camera_id
camera_info = self.get_camera_info(
camera_id,
owl_info=self.get_owl_info(camera_name),
lotus_info=self.get_lotus_info(camera_name),
)
self.cameras[camera_name].update(camera_info, force_cache=force_cache)
self.available = True
def check_new_videos(self):
"""Check if new videos since last refresh."""
try:
interval = self.blink.last_refresh - self.motion_interval * 60
except TypeError:
# This is the first start, so refresh hasn't happened yet.
# No need to check for motion.
return False
resp = api.request_videos(self.blink, time=interval, page=1)
for camera in self.cameras.keys():
self.motion[camera] = False
try:
info = resp["media"]
except (KeyError, TypeError):
_LOGGER.warning("Could not check for motion. Response: %s", resp)
return False
for entry in info:
try:
name = entry["device_name"]
clip = entry["media"]
timestamp = entry["created_at"]
if self.check_new_video_time(timestamp):
self.motion[name] = True and self.arm
self.last_record[name] = {"clip": clip, "time": timestamp}
except KeyError:
_LOGGER.debug("No new videos since last refresh.")
return True
def check_new_video_time(self, timestamp):
"""Check if video has timestamp since last refresh."""
return time_to_seconds(timestamp) > self.blink.last_refresh
class BlinkOwl(BlinkSyncModule):
"""Representation of a sync-less device."""
def __init__(self, blink, name, network_id, response):
"""Initialize a sync-less object."""
cameras = [{"name": name, "id": response["id"]}]
super().__init__(blink, name, network_id, cameras)
self.sync_id = response["id"]
self.serial = response["serial"]
self.status = response["enabled"]
if not self.serial:
self.serial = f"{network_id}-{self.sync_id}"
def sync_initialize(self):
"""Initialize a sync-less module."""
self.summary = {
"id": self.sync_id,
"name": self.name,
"serial": self.serial,
"status": self.status,
"onboarded": True,
"account_id": self.blink.account_id,
"network_id": self.network_id,
}
return self.summary
def update_cameras(self, camera_type=BlinkCameraMini):
"""Update sync-less cameras."""
return super().update_cameras(camera_type=BlinkCameraMini)
def get_camera_info(self, camera_id, **kwargs):
"""Retrieve camera information."""
try:
for owl in self.blink.homescreen["owls"]:
if owl["name"] == self.name:
self.status = owl["enabled"]
return owl
except (TypeError, KeyError):
pass
return None
def get_network_info(self):
"""Get network info for sync-less module."""
return True
@property
def network_info(self):
"""Format owl response to resemble sync module."""
return {
"network": {
"id": self.network_id,
"name": self.name,
"armed": self.status,
"sync_module_error": False,
"account_id": self.blink.account_id,
}
}
@network_info.setter
def network_info(self, value):
"""Set network_info property."""
class BlinkLotus(BlinkSyncModule):
"""Representation of a sync-less device."""
def __init__(self, blink, name, network_id, response):
"""Initialize a sync-less object."""
cameras = [{"name": name, "id": response["id"]}]
super().__init__(blink, name, network_id, cameras)
self.sync_id = response["id"]
self.serial = response["serial"]
self.status = response["enabled"]
if not self.serial:
self.serial = f"{network_id}-{self.sync_id}"
def sync_initialize(self):
"""Initialize a sync-less module."""
self.summary = {
"id": self.sync_id,
"name": self.name,
"serial": self.serial,
"status": self.status,
"onboarded": True,
"account_id": self.blink.account_id,
"network_id": self.network_id,
}
return self.summary
def update_cameras(self, camera_type=BlinkDoorbell):
"""Update sync-less cameras."""
return super().update_cameras(camera_type=BlinkDoorbell)
def get_camera_info(self, camera_id, **kwargs):
"""Retrieve camera information."""
try:
for doorbell in self.blink.homescreen["doorbells"]:
if doorbell["name"] == self.name:
self.status = doorbell["enabled"]
return doorbell
except (TypeError, KeyError):
pass
return None
def get_network_info(self):
"""Get network info for sync-less module."""
return True
@property
def network_info(self):
"""Format lotus response to resemble sync module."""
return {
"network": {
"id": self.network_id,
"name": self.name,
"armed": self.status,
"sync_module_error": False,
"account_id": self.blink.account_id,
}
}
@network_info.setter
def network_info(self, value):
"""Set network_info property."""
| fronzbot/blinkpy | blinkpy/sync_module.py | Python | mit | 12,603 |
#! /usr/bin/env python
#
# This file is part of khmer, https://github.com/dib-lab/khmer/, and is
# Copyright (C) Michigan State University, 2009-2015. It is licensed under
# the three-clause BSD license; see LICENSE.
# Contact: [email protected]
#
from __future__ import print_function
import sys
import khmer
import os
try:
from pylab import *
except ImportError:
pass
def main():
hashfile = sys.argv[1]
filename = sys.argv[2]
figure = sys.argv[3]
ht = khmer.load_counting_hash(hashfile)
outabund = open(os.path.basename(filename) + '.counts', 'w')
counts = []
d = {}
for sequence in open(sys.argv[2]):
sequence = sequence.strip()
count = ht.get(sequence)
counts.append(count)
d[count] = d.get(count, 0) + 1
if count > 1000:
print(sequence, count, file=outabund)
outfp = open(figure + '.countshist', 'w')
sofar = 0
sofar_cumu = 0
for k in sorted(d.keys()):
sofar += d[k]
sofar_cumu += k * d[k]
print(k, d[k], sofar, sofar_cumu, file=outfp)
hist(counts, normed=True, cumulative=True, bins=100, range=(1, 1000))
savefig(figure)
if __name__ == '__main__':
main()
| Winterflower/khmer | sandbox/stoptag-abundance-hist.py | Python | bsd-3-clause | 1,222 |
from automlk.dataset import get_dataset_list, create_graph_data
from automlk.store import set_key_store
"""
module specifically designed to update feature graphs after new version
"""
for dt in get_dataset_list():
print('-'*60)
print(dt.name)
create_graph_data(dt.dataset_id)
| pierre-chaville/automlk | scripts/update_graphs.py | Python | mit | 290 |
import random
from unittest import TestCase
from hamcrest import *
from chapter14.textbook14_3 import interval_search, overlap, interval_insert, interval_delete
from datastructures.interval import Interval
from datastructures.red_black_tree import RedBlackTree, IntervalNode
from tree_util import assert_parent_pointers_consistent, get_binary_tree_keys, get_binary_tree_nodes, \
get_random_interval_tree, assert_interval_tree
class TestTextbook14_3(TestCase):
def test_interval_insert(self):
keys = [random.randrange(949) for _ in range(20)]
tree = RedBlackTree(sentinel=IntervalNode(None, None))
for key in keys:
interval_insert(tree, IntervalNode(key, Interval(key, key + random.randint(0, 50))))
assert_interval_tree(tree)
assert_parent_pointers_consistent(tree, sentinel=tree.nil)
actual_keys = get_binary_tree_keys(tree, sentinel=tree.nil)
assert_that(actual_keys, contains_inanyorder(*keys))
def test_interval_delete(self):
tree, _, keys = get_random_interval_tree()
nodes = get_binary_tree_nodes(tree, sentinel=tree.nil)
while nodes:
node = random.choice(nodes)
keys.remove(node.key)
interval_delete(tree, node)
assert_interval_tree(tree)
assert_parent_pointers_consistent(tree, sentinel=tree.nil)
actual_keys = get_binary_tree_keys(tree, sentinel=tree.nil)
assert_that(actual_keys, contains_inanyorder(*keys))
nodes = get_binary_tree_nodes(tree, sentinel=tree.nil)
def test_interval_search(self):
tree, nodes, keys = get_random_interval_tree()
low_endpoint = random.randint(0, 949)
high_endpoint = low_endpoint + random.randint(0, 50)
interval = Interval(low_endpoint, high_endpoint)
actual_found = interval_search(tree, interval)
if actual_found is not tree.nil:
assert_that(overlap(actual_found.int, interval))
else:
for node in nodes:
assert_that(not_(overlap(node.int, interval)))
| wojtask/CormenPy | test/test_chapter14/test_textbook14_3.py | Python | gpl-3.0 | 2,117 |
##
# Copyright (c) 2005 Apple Computer, Inc. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# DRI: Wilfredo Sanchez, [email protected]
##
"""
WebDAV XML parsing.
This module provides XML utilities for use with WebDAV.
See RFC 2518: http://www.ietf.org/rfc/rfc2518.txt (WebDAV)
"""
__all__ = [
"registerElements",
"WebDAVContentHandler",
"WebDAVDocument",
]
import StringIO
import xml.dom.minidom
import xml.sax
from higgins.http.dav.element.base import *
from higgins.http.dav.element.util import PrintXML, encodeXMLName
##
# Parsing
##
def registerElements(module):
"""
Register XML elements defined in the given module with the parser.
"""
element_names = []
for element_class_name in dir(module):
element_class = getattr(module, element_class_name)
if type(element_class) is type and issubclass(element_class, WebDAVElement):
if element_class.namespace is None: continue
if element_class.name is None: continue
if element_class.unregistered: continue
qname = element_class.namespace, element_class.name
if qname in elements_by_tag_name:
raise AssertionError(
"Attempting to register qname %s multiple times: (%r, %r)"
% (qname, elements_by_tag_name[qname], element_class)
)
if not (qname in elements_by_tag_name and issubclass(element_class, elements_by_tag_name[qname])):
elements_by_tag_name[qname] = element_class
element_names.append(element_class.__name__)
return element_names
def lookupElement(qname):
"""
Return the element class for the element with the given qname.
"""
return elements_by_tag_name[qname]
elements_by_tag_name = {}
class WebDAVContentHandler (xml.sax.handler.ContentHandler):
def setDocumentLocator(self, locator): self.locator = locator
locator = None
def location(self):
return "line %d, column %d" % (self.locator.getLineNumber(), self.locator.getColumnNumber())
def startDocument(self):
self.stack = [{
"name" : None,
"class" : None,
"attributes" : None,
"children" : [],
}]
def endDocument(self):
top = self.stack[-1]
assert top["name"] is None
assert top["class"] is None
assert top["attributes"] is None
assert len(top["children"]) is 1, "Must have exactly one root element, got %d" % len(top["children"])
self.dom = WebDAVDocument(top["children"][0])
def startElementNS(self, name, qname, attributes):
attributes_dict = {}
if attributes.getLength() is not 0:
for attr_name in attributes.getNames():
attributes_dict[encodeXMLName(attr_name)] = attributes.getValue(attr_name)
tag_namespace, tag_name = name
if (name not in elements_by_tag_name):
class UnknownElement (WebDAVUnknownElement):
namespace = tag_namespace
name = tag_name
element_class = UnknownElement
else:
element_class = elements_by_tag_name[name]
self.stack.append({
"name" : name,
"class" : element_class,
"attributes" : attributes_dict,
"children" : [],
})
def endElementNS(self, name, qname):
# Pop the current element from the stack...
top = self.stack[-1]
del(self.stack[-1])
assert top["name"] == name, "Last item on stack is %s while closing %s" % (top["name"], name)
# ...then instantiate the element and add it to the parent's list of
# children.
try:
element = top["class"](*top["children"], **top["attributes"])
except ValueError, e:
e.args = ("%s at %s" % (e.args[0], self.location()),) + e.args[1:]
raise # Re-raises modified e, but preserves traceback
self.stack[-1]["children"].append(element)
def characters(self, content):
self.stack[-1]["children"].append(PCDATAElement(content))
def ignorableWhitespace(self, whitespace):
self.characters(self, whitespace)
def startElement(self, name, attributes):
raise AssertionError("startElement() should not be called by namespace-aware parser")
def endElement(self, name):
raise AssertionError("endElement() should not be called by namespace-aware parser")
def processingInstruction(self, target, data):
raise AssertionError("processing instructions are not allowed")
def skippedEntity(self, name):
raise AssertionError("skipped entities are not allowed")
class WebDAVDocument (object):
"""
WebDAV XML document.
"""
def _parse(source_is_string):
def parse(source):
handler = WebDAVContentHandler()
parser = xml.sax.make_parser()
parser.setContentHandler(handler)
parser.setFeature(xml.sax.handler.feature_namespaces, True)
if source_is_string: source = StringIO.StringIO(source)
try:
parser.parse(source)
except xml.sax.SAXParseException, e:
raise ValueError(e)
return handler.dom
return parse
fromStream = staticmethod(_parse(False))
fromString = staticmethod(_parse(True ))
def __init__(self, root_element):
"""
root_element must be a WebDAVElement instance.
"""
super(WebDAVDocument, self).__init__()
if not isinstance(root_element, WebDAVElement):
raise ValueError("Not a WebDAVElement: %r" % (obj,))
self.root_element = root_element
def __str__(self):
output = StringIO.StringIO()
self.writeXML(output)
return output.getvalue()
def __eq__(self, other):
if isinstance(other, WebDAVDocument):
return self.root_element == other.root_element
else:
return NotImplemented
def writeXML(self, output):
document = xml.dom.minidom.Document()
self.root_element.addToDOM(document, None)
PrintXML(document, stream=output)
def toxml(self):
output = StringIO.StringIO()
self.writeXML(output)
return output.getvalue()
| msfrank/Higgins | higgins/http/dav/element/parser.py | Python | lgpl-2.1 | 7,425 |
from __future__ import absolute_import
import errno
import select
import sys
from mock import Mock, patch
from time import time
from celery.worker import autoreload
from celery.worker.autoreload import (
WorkerComponent,
file_hash,
BaseMonitor,
StatMonitor,
KQueueMonitor,
InotifyMonitor,
default_implementation,
Autoreloader,
)
from celery.tests.case import AppCase, Case, mock_open
class test_WorkerComponent(AppCase):
def test_create_threaded(self):
w = Mock()
w.use_eventloop = False
x = WorkerComponent(w)
x.instantiate = Mock()
r = x.create(w)
x.instantiate.assert_called_with(w.autoreloader_cls, w)
self.assertIs(r, w.autoreloader)
@patch('select.kevent', create=True)
@patch('select.kqueue', create=True)
def test_create_ev(self, kqueue, kevent):
w = Mock()
w.use_eventloop = True
x = WorkerComponent(w)
x.instantiate = Mock()
r = x.create(w)
x.instantiate.assert_called_with(w.autoreloader_cls, w)
x.register_with_event_loop(w, w.hub)
self.assertIsNone(r)
w.hub.on_close.add.assert_called_with(
w.autoreloader.on_event_loop_close,
)
class test_file_hash(Case):
def test_hash(self):
with mock_open() as a:
a.write('the quick brown fox\n')
a.seek(0)
A = file_hash('foo')
with mock_open() as b:
b.write('the quick brown bar\n')
b.seek(0)
B = file_hash('bar')
self.assertNotEqual(A, B)
class test_BaseMonitor(Case):
def test_start_stop_on_change(self):
x = BaseMonitor(['a', 'b'])
with self.assertRaises(NotImplementedError):
x.start()
x.stop()
x.on_change([])
x._on_change = Mock()
x.on_change('foo')
x._on_change.assert_called_with('foo')
class test_StatMonitor(Case):
@patch('os.stat')
def test_start(self, stat):
class st(object):
st_mtime = time()
stat.return_value = st()
x = StatMonitor(['a', 'b'])
def on_is_set():
if x.shutdown_event.is_set.call_count > 3:
return True
return False
x.shutdown_event = Mock()
x.shutdown_event.is_set.side_effect = on_is_set
x.start()
x.shutdown_event = Mock()
stat.side_effect = OSError()
x.start()
@patch('os.stat')
def test_mtime_stat_raises(self, stat):
stat.side_effect = ValueError()
x = StatMonitor(['a', 'b'])
x._mtime('a')
class test_KQueueMonitor(Case):
@patch('select.kqueue', create=True)
@patch('os.close')
def test_stop(self, close, kqueue):
x = KQueueMonitor(['a', 'b'])
x.poller = Mock()
x.filemap['a'] = 10
x.stop()
x.poller.close.assert_called_with()
close.assert_called_with(10)
close.side_effect = OSError()
close.side_effect.errno = errno.EBADF
x.stop()
def test_register_with_event_loop(self):
x = KQueueMonitor(['a', 'b'])
hub = Mock()
x.add_events = Mock()
x.register_with_event_loop(hub)
x.add_events.assert_called_with(hub.poller)
self.assertEqual(
hub.poller.on_file_change,
x.handle_event,
)
def test_on_event_loop_close(self):
x = KQueueMonitor(['a', 'b'])
x.close = Mock()
hub = Mock()
x.on_event_loop_close(hub)
x.close.assert_called_with(hub.poller)
def test_handle_event(self):
x = KQueueMonitor(['a', 'b'])
x.on_change = Mock()
eA = Mock()
eA.ident = 'a'
eB = Mock()
eB.ident = 'b'
x.fdmap = {'a': 'A', 'b': 'B'}
x.handle_event([eA, eB])
x.on_change.assert_called_with(['A', 'B'])
@patch('kombu.utils.eventio.kqueue', create=True)
@patch('kombu.utils.eventio.kevent', create=True)
@patch('os.open')
@patch('select.kqueue', create=True)
def test_start(self, _kq, osopen, kevent, kqueue):
from kombu.utils import eventio
prev_poll, eventio.poll = eventio.poll, kqueue
prev = {}
flags = ['KQ_FILTER_VNODE', 'KQ_EV_ADD', 'KQ_EV_ENABLE',
'KQ_EV_CLEAR', 'KQ_NOTE_WRITE', 'KQ_NOTE_EXTEND']
for i, flag in enumerate(flags):
prev[flag] = getattr(eventio, flag, None)
if not prev[flag]:
setattr(eventio, flag, i)
try:
kq = kqueue.return_value = Mock()
class ev(object):
ident = 10
filter = eventio.KQ_FILTER_VNODE
fflags = eventio.KQ_NOTE_WRITE
kq.control.return_value = [ev()]
x = KQueueMonitor(['a'])
osopen.return_value = 10
calls = [0]
def on_is_set():
calls[0] += 1
if calls[0] > 2:
return True
return False
x.shutdown_event = Mock()
x.shutdown_event.is_set.side_effect = on_is_set
x.start()
finally:
for flag in flags:
if prev[flag]:
setattr(eventio, flag, prev[flag])
else:
delattr(eventio, flag)
eventio.poll = prev_poll
class test_InotifyMonitor(Case):
@patch('celery.worker.autoreload.pyinotify')
def test_start(self, inotify):
x = InotifyMonitor(['a'])
inotify.IN_MODIFY = 1
inotify.IN_ATTRIB = 2
x.start()
inotify.WatchManager.side_effect = ValueError()
with self.assertRaises(ValueError):
x.start()
x.stop()
x._on_change = None
x.process_(Mock())
x._on_change = Mock()
x.process_(Mock())
self.assertTrue(x._on_change.called)
class test_default_implementation(Case):
@patch('select.kqueue', create=True)
def test_kqueue(self, kqueue):
self.assertEqual(default_implementation(), 'kqueue')
@patch('celery.worker.autoreload.pyinotify')
def test_inotify(self, pyinotify):
kq = getattr(select, 'kqueue', None)
try:
delattr(select, 'kqueue')
except AttributeError:
pass
platform, sys.platform = sys.platform, 'linux'
try:
self.assertEqual(default_implementation(), 'inotify')
ino, autoreload.pyinotify = autoreload.pyinotify, None
try:
self.assertEqual(default_implementation(), 'stat')
finally:
autoreload.pyinotify = ino
finally:
if kq:
select.kqueue = kq
sys.platform = platform
class test_Autoreloader(AppCase):
def test_register_with_event_loop(self):
x = Autoreloader(Mock(), modules=[__name__])
hub = Mock()
x._monitor = None
x.on_init = Mock()
def se(*args, **kwargs):
x._monitor = Mock()
x.on_init.side_effect = se
x.register_with_event_loop(hub)
x.on_init.assert_called_with()
x._monitor.register_with_event_loop.assert_called_with(hub)
x._monitor.register_with_event_loop.reset_mock()
x.register_with_event_loop(hub)
x._monitor.register_with_event_loop.assert_called_with(hub)
def test_on_event_loop_close(self):
x = Autoreloader(Mock(), modules=[__name__])
hub = Mock()
x._monitor = Mock()
x.on_event_loop_close(hub)
x._monitor.on_event_loop_close.assert_called_with(hub)
x._monitor = None
x.on_event_loop_close(hub)
@patch('celery.worker.autoreload.file_hash')
def test_start(self, fhash):
x = Autoreloader(Mock(), modules=[__name__])
x.Monitor = Mock()
mon = x.Monitor.return_value = Mock()
mon.start.side_effect = OSError()
mon.start.side_effect.errno = errno.EINTR
x.body()
mon.start.side_effect.errno = errno.ENOENT
with self.assertRaises(OSError):
x.body()
mon.start.side_effect = None
x.body()
@patch('celery.worker.autoreload.file_hash')
@patch('os.path.exists')
def test_maybe_modified(self, exists, fhash):
exists.return_value = True
fhash.return_value = 'abcd'
x = Autoreloader(Mock(), modules=[__name__])
x._hashes = {}
x._hashes[__name__] = 'dcba'
self.assertTrue(x._maybe_modified(__name__))
x._hashes[__name__] = 'abcd'
self.assertFalse(x._maybe_modified(__name__))
exists.return_value = False
self.assertFalse(x._maybe_modified(__name__))
def test_on_change(self):
x = Autoreloader(Mock(), modules=[__name__])
mm = x._maybe_modified = Mock(0)
mm.return_value = True
x._reload = Mock()
x.file_to_module[__name__] = __name__
x.on_change([__name__])
self.assertTrue(x._reload.called)
mm.return_value = False
x.on_change([__name__])
def test_reload(self):
x = Autoreloader(Mock(), modules=[__name__])
x._reload([__name__])
x.controller.reload.assert_called_with([__name__], reload=True)
def test_stop(self):
x = Autoreloader(Mock(), modules=[__name__])
x._monitor = None
x.stop()
x._monitor = Mock()
x.stop()
x._monitor.stop.assert_called_with()
| sivaprakashniet/push_pull | p2p/lib/python2.7/site-packages/celery/tests/worker/test_autoreload.py | Python | bsd-3-clause | 9,595 |
import os
from os import path
from . import getClient
from docker.errors import APIError
from tsdesktop import config
class ImageInfo(object):
"""docker image information and status"""
name = None
status = None
def __init__(self, name, info=None, missing=False, error=False):
self.name = name
if error:
self.status = 'error'
elif missing:
self.status = 'missing'
elif info:
self.status = 'ok'
def repo(self):
"""returns image repository part of the name (repo:tag)"""
return self.name.split(':')[0]
def tag(self):
"""returns image tag part of the name (repo:tag)"""
try:
return self.name.split(':')[1]
except IndexError:
return ''
class Service(object):
"""manages docker images and containers for services"""
name = None
dedicated = False
site = None
container = None
containerName = None
ports = []
volumes = []
hostConfig = {}
environ = {}
URI = None
URIDesc = None
def __init__(self, site=None):
self.site = site
self.containerName = self._contName()
self._defaultEnviron()
def __str__(self):
return '<Service: %s>' % self.containerName
def __repr__(self):
return str(self)
def _defaultEnviron(self):
"""sets common environment variables for all containers"""
self.environ.update({
'TSDESKTOP_UID': os.getuid(),
'TSDESKTOP_GID': os.getgid(),
})
def status(self):
"""returns container status ('error'|'running'|'exit'|'')"""
cli = getClient()
l = cli.containers(all=True, filters={'name': self.containerName})
for s in l:
if '/%s' % self.containerName in s.get('Names', []):
stat = s.get('Status', None)
if stat is None:
return 'error'
elif stat.startswith('Up'):
return 'running'
elif stat.startswith('Exited'):
return 'exit'
else:
return 'error'
else:
return ''
def _contName(self):
"""builds and returns container name, based on the service name"""
n = 'tsdesktop-'+self.name
if self.site is not None:
n = n+'-'+self.site
return n
def imageInfo(self):
"""returns service image information (ImageInfo)"""
imgName = self._imgName()
cli = getClient()
il = cli.images(name='tsadm/desktop')
if il:
for s in il:
rt = s.get('RepoTags', [])
if imgName in rt:
return ImageInfo(imgName, info=s)
return ImageInfo(imgName, missing=True)
else:
return ImageInfo(imgName, error=True)
def _imgName(self):
"""builds and returns image name, based on the service name"""
return 'tsadm/desktop:'+self.name
def _rmContainer(self, cli):
"""removes docker container"""
cli.remove_container(container=self.containerName, v=True)
self.container = None
def _mkContainer(self):
"""creates docker container"""
cli = getClient()
self.container = cli.create_container(
name=self.containerName,
image=self._imgName(),
ports=self.ports,
volumes=self.volumes,
environment=self.environ,
host_config=cli.create_host_config(**self.hostConfig),
)
def start(self):
"""starts service container"""
cli = getClient()
stat = self.status()
if stat in ('exit', 'error'):
self._rmContainer(cli)
elif stat == 'running':
return self.containerName+': already running'
try:
self._mkContainer()
err = cli.start(container=self.container.get('Id'))
if err is not None:
return self.containerName+': error - '+str(err)
except APIError as e:
return '%s - %s' % (self.containerName, e.explanation.decode())
return None
def stop(self):
"""stops service container"""
cli = getClient()
stat = self.status()
if stat == 'exit':
self._rmContainer(cli)
return None
elif stat == 'running':
try:
cli.stop(self.containerName)
except APIError as e:
return '%s: %s' % (self.containerName, e)
self._rmContainer(cli)
return None
return self.containerName+': not running'
def run(self, cmd):
cli = getClient()
proc = cli.exec_create(container=self.containerName, cmd=cmd)
return cli.exec_start(proc).decode()
def volAdd(self, orig, dest, mode='rw'):
"""adds a volume to the docker container"""
if self.container is not None:
raise RuntimeError('volAdd called after container was created')
self.volumes.append(dest)
if not 'binds' in self.hostConfig.keys():
self.hostConfig['binds'] = {}
self.hostConfig['binds'][orig] = {
'bind': dest,
'mode': mode,
}
def cachePath(self, *args):
p = path.expanduser(config.cfg.get('user', 'cachedir'))
return path.abspath(path.join(p, self.name, *args))
class _httpd(Service):
"""http (apache2) service manager"""
name = 'httpd'
dedicated = True
ports = [80, 443]
hostConfig = {
'port_bindings': {
80: ('127.0.0.1', 4080),
443: ('127.0.0.1', 4443),
},
}
URI = 'http://localhost:4080/'
def start(self):
dbserver = config.cfg.get('site:'+self.site, 'dbserver')
k = classMap.get(dbserver, None)
if k is not None:
dbs = k()
if dbs.status() != 'running':
return '%s service should be started first (%s)' % (dbserver, dbs.status())
err = dbs.createDB('%sdb' % self.site, self.site, self.containerName)
if err:
return err
self.environ['TSDESKTOP_SITE'] = self.site
return super(_httpd, self).start()
class _mysqld(Service):
"""mysql service container manager"""
name = 'mysqld'
ports = [80, 3306]
hostConfig = {
'port_bindings': {
80: ('127.0.0.1', 4980),
3306: ('127.0.0.1', 4936),
},
}
URI = 'http://localhost:4980/phpmyadmin'
URIDesc = 'login as tsdesktop:tsdesktop'
def start(self):
self.volAdd(self.cachePath('datadir'), '/var/lib/mysql')
self.volAdd(self.cachePath('upload'), '/var/tmp/upload')
return super(_mysqld, self).start()
def createDB(self, dbname, user, host):
return self.run('/opt/tsdesktop/mysqld.createdb %s %s %s' % (dbname, user, host))
classMap = {
'mysqld': _mysqld,
'httpd': _httpd,
}
def classList():
return [classMap[k]() for k in sorted(classMap.keys())]
| tsadm/desktop | lib/tsdesktop/dockman/services.py | Python | bsd-3-clause | 7,130 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'pootle.settings'
from optparse import make_option
from django.core.management.base import NoArgsCommand
from pootle.core.models import Revision
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option('--restore', action='store_true', default=False, dest='restore',
help='Restore the current revision number from the DB.'),
)
help = "Print the number of the current revision."
def handle_noargs(self, **options):
if options.get('restore'):
from pootle_store.models import Unit
Revision.set(Unit.max_revision())
self.stdout.write('%s' % Revision.get())
| electrolinux/pootle | pootle/apps/pootle_app/management/commands/revision.py | Python | gpl-3.0 | 1,022 |
from nose.tools import *
from RoboTableProject.wiimote import Wiimote
from RoboTableProject.robot import Robot
import math
class SensorTest(Wiimote):
"""Simple SensorTest class"""
def __init__(self):
Wiimote.__init__(self, test=True)
def get_leds(self):
front_led = {"X": 3, "Y": 10}
back_left = {"X": 5, "Y": 1}
back_right = {"X": 2, "Y": 1}
return [front_led, back_left, back_right]
sensor = SensorTest()
robot = Robot(sensor)
def test_robot():
led1 = {"X": 7, "Y": 3}
led2 = {"X": 1, "Y": 1}
assert_equal(robot._get_distance_between_2_points(led1, led2), math.sqrt(40))
assert_equal(robot._get_midpoint(led1, led2), {"X": 4, "Y": 2})
def test_led_position():
leds = sensor.get_leds()
expected_leds = {'front': leds[0], 'left': leds[1], 'right': leds[2]}
obtained_leds = robot.leds
assert_equal(obtained_leds, expected_leds)
def test_centre_property():
expected_centreX = 3.25
expected_centreY = 5.5
centre = robot.centre
centreX = centre[0]
centreY = centre[1]
assert_equal(expected_centreX, centreX)
assert_equal(expected_centreY, centreY)
| arnaudchenyensu/RoboTableProject | tests/robot_tests.py | Python | gpl-2.0 | 1,167 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2011 Deepin, Inc.
# 2011 Wang Yong
#
# Author: Wang Yong <[email protected]>
# Maintainer: Wang Yong <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from constant import *
from lang import __, getDefaultLanguage
from utils import *
import socket
import subprocess
import sys
class CommandProxy:
'''Command proxy.'''
def __init__(self):
'''Init command proxy.'''
self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # make sure socket port always work
self.socket.bind(SOCKET_COMMANDPROXY_ADDRESS)
self.noExit = len(sys.argv) == 2 and sys.argv[1] == "--daemon"
self.run()
def run(self):
'''Run.'''
print "* Command proxy listen ..."
cmd, addr = self.socket.recvfrom(2048)
print "* Command proxy received: '%s' from %s" % (cmd, addr)
if cmd != "exit":
try:
runCommand(cmd)
except Exception, e:
print "Got error `%s` when execute `%s`." % (e, cmd)
finally:
self.run()
elif self.noExit:
self.run()
print "* Command proxy exit."
if __name__ == "__main__":
CommandProxy()
| manateelazycat/deepin-software-center | src/commandProxy.py | Python | gpl-3.0 | 2,004 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from lxml import etree
from openerp.osv import fields, osv
import openerp.addons.decimal_precision as dp
from openerp.tools.translate import _
from openerp.tools import float_compare
from openerp.report import report_sxw
import openerp
from openerp.exceptions import UserError
class res_currency(osv.osv):
_inherit = "res.currency"
def _get_current_rate(self, cr, uid, ids, raise_on_no_rate=True, context=None):
if context is None:
context = {}
res = super(res_currency, self)._get_current_rate(cr, uid, ids, raise_on_no_rate, context=context)
if context.get('voucher_special_currency') in ids and context.get('voucher_special_currency_rate'):
res[context.get('voucher_special_currency')] = context.get('voucher_special_currency_rate')
return res
class account_voucher(osv.osv):
def _check_paid(self, cr, uid, ids, name, args, context=None):
res = {}
for voucher in self.browse(cr, uid, ids, context=context):
res[voucher.id] = any([((line.account_id.type, 'in', ('receivable', 'payable')) and line.reconcile_id) for line in voucher.move_ids])
return res
def _get_type(self, cr, uid, context=None):
if context is None:
context = {}
return context.get('type', False)
def _get_period(self, cr, uid, context=None):
if context is None: context = {}
if context.get('period_id', False):
return context.get('period_id')
periods = self.pool.get('account.period').find(cr, uid, context=context)
return periods and periods[0] or False
def _make_journal_search(self, cr, uid, ttype, context=None):
journal_pool = self.pool.get('account.journal')
return journal_pool.search(cr, uid, [('type', '=', ttype)], limit=1)
def _get_journal(self, cr, uid, context=None):
if context is None: context = {}
invoice_pool = self.pool.get('account.invoice')
journal_pool = self.pool.get('account.journal')
if context.get('invoice_id', False):
invoice = invoice_pool.browse(cr, uid, context['invoice_id'], context=context)
journal_id = journal_pool.search(cr, uid, [
('currency', '=', invoice.currency_id.id), ('company_id', '=', invoice.company_id.id)
], limit=1, context=context)
return journal_id and journal_id[0] or False
if context.get('journal_id', False):
return context.get('journal_id')
if not context.get('journal_id', False) and context.get('search_default_journal_id', False):
return context.get('search_default_journal_id')
ttype = context.get('type', 'bank')
if ttype in ('payment', 'receipt'):
ttype = 'bank'
res = self._make_journal_search(cr, uid, ttype, context=context)
return res and res[0] or False
def _get_tax(self, cr, uid, context=None):
if context is None: context = {}
journal_pool = self.pool.get('account.journal')
journal_id = context.get('journal_id', False)
if not journal_id:
ttype = context.get('type', 'bank')
res = journal_pool.search(cr, uid, [('type', '=', ttype)], limit=1)
if not res:
return False
journal_id = res[0]
if not journal_id:
return False
journal = journal_pool.browse(cr, uid, journal_id, context=context)
account_id = journal.default_credit_account_id or journal.default_debit_account_id
if account_id and account_id.tax_ids:
tax_id = account_id.tax_ids[0].id
return tax_id
return False
def _get_payment_rate_currency(self, cr, uid, context=None):
"""
Return the default value for field payment_rate_currency_id: the currency of the journal
if there is one, otherwise the currency of the user's company
"""
if context is None: context = {}
journal_pool = self.pool.get('account.journal')
journal_id = context.get('journal_id', False)
if journal_id:
journal = journal_pool.browse(cr, uid, journal_id, context=context)
if journal.currency:
return journal.currency.id
#no journal given in the context, use company currency as default
return self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.currency_id.id
def _get_currency(self, cr, uid, context=None):
if context is None: context = {}
journal_pool = self.pool.get('account.journal')
journal_id = context.get('journal_id', False)
if journal_id:
if isinstance(journal_id, (list, tuple)):
# sometimes journal_id is a pair (id, display_name)
journal_id = journal_id[0]
journal = journal_pool.browse(cr, uid, journal_id, context=context)
if journal.currency:
return journal.currency.id
return self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.currency_id.id
def _get_partner(self, cr, uid, context=None):
if context is None: context = {}
return context.get('partner_id', False)
def _get_reference(self, cr, uid, context=None):
if context is None: context = {}
return context.get('reference', False)
def _get_narration(self, cr, uid, context=None):
if context is None: context = {}
return context.get('narration', False)
def _get_amount(self, cr, uid, context=None):
if context is None:
context= {}
return context.get('amount', 0.0)
def name_get(self, cr, uid, ids, context=None):
if not ids:
return []
if context is None: context = {}
return [(r['id'], (r['number'] or _('Voucher'))) for r in self.read(cr, uid, ids, ['number'], context, load='_classic_write')]
def fields_view_get(self, cr, uid, view_id=None, view_type=False, context=None, toolbar=False, submenu=False):
mod_obj = self.pool.get('ir.model.data')
if context is None: context = {}
if view_type == 'form':
if not view_id and context.get('invoice_type'):
if context.get('invoice_type') in ('out_invoice', 'out_refund'):
result = mod_obj.get_object_reference(cr, uid, 'account_voucher', 'view_vendor_receipt_form')
else:
result = mod_obj.get_object_reference(cr, uid, 'account_voucher', 'view_vendor_payment_form')
result = result and result[1] or False
view_id = result
if not view_id and context.get('line_type'):
if context.get('line_type') == 'customer':
result = mod_obj.get_object_reference(cr, uid, 'account_voucher', 'view_vendor_receipt_form')
else:
result = mod_obj.get_object_reference(cr, uid, 'account_voucher', 'view_vendor_payment_form')
result = result and result[1] or False
view_id = result
res = super(account_voucher, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=submenu)
doc = etree.XML(res['arch'])
if context.get('type', 'sale') in ('purchase', 'payment'):
nodes = doc.xpath("//field[@name='partner_id']")
for node in nodes:
node.set('context', "{'default_customer': 0, 'search_default_supplier': 1, 'default_supplier': 1}")
if context.get('invoice_type','') in ('in_invoice', 'in_refund'):
node.set('string', _("Supplier"))
res['arch'] = etree.tostring(doc)
return res
def _compute_writeoff_amount(self, cr, uid, line_dr_ids, line_cr_ids, amount, type):
debit = credit = 0.0
sign = type == 'payment' and -1 or 1
for l in line_dr_ids:
if isinstance(l, dict):
debit += l['amount']
for l in line_cr_ids:
if isinstance(l, dict):
credit += l['amount']
return amount - sign * (credit - debit)
def onchange_line_ids(self, cr, uid, ids, line_dr_ids, line_cr_ids, amount, voucher_currency, type, context=None):
context = context or {}
if not line_dr_ids and not line_cr_ids:
return {'value':{'writeoff_amount': 0.0}}
# resolve lists of commands into lists of dicts
line_dr_ids = self.resolve_2many_commands(cr, uid, 'line_dr_ids', line_dr_ids, ['amount'], context)
line_cr_ids = self.resolve_2many_commands(cr, uid, 'line_cr_ids', line_cr_ids, ['amount'], context)
#compute the field is_multi_currency that is used to hide/display options linked to secondary currency on the voucher
is_multi_currency = False
#loop on the voucher lines to see if one of these has a secondary currency. If yes, we need to see the options
for voucher_line in line_dr_ids+line_cr_ids:
line_id = voucher_line.get('id') and self.pool.get('account.voucher.line').browse(cr, uid, voucher_line['id'], context=context).move_line_id.id or voucher_line.get('move_line_id')
if line_id and self.pool.get('account.move.line').browse(cr, uid, line_id, context=context).currency_id:
is_multi_currency = True
break
return {'value': {'writeoff_amount': self._compute_writeoff_amount(cr, uid, line_dr_ids, line_cr_ids, amount, type), 'is_multi_currency': is_multi_currency}}
def _get_journal_currency(self, cr, uid, ids, name, args, context=None):
res = {}
for voucher in self.browse(cr, uid, ids, context=context):
res[voucher.id] = voucher.journal_id.currency and voucher.journal_id.currency.id or voucher.company_id.currency_id.id
return res
def _get_writeoff_amount(self, cr, uid, ids, name, args, context=None):
if not ids: return {}
currency_obj = self.pool.get('res.currency')
res = {}
debit = credit = 0.0
for voucher in self.browse(cr, uid, ids, context=context):
sign = voucher.type == 'payment' and -1 or 1
for l in voucher.line_dr_ids:
debit += l.amount
for l in voucher.line_cr_ids:
credit += l.amount
currency = voucher.currency_id or voucher.company_id.currency_id
res[voucher.id] = currency_obj.round(cr, uid, currency, voucher.amount - sign * (credit - debit))
return res
def _paid_amount_in_company_currency(self, cr, uid, ids, name, args, context=None):
if context is None:
context = {}
res = {}
ctx = context.copy()
for v in self.browse(cr, uid, ids, context=context):
ctx.update({'date': v.date})
#make a new call to browse in order to have the right date in the context, to get the right currency rate
voucher = self.browse(cr, uid, v.id, context=ctx)
ctx.update({
'voucher_special_currency': voucher.payment_rate_currency_id and voucher.payment_rate_currency_id.id or False,
'voucher_special_currency_rate': voucher.currency_id.rate * voucher.payment_rate,})
res[voucher.id] = self.pool.get('res.currency').compute(cr, uid, voucher.currency_id.id, voucher.company_id.currency_id.id, voucher.amount, context=ctx)
return res
def _get_currency_help_label(self, cr, uid, currency_id, payment_rate, payment_rate_currency_id, context=None):
"""
This function builds a string to help the users to understand the behavior of the payment rate fields they can specify on the voucher.
This string is only used to improve the usability in the voucher form view and has no other effect.
:param currency_id: the voucher currency
:type currency_id: integer
:param payment_rate: the value of the payment_rate field of the voucher
:type payment_rate: float
:param payment_rate_currency_id: the value of the payment_rate_currency_id field of the voucher
:type payment_rate_currency_id: integer
:return: translated string giving a tip on what's the effect of the current payment rate specified
:rtype: str
"""
rml_parser = report_sxw.rml_parse(cr, uid, 'currency_help_label', context=context)
currency_pool = self.pool.get('res.currency')
currency_str = payment_rate_str = ''
if currency_id:
currency_str = rml_parser.formatLang(1, currency_obj=currency_pool.browse(cr, uid, currency_id, context=context))
if payment_rate_currency_id:
payment_rate_str = rml_parser.formatLang(payment_rate, currency_obj=currency_pool.browse(cr, uid, payment_rate_currency_id, context=context))
currency_help_label = _('At the operation date, the exchange rate was\n%s = %s') % (currency_str, payment_rate_str)
return currency_help_label
def _fnct_currency_help_label(self, cr, uid, ids, name, args, context=None):
res = {}
for voucher in self.browse(cr, uid, ids, context=context):
res[voucher.id] = self._get_currency_help_label(cr, uid, voucher.currency_id.id, voucher.payment_rate, voucher.payment_rate_currency_id.id, context=context)
return res
_name = 'account.voucher'
_description = 'Accounting Voucher'
_inherit = ['mail.thread']
_order = "date desc, id desc"
_columns = {
'type':fields.selection([
('sale','Sale'),
('purchase','Purchase'),
('payment','Payment'),
('receipt','Receipt'),
],'Default Type', readonly=True, states={'draft':[('readonly',False)]}),
'name':fields.char('Memo', readonly=True, states={'draft':[('readonly',False)]}),
'date':fields.date('Date', readonly=True, select=True, states={'draft':[('readonly',False)]},
help="Effective date for accounting entries", copy=False),
'journal_id':fields.many2one('account.journal', 'Journal', required=True, readonly=True, states={'draft':[('readonly',False)]}),
'account_id':fields.many2one('account.account', 'Account', required=True, readonly=True, states={'draft':[('readonly',False)]}),
'line_ids':fields.one2many('account.voucher.line', 'voucher_id', 'Voucher Lines',
readonly=True, copy=True,
states={'draft':[('readonly',False)]}),
'line_cr_ids':fields.one2many('account.voucher.line','voucher_id','Credits',
domain=[('type','=','cr')], context={'default_type':'cr'}, readonly=True, states={'draft':[('readonly',False)]}),
'line_dr_ids':fields.one2many('account.voucher.line','voucher_id','Debits',
domain=[('type','=','dr')], context={'default_type':'dr'}, readonly=True, states={'draft':[('readonly',False)]}),
'period_id': fields.many2one('account.period', 'Period', required=True, readonly=True, states={'draft':[('readonly',False)]}),
'narration':fields.text('Notes', readonly=True, states={'draft':[('readonly',False)]}),
'currency_id': fields.function(_get_journal_currency, type='many2one', relation='res.currency', string='Currency', readonly=True, required=True),
'company_id': fields.many2one('res.company', 'Company', required=True, readonly=True, states={'draft':[('readonly',False)]}),
'state':fields.selection(
[('draft','Draft'),
('cancel','Cancelled'),
('proforma','Pro-forma'),
('posted','Posted')
], 'Status', readonly=True, track_visibility='onchange', copy=False,
help=' * The \'Draft\' status is used when a user is encoding a new and unconfirmed Voucher. \
\n* The \'Pro-forma\' when voucher is in Pro-forma status,voucher does not have an voucher number. \
\n* The \'Posted\' status is used when user create voucher,a voucher number is generated and voucher entries are created in account \
\n* The \'Cancelled\' status is used when user cancel voucher.'),
'amount': fields.float('Total', digits_compute=dp.get_precision('Account'), required=True, readonly=True, states={'draft':[('readonly',False)]}),
'tax_amount':fields.float('Tax Amount', digits_compute=dp.get_precision('Account'), readonly=True),
'reference': fields.char('Reference', readonly=True, states={'draft':[('readonly',False)]},
help="Transaction reference number.", copy=False),
'number': fields.char('Number', readonly=True, copy=False),
'move_id':fields.many2one('account.move', 'Account Entry', copy=False),
'move_ids': fields.related('move_id','line_id', type='one2many', relation='account.move.line', string='Journal Items', readonly=True),
'partner_id':fields.many2one('res.partner', 'Partner', change_default=1, readonly=True, states={'draft':[('readonly',False)]}),
'audit': fields.related('move_id','to_check', type='boolean', help='Check this box if you are unsure of that journal entry and if you want to note it as \'to be reviewed\' by an accounting expert.', relation='account.move', string='To Review'),
'paid': fields.function(_check_paid, string='Paid', type='boolean', help="The Voucher has been totally paid."),
'pay_now':fields.selection([
('pay_now','Pay Directly'),
('pay_later','Pay Later or Group Funds'),
],'Payment', select=True, readonly=True, states={'draft':[('readonly',False)]}),
'tax_id': fields.many2one('account.tax', 'Tax', readonly=True, states={'draft':[('readonly',False)]}, domain=[('price_include','=', False)], help="Only for tax excluded from price"),
'pre_line':fields.boolean('Previous Payments ?', required=False),
'date_due': fields.date('Due Date', readonly=True, select=True, states={'draft':[('readonly',False)]}),
'payment_option':fields.selection([
('without_writeoff', 'Keep it open'),
('with_writeoff', 'Reconcile payment balance'),
], 'Payment Difference', required=True, readonly=True, states={'draft': [('readonly', False)]}, help="This field helps you to choose what you want to do with the eventual difference between the paid amount and the sum of allocated amounts. You can either choose to keep open this difference on the partner's account, or reconcile it with the payment(s)"),
'writeoff_acc_id': fields.many2one('account.account', 'Counterpart Account', readonly=True, states={'draft': [('readonly', False)]}),
'comment': fields.char('Counterpart Comment', required=True, readonly=True, states={'draft': [('readonly', False)]}),
'analytic_id': fields.many2one('account.analytic.account','Write-Off Analytic Account', readonly=True, states={'draft': [('readonly', False)]}),
'writeoff_amount': fields.function(_get_writeoff_amount, string='Difference Amount', type='float', readonly=True, help="Computed as the difference between the amount stated in the voucher and the sum of allocation on the voucher lines."),
'payment_rate_currency_id': fields.many2one('res.currency', 'Payment Rate Currency', required=True, readonly=True, states={'draft':[('readonly',False)]}),
'payment_rate': fields.float('Exchange Rate', digits=(12,6), required=True, readonly=True, states={'draft': [('readonly', False)]},
help='The specific rate that will be used, in this voucher, between the selected currency (in \'Payment Rate Currency\' field) and the voucher currency.'),
'paid_amount_in_company_currency': fields.function(_paid_amount_in_company_currency, string='Paid Amount in Company Currency', type='float', readonly=True),
'is_multi_currency': fields.boolean('Multi Currency Voucher', help='Fields with internal purpose only that depicts if the voucher is a multi currency one or not'),
'currency_help_label': fields.function(_fnct_currency_help_label, type='text', string="Helping Sentence", help="This sentence helps you to know how to specify the payment rate by giving you the direct effect it has"),
}
_defaults = {
'period_id': _get_period,
'partner_id': _get_partner,
'journal_id':_get_journal,
'currency_id': _get_currency,
'reference': _get_reference,
'narration':_get_narration,
'amount': _get_amount,
'type':_get_type,
'state': 'draft',
'pay_now': 'pay_now',
'name': '',
'date': lambda *a: time.strftime('%Y-%m-%d'),
'company_id': lambda self,cr,uid,c: self.pool.get('res.company')._company_default_get(cr, uid, 'account.voucher',context=c),
'tax_id': _get_tax,
'payment_option': 'without_writeoff',
'comment': _('Write-Off'),
'payment_rate': 1.0,
'payment_rate_currency_id': _get_payment_rate_currency,
}
def compute_tax(self, cr, uid, ids, context=None):
tax_pool = self.pool.get('account.tax')
partner_pool = self.pool.get('res.partner')
position_pool = self.pool.get('account.fiscal.position')
voucher_line_pool = self.pool.get('account.voucher.line')
voucher_pool = self.pool.get('account.voucher')
if context is None: context = {}
for voucher in voucher_pool.browse(cr, uid, ids, context=context):
voucher_amount = 0.0
for line in voucher.line_ids:
voucher_amount += line.untax_amount or line.amount
line.amount = line.untax_amount or line.amount
voucher_line_pool.write(cr, uid, [line.id], {'amount':line.amount, 'untax_amount':line.untax_amount})
if not voucher.tax_id:
self.write(cr, uid, [voucher.id], {'amount':voucher_amount, 'tax_amount':0.0})
continue
tax = [tax_pool.browse(cr, uid, voucher.tax_id.id, context=context)]
partner = partner_pool.browse(cr, uid, voucher.partner_id.id, context=context) or False
taxes = position_pool.map_tax(cr, uid, partner and partner.property_account_position or False, tax)
tax = tax_pool.browse(cr, uid, taxes, context=context)
total = voucher_amount
total_tax = 0.0
if not tax[0].price_include:
for line in voucher.line_ids:
for tax_line in tax_pool.compute_all(cr, uid, tax, line.amount, 1).get('taxes', []):
total_tax += tax_line.get('amount', 0.0)
total += total_tax
else:
for line in voucher.line_ids:
line_total = 0.0
line_tax = 0.0
for tax_line in tax_pool.compute_all(cr, uid, tax, line.untax_amount or line.amount, 1).get('taxes', []):
line_tax += tax_line.get('amount', 0.0)
line_total += tax_line.get('price_unit')
total_tax += line_tax
untax_amount = line.untax_amount or line.amount
voucher_line_pool.write(cr, uid, [line.id], {'amount':line_total, 'untax_amount':untax_amount})
self.write(cr, uid, [voucher.id], {'amount':total, 'tax_amount':total_tax})
return True
def onchange_price(self, cr, uid, ids, line_ids, tax_id, partner_id=False, context=None):
context = context or {}
tax_pool = self.pool.get('account.tax')
partner_pool = self.pool.get('res.partner')
position_pool = self.pool.get('account.fiscal.position')
if not line_ids:
line_ids = []
res = {
'tax_amount': False,
'amount': False,
}
voucher_total = 0.0
# resolve the list of commands into a list of dicts
line_ids = self.resolve_2many_commands(cr, uid, 'line_ids', line_ids, ['amount'], context)
total_tax = 0.0
for line in line_ids:
line_amount = 0.0
line_amount = line.get('amount',0.0)
if tax_id:
tax = [tax_pool.browse(cr, uid, tax_id, context=context)]
if partner_id:
partner = partner_pool.browse(cr, uid, partner_id, context=context) or False
taxes = position_pool.map_tax(cr, uid, partner and partner.property_account_position or False, tax)
tax = tax_pool.browse(cr, uid, taxes, context=context)
if not tax[0].price_include:
for tax_line in tax_pool.compute_all(cr, uid, tax, line_amount, 1).get('taxes', []):
total_tax += tax_line.get('amount')
voucher_total += line_amount
total = voucher_total + total_tax
res.update({
'amount': total or voucher_total,
'tax_amount': total_tax
})
return {
'value': res
}
def onchange_term_id(self, cr, uid, ids, term_id, amount):
term_pool = self.pool.get('account.payment.term')
terms = False
due_date = False
default = {'date_due':False}
if term_id and amount:
terms = term_pool.compute(cr, uid, term_id, amount)
if terms:
due_date = terms[-1][0]
default.update({
'date_due':due_date
})
return {'value':default}
def onchange_journal_voucher(self, cr, uid, ids, line_ids=False, tax_id=False, price=0.0, partner_id=False, journal_id=False, ttype=False, company_id=False, context=None):
"""price
Returns a dict that contains new values and context
@param partner_id: latest value from user input for field partner_id
@param args: other arguments
@param context: context arguments, like lang, time zone
@return: Returns a dict which contains new values, and context
"""
default = {
'value':{},
}
if not partner_id or not journal_id:
return default
partner_pool = self.pool.get('res.partner')
journal_pool = self.pool.get('account.journal')
journal = journal_pool.browse(cr, uid, journal_id, context=context)
partner = partner_pool.browse(cr, uid, partner_id, context=context)
account_id = False
tr_type = False
if journal.type in ('sale','sale_refund'):
account_id = partner.property_account_receivable.id
tr_type = 'sale'
elif journal.type in ('purchase', 'purchase_refund','expense'):
account_id = partner.property_account_payable.id
tr_type = 'purchase'
else:
if not journal.default_credit_account_id or not journal.default_debit_account_id:
raise UserError(_('Please define default credit/debit accounts on the journal "%s".') % (journal.name))
account_id = journal.default_credit_account_id.id or journal.default_debit_account_id.id
tr_type = 'receipt'
default['value']['account_id'] = account_id
default['value']['type'] = ttype or tr_type
vals = self.onchange_journal(cr, uid, ids, journal_id, line_ids, tax_id, partner_id, time.strftime('%Y-%m-%d'), price, ttype, company_id, context)
default['value'].update(vals.get('value'))
return default
def onchange_rate(self, cr, uid, ids, rate, amount, currency_id, payment_rate_currency_id, company_id, context=None):
res = {'value': {'paid_amount_in_company_currency': amount, 'currency_help_label': self._get_currency_help_label(cr, uid, currency_id, rate, payment_rate_currency_id, context=context)}}
if rate and amount and currency_id:
company_currency = self.pool.get('res.company').browse(cr, uid, company_id, context=context).currency_id
#context should contain the date, the payment currency and the payment rate specified on the voucher
amount_in_company_currency = self.pool.get('res.currency').compute(cr, uid, currency_id, company_currency.id, amount, context=context)
res['value']['paid_amount_in_company_currency'] = amount_in_company_currency
return res
def onchange_amount(self, cr, uid, ids, amount, rate, partner_id, journal_id, currency_id, ttype, date, payment_rate_currency_id, company_id, context=None):
if context is None:
context = {}
ctx = context.copy()
ctx.update({'date': date})
#read the voucher rate with the right date in the context
currency_id = currency_id or self.pool.get('res.company').browse(cr, uid, company_id, context=ctx).currency_id.id
voucher_rate = self.pool.get('res.currency').read(cr, uid, [currency_id], ['rate'], context=ctx)[0]['rate']
ctx.update({
'voucher_special_currency': payment_rate_currency_id,
'voucher_special_currency_rate': rate * voucher_rate})
res = self.recompute_voucher_lines(cr, uid, ids, partner_id, journal_id, amount, currency_id, ttype, date, context=ctx)
vals = self.onchange_rate(cr, uid, ids, rate, amount, currency_id, payment_rate_currency_id, company_id, context=ctx)
for key in vals.keys():
res[key].update(vals[key])
return res
def recompute_payment_rate(self, cr, uid, ids, vals, currency_id, date, ttype, journal_id, amount, context=None):
if context is None:
context = {}
#on change of the journal, we need to set also the default value for payment_rate and payment_rate_currency_id
currency_obj = self.pool.get('res.currency')
journal = self.pool.get('account.journal').browse(cr, uid, journal_id, context=context)
company_id = journal.company_id.id
payment_rate = 1.0
currency_id = currency_id or journal.company_id.currency_id.id
payment_rate_currency_id = currency_id
ctx = context.copy()
ctx.update({'date': date})
o2m_to_loop = False
if ttype == 'receipt':
o2m_to_loop = 'line_cr_ids'
elif ttype == 'payment':
o2m_to_loop = 'line_dr_ids'
if o2m_to_loop and 'value' in vals and o2m_to_loop in vals['value']:
for voucher_line in vals['value'][o2m_to_loop]:
if not isinstance(voucher_line, dict):
continue
if voucher_line['currency_id'] != currency_id:
# we take as default value for the payment_rate_currency_id, the currency of the first invoice that
# is not in the voucher currency
payment_rate_currency_id = voucher_line['currency_id']
tmp = currency_obj.browse(cr, uid, payment_rate_currency_id, context=ctx).rate
payment_rate = tmp / currency_obj.browse(cr, uid, currency_id, context=ctx).rate
break
vals['value'].update({
'payment_rate': payment_rate,
'currency_id': currency_id,
'payment_rate_currency_id': payment_rate_currency_id
})
#read the voucher rate with the right date in the context
voucher_rate = self.pool.get('res.currency').read(cr, uid, [currency_id], ['rate'], context=ctx)[0]['rate']
ctx.update({
'voucher_special_currency_rate': payment_rate * voucher_rate,
'voucher_special_currency': payment_rate_currency_id})
res = self.onchange_rate(cr, uid, ids, payment_rate, amount, currency_id, payment_rate_currency_id, company_id, context=ctx)
for key in res.keys():
vals[key].update(res[key])
return vals
def basic_onchange_partner(self, cr, uid, ids, partner_id, journal_id, ttype, context=None):
partner_pool = self.pool.get('res.partner')
journal_pool = self.pool.get('account.journal')
res = {'value': {'account_id': False}}
if not partner_id or not journal_id:
return res
journal = journal_pool.browse(cr, uid, journal_id, context=context)
partner = partner_pool.browse(cr, uid, partner_id, context=context)
account_id = False
if journal.type in ('sale','sale_refund'):
account_id = partner.property_account_receivable.id
elif journal.type in ('purchase', 'purchase_refund','expense'):
account_id = partner.property_account_payable.id
else:
account_id = journal.default_credit_account_id.id or journal.default_debit_account_id.id
res['value']['account_id'] = account_id
return res
def onchange_partner_id(self, cr, uid, ids, partner_id, journal_id, amount, currency_id, ttype, date, context=None):
if not journal_id:
return {}
if context is None:
context = {}
#TODO: comment me and use me directly in the sales/purchases views
res = self.basic_onchange_partner(cr, uid, ids, partner_id, journal_id, ttype, context=context)
if ttype in ['sale', 'purchase']:
return res
ctx = context.copy()
# not passing the payment_rate currency and the payment_rate in the context but it's ok because they are reset in recompute_payment_rate
ctx.update({'date': date})
vals = self.recompute_voucher_lines(cr, uid, ids, partner_id, journal_id, amount, currency_id, ttype, date, context=ctx)
vals2 = self.recompute_payment_rate(cr, uid, ids, vals, currency_id, date, ttype, journal_id, amount, context=context)
for key in vals.keys():
res[key].update(vals[key])
for key in vals2.keys():
res[key].update(vals2[key])
#TODO: can probably be removed now
#TODO: onchange_partner_id() should not returns [pre_line, line_dr_ids, payment_rate...] for type sale, and not
# [pre_line, line_cr_ids, payment_rate...] for type purchase.
# We should definitively split account.voucher object in two and make distinct on_change functions. In the
# meanwhile, bellow lines must be there because the fields aren't present in the view, what crashes if the
# onchange returns a value for them
if ttype == 'sale':
del(res['value']['line_dr_ids'])
del(res['value']['pre_line'])
del(res['value']['payment_rate'])
elif ttype == 'purchase':
del(res['value']['line_cr_ids'])
del(res['value']['pre_line'])
del(res['value']['payment_rate'])
return res
def recompute_voucher_lines(self, cr, uid, ids, partner_id, journal_id, price, currency_id, ttype, date, context=None):
"""
Returns a dict that contains new values and context
@param partner_id: latest value from user input for field partner_id
@param args: other arguments
@param context: context arguments, like lang, time zone
@return: Returns a dict which contains new values, and context
"""
def _remove_noise_in_o2m():
"""if the line is partially reconciled, then we must pay attention to display it only once and
in the good o2m.
This function returns True if the line is considered as noise and should not be displayed
"""
if line.reconcile_partial_id:
if currency_id == line.currency_id.id:
if line.amount_residual_currency <= 0:
return True
else:
if line.amount_residual <= 0:
return True
return False
if context is None:
context = {}
context_multi_currency = context.copy()
currency_pool = self.pool.get('res.currency')
move_line_pool = self.pool.get('account.move.line')
partner_pool = self.pool.get('res.partner')
journal_pool = self.pool.get('account.journal')
line_pool = self.pool.get('account.voucher.line')
#set default values
default = {
'value': {'line_dr_ids': [], 'line_cr_ids': [], 'pre_line': False},
}
# drop existing lines
line_ids = ids and line_pool.search(cr, uid, [('voucher_id', '=', ids[0])])
for line in line_pool.browse(cr, uid, line_ids, context=context):
if line.type == 'cr':
default['value']['line_cr_ids'].append((2, line.id))
else:
default['value']['line_dr_ids'].append((2, line.id))
if not partner_id or not journal_id:
return default
journal = journal_pool.browse(cr, uid, journal_id, context=context)
partner = partner_pool.browse(cr, uid, partner_id, context=context)
currency_id = currency_id or journal.company_id.currency_id.id
total_credit = 0.0
total_debit = 0.0
account_type = None
if context.get('account_id'):
account_type = self.pool['account.account'].browse(cr, uid, context['account_id'], context=context).type
if ttype == 'payment':
if not account_type:
account_type = 'payable'
total_debit = price or 0.0
else:
total_credit = price or 0.0
if not account_type:
account_type = 'receivable'
if not context.get('move_line_ids', False):
ids = move_line_pool.search(cr, uid, [('state','=','valid'), ('account_id.type', '=', account_type), ('reconcile_id', '=', False), ('partner_id', '=', partner_id)], context=context)
else:
ids = context['move_line_ids']
invoice_id = context.get('invoice_id', False)
company_currency = journal.company_id.currency_id.id
move_lines_found = []
#order the lines by most old first
ids.reverse()
account_move_lines = move_line_pool.browse(cr, uid, ids, context=context)
#compute the total debit/credit and look for a matching open amount or invoice
for line in account_move_lines:
if _remove_noise_in_o2m():
continue
if invoice_id:
if line.invoice.id == invoice_id:
#if the invoice linked to the voucher line is equal to the invoice_id in context
#then we assign the amount on that line, whatever the other voucher lines
move_lines_found.append(line.id)
elif currency_id == company_currency:
#otherwise treatments is the same but with other field names
if line.amount_residual == price:
#if the amount residual is equal the amount voucher, we assign it to that voucher
#line, whatever the other voucher lines
move_lines_found.append(line.id)
break
#otherwise we will split the voucher amount on each line (by most old first)
total_credit += line.credit or 0.0
total_debit += line.debit or 0.0
elif currency_id == line.currency_id.id:
if line.amount_residual_currency == price:
move_lines_found.append(line.id)
break
total_credit += line.credit and line.amount_currency or 0.0
total_debit += line.debit and line.amount_currency or 0.0
remaining_amount = price
#voucher line creation
for line in account_move_lines:
if _remove_noise_in_o2m():
continue
if line.currency_id and currency_id == line.currency_id.id:
amount_original = abs(line.amount_currency)
amount_unreconciled = abs(line.amount_residual_currency)
else:
#always use the amount booked in the company currency as the basis of the conversion into the voucher currency
amount_original = currency_pool.compute(cr, uid, company_currency, currency_id, line.credit or line.debit or 0.0, context=context_multi_currency)
amount_unreconciled = currency_pool.compute(cr, uid, company_currency, currency_id, abs(line.amount_residual), context=context_multi_currency)
line_currency_id = line.currency_id and line.currency_id.id or company_currency
rs = {
'name':line.move_id.name,
'type': line.credit and 'dr' or 'cr',
'move_line_id':line.id,
'account_id':line.account_id.id,
'amount_original': amount_original,
'amount': (line.id in move_lines_found) and min(abs(remaining_amount), amount_unreconciled) or 0.0,
'date_original':line.date,
'date_due':line.date_maturity,
'amount_unreconciled': amount_unreconciled,
'currency_id': line_currency_id,
}
remaining_amount -= rs['amount']
#in case a corresponding move_line hasn't been found, we now try to assign the voucher amount
#on existing invoices: we split voucher amount by most old first, but only for lines in the same currency
if not move_lines_found:
if currency_id == line_currency_id:
if line.credit:
amount = min(amount_unreconciled, abs(total_debit))
rs['amount'] = amount
total_debit -= amount
else:
amount = min(amount_unreconciled, abs(total_credit))
rs['amount'] = amount
total_credit -= amount
if rs['amount_unreconciled'] == rs['amount']:
rs['reconcile'] = True
if rs['type'] == 'cr':
default['value']['line_cr_ids'].append(rs)
else:
default['value']['line_dr_ids'].append(rs)
if len(default['value']['line_cr_ids']) > 0:
default['value']['pre_line'] = 1
elif len(default['value']['line_dr_ids']) > 0:
default['value']['pre_line'] = 1
default['value']['writeoff_amount'] = self._compute_writeoff_amount(cr, uid, default['value']['line_dr_ids'], default['value']['line_cr_ids'], price, ttype)
return default
def onchange_payment_rate_currency(self, cr, uid, ids, currency_id, payment_rate, payment_rate_currency_id, date, amount, company_id, context=None):
if context is None:
context = {}
res = {'value': {}}
if currency_id:
#set the default payment rate of the voucher and compute the paid amount in company currency
ctx = context.copy()
ctx.update({'date': date})
#read the voucher rate with the right date in the context
voucher_rate = self.pool.get('res.currency').read(cr, uid, [currency_id], ['rate'], context=ctx)[0]['rate']
ctx.update({
'voucher_special_currency_rate': payment_rate * voucher_rate,
'voucher_special_currency': payment_rate_currency_id})
vals = self.onchange_rate(cr, uid, ids, payment_rate, amount, currency_id, payment_rate_currency_id, company_id, context=ctx)
for key in vals.keys():
res[key].update(vals[key])
return res
def onchange_date(self, cr, uid, ids, date, currency_id, payment_rate_currency_id, amount, company_id, context=None):
"""
@param date: latest value from user input for field date
@param args: other arguments
@param context: context arguments, like lang, time zone
@return: Returns a dict which contains new values, and context
"""
if context is None:
context ={}
res = {'value': {}}
#set the period of the voucher
period_pool = self.pool.get('account.period')
currency_obj = self.pool.get('res.currency')
ctx = context.copy()
ctx.update({'company_id': company_id, 'account_period_prefer_normal': True})
voucher_currency_id = currency_id or self.pool.get('res.company').browse(cr, uid, company_id, context=ctx).currency_id.id
pids = period_pool.find(cr, uid, date, context=ctx)
if pids:
res['value'].update({'period_id':pids[0]})
if payment_rate_currency_id:
ctx.update({'date': date})
payment_rate = 1.0
if payment_rate_currency_id != currency_id:
tmp = currency_obj.browse(cr, uid, payment_rate_currency_id, context=ctx).rate
payment_rate = tmp / currency_obj.browse(cr, uid, voucher_currency_id, context=ctx).rate
vals = self.onchange_payment_rate_currency(cr, uid, ids, voucher_currency_id, payment_rate, payment_rate_currency_id, date, amount, company_id, context=context)
vals['value'].update({'payment_rate': payment_rate})
for key in vals.keys():
res[key].update(vals[key])
return res
def onchange_journal(self, cr, uid, ids, journal_id, line_ids, tax_id, partner_id, date, amount, ttype, company_id, context=None):
if context is None:
context = {}
if not journal_id:
return False
journal_pool = self.pool.get('account.journal')
journal = journal_pool.browse(cr, uid, journal_id, context=context)
account_id = journal.default_credit_account_id or journal.default_debit_account_id
tax_id = False
if account_id and account_id.tax_ids:
tax_id = account_id.tax_ids[0].id
vals = {'value':{} }
if ttype in ('sale', 'purchase'):
vals = self.onchange_price(cr, uid, ids, line_ids, tax_id, partner_id, context)
vals['value'].update({'tax_id':tax_id,'amount': amount})
currency_id = False
if journal.currency:
currency_id = journal.currency.id
else:
currency_id = journal.company_id.currency_id.id
period_ids = self.pool['account.period'].find(cr, uid, context=dict(context, company_id=company_id))
vals['value'].update({
'currency_id': currency_id,
'payment_rate_currency_id': currency_id,
'period_id': period_ids and period_ids[0] or False
})
#in case we want to register the payment directly from an invoice, it's confusing to allow to switch the journal
#without seeing that the amount is expressed in the journal currency, and not in the invoice currency. So to avoid
#this common mistake, we simply reset the amount to 0 if the currency is not the invoice currency.
if context.get('payment_expected_currency') and currency_id != context.get('payment_expected_currency'):
vals['value']['amount'] = 0
amount = 0
if partner_id:
res = self.onchange_partner_id(cr, uid, ids, partner_id, journal_id, amount, currency_id, ttype, date, context)
for key in res.keys():
vals[key].update(res[key])
return vals
def onchange_company(self, cr, uid, ids, partner_id, journal_id, currency_id, company_id, context=None):
"""
If the company changes, check that the journal is in the right company.
If not, fetch a new journal.
"""
journal_pool = self.pool['account.journal']
journal = journal_pool.browse(cr, uid, journal_id, context=context)
if journal.company_id.id != company_id:
# can not guess type of journal, better remove it
return {'value': {'journal_id': False}}
return {}
def button_proforma_voucher(self, cr, uid, ids, context=None):
self.signal_workflow(cr, uid, ids, 'proforma_voucher')
return {'type': 'ir.actions.act_window_close'}
def proforma_voucher(self, cr, uid, ids, context=None):
self.action_move_line_create(cr, uid, ids, context=context)
return True
def action_cancel_draft(self, cr, uid, ids, context=None):
self.create_workflow(cr, uid, ids)
self.write(cr, uid, ids, {'state':'draft'})
return True
def cancel_voucher(self, cr, uid, ids, context=None):
reconcile_pool = self.pool.get('account.move.reconcile')
move_pool = self.pool.get('account.move')
move_line_pool = self.pool.get('account.move.line')
for voucher in self.browse(cr, uid, ids, context=context):
# refresh to make sure you don't unlink an already removed move
voucher.refresh()
for line in voucher.move_ids:
# refresh to make sure you don't unreconcile an already unreconciled entry
line.refresh()
if line.reconcile_id:
move_lines = [move_line.id for move_line in line.reconcile_id.line_id]
move_lines.remove(line.id)
reconcile_pool.unlink(cr, uid, [line.reconcile_id.id])
if len(move_lines) >= 2:
move_line_pool.reconcile_partial(cr, uid, move_lines, 'auto',context=context)
if voucher.move_id:
move_pool.button_cancel(cr, uid, [voucher.move_id.id])
move_pool.unlink(cr, uid, [voucher.move_id.id])
res = {
'state':'cancel',
'move_id':False,
}
self.write(cr, uid, ids, res)
return True
def unlink(self, cr, uid, ids, context=None):
for t in self.read(cr, uid, ids, ['state'], context=context):
if t['state'] not in ('draft', 'cancel'):
raise UserError(_('Cannot delete voucher(s) which are already opened or paid.'))
return super(account_voucher, self).unlink(cr, uid, ids, context=context)
def onchange_payment(self, cr, uid, ids, pay_now, journal_id, partner_id, ttype='sale'):
res = {}
if not partner_id:
return res
res = {}
partner_pool = self.pool.get('res.partner')
journal_pool = self.pool.get('account.journal')
if pay_now == 'pay_later':
partner = partner_pool.browse(cr, uid, partner_id)
journal = journal_pool.browse(cr, uid, journal_id)
if journal.type in ('sale','sale_refund'):
account_id = partner.property_account_receivable.id
elif journal.type in ('purchase', 'purchase_refund','expense'):
account_id = partner.property_account_payable.id
else:
account_id = journal.default_credit_account_id.id or journal.default_debit_account_id.id
if account_id:
res['account_id'] = account_id
return {'value':res}
def _sel_context(self, cr, uid, voucher_id, context=None):
"""
Select the context to use accordingly if it needs to be multicurrency or not.
:param voucher_id: Id of the actual voucher
:return: The returned context will be the same as given in parameter if the voucher currency is the same
than the company currency, otherwise it's a copy of the parameter with an extra key 'date' containing
the date of the voucher.
:rtype: dict
"""
company_currency = self._get_company_currency(cr, uid, voucher_id, context)
current_currency = self._get_current_currency(cr, uid, voucher_id, context)
if current_currency <> company_currency:
context_multi_currency = context.copy()
voucher = self.pool.get('account.voucher').browse(cr, uid, voucher_id, context)
context_multi_currency.update({'date': voucher.date})
return context_multi_currency
return context
def first_move_line_get(self, cr, uid, voucher_id, move_id, company_currency, current_currency, context=None):
'''
Return a dict to be use to create the first account move line of given voucher.
:param voucher_id: Id of voucher what we are creating account_move.
:param move_id: Id of account move where this line will be added.
:param company_currency: id of currency of the company to which the voucher belong
:param current_currency: id of currency of the voucher
:return: mapping between fieldname and value of account move line to create
:rtype: dict
'''
voucher = self.pool.get('account.voucher').browse(cr,uid,voucher_id,context)
debit = credit = 0.0
# TODO: is there any other alternative then the voucher type ??
# ANSWER: We can have payment and receipt "In Advance".
# TODO: Make this logic available.
# -for sale, purchase we have but for the payment and receipt we do not have as based on the bank/cash journal we can not know its payment or receipt
if voucher.type in ('purchase', 'payment'):
credit = voucher.paid_amount_in_company_currency
elif voucher.type in ('sale', 'receipt'):
debit = voucher.paid_amount_in_company_currency
if debit < 0: credit = -debit; debit = 0.0
if credit < 0: debit = -credit; credit = 0.0
sign = debit - credit < 0 and -1 or 1
#set the first line of the voucher
move_line = {
'name': voucher.name or '/',
'debit': debit,
'credit': credit,
'account_id': voucher.account_id.id,
'move_id': move_id,
'journal_id': voucher.journal_id.id,
'period_id': voucher.period_id.id,
'partner_id': voucher.partner_id.id,
'currency_id': company_currency <> current_currency and current_currency or False,
'amount_currency': (sign * abs(voucher.amount) # amount < 0 for refunds
if company_currency != current_currency else 0.0),
'date': voucher.date,
'date_maturity': voucher.date_due
}
return move_line
def account_move_get(self, cr, uid, voucher_id, context=None):
'''
This method prepare the creation of the account move related to the given voucher.
:param voucher_id: Id of voucher for which we are creating account_move.
:return: mapping between fieldname and value of account move to create
:rtype: dict
'''
seq_obj = self.pool.get('ir.sequence')
voucher = self.pool.get('account.voucher').browse(cr,uid,voucher_id,context)
if voucher.number:
name = voucher.number
elif voucher.journal_id.sequence_id:
if not voucher.journal_id.sequence_id.active:
raise UserError(_('Please activate the sequence of selected journal !'))
c = dict(context)
c.update({'ir_sequence_date': voucher.period_id.date_start})
name = seq_obj.next_by_id(cr, uid, voucher.journal_id.sequence_id.id, context=c)
else:
raise UserError(_('Please define a sequence on the journal.'))
ref = voucher.reference or name
move = {
'name': name,
'journal_id': voucher.journal_id.id,
'narration': voucher.narration,
'date': voucher.date,
'ref': ref,
'period_id': voucher.period_id.id,
}
return move
def _get_exchange_lines(self, cr, uid, line, move_id, amount_residual, company_currency, current_currency, context=None):
'''
Prepare the two lines in company currency due to currency rate difference.
:param line: browse record of the voucher.line for which we want to create currency rate difference accounting
entries
:param move_id: Account move wher the move lines will be.
:param amount_residual: Amount to be posted.
:param company_currency: id of currency of the company to which the voucher belong
:param current_currency: id of currency of the voucher
:return: the account move line and its counterpart to create, depicted as mapping between fieldname and value
:rtype: tuple of dict
'''
if amount_residual > 0:
account_id = line.voucher_id.company_id.expense_currency_exchange_account_id
if not account_id:
model, action_id = self.pool['ir.model.data'].get_object_reference(cr, uid, 'account', 'action_account_form')
msg = _("You should configure the 'Loss Exchange Rate Account' to manage automatically the booking of accounting entries related to differences between exchange rates.")
raise openerp.exceptions.RedirectWarning(msg, action_id, _('Go to the configuration panel'))
else:
account_id = line.voucher_id.company_id.income_currency_exchange_account_id
if not account_id:
model, action_id = self.pool['ir.model.data'].get_object_reference(cr, uid, 'account', 'action_account_form')
msg = _("You should configure the 'Gain Exchange Rate Account' to manage automatically the booking of accounting entries related to differences between exchange rates.")
raise openerp.exceptions.RedirectWarning(msg, action_id, _('Go to the configuration panel'))
# Even if the amount_currency is never filled, we need to pass the foreign currency because otherwise
# the receivable/payable account may have a secondary currency, which render this field mandatory
if line.account_id.currency_id:
account_currency_id = line.account_id.currency_id.id
else:
account_currency_id = company_currency <> current_currency and current_currency or False
move_line = {
'journal_id': line.voucher_id.journal_id.id,
'period_id': line.voucher_id.period_id.id,
'name': _('change')+': '+(line.name or '/'),
'account_id': line.account_id.id,
'move_id': move_id,
'partner_id': line.voucher_id.partner_id.id,
'currency_id': account_currency_id,
'amount_currency': 0.0,
'quantity': 1,
'credit': amount_residual > 0 and amount_residual or 0.0,
'debit': amount_residual < 0 and -amount_residual or 0.0,
'date': line.voucher_id.date,
}
move_line_counterpart = {
'journal_id': line.voucher_id.journal_id.id,
'period_id': line.voucher_id.period_id.id,
'name': _('change')+': '+(line.name or '/'),
'account_id': account_id.id,
'move_id': move_id,
'amount_currency': 0.0,
'partner_id': line.voucher_id.partner_id.id,
'currency_id': account_currency_id,
'quantity': 1,
'debit': amount_residual > 0 and amount_residual or 0.0,
'credit': amount_residual < 0 and -amount_residual or 0.0,
'date': line.voucher_id.date,
}
return (move_line, move_line_counterpart)
def _convert_amount(self, cr, uid, amount, voucher_id, context=None):
'''
This function convert the amount given in company currency. It takes either the rate in the voucher (if the
payment_rate_currency_id is relevant) either the rate encoded in the system.
:param amount: float. The amount to convert
:param voucher: id of the voucher on which we want the conversion
:param context: to context to use for the conversion. It may contain the key 'date' set to the voucher date
field in order to select the good rate to use.
:return: the amount in the currency of the voucher's company
:rtype: float
'''
if context is None:
context = {}
currency_obj = self.pool.get('res.currency')
voucher = self.browse(cr, uid, voucher_id, context=context)
return currency_obj.compute(cr, uid, voucher.currency_id.id, voucher.company_id.currency_id.id, amount, context=context)
def voucher_move_line_create(self, cr, uid, voucher_id, line_total, move_id, company_currency, current_currency, context=None):
'''
Create one account move line, on the given account move, per voucher line where amount is not 0.0.
It returns Tuple with tot_line what is total of difference between debit and credit and
a list of lists with ids to be reconciled with this format (total_deb_cred,list_of_lists).
:param voucher_id: Voucher id what we are working with
:param line_total: Amount of the first line, which correspond to the amount we should totally split among all voucher lines.
:param move_id: Account move wher those lines will be joined.
:param company_currency: id of currency of the company to which the voucher belong
:param current_currency: id of currency of the voucher
:return: Tuple build as (remaining amount not allocated on voucher lines, list of account_move_line created in this method)
:rtype: tuple(float, list of int)
'''
if context is None:
context = {}
move_line_obj = self.pool.get('account.move.line')
currency_obj = self.pool.get('res.currency')
tax_obj = self.pool.get('account.tax')
tot_line = line_total
rec_lst_ids = []
date = self.read(cr, uid, [voucher_id], ['date'], context=context)[0]['date']
ctx = context.copy()
ctx.update({'date': date})
voucher = self.pool.get('account.voucher').browse(cr, uid, voucher_id, context=ctx)
voucher_currency = voucher.journal_id.currency or voucher.company_id.currency_id
ctx.update({
'voucher_special_currency_rate': voucher_currency.rate * voucher.payment_rate ,
'voucher_special_currency': voucher.payment_rate_currency_id and voucher.payment_rate_currency_id.id or False,})
prec = self.pool.get('decimal.precision').precision_get(cr, uid, 'Account')
for line in voucher.line_ids:
#create one move line per voucher line where amount is not 0.0
# AND (second part of the clause) only if the original move line was not having debit = credit = 0 (which is a legal value)
if not line.amount and not (line.move_line_id and not float_compare(line.move_line_id.debit, line.move_line_id.credit, precision_digits=prec) and not float_compare(line.move_line_id.debit, 0.0, precision_digits=prec)):
continue
# convert the amount set on the voucher line into the currency of the voucher's company
# this calls res_curreny.compute() with the right context, so that it will take either the rate on the voucher if it is relevant or will use the default behaviour
amount = self._convert_amount(cr, uid, line.untax_amount or line.amount, voucher.id, context=ctx)
# if the amount encoded in voucher is equal to the amount unreconciled, we need to compute the
# currency rate difference
if line.amount == line.amount_unreconciled:
if not line.move_line_id:
raise UserError(_("The invoice you are willing to pay is not valid anymore."))
sign = line.type =='dr' and -1 or 1
currency_rate_difference = sign * (line.move_line_id.amount_residual - amount)
else:
currency_rate_difference = 0.0
move_line = {
'journal_id': voucher.journal_id.id,
'period_id': voucher.period_id.id,
'name': line.name or '/',
'account_id': line.account_id.id,
'move_id': move_id,
'partner_id': voucher.partner_id.id,
'currency_id': line.move_line_id and (company_currency <> line.move_line_id.currency_id.id and line.move_line_id.currency_id.id) or False,
'analytic_account_id': line.account_analytic_id and line.account_analytic_id.id or False,
'quantity': 1,
'credit': 0.0,
'debit': 0.0,
'date': voucher.date
}
if amount < 0:
amount = -amount
if line.type == 'dr':
line.type = 'cr'
else:
line.type = 'dr'
if (line.type=='dr'):
tot_line += amount
move_line['debit'] = amount
else:
tot_line -= amount
move_line['credit'] = amount
if voucher.tax_id and voucher.type in ('sale', 'purchase'):
move_line.update({
'account_tax_id': voucher.tax_id.id,
})
if move_line.get('account_tax_id', False):
tax_data = tax_obj.browse(cr, uid, [move_line['account_tax_id']], context=context)[0]
if not (tax_data.base_code_id and tax_data.tax_code_id):
raise UserError(_("You have to configure account base code and account tax code on the '%s' tax!") % (tax_data.name))
# compute the amount in foreign currency
foreign_currency_diff = 0.0
amount_currency = False
if line.move_line_id:
# We want to set it on the account move line as soon as the original line had a foreign currency
if line.move_line_id.currency_id and line.move_line_id.currency_id.id != company_currency:
# we compute the amount in that foreign currency.
if line.move_line_id.currency_id.id == current_currency:
# if the voucher and the voucher line share the same currency, there is no computation to do
sign = (move_line['debit'] - move_line['credit']) < 0 and -1 or 1
amount_currency = sign * (line.amount)
else:
# if the rate is specified on the voucher, it will be used thanks to the special keys in the context
# otherwise we use the rates of the system
amount_currency = currency_obj.compute(cr, uid, company_currency, line.move_line_id.currency_id.id, move_line['debit']-move_line['credit'], context=ctx)
if line.amount == line.amount_unreconciled:
foreign_currency_diff = line.move_line_id.amount_residual_currency - abs(amount_currency)
move_line['amount_currency'] = amount_currency
voucher_line = move_line_obj.create(cr, uid, move_line)
rec_ids = [voucher_line, line.move_line_id.id]
if not currency_obj.is_zero(cr, uid, voucher.company_id.currency_id, currency_rate_difference):
# Change difference entry in company currency
exch_lines = self._get_exchange_lines(cr, uid, line, move_id, currency_rate_difference, company_currency, current_currency, context=context)
new_id = move_line_obj.create(cr, uid, exch_lines[0],context)
move_line_obj.create(cr, uid, exch_lines[1], context)
rec_ids.append(new_id)
if line.move_line_id and line.move_line_id.currency_id and not currency_obj.is_zero(cr, uid, line.move_line_id.currency_id, foreign_currency_diff):
# Change difference entry in voucher currency
move_line_foreign_currency = {
'journal_id': line.voucher_id.journal_id.id,
'period_id': line.voucher_id.period_id.id,
'name': _('change')+': '+(line.name or '/'),
'account_id': line.account_id.id,
'move_id': move_id,
'partner_id': line.voucher_id.partner_id.id,
'currency_id': line.move_line_id.currency_id.id,
'amount_currency': -1 * foreign_currency_diff,
'quantity': 1,
'credit': 0.0,
'debit': 0.0,
'date': line.voucher_id.date,
}
new_id = move_line_obj.create(cr, uid, move_line_foreign_currency, context=context)
rec_ids.append(new_id)
if line.move_line_id.id:
rec_lst_ids.append(rec_ids)
return (tot_line, rec_lst_ids)
def writeoff_move_line_get(self, cr, uid, voucher_id, line_total, move_id, name, company_currency, current_currency, context=None):
'''
Set a dict to be use to create the writeoff move line.
:param voucher_id: Id of voucher what we are creating account_move.
:param line_total: Amount remaining to be allocated on lines.
:param move_id: Id of account move where this line will be added.
:param name: Description of account move line.
:param company_currency: id of currency of the company to which the voucher belong
:param current_currency: id of currency of the voucher
:return: mapping between fieldname and value of account move line to create
:rtype: dict
'''
currency_obj = self.pool.get('res.currency')
move_line = {}
voucher = self.pool.get('account.voucher').browse(cr,uid,voucher_id,context)
current_currency_obj = voucher.currency_id or voucher.journal_id.company_id.currency_id
if not currency_obj.is_zero(cr, uid, current_currency_obj, line_total):
diff = line_total
account_id = False
write_off_name = ''
if voucher.payment_option == 'with_writeoff':
account_id = voucher.writeoff_acc_id.id
write_off_name = voucher.comment
elif voucher.partner_id:
if voucher.type in ('sale', 'receipt'):
account_id = voucher.partner_id.property_account_receivable.id
else:
account_id = voucher.partner_id.property_account_payable.id
else:
# fallback on account of voucher
account_id = voucher.account_id.id
sign = voucher.type == 'payment' and -1 or 1
move_line = {
'name': write_off_name or name,
'account_id': account_id,
'move_id': move_id,
'partner_id': voucher.partner_id.id,
'date': voucher.date,
'credit': diff > 0 and diff or 0.0,
'debit': diff < 0 and -diff or 0.0,
'amount_currency': company_currency <> current_currency and (sign * -1 * voucher.writeoff_amount) or 0.0,
'currency_id': company_currency <> current_currency and current_currency or False,
'analytic_account_id': voucher.analytic_id and voucher.analytic_id.id or False,
}
return move_line
def _get_company_currency(self, cr, uid, voucher_id, context=None):
'''
Get the currency of the actual company.
:param voucher_id: Id of the voucher what i want to obtain company currency.
:return: currency id of the company of the voucher
:rtype: int
'''
return self.pool.get('account.voucher').browse(cr,uid,voucher_id,context).journal_id.company_id.currency_id.id
def _get_current_currency(self, cr, uid, voucher_id, context=None):
'''
Get the currency of the voucher.
:param voucher_id: Id of the voucher what i want to obtain current currency.
:return: currency id of the voucher
:rtype: int
'''
voucher = self.pool.get('account.voucher').browse(cr,uid,voucher_id,context)
return voucher.currency_id.id or self._get_company_currency(cr,uid,voucher.id,context)
def action_move_line_create(self, cr, uid, ids, context=None):
'''
Confirm the vouchers given in ids and create the journal entries for each of them
'''
if context is None:
context = {}
move_pool = self.pool.get('account.move')
move_line_pool = self.pool.get('account.move.line')
for voucher in self.browse(cr, uid, ids, context=context):
local_context = dict(context, force_company=voucher.journal_id.company_id.id)
if voucher.move_id:
continue
company_currency = self._get_company_currency(cr, uid, voucher.id, context)
current_currency = self._get_current_currency(cr, uid, voucher.id, context)
# we select the context to use accordingly if it's a multicurrency case or not
context = self._sel_context(cr, uid, voucher.id, context)
# But for the operations made by _convert_amount, we always need to give the date in the context
ctx = context.copy()
ctx.update({'date': voucher.date})
# Create the account move record.
move_id = move_pool.create(cr, uid, self.account_move_get(cr, uid, voucher.id, context=context), context=context)
# Get the name of the account_move just created
name = move_pool.browse(cr, uid, move_id, context=context).name
# Create the first line of the voucher
move_line_id = move_line_pool.create(cr, uid, self.first_move_line_get(cr,uid,voucher.id, move_id, company_currency, current_currency, local_context), local_context)
move_line_brw = move_line_pool.browse(cr, uid, move_line_id, context=context)
line_total = move_line_brw.debit - move_line_brw.credit
rec_list_ids = []
if voucher.type == 'sale':
line_total = line_total - self._convert_amount(cr, uid, voucher.tax_amount, voucher.id, context=ctx)
elif voucher.type == 'purchase':
line_total = line_total + self._convert_amount(cr, uid, voucher.tax_amount, voucher.id, context=ctx)
# Create one move line per voucher line where amount is not 0.0
line_total, rec_list_ids = self.voucher_move_line_create(cr, uid, voucher.id, line_total, move_id, company_currency, current_currency, context)
# Create the writeoff line if needed
ml_writeoff = self.writeoff_move_line_get(cr, uid, voucher.id, line_total, move_id, name, company_currency, current_currency, local_context)
if ml_writeoff:
move_line_pool.create(cr, uid, ml_writeoff, local_context)
# We post the voucher.
self.write(cr, uid, [voucher.id], {
'move_id': move_id,
'state': 'posted',
'number': name,
})
if voucher.journal_id.entry_posted:
move_pool.post(cr, uid, [move_id], context={})
# We automatically reconcile the account move lines.
reconcile = False
for rec_ids in rec_list_ids:
if len(rec_ids) >= 2:
reconcile = move_line_pool.reconcile_partial(cr, uid, rec_ids, writeoff_acc_id=voucher.writeoff_acc_id.id, writeoff_period_id=voucher.period_id.id, writeoff_journal_id=voucher.journal_id.id)
return True
def _track_subtype(self, cr, uid, ids, init_values, context=None):
if 'state' in init_values:
return 'account_voucher.mt_voucher_state_change'
return super(account_voucher, self)._track_subtype(cr, uid, ids, init_values, context=context)
class account_voucher_line(osv.osv):
_name = 'account.voucher.line'
_description = 'Voucher Lines'
_order = "move_line_id"
# If the payment is in the same currency than the invoice, we keep the same amount
# Otherwise, we compute from invoice currency to payment currency
def _compute_balance(self, cr, uid, ids, name, args, context=None):
currency_pool = self.pool.get('res.currency')
rs_data = {}
for line in self.browse(cr, uid, ids, context=context):
ctx = context.copy()
ctx.update({'date': line.voucher_id.date})
voucher_rate = self.pool.get('res.currency').read(cr, uid, line.voucher_id.currency_id.id, ['rate'], context=ctx)['rate']
ctx.update({
'voucher_special_currency': line.voucher_id.payment_rate_currency_id and line.voucher_id.payment_rate_currency_id.id or False,
'voucher_special_currency_rate': line.voucher_id.payment_rate * voucher_rate})
res = {}
company_currency = line.voucher_id.journal_id.company_id.currency_id.id
voucher_currency = line.voucher_id.currency_id and line.voucher_id.currency_id.id or company_currency
move_line = line.move_line_id or False
if not move_line:
res['amount_original'] = 0.0
res['amount_unreconciled'] = 0.0
elif move_line.currency_id and voucher_currency==move_line.currency_id.id:
res['amount_original'] = abs(move_line.amount_currency)
res['amount_unreconciled'] = abs(move_line.amount_residual_currency)
else:
#always use the amount booked in the company currency as the basis of the conversion into the voucher currency
res['amount_original'] = currency_pool.compute(cr, uid, company_currency, voucher_currency, move_line.credit or move_line.debit or 0.0, context=ctx)
res['amount_unreconciled'] = currency_pool.compute(cr, uid, company_currency, voucher_currency, abs(move_line.amount_residual), context=ctx)
rs_data[line.id] = res
return rs_data
def _currency_id(self, cr, uid, ids, name, args, context=None):
'''
This function returns the currency id of a voucher line. It's either the currency of the
associated move line (if any) or the currency of the voucher or the company currency.
'''
res = {}
for line in self.browse(cr, uid, ids, context=context):
move_line = line.move_line_id
if move_line:
res[line.id] = move_line.currency_id and move_line.currency_id.id or move_line.company_id.currency_id.id
else:
res[line.id] = line.voucher_id.currency_id and line.voucher_id.currency_id.id or line.voucher_id.company_id.currency_id.id
return res
_columns = {
'voucher_id':fields.many2one('account.voucher', 'Voucher', required=1, ondelete='cascade'),
'name':fields.char('Description',),
'account_id':fields.many2one('account.account','Account', required=True),
'partner_id':fields.related('voucher_id', 'partner_id', type='many2one', relation='res.partner', string='Partner'),
'untax_amount':fields.float('Untax Amount'),
'amount':fields.float('Amount', digits_compute=dp.get_precision('Account')),
'reconcile': fields.boolean('Full Reconcile'),
'type':fields.selection([('dr','Debit'),('cr','Credit')], 'Dr/Cr'),
'account_analytic_id': fields.many2one('account.analytic.account', 'Analytic Account'),
'move_line_id': fields.many2one('account.move.line', 'Journal Item', copy=False),
'date_original': fields.related('move_line_id','date', type='date', relation='account.move.line', string='Date', readonly=1),
'date_due': fields.related('move_line_id','date_maturity', type='date', relation='account.move.line', string='Due Date', readonly=1),
'amount_original': fields.function(_compute_balance, multi='dc', type='float', string='Original Amount', store=True, digits_compute=dp.get_precision('Account')),
'amount_unreconciled': fields.function(_compute_balance, multi='dc', type='float', string='Open Balance', store=True, digits_compute=dp.get_precision('Account')),
'company_id': fields.related('voucher_id','company_id', relation='res.company', type='many2one', string='Company', store=True, readonly=True),
'currency_id': fields.function(_currency_id, string='Currency', type='many2one', relation='res.currency', readonly=True),
}
_defaults = {
'name': '',
}
def onchange_reconcile(self, cr, uid, ids, reconcile, amount, amount_unreconciled, context=None):
vals = {'amount': 0.0}
if reconcile:
vals = { 'amount': amount_unreconciled}
return {'value': vals}
def onchange_amount(self, cr, uid, ids, amount, amount_unreconciled, context=None):
vals = {}
if amount:
vals['reconcile'] = (amount == amount_unreconciled)
return {'value': vals}
def onchange_move_line_id(self, cr, user, ids, move_line_id, context=None):
"""
Returns a dict that contains new values and context
@param move_line_id: latest value from user input for field move_line_id
@param args: other arguments
@param context: context arguments, like lang, time zone
@return: Returns a dict which contains new values, and context
"""
res = {}
move_line_pool = self.pool.get('account.move.line')
if move_line_id:
move_line = move_line_pool.browse(cr, user, move_line_id, context=context)
if move_line.credit:
ttype = 'dr'
else:
ttype = 'cr'
res.update({
'account_id': move_line.account_id.id,
'type': ttype,
'currency_id': move_line.currency_id and move_line.currency_id.id or move_line.company_id.currency_id.id,
})
return {
'value':res,
}
def default_get(self, cr, user, fields_list, context=None):
"""
Returns default values for fields
@param fields_list: list of fields, for which default values are required to be read
@param context: context arguments, like lang, time zone
@return: Returns a dict that contains default values for fields
"""
if context is None:
context = {}
journal_id = context.get('journal_id', False)
partner_id = context.get('partner_id', False)
journal_pool = self.pool.get('account.journal')
partner_pool = self.pool.get('res.partner')
values = super(account_voucher_line, self).default_get(cr, user, fields_list, context=context)
if (not journal_id) or ('account_id' not in fields_list):
return values
journal = journal_pool.browse(cr, user, journal_id, context=context)
account_id = False
ttype = 'cr'
if journal.type in ('sale', 'sale_refund'):
account_id = journal.default_credit_account_id and journal.default_credit_account_id.id or False
ttype = 'cr'
elif journal.type in ('purchase', 'expense', 'purchase_refund'):
account_id = journal.default_debit_account_id and journal.default_debit_account_id.id or False
ttype = 'dr'
elif partner_id:
partner = partner_pool.browse(cr, user, partner_id, context=context)
if context.get('type') == 'payment':
ttype = 'dr'
account_id = partner.property_account_payable.id
elif context.get('type') == 'receipt':
account_id = partner.property_account_receivable.id
values.update({
'account_id':account_id,
'type':ttype
})
return values
| addition-it-solutions/project-all | addons/account_voucher/account_voucher.py | Python | agpl-3.0 | 84,758 |
# -*- coding: utf-8 -*-
from willow.willow import *
from wifunc_v2 import *
def session(me):
# 0 : The First Price Auction
# 1 : Vickrey Auction
# 2 : The Reference Rule Auction
auction = 1
Round = 3
if me == 0:
# 変数のセット
plnum = 0
i1list = []
i2list = []
jlist = []
i1bid = 0
i2bid = 0
jbid = 0
i1num = 0
i2num = 0
jnum = 0
i1price = 0
i2price = 0
jprice = 0
revenue = 0
add(open("web/index.html"))
add("Auction For Complements : Monitor Display<br />", "#catch")
add("パッケージ付きオークション 実験者", "#catch")
# Initial Setting
# Player Number
numset()
take({"id": "go"})
plnum = get_num()
numset_end(plnum)
wait(1)
# Open the Datasets
f1 = open('data/data1.csv', 'rb')
f2 = open('data/data2.csv', 'rb')
f3 = open('data/data3.csv', 'rb')
dataReader1 = csv.reader(f1)
dataReader2 = csv.reader(f2)
dataReader3 = csv.reader(f3)
i1list = [row for row in dataReader1]
i2list = [row for row in dataReader2]
jlist = [row for row in dataReader3]
# Instruction(a)
for i in range(len(i1list[0])):
put({"tag": "a", "type": "i1", "plnum": plnum})
for i in range(len(i2list[0])):
put({"tag": "a", "type": "i2", "plnum": plnum})
for i in range(len(jlist[0])):
put({"tag": "a", "type": "j", "plnum": plnum})
# Ready to start(b)
for i in range(plnum):
take({"tag": "b"})
waithide(1)
start(1)
take({"client": me})
starthide(1)
add("<h2>実験開始</h2>", "#main")
log("mechanism", "is", auction)
# Start!(c)
for time in range(Round):
add("<h2>第%sラウンド</h2>" % (time+1), "#main")
# Reset variables
i1s = i1list[time]
i2s = i2list[time]
js = jlist[time]
i1bid = 0
i2bid = 0
jbid = 0
i1price = 0
i2price = 0
jprice = 0
i1num = 0
i2num = 0
jnum = 0
averagebid = 0
averageprice = 0
info = []
print i1s, i2s
# Put Value and Estimates(c)
for i in range(len(i1s)):
put({"tag": "c", "type": "i1", "v": int(i1s[i])})
for i in range(len(i2s)):
put({"tag": "c", "type": "i2", "v": int(i2s[i])})
for i in range(len(js)):
put({"tag": "c", "type": "j", "v": int(js[i])})
# Take Bids(d)
add("Take Bids<br />", "#main")
for i in range(plnum):
msg = take({"tag": "d"})
add("タイプ%s被験者No.%sが%s円の入札を行いました。<br />" % (msg["type"], msg["client"], msg["bid"]), "#main")
if msg["type"] == "i1":
if int(msg["bid"]) > i1bid:
i1bid = int(msg["bid"])
i1num = int(msg["client"])
elif msg["type"] == "i2":
if int(msg["bid"]) > i2bid:
i2bid = int(msg["bid"])
i2num = int(msg["client"])
elif msg["type"] == "j":
if int(msg["bid"]) > jbid:
jbid = int(msg["bid"])
jnum = int(msg["client"])
# Allocation and Price(e)
add("Allocation and Price<br />", "#main")
# 0 : The First Price Auction
if auction == 0:
if i1bid + i2bid > jbid:
add("Itype1被験者No.%sが%s円で財1を、Itype2被験者No.%sが%s円で財2を落札しました。<br />"
% (i1num, i1bid, i2num, i2bid), "#main")
log("winner", "seller", i1bid + i2bid)
revenue += i1bid + i2bid
for i in range(plnum):
put({"tag": "e", "win":"i", "i1num":i1num, "i1price":i1bid,
"i2num":i2num, "i2price":i2bid, "client": i+1})
else:
add("Jtype被験者No.%sが%s円で財1・財2を落札しました。<br />" % (jnum, jbid), "#main")
log("winner", "seller", jbid)
revenue += jbid
for i in range(plnum):
put({"tag": "e", "win":"j", "jnum":jnum, "jprice":jbid, "client": i+1})
# 1 : Vickrey Auction
if auction == 1:
if i1bid + i2bid > jbid:
i1price = max(jbid - i2bid, 0)
i2price = max(jbid - i1bid, 0)
add("Itype1被験者No.%sが%s円で財1を、Itype2被験者No.%sが%s円で財2を落札しました。<br />"
"価格は財1が%s円、財2が%s円に決まりました。<br />"
% (i1num, i1bid, i2num, i2bid, i1price, i2price), "#main")
log("winner", "seller", i1price + i2price)
revenue += i1price + i2price
for i in range(plnum):
put({"tag": "e", "win":"i", "i1num":i1num, "i1price":i1price,
"i2num":i2num, "i2price":i2price, "client": i+1})
else:
jprice = i1bid + i2bid
add("Jtype被験者No.%sが%s円で財1・財2を落札しました。<br />"
"価格は%s円に決まりました。<br />" % (jnum, jbid, jprice), "#main")
log("winner", "seller", jprice)
revenue += jprice
for i in range(plnum):
put({"tag": "e", "win":"j", "jnum":jnum, "jprice":jprice, "client": i+1})
# 2 : The Reference Rule Auction
if auction == 2:
if i1bid + i2bid > jbid:
if jbid % 2 == 0:
i1price = jbid/2
i2price = jbid/2
else:
i1price = (jbid + 1)/2
i2price = (jbid + 1)/2
add("Itype1被験者No.%sが%s円で財1を、Itype2被験者No.%sが%s円で財2を落札しました。<br />"
"価格は財1が%s円、財2が%s円に決まりました。<br />"
% (i1num, i1bid, i2num, i2bid, i1price, i2price), "#main")
log("winner", "seller", i1price + i2price)
revenue += i1price + i2price
for i in range(plnum):
put({"tag": "e", "win":"i", "i1num":i1num, "i1price":i1price,
"i2num":i2num, "i2price":i2price, "client": i+1})
else:
jprice = i1bid + i2bid
add("Jtype被験者No.%sが%s円で財1・財2を落札しました。<br />"
"価格は%s円に決まりました。<br />" % (jnum, jbid, jprice), "#main")
log("winner", "seller", jprice)
revenue += jprice
for i in range(plnum):
put({"tag": "e", "win":"j", "jnum":jnum, "jprice":jprice, "client": i+1})
# Next Round(g)
if time != Round-1:
for i in range(plnum):
take({"tag": "g"})
start(time+2)
take({"client": me})
starthide(time+2)
else:
result = []
# Take Profit(g)
for i in range(plnum):
msg = take({"tag": "g"})
result.append({"client": msg["client"], "profit": msg["profit"]})
# Ranked
result = sorted(result, key=lambda x:x["profit"], reverse=True)
add("<p id='start%s'>被験者の準備が整いました。<br />"
"ボタンを押すと結果発表にうつります。<br />"
"<input id='go' type='submit'></p>" % (time+2), "#main")
take({"client": me})
starthide(time+2)
# Put Result(h)
wiput(plnum, {"tag": "h", "result": result, "revenue": revenue})
add("<h2>結果発表</h2>", "#main")
for i in range(plnum):
add("%s位:被験者No.%s, %s円<br />" % (i+1, result[i]["client"], result[i]["profit"]), "#main")
add("収益:%s円<br />" % (revenue), "#main")
add("<p>これで実験を終了します。</p>", "#main")
else:
add(open("web/index.html"))
add("Auction For Complements : Participants Display<br />", "#catch")
add("パッケージオークション 参加者No.%s" % me, "#catch")
wait(1)
# Instruction(a)
msg = take({"tag": "a"})
waithide(1)
mytype = msg["type"]
plnum = msg["plnum"]
if mytype == "i1":
add(open("AFC_Itype1.html"), "#main")
elif mytype == "i2":
add(open("AFC_Itype2.html"), "#main")
elif mytype == "j":
add(open("AFC_Jtype.html"), "#main")
if auction == 0:
add(open("FP.html"), "#instruction")
elif auction == 1:
add(open("VA.html"), "#instruction")
elif auction == 2:
add(open("RR.html"), "#instruction")
# Ready to start(b)
ready()
take({"client": me})
put({"tag": "b"})
hide("#ready")
wait(2)
# Start!(c)
profit = 0
for time in range(Round):
counter = 0
msg = take({"tag": "c", "type": mytype})
if time == 0:
waithide(2)
hide("#instruction")
add("<h2>情報板</h2>"
"<p>タイプ:%s</p>"
"<p>ラウンド数:第<span id='roundnum'>1</span>ラウンド</p>"
"<p>現在の合計利潤:<span id='profit'>0</span>円</p>"
"<p id='info'></p>" % mytype, "#experiment")
elif time >= 1:
let(time+1, "#roundnum")
waitinfohide(2*time)
boardhide(time-1)
board_AFC(time)
value = msg["v"]
add(value, "#value%s" % time)
let("入札価格記入欄に価格を入力して送信してください。", "#info")
while True:
bid_AFC(time, counter)
take({"client": me})
bid = peek("#%sbid%s" % (time, counter))
if bid.isdigit():
bid = int(bid)
if (bid >= 0) and (bid <= 200):
let("", "#info")
log(value, mytype, bid)
break
else:
counter += 1
let("<font color='red'>入札価格は注意書きに従った金額を入力してください。</font>", "#info")
else:
counter += 1
let("<font color='red'>入札価格は注意書きに従った金額を入力してください。</font>", "#info")
hide("#offer%s" % time)
waitinfo(2*(time+1) - 1)
# Submit Bids(d)
put({"tag": "d", "type": mytype, "bid": bid, "client": me})
# Result(e)
msg = take({"tag": "e", "client": me})
if msg["win"] == "i":
add("Itype1被験者No.%sが%s円で財1を、Itype2被験者No.%sが%s円で財2を落札しました。<br />"
% (msg["i1num"], msg["i1price"], msg["i2num"], msg["i2price"]), "#info")
if me == msg["i1num"]:
add("私的価値%s円と取得価格との差分の%s円の効用を得ました。<br />" % (value, value-msg["i1price"]), "#info")
profit += value-msg["i1price"]
log("winner", "i1type", profit)
elif me == msg["i2num"]:
add("私的価値%s円と取得価格との差分の%s円の効用を得ました。<br />" % (value, value-msg["i2price"]), "#info")
profit = value - msg["i2price"]
log("winner", "i2type", profit)
else:
add("財を取得できませんでした。<br />", "#info")
profit += 0
elif msg["win"] == "j":
add("Jtype被験者No.%sが%s円で財1・財2を落札しました。<br />" % (msg["jnum"], msg["jprice"]), "#info")
if me == msg["jnum"]:
add("私的価値%s円と取得価格との差分の%s円の効用を得ました。<br />" % (value, value-msg["jprice"]), "#info")
profit = value - msg["jprice"]
log("winner", "jtype", profit)
else:
add("財を取得できませんでした。<br />", "#info")
profit += 0
# Information(f)
waitinfohide(2*(time+1) - 1)
add("<p>あなたの合計利潤は%s円になりました。</p>" % profit, "#info")
let(profit, "#profit")
# Next Round(g)
if time != Round-1:
nextTA(time)
take({"client": me})
nextTAhide(time)
put({"tag": "g"})
waitinfo(2*(time+1))
else:
add("<p id='next%s'>結果発表にうつる準備ができましたら、"
"ボタンを押してください。<br />"
"<input id='go' type='submit'></p>" % time, "#info")
take({"client": me})
nextTAhide(time)
# Put Profit(g)
put({"tag": "g", "client": me, "profit": profit})
waitinfo(2*(time+1))
# Take Result(h)
msg = take({"tag": "h"})
let("", "#info")
result = msg["result"]
for i in range(plnum):
add("%s位:被験者No.%s, %s円<br />" % (i+1, result[i]["client"], result[i]["profit"]), "#info")
add("売り手の合計収益:%s円<br />" % (msg["revenue"]), "#info")
add("<p>これで実験を終了します。</p>", "#info")
run(session)
| ogaway/WillowProject | AuctionForComplements/AuctionForComplements.py | Python | gpl-3.0 | 14,693 |
from twisted.web.resource import Resource
import sqlite3
import string
import cgi
import sys
from libs.templates import template_html
class snippets(Resource):
isLeaf = True
def __init__(self):
Resource.__init__(self)
self.m_conn = sqlite3.connect("./dbs/snippets.db")
self.m_cursor = self.m_conn.cursor()
self.m_users = {}
self.load_users()
def load_users(self):
self.m_cursor.execute("SELECT id, name FROM users")
for row in self.m_cursor:
id = row[0]
name = row[1]
self.m_users[id] = name
#def get_tags_list(self):
# self.m_cursor.execute("SELECT DISTINCT(category) FROM expenses ORDER BY category asc")
def get_snippets(self):
snippets = []
self.m_cursor.execute("SELECT id, cdate, ctime, language, label, author, description FROM snippets ORDER BY id DESC")
for row in self.m_cursor:
snippet = {"id" : row[0],
"date" : row[1],
"time" : row[2],
"language" : row[3],
"label" : row[4],
"author" : row[5],
"description" : row[6]}
snippets.append(snippet)
return snippets
def get_snippet(self, id, revid):
self.m_cursor.execute("SELECT cdate, ctime, language, label, author, description FROM snippets WHERE id=? ORDER BY id DESC", (id))
snippet = None
for row in self.m_cursor:
snippet = {"date" : row[0],
"time" : row[1],
"language" : row[2],
"label" : row[3],
"author" : row[4],
"description" : row[5]}
snippet["data"] = ""
if(revid != None):
self.m_cursor.execute("SELECT data FROM revisions WHERE sid=? and id=?", (id, revid))
else:
self.m_cursor.execute("SELECT data FROM revisions WHERE sid=? ORDER by id DESC LIMIT 1", (id))
for row in self.m_cursor:
snippet["data"] = row[0]
return snippet
def get_revisions(self, id):
self.m_cursor.execute("SELECT id FROM revisions WHERE sid=?", (id))
json_data = []
for row in self.m_cursor:
revid = row[0]
json_data.append("'%s':'%s'" % (revid,revid))
json_data = ','.join(json_data)
return json_data
def render_summary(self, request):
styles = """
div.row
{
clear:both;border-bottom:1px solid #ccc;background-color:#ffffff;height:25px;width:100%;'
}
div.row_odd
{
clear:both;border-bottom:1px solid #ccc;background-color:#f7f7f7;height:25px;width:100%;'
}
div.row:hover,
div.row_odd:hover
{
background-color:#e0e0e0;
}
"""
script = ''
body = """<h1>Snippets</h1>
<input type='button' onclick='return snippet_add();' value='Add'/>
<br/>
<div style='float:left;padding:20px;width:90%;'>
<div style='clear:both;border-bottom:1px solid #ccc;background-color:#f0f0f0;height:25px;width:100%;'>
<div style="float:left;font-weight:bold;width:5%;min-width:5%;">ID</div>
<div style="float:left;font-weight:bold;width:10%;min-width:10%;">Author</div>
<div style="float:left;font-weight:bold;width:20%;min-width:20%;">Label</div>
<div style="float:left;font-weight:bold;width:20%;min-width:20%;">Date</div>
<div style="float:left;font-weight:bold;width:20%;min-width:20%;">Time</div>
<div style="float:left;font-weight:bold;width:20%;min-width:20%;">Language</div>
</div>
"""
snippets = self.get_snippets()
i = 0
for snippet in snippets:
id = snippet["id"]
cdate = snippet["date"]
ctime = snippet["time"]
language = snippet["language"]
label = snippet["label"]
if(snippet.has_key("author") and snippet["author"] != None):
author = self.m_users[snippet["author"]]
else:
author = " "
#data = snippet["data"]
cls = "row"
if(i & 1):
cls = "row_odd"
i += 1
body += string.Template("""
<div class='$class' onclick='snippet_open(${id})'>
<div style="float:left;width:5%;min-width:5%;">${id}</div>
<div style="float:left;width:10%;min-width:10%;">${author}</div>
<div style="float:left;width:20%;min-width:20%;">${label}</div>
<div style="float:left;width:20%;min-width:20%;">${date}</div>
<div style="float:left;width:20%;min-width:20%;">${time}</div>
<div style="float:left;width:20%;min-width:20%;">${language}</div>
</div>
""").substitute({"id" : id, "label" : label, "class" : cls, "date" : cdate, "time" : ctime, "language" : language, "author" : author})
json_categories = '';
script = """
$(document).ready(function() {
$('.edit').editable('/snippets/update');
});
$(document).ready(function() {
$('.edit_items').editable('/snippets/update', {
data : "{ %s }",
type: 'select',
submit : 'OK'
})});
""" % json_categories
html = template_html.substitute({
"body" : body,
"onload" : "",
"version" : "0.1",
"title" : "Snippets",
"styles" : styles,
"script" : script
}
)
return html
def render(self, request):
print request.uri
if(request.uri.startswith("/snippets/add")):
self.m_cursor.execute("INSERT INTO snippets (cdate, ctime, language, label) VALUES(date('now', 'localtime'), time('now', 'localtime'), 'c', 'No Name')")
self.m_conn.commit()
id = self.m_cursor.lastrowid
return "/snippets/edit?id=%d" % id
elif(request.uri.startswith("/snippets/save")):
print request.args
id = cgi.escape(request.args["id"][0])
data = cgi.escape(request.args["data"][0])
print "Saving snippet %s" % id
print "Data: %s" % data
# First query the last revision and only save the
# changes if there is a delta.
self.m_cursor.execute("INSERT INTO revisions (sid, cdate, ctime, data) VALUES (?, date('now', 'localtime'), time('now', 'localtime'), ?)", (id, data))
self.m_conn.commit()
print "Saved data"
return data
elif(request.uri.startswith("/snippets/update")):
print "In /snippets/update"
print request.args
id = cgi.escape(request.args["id"][0])
value = cgi.escape(request.args["value"][0])
parts = id.split("_")
id = parts[1]
field = parts[2]
self.m_cursor.execute("UPDATE snippets SET %s=? WHERE ROWID=?" % field, (value, id))
self.m_conn.commit()
return value
elif(request.uri.startswith("/snippets/edit")):
id = cgi.escape(request.args["id"][0])
revid = None
if(request.args.has_key("revid")):
revid = cgi.escape(request.args["revid"][0])
snippet = self.get_snippet(id, revid)
language = snippet["language"]
languages = [
{"name" : "c", "lexer" : "clike", "mime" : "x-c++src"},
{"name" : "css", "mime" : "x-css"},
{"name" : "dotreg", "mime" : "dotreg", "styles" : '''
.cm-s-default .cm-comment {color: #009900;}
.cm-s-default .cm-comment-block{color: #a0a0a0;}
.cm-s-default .cm-comment-body {color: #00aa00;font-style:italic;margin-left:10px;}
.cm-s-default .cm-keyword {color: #990033;}
.cm-s-default .cm-register {color: #0000ff;font-weight:bold;}
.cm-s-default .cm-bitfield {color: #cc00cc;font-weight:bold;}
'''},
{"name" : "makefile","mime" : "x-makefile"},
{"name" : "markdown","mime" : "x-markdown"},
{"name" : "python", "mime" : "x-python"},
{"name" : "verilog", "mime" : "x-systemverilog"},
{"name" : "shorte", "mime" : "shorte"},
{"name" : "sql", "mime" : "x-sql"}
]
json_languages = []
for l in languages:
if(l["name"] == language):
language = l
json_languages.append("'%s' : '%s'" % (l["name"], l["name"]))
json_languages = ",".join(json_languages)
json_revisions = self.get_revisions(id)
script = """
$(document).ready(function() {
$('.edit').editable('/snippets/update');
});
$(document).ready(function() {
$('.edit_description').editable('/snippets/update', {
type : 'textarea',
submit : 'OK'
})});
$(document).ready(function() {
$('.edit_revisions').editable(function(value, settings) {
window.location.href = "/snippets/edit?id=%s&revid=" + value
}, {
data : "{%s}",
type: 'select',
submit : 'OK'
})});
$(document).ready(function() {
$('.edit_languages').editable('/snippets/update', {
data : "{%s}",
type: 'select',
submit : 'OK'
})});
$(document).ready(function() {
$('.edit_users').editable('/snippets/update', {
data : "{'1' : 'guest', '2' : 'belliott', '3' : 'dlinnington'}",
type: 'select',
submit : 'OK'
})});
""" % (id, json_revisions, json_languages)
lexer = language['name']
if(language.has_key("lexer")):
lexer = language["lexer"]
styles = ""
if(language.has_key("styles")):
styles = language["styles"]
mime = language['mime']
author = ""
if(snippet.has_key("author") and snippet["author"] != None):
author = self.m_users[snippet["author"]]
description = ""
if(snippet["description"] != None):
description = snippet["description"]
body = string.Template("""
<style>
div.field_label {float:left;margin-left:20px;font-weight:bold;padding-right:20px;}
</style>
<!-- This message is used to display a message when generating the document -->
<div id='loading' style='display:none;position:fixed;top:0;left:0;background:#c0c0c0;width:100%;height:100%;z-index:1000;'>
<div style='width:300px;height:200px;border:2px solid #aaa;border-radius:5px;background-color:white;text-align:center;position:absolute;left:50%;top:50%;margin-left:-150px;margin-top:-100px;'>
<b>Generating PDF ... please be patient ...</b>
</div>
</div>
<!-- A warning message used to display information to the user -->
<div id='warning' style='display:none;position:fixed;top:0;left:0;background:#c0c0c0;width:100%;height:100%;z-index:1000;'>
<div style='width:500px;height:180px;border:2px solid #aaa;border-radius:5px;background-color:white;text-align:center;position:absolute;left:50%;top:50%;margin-left:-250px;margin-top:-90px;'>
<div style='width:100%;height:24px;background-color:#ccc;border-bottom:1px solid #aaa;'><b>Warning:</b></div>
<div id='warning_message' style='height:128px;margin-top:10px;'></div>
<div style='height:27px;background-color:#ddd;'>
<input type='button' style='margin-top:5px;' value='Close' onclick="$$('#warning').hide()"></input>
</div>
</div>
</div>
<div style='border-top:1px solid #ccc;background-color:#f7f7f7;padding:4px;margin:20px;margin-bottom:0px;'>
<div class='field_label'>Name:</div><div style='float:left;' class='edit' id='snippet_${id}_label' name='snippet_${id}_label'>${label}</div>
<div class='field_label'>Language:</div><div style='float:left;' class='edit_languages' id='snippet_${id}_language' name='snippet_${id}_language'>${language}</div>
<div class='field_label'>Author:</div><div style='float:left;' class='edit_users' id='snippet_${id}_author' name='snippet_${id}_author'>${author}</div>
<div class='field_label'>Revisions:</div><div style='float:left;' class='edit_revisions' id='snippet_${id}_revisions' name='snippet_${id}_revisions'>...</div>
<div class='field_label' style='float:right;'><input type='button' onclick='return snippet_add();' value='Create'/></div>
<div class='field_label' style='float:right;'><input type='button' onclick='return snippet_save(${id});' value='Save'/></div>
<div class='field_label' style='float:right;'><input type="button" value="HTML" onclick='generate(editor, FORMAT_HTML, "${label}", "${language}");'/></div>
<div class='field_label' style='float:right;'><input type="button" value="PDF" onclick='generate(editor, FORMAT_PDF, "${label}", "${language}");'/></div>
<div style='clear:both;'></div>
</div>
<div style='border-top:1px solid #ccc;background-color:#fff;padding:10px;margin:20px;margin-top:0px;margin-bottom:0px;padding-left:20px;'>
<div style='white-space:pre;' class='edit_description' id='snippet_${id}_description' name='snippet_${id}_description'>${description}</div>
</div>
<textarea id="snippet" name="snippet">${data}</textarea>
<script src="/codemirror/mode/${lexer}/${lexer}.js"></script>
<script>
function betterTab(cm) {
if (cm.somethingSelected()) {
cm.indentSelection("add");
} else {
cm.replaceSelection(cm.getOption("indentWithTabs")? "\t":
Array(cm.getOption("indentUnit") + 1).join(" "), "end", "+input");
}
}
var editor = CodeMirror.fromTextArea(document.getElementById("snippet"), {
mode: "text/${mime}",
lineNumbers: true,
viewportMargin: Infinity,
tabSize: 4,
indentWithTabs: false,
indentUnit: 4,
extraKeys: {"Tab": betterTab, "Ctrl-Space" : "autocomplete"}
});
</script>
""").substitute({"id" : id, "data" : snippet["data"],
"lexer" : lexer, "mime" : mime, "label" : snippet["label"], "language" : snippet["language"],
"author" : author,
"description" : description})
html = template_html.substitute({
"body" : body,
"onload" : "",
"version" : "0.1",
"title" : "Snippet",
"styles" : styles,
"script" : script
}
)
return html.encode('utf-8')
html = self.render_summary(request)
return html.encode('utf-8')
| bradfordelliott/snippets | libs/snippets.py | Python | lgpl-2.1 | 14,478 |
# -*- coding: utf-8 -*-
"""
glashammer.bundles.i18n.request
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Adds i18n functionality to the request objects.
:copyright: 2010, The Glashammer Authors
:license: MIT
"""
from glashammer.bundles.i18n import select_locale, load_translations, \
has_section, Locale
from werkzeug.utils import cached_property
class I18NRequestMixin(object):
"""A mixing that adds methods to the request to detect the current
locale."""
_locale = None
def _get_locale(self):
"""The locale of the incoming request. If a locale is unsupported, the
default english locale is used. If the locale is assigned it will be
stored in the session so that that language changes are persistent.
"""
if self._locale is not None:
return self._locale
rv = self.session.get('locale')
if rv is not None:
rv = Locale.parse(rv)
# we could trust the cookie here because it's signed, but we do not
# because the configuration could have changed in the meantime.
if not has_section(rv):
rv = None
if rv is None:
rv = select_locale(self.accept_languages)
self._locale = rv
return rv
def _set_locale(self, locale):
self._locale = Locale.parse(locale)
self.__dict__.pop('translations', None)
self.session['locale'] = str(self._locale)
locale = property(_get_locale, _set_locale)
del _get_locale, _set_locale
@cached_property
def translations(self):
"""The translations for this request."""
return load_translations(self.locale)
| passy/glashammer-rdrei | glashammer/bundles/i18n/request.py | Python | mit | 1,675 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-09-05 02:31
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('secnews', '0002_auto_20170830_1310'),
]
operations = [
migrations.AlterField(
model_name='secnewsitem',
name='img_link',
field=models.URLField(default='', verbose_name='illustration link'),
),
]
| skiyer/xuanwu | secnews/migrations/0003_auto_20170905_1031.py | Python | mit | 488 |
from six.moves.urllib.parse import unquote
from wptserve.utils import isomorphic_decode, isomorphic_encode
def redirect_response(request, response, visited_count):
# |visited_count| is used as a unique id to differentiate responses
# every time.
location = b'empty.js'
if b'Redirect' in request.GET:
location = isomorphic_encode(unquote(isomorphic_decode(request.GET[b'Redirect'])))
return (301,
[
(b'Cache-Control', b'no-cache, must-revalidate'),
(b'Pragma', b'no-cache'),
(b'Content-Type', b'application/javascript'),
(b'Location', location),
],
u'/* %s */' % str(visited_count))
def not_found_response():
return 404, [(b'Content-Type', b'text/plain')], u"Page not found"
def ok_response(request, response, visited_count,
extra_body=u'', mime_type=b'application/javascript'):
# |visited_count| is used as a unique id to differentiate responses
# every time.
return (
[
(b'Cache-Control', b'no-cache, must-revalidate'),
(b'Pragma', b'no-cache'),
(b'Content-Type', mime_type)
],
u'/* %s */ %s' % (str(visited_count), extra_body))
def main(request, response):
key = request.GET[b"Key"]
mode = request.GET[b"Mode"]
visited_count = request.server.stash.take(key)
if visited_count is None:
visited_count = 0
# Keep how many times the test requested this resource.
visited_count += 1
request.server.stash.put(key, visited_count)
# Return a response based on |mode| only when it's the second time (== update).
if visited_count == 2:
if mode == b'normal':
return ok_response(request, response, visited_count)
if mode == b'bad_mime_type':
return ok_response(request, response, visited_count, mime_type=b'text/html')
if mode == b'not_found':
return not_found_response()
if mode == b'redirect':
return redirect_response(request, response, visited_count)
if mode == b'syntax_error':
return ok_response(request, response, visited_count, extra_body=u'badsyntax(isbad;')
if mode == b'throw_install':
return ok_response(request, response, visited_count, extra_body=u"addEventListener('install', function(e) { throw new Error('boom'); });")
return ok_response(request, response, visited_count)
| asajeffrey/servo | tests/wpt/web-platform-tests/service-workers/service-worker/resources/update-worker.py | Python | mpl-2.0 | 2,267 |
from errbot import BotPlugin, botcmd
from errbot.version import VERSION
from errbot.utils import get_class_that_defined_method
class Help(BotPlugin):
min_err_version = VERSION # don't copy paste that for your plugin, it is just because it is a bundled plugin !
max_err_version = VERSION
MSG_HELP_TAIL = 'Type help <command name> to get more info ' \
'about that specific command.'
MSG_HELP_UNDEFINED_COMMAND = 'That command is not defined.'
# noinspection PyUnusedLocal
@botcmd
def about(self, mess, args):
""" Returns some information about this err instance"""
result = 'Err version %s \n\n' % VERSION
result += ('Authors: Mondial Telecom, Guillaume BINET, Tali PETROVER, '
'Ben VAN DAELE, Paul LABEDAN and others.\n\n')
return result
# noinspection PyUnusedLocal
@botcmd
def apropos(self, mess, args):
""" Returns a help string listing available options.
Automatically assigned to the "help" command."""
if not args:
return 'Usage: ' + self._bot.prefix + 'apropos search_term'
description = 'Available commands:\n'
clazz_commands = {}
for (name, command) in self._bot.commands.items():
clazz = get_class_that_defined_method(command)
clazz = str.__module__ + '.' + clazz.__name__ # makes the fuul qualified name
commands = clazz_commands.get(clazz, [])
if not self.bot_config.HIDE_RESTRICTED_COMMANDS or self._bot.check_command_access(mess, name)[0]:
commands.append((name, command))
clazz_commands[clazz] = commands
usage = ''
for clazz in sorted(clazz_commands):
usage += '\n'.join(sorted([
'\t' + self._bot.prefix + '%s: %s' % (
name.replace('_', ' ', 1),
(command.__doc__ or '(undocumented)').strip().split('\n', 1)[0]
)
for (name, command) in clazz_commands[clazz]
if args is not None and
command.__doc__ is not None and
args.lower() in command.__doc__.lower() and
name != 'help' and not command._err_command_hidden
]))
usage += '\n\n'
top = self._bot.top_of_help_message()
bottom = self._bot.bottom_of_help_message()
return ''.join(filter(None, [top, description, usage, bottom])).strip()
@botcmd
def help(self, mess, args):
""" Returns a help string listing available options.
Automatically assigned to the "help" command."""
def may_access_command(cmd):
mess, _, _ = self._bot._process_command_filters(mess, cmd, None, True)
return mess is not None
usage = ''
if not args:
description = '### Available help\n\n'
command_classes = sorted(set(self._bot.get_command_classes()), key=lambda c: c.__name__)
usage = '\n'.join(
'- **' + self._bot.prefix + 'help %s** \- %s' %
(clazz.__name__, clazz.__errdoc__.strip() or '(undocumented)') for clazz in command_classes)
elif args == 'full':
description = '### Available commands\n\n'
clazz_commands = {}
for (name, command) in self._bot.commands.items():
clazz = get_class_that_defined_method(command)
commands = clazz_commands.get(clazz, [])
if not self.bot_config.HIDE_RESTRICTED_COMMANDS or may_access_command(name):
commands.append((name, command))
clazz_commands[clazz] = commands
for clazz in sorted(set(clazz_commands), key=lambda c: c.__name__):
usage += '\n\n**%s** \- %s\n' % (clazz.__name__, clazz.__errdoc__ or '')
usage += '\n'.join(sorted(['**' +
self._bot.prefix +
'%s** %s' % (name.replace('_', ' ', 1),
(self._bot.get_doc(command).strip()).split('\n', 1)[0])
for (name, command) in clazz_commands[clazz]
if name != 'help' and not command._err_command_hidden and
(not self.bot_config.HIDE_RESTRICTED_COMMANDS or may_access_command(name))
]))
usage += '\n\n'
elif args in (clazz.__name__ for clazz in self._bot.get_command_classes()):
# filter out the commands related to this class
commands = [(name, command) for (name, command) in self._bot.commands.items() if
get_class_that_defined_method(command).__name__ == args]
description = '### Available commands for %s\n\n' % args
usage += '\n'.join(sorted([
'- **' + self._bot.prefix + '%s** \- %s' % (name.replace('_', ' ', 1),
(self._bot.get_doc(command).strip()).split('\n', 1)[0])
for (name, command) in commands
if not command._err_command_hidden and
(not self.bot_config.HIDE_RESTRICTED_COMMANDS or may_access_command(name))
]))
else:
description = ''
if args in self._bot.commands:
usage = (self._bot.commands[args].__doc__ or
'undocumented').strip()
else:
usage = self.MSG_HELP_UNDEFINED_COMMAND
top = self._bot.top_of_help_message()
bottom = self._bot.bottom_of_help_message()
return ''.join(filter(None, [top, description, usage, bottom]))
| javierwilson/err | errbot/core_plugins/help.py | Python | gpl-3.0 | 5,846 |
from modules.front_motor import FrontMotor
from modules.back_motor import BackMotor
from services.temperature_sensor import TemperatureSensor
modules = {}
def get_module(module_name, **data):
return modules[module_name](**data)
def register_module(module_name, cls):
modules[module_name] = cls
register_module('FrontMotor', FrontMotor)
register_module('BackMotor', BackMotor)
register_module('TemperatureSensor', TemperatureSensor) | astagi/chickenfoot | chickenfoot/modules_register.py | Python | mit | 443 |
# -*- encoding: utf-8 -*-
from cliff.interactive import InteractiveApp
class FakeApp(object):
NAME = 'Fake'
def make_interactive_app(*command_names):
fake_command_manager = [(x, None) for x in command_names]
return InteractiveApp(FakeApp, fake_command_manager,
stdin=None, stdout=None)
def _test_completenames(expecteds, prefix):
app = make_interactive_app('hips', 'hippo', 'nonmatching')
assert set(app.completenames(prefix)) == set(expecteds)
def test_cmd2_completenames():
# cmd2.Cmd define do_help method
_test_completenames(['help'], 'he')
def test_cliff_completenames():
_test_completenames(['hips', 'hippo'], 'hip')
def test_no_completenames():
_test_completenames([], 'taz')
def test_both_completenames():
# cmd2.Cmd define do_hi and do_history methods
_test_completenames(['hi', 'history', 'hips', 'hippo'], 'hi')
def _test_completedefault(expecteds, line, begidx):
command_names = set(['show file', 'show folder', 'show long', 'list all'])
app = make_interactive_app(*command_names)
observeds = app.completedefault(None, line, begidx, None)
assert set(observeds) == set(expecteds)
assert set([line[:begidx] + x for x in observeds]) <= command_names
def test_empty_text_completedefault():
# line = 'show ' + begidx = 5 implies text = ''
_test_completedefault(['file', 'folder', ' long'], 'show ', 5)
def test_nonempty_text_completedefault2():
# line = 'show f' + begidx = 6 implies text = 'f'
_test_completedefault(['file', 'folder'], 'show f', 5)
def test_long_completedefault():
_test_completedefault(['long'], 'show ', 6)
def test_no_completedefault():
_test_completedefault([], 'taz ', 4)
| sjsucohort6/openstack | python/venv/lib/python2.7/site-packages/cliff/tests/test_interactive.py | Python | mit | 1,743 |
"""
Module containing error objects we return to clients
"""
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import flask.ext.api as api
class FrontendException(Exception):
def __init__(self):
super(FrontendException, self).__init__()
self.message = ""
self.code = None
self.description = ""
self.httpStatus = None
class BadRequestException(FrontendException):
def __init__(self):
super(BadRequestException, self).__init__()
self.httpStatus = 400
self.message = "Bad request"
self.code = 1
class BadPageSizeException(BadRequestException):
def __init__(self):
super(BadPageSizeException, self).__init__()
self.message = "Request page size invalid"
self.code = 2
class BadPageTokenException(BadRequestException):
def __init__(self):
super(BadPageTokenException, self).__init__()
self.message = "Request page token invalid"
self.code = 3
class NotFoundException(FrontendException):
def __init__(self):
super(NotFoundException, self).__init__()
self.httpStatus = 404
self.message = "A resource was not found"
self.code = 4
class PathNotFoundException(NotFoundException):
def __init__(self):
super(PathNotFoundException, self).__init__()
self.message = "The request path was not found"
self.code = 5
class ObjectNotFoundException(NotFoundException):
def __init__(self):
super(ObjectNotFoundException, self).__init__()
self.message = "The requested object was not found"
self.code = 6
class ServerException(FrontendException):
def __init__(self):
super(ServerException, self).__init__()
self.httpStatus = 500
self.message = "Internal server error"
self.code = 7
class UnsupportedMediaTypeException(FrontendException):
def __init__(self):
super(FrontendException, self).__init__()
self.httpStatus = 415
self.message = "Unsupported media type"
self.code = 8
# exceptions thrown by the underlying system that we want to
# translate to exceptions that we define before they are
# serialized and returned to the client
exceptionMap = {
api.exceptions.UnsupportedMediaType: UnsupportedMediaTypeException,
}
| jeromekelleher/server | ga4gh/frontend_exceptions.py | Python | apache-2.0 | 2,392 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2014 Martin Raspaud
# Author(s):
# Martin Raspaud <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Interface to HRPT level 0 format. Needs pyorbital.
Since the loading and calibration goes quite fast, all channels are calibrated
at the same time, so don't hesitate to load all the channels anyway.
Contrarily to AAPP, no smoothing, sigma or gross filtering is taking place.
TODO:
- Faster navigation (pyorbital).
"""
from ConfigParser import ConfigParser
import os
import logging
import glob
import numpy as np
import numexpr as ne
from mpop.plugin_base import Reader
from mpop import CONFIG_PATH
from pyresample.geometry import SwathDefinition
logger = logging.getLogger(__name__)
# Constants
c1 = 1.1910427e-5 #mW/(m2-sr-cm-4)
c2 = 1.4387752 #cm-K
calib = {"noaa 15": # copy from noaa 16
{
# VIS
"intersections": np.array([497.5, 500.3, 498.7]),
"slopes_l": np.array([0.0523, 0.0513, 0.0262]),
"slopes_h": np.array([0.1528, 0.1510, 0.1920]),
"intercepts_l": np.array([-2.016, -1.943, -1.01]),
"intercepts_h": np.array([-51.91, -51.77, -84.2]),
# IR
"d0": np.array([276.355, 276.142, 275.996, 276.132, 0]),
"d1": np.array([0.0562, 0.05605, 0.05486, 0.0594, 0]),
"d2": np.array([-1.590e-5, -1.707e-5, -1.223e-5, -1.344e-5, 0]),
"d3": np.array([2.486e-8, 2.595e-8, 1.862e-8, 2.112e-8, 0]),
"d4": np.array([-1.199e-11, -1.224e-11,
-0.853e-11, -1.001e-11, 0]),
"prt_weights": np.array((.25, .25, .25, .25)),
"vc": np.array((2700.1148, 917.2289, 838.1255)),
"A": np.array((1.592459, 0.332380, 0.674623)),
"B": np.array((0.998147, 0.998522, 0.998363)),
"N_S": np.array([0, -2.467, -2.009]),
"b0": np.array([0, 2.96, 2.25]),
"b1": np.array([0, -0.05411, -0.03665]),
"b2": np.array([0, 0.00024532, 0.00014854]),
},
"noaa 16":
{
# VIS
"intersections": np.array([497.5, 500.3, 498.7]),
"slopes_l": np.array([0.0523, 0.0513, 0.0262]),
"slopes_h": np.array([0.1528, 0.1510, 0.1920]),
"intercepts_l": np.array([-2.016, -1.943, -1.01]),
"intercepts_h": np.array([-51.91, -51.77, -84.2]),
# IR
"d0": np.array([276.355, 276.142, 275.996, 276.132, 0]),
"d1": np.array([0.0562, 0.05605, 0.05486, 0.0594, 0]),
"d2": np.array([-1.590e-5, -1.707e-5, -1.223e-5, -1.344e-5, 0]),
"d3": np.array([2.486e-8, 2.595e-8, 1.862e-8, 2.112e-8, 0]),
"d4": np.array([-1.199e-11, -1.224e-11,
-0.853e-11, -1.001e-11, 0]),
"prt_weights": np.array((.25, .25, .25, .25)),
"vc": np.array((2700.1148, 917.2289, 838.1255)),
"A": np.array((1.592459, 0.332380, 0.674623)),
"B": np.array((0.998147, 0.998522, 0.998363)),
"N_S": np.array([0, -2.467, -2.009]),
"b0": np.array([0, 2.96, 2.25]),
"b1": np.array([0, -0.05411, -0.03665]),
"b2": np.array([0, 0.00024532, 0.00014854]),
},
"noaa 18": # FIXME: copy of noaa 19
{
# VIS
"intersections": np.array([496.43, 500.37, 496.11]),
"slopes_l": np.array([0.055091, 0.054892, 0.027174]),
"slopes_h": np.array([0.16253, 0.16325, 0.18798]),
"intercepts_l": np.array([-2.1415, -2.1288, -1.0881]),
"intercepts_h": np.array([-55.863, -56.445, -81.491]),
# IR
"d0": np.array([276.601, 276.683, 276.565, 276.615, 0]),
"d1": np.array([0.05090, 0.05101, 0.05117, 0.05103, 0]),
"d2": np.array([1.657e-6, 1.482e-6, 1.313e-6, 1.484e-6, 0]),
"d3": np.array([0, 0, 0, 0, 0]),
"d4": np.array([0, 0, 0, 0, 0]),
"prt_weights": np.array((1, 1, 1, 1)),
"vc": np.array((2659.7952, 928.1460, 833.2532)),
"A": np.array((1.698704, 0.436645, 0.253179)),
"B": np.array((0.996960, 0.998607, 0.999057)),
"N_S": np.array([0, -5.49, -3.39]),
"b0": np.array([0, 5.70, 3.58]),
"b1": np.array([0, -0.11187, -0.05991]),
"b2": np.array([0, 0.00054668, 0.00024985]),
},
"noaa 19":
{
# VIS
"intersections": np.array([496.43, 500.37, 496.11]),
"slopes_l": np.array([0.055091, 0.054892, 0.027174]),
"slopes_h": np.array([0.16253, 0.16325, 0.18798]),
"intercepts_l": np.array([-2.1415, -2.1288, -1.0881]),
"intercepts_h": np.array([-55.863, -56.445, -81.491]),
# IR
"d0": np.array([276.601, 276.683, 276.565, 276.615, 0]),
"d1": np.array([0.05090, 0.05101, 0.05117, 0.05103, 0]),
"d2": np.array([1.657e-6, 1.482e-6, 1.313e-6, 1.484e-6, 0]),
"d3": np.array([0, 0, 0, 0, 0]),
"d4": np.array([0, 0, 0, 0, 0]),
"prt_weights": np.array((1, 1, 1, 1)),
"vc": np.array((2659.7952, 928.1460, 833.2532)),
"A": np.array((1.698704, 0.436645, 0.253179)),
"B": np.array((0.996960, 0.998607, 0.999057)),
"N_S": np.array([0, -5.49, -3.39]),
"b0": np.array([0, 5.70, 3.58]),
"b1": np.array([0, -0.11187, -0.05991]),
"b2": np.array([0, 0.00054668, 0.00024985]),
},
"metop-a":
{
# VIS
"intersections": np.array([501, 500, 502]),
"slopes_l": np.array([0.0537, 0.0545, 0.0264]),
"slopes_h": np.array([0.1587, 0.1619, 0.1837]),
"intercepts_l": np.array([-2.1719, -2.167, -1.0868]),
"intercepts_h": np.array([-54.7824, -55.913, -80.0116]),
# IR
"d0": np.array([276.6194, 276.6511, 276.6597, 276.3685, 0]),
"d1": np.array([0.050919, 0.050892, 0.050845, 0.050992, 0]),
"d2": np.array([1.470892e-6, 1.489e-6, 1.520646e-6, 1.48239e-6, 0]),
"d3": np.array([0, 0, 0, 0, 0]),
"d4": np.array([0, 0, 0, 0, 0]),
"prt_weights": np.array((1, 1, 1, 1)) / 4.0,
"vc": np.array((2687, 927.2, 837.7)),
"A": np.array((2.06699, 0.55126, 0.34716)),
"B": np.array((0.996577, 0.998509, 0.998947)),
"N_S": np.array([0, -4.98, -3.40]),
"b0": np.array([0, 5.44, 3.84]),
"b1": np.array([0, 0.89848 - 1, 0.93751 - 1]),
"b2": np.array([0, 0.00046964, 0.00025239]),
},
"metop-b":
{
# VIS
"intersections": np.array([501, 503, 501]),
"slopes_l": np.array([0.053572113, 0.051817433, 0.023518528]),
"slopes_h": np.array([0.15871941, 0.15264062, 0.16376181]),
"intercepts_l": np.array([-2.1099778, -2.0923391, -0.9879577]),
"intercepts_h": np.array([-54.751018, -52.806460, -71.229881]),
# IR
"d0": np.array([276.5853, 276.5335, 276.5721, 276.5750, 0]),
"d1": np.array([0.050933, 0.051033, 0.051097, 0.05102, 0]),
"d2": np.array([1.54333e-6, 1.49751e-6, 1.42928e-6, 1.50841e-6, 0]),
"d3": np.array([0, 0, 0, 0, 0]),
"d4": np.array([0, 0, 0, 0, 0]),
"prt_weights": np.array((1, 1, 1, 1)) / 4.0,
"vc": np.array((2687, 927.2, 837.7)),
"A": np.array((2.06699, 0.55126, 0.34716)),
"B": np.array((0.996577, 0.998509, 0.998947)),
"N_S": np.array([0, -4.75, -4.39]),
"b0": np.array([0, 4.85, 4.36]),
"b1": np.array([0, 0.903229 - 1, 0.923365 - 1]),
"b2": np.array([0, 0.00048091, 0.00033524]),
}
}
SATELLITES = {7: "noaa 15",
3: "noaa 16",
5: "noaa 18",
13: "noaa 18",
15: "noaa 19"}
def bfield(array, bit):
"""return the bit array.
"""
return (array & 2**(9 - bit + 1)).astype(np.bool)
class HRPTReader(Reader):
"""HRPT minor frame reader.
"""
pformat = "hrpt_hmf"
def load(self, satscene):
"""Read data from file and load it into *satscene*.
"""
conf = ConfigParser()
conf.read(os.path.join(CONFIG_PATH, satscene.fullname + ".cfg"))
options = {}
for option, value in conf.items(satscene.instrument_name + "-level2",
raw = True):
options[option] = value
CASES[satscene.instrument_name](self, satscene, options)
def load_avhrr(self, satscene, options):
"""Read avhrr data from file and load it into *satscene*.
"""
if "filename" not in options:
raise IOError("No filename given, cannot load.")
filename = os.path.join(
options["dir"],
(satscene.time_slot.strftime(options["filename"])))
file_list = glob.glob(satscene.time_slot.strftime(filename))
if len(file_list) > 1:
raise IOError("More than one hrpt file matching!")
elif len(file_list) == 0:
raise IOError("No hrpt file matching!: " +
satscene.time_slot.strftime(filename))
filename = file_list[0]
array = read_file(filename)
sat = (array["id"]["id"] & (2 ** 6 - 1)) >> 3
sat = SATELLITES[sat[len(sat) / 2]]
lon, lat, alt = navigate(array["timecode"], sat)
area = SwathDefinition(lon.reshape(2048, -1), lat.reshape(2048, -1))
satscene.area = area
vis = vis_cal(array["image_data"][:, :, :3], sat)
ir_ = ir_cal(array["image_data"][:, :, 2:], array["telemetry"]["PRT"],
array["back_scan"], array["space_data"], sat)
channels = np.empty(array["image_data"].shape, dtype=np.float64)
channels[:, :, :2] = vis[:, :, :2]
channels[:, :, 3:] = ir_[:, :, 1:]
ch3a = bfield(array["id"]["id"], 10)
ch3b = np.logical_not(ch3a)
channels[ch3a, :, 2] = vis[ch3a, :, 2]
channels[ch3b, :, 2] = ir_[ch3b, :, 0]
for chan in satscene.channels_to_load:
if chan == "1":
satscene["1"] = np.ma.array(vis[:, :, 0])
if chan == "2":
satscene["2"] = np.ma.array(vis[:, :, 1])
if chan == "3A":
satscene["3A"] = np.ma.array(vis[:, :, 2],
mask=np.tile(ch3a, (1, 2048)))
if chan == "3B":
satscene["3B"] = np.ma.array(ir_[:, :, 0],
mask=np.tile(ch3b, (1, 2048)))
if chan == "4":
satscene["4"] = np.ma.array(ir_[:, :, 1])
if chan == "5":
satscene["5"] = np.ma.array(ir_[:, :, 2])
## Reading
## http://www.ncdc.noaa.gov/oa/pod-guide/ncdc/docs/klm/html/c4/sec4-1.htm#t413-1
def read_file(filename):
"""Read the file using numpy
"""
dtype = np.dtype([('frame_sync', '>u2', (6, )),
('id', [('id', '>u2'),
('spare', '>u2')]),
('timecode', '>u2', (4, )),
('telemetry', [("ramp_calibration", '>u2', (5, )),
("PRT", '>u2', (3, )),
("ch3_patch_temp", '>u2'),
("spare", '>u2'),]),
('back_scan', '>u2', (10, 3)),
('space_data', '>u2', (10, 5)),
('sync', '>u2'),
('TIP_data', '>u2', (520, )),
('spare', '>u2', (127, )),
('image_data', '>u2', (2048, 5)),
('aux_sync', '>u2', (100, ))])
arr = np.memmap(filename, dtype=dtype)
#arr = arr.newbyteorder()
return arr
## navigation
from pyorbital.orbital import Orbital
from datetime import datetime, timedelta, time
from pyorbital.geoloc import ScanGeometry, compute_pixels, get_lonlatalt
def timecode(tc_array):
word = tc_array[0]
day = word >> 1
word = tc_array[1]
msecs = ((127) & word) * 1024
word = tc_array[2]
msecs += word & 1023
msecs *= 1024
word = tc_array[3]
msecs += word & 1023
return datetime(2014, 1, 1) + timedelta(days=int(day) - 1,
milliseconds=int(msecs))
def navigate(timecodes, satellite):
orb = Orbital(satellite)
first_time = timecode(timecodes[0])
first_time = datetime(first_time.year, first_time.month, first_time.day)
hrpttimes = [timecode(x) - first_time for x in timecodes]
hrpttimes = np.array([x.seconds + x.microseconds / 1000000.0
for x in hrpttimes])
scan_points = np.arange(2048)
if satellite == "noaa 16":
scan_angle = 55.25
else:
scan_angle = 55.37
scans_nb = len(hrpttimes)
avhrr_inst = np.vstack(((scan_points / 1023.5 - 1)
* np.deg2rad(-scan_angle),
np.zeros((len(scan_points),)))).T
avhrr_inst = np.tile(avhrr_inst, [scans_nb, 1])
offset = hrpttimes
times = (np.tile(scan_points * 0.000025, [scans_nb, 1])
+ np.expand_dims(offset, 1))
sgeom = ScanGeometry(avhrr_inst, times.ravel())
s_times = sgeom.times(first_time)
rpy = (0, 0, 0)
pixels_pos = compute_pixels((orb.tle._line1, orb.tle._line2), sgeom, s_times, rpy)
pos_time = get_lonlatalt(pixels_pos, s_times)
return pos_time
## VIS calibration
def vis_cal(vis_data, sat):
"""Calibrates the visual data using dual gain.
"""
logger.debug("Visual calibration")
vis = np.empty(vis_data.shape, dtype=np.float64)
for i in range(3):
ch = vis_data[:, :, i]
intersect = calib[sat]["intersections"][i]
slope_l = calib[sat]["slopes_l"][i]
slope_h = calib[sat]["slopes_h"][i]
intercept_l = calib[sat]["intercepts_l"][i]
intercept_h = calib[sat]["intercepts_h"][i]
vis[:, :, i] = ne.evaluate("where(ch < intersect, ch * slope_l + intercept_l, ch * slope_h + intercept_h)")
return vis
## IR calibration
def ir_cal(ir_data, telemetry, back_scan, space_data, sat):
alen = ir_data.shape[0]
logger.debug("IR calibration")
logger.debug(" Preparing telemetry...")
factor = np.ceil(alen / 5.0) + 1
displacement = (telemetry[0:5, :] == np.array([0, 0, 0])).sum(1).argmax() + 1
offset = 4 - (displacement - 1)
globals().update(calib[sat])
bd0 = np.tile(d0.reshape(-1, 1), (factor, 3))[offset:offset + alen]
bd1 = np.tile(d1.reshape(-1, 1), (factor, 3))[offset:offset + alen]
bd2 = np.tile(d2.reshape(-1, 1), (factor, 3))[offset:offset + alen]
bd3 = np.tile(d3.reshape(-1, 1), (factor, 3))[offset:offset + alen]
bd4 = np.tile(d4.reshape(-1, 1), (factor, 3))[offset:offset + alen]
PRT = telemetry
T_PRT = bd0 + PRT * (bd1 + PRT * (bd2 + PRT * (bd3 + PRT * bd4)))
sublen = np.floor((T_PRT.shape[0] - displacement) / 5.0) * 5
TMP_PRT = T_PRT[displacement:displacement + sublen]
logger.debug(" Computing blackbody temperatures...")
MEAN = ((TMP_PRT[::5] +
TMP_PRT[1::5] +
TMP_PRT[2::5] +
TMP_PRT[3::5]) / 4).repeat(5, 0)
if displacement == 0:
T_BB_beg = None
elif displacement == 1:
T_BB_beg = MEAN[0]
else:
T_BB_beg = np.tile(T_PRT[:displacement].sum(0) / (displacement - 1), (displacement, 1))
if sublen + displacement >=T_PRT.shape[0]:
T_BB_end = None
else:
T_BB_end = np.tile(T_PRT[sublen+displacement:].mean(0), (T_PRT.shape[0] - sublen - displacement, 1))
if T_BB_beg is not None:
to_stack = [T_BB_beg, MEAN]
else:
to_stack = [MEAN]
if T_BB_end is not None:
to_stack.append(T_BB_end)
T_BB = np.vstack(to_stack)
if sat in ["noaa 15", "noaa 16"]:
# three readings for klm
T_BB = T_BB.mean(0)
T_BB_star = A + B * T_BB
N_BB = (c1 * vc ** 3) / (np.exp((c2 * vc)/(T_BB_star)) - 1)
C_S = space_data[:,:, 2:].mean(1)
C_BB = back_scan.mean(1)
C_E = ir_data
# aapp style
#G = (N_BB - N_S) / (C_BB - C_S)
#k1 = G**2 * b2
#k2 = (b1 + 1) *G - 2 * k1 * C_S + 2*b2 * G * N_S
#k3 = b0 + (b1 + 1) * N_S - (b1 + 1) *G * C_S + b2 * (N_S - G * N_S) ** 2
#N_E = k1[:, np.newaxis, :] * C_E * C_E + k2[:, np.newaxis, :] * C_E + k3[:, np.newaxis, :]
logger.debug(" Computing linear part of radiances...")
C_Sr = C_S[:, np.newaxis, :]
Cr = ((N_BB - N_S) / (C_S - C_BB))[:, np.newaxis, :]
N_lin = ne.evaluate("(N_S + (Cr * (C_Sr - C_E)))")
logger.debug(" Computing radiance correction...")
# the +1 (N_lin) here is for Ne = Nlin + Ncor
N_E = ne.evaluate("((b2 * N_lin + b1 + 1) * N_lin + b0)")
logger.debug(" Computing channels brightness temperatures...")
T_E_star = ne.evaluate("(c2 * vc / (log(1 + c1 * vc**3 / N_E)))")
T_E = ne.evaluate("(T_E_star - A) / B")
return T_E
CASES = {
"avhrr": HRPTReader.load_avhrr
}
if __name__ == '__main__':
import sys
array = read_file(sys.argv[1])
sat = (array["id"]["id"] & (2 ** 6 - 1)) >> 3
sat = int(np.round(np.mean(sat)))
sat = SATELLITES[sat]
vis = vis_cal(array["image_data"][:, :, :3], sat)
ir_ = ir_cal(array["image_data"][:, :, 2:], array["telemetry"]["PRT"],
array["back_scan"], array["space_data"], sat)
channels = np.empty(array["image_data"].shape, dtype=np.float64)
channels[:, :, :2] = vis[:, :, :2]
channels[:, :, 3:] = ir_[:, :, 1:]
ch3a = bfield(array["id"]["id"], 10)
ch3b = np.logical_not(ch3a)
channels[ch3a, :, 2] = vis[ch3a, :, 2]
channels[ch3b, :, 2] = ir_[ch3b, :, 0]
lon, lat, alt = navigate(array["timecode"], sat)
area = SwathDefinition(lon.reshape(2048, -1), lat.reshape(2048, -1))
| mraspaud/mpop | mpop/satin/hrpt_hmf.py | Python | gpl-3.0 | 18,933 |
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test classes for code snippet for modeling article."""
from google.appengine.ext import ndb
from tests import AppEngineTestbedCase
from . import relation_model_models as models
class ContactTestCase(AppEngineTestbedCase):
"""A test case for the Contact model with relationship model."""
def setUp(self):
"""Creates 1 contact and 1 company.
Assuming the contact belongs to tmatsuo's addressbook.
"""
super(ContactTestCase, self).setUp()
self.myaddressbook_key = ndb.Key('AddressBook', 'tmatsuo')
mary = models.Contact(parent=self.myaddressbook_key, name='Mary')
mary.put()
self.mary_key = mary.key
google = models.Company(name='Google')
google.put()
self.google_key = google.key
candit = models.Company(name='Candit')
candit.put()
self.candit_key = candit.key
def test_relationship(self):
"""Two companies hire Mary."""
mary = self.mary_key.get()
google = self.google_key.get()
candit = self.candit_key.get()
# first google hires Mary
models.ContactCompany(parent=self.myaddressbook_key,
contact=mary.key,
company=google.key,
title='engineer').put()
# then another company named 'candit' hires Mary too
models.ContactCompany(parent=self.myaddressbook_key,
contact=mary.key,
company=candit.key,
title='president').put()
# get the list of companies that Mary belongs to
self.assertEqual(len(mary.companies), 2)
| JPO1/python-docs-samples | appengine/ndb/modeling/relation_model_models_test.py | Python | apache-2.0 | 2,299 |
# Copyright 2017-2019 The FIAAS Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from .common import ObjectMeta
from ..base import Model
from ..fields import Field
class ConfigMap(Model):
class Meta:
list_url = "/api/v1/configmaps"
url_template = "/api/v1/namespaces/{namespace}/configmaps/{name}"
metadata = Field(ObjectMeta)
data = Field(dict)
| fiaas/k8s | k8s/models/configmap.py | Python | apache-2.0 | 928 |
#!/usr/bin/python
import re, nltk, random, tweepy, time
from words import wordpos
consumer_key = ""
consumer_secret = ""
access_token = ""
access_token_secret = ""
trumpismsbegin = ['Frankly, ', 'To be honest, ', 'You know what? ', 'Listen, ', "I'm the best. ", "I have a great brain and ", "Let's be honest, ", "This is huge, ", 'China ', "I'm smart, '"]
trumpismsend = [' We need a wall.', ' Sad!', ' THAT much I know!', ' THAT much I can tell you!', ' Make America Great Again', ' We will win.', ' It is HUGE!', ' Amazing!', ' Fantastic!', " We're gonna win!'", ' #makeamericagreatagain']
class TwitterWrapper:
def __init__(self, consumer_key, consumer_secret, access_token, access_token_secret):
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
self.api = tweepy.API(auth)
def update_status(self, message):
self.api.update_status(status=message)
shouldbp = random.randint(1, 10)
shouldep = random.randint(1, 10)
def tweetit():
d = random.randint(0,(len(wordpos['DT'])-1))
n = random.randint(0,(len(wordpos['NN'])-1))
v = random.randint(0,(len(wordpos['VBZ'])-1))
j = random.randint(0,(len(wordpos['JJ'])-1))
it = wordpos['DT'][d] + ' ' + wordpos['NN'][n] + ' ' + wordpos['VBZ'][v] + ' ' + wordpos['JJ'][j]
bp = random.randint(0, (len(trumpismsbegin)-1))
bp = trumpismsbegin[bp]
ep = random.randint(0, (len(trumpismsend)-1))
ep = trumpismsend[ep]
pick = random.randint(1, 3)
if pick == 1:
ep = ''
elif pick == 3:
bp = ''
it = re.sub('[!.?,:()"]', '', it)
it = bp + it.lower() + '.' + ep
twitter = TwitterWrapper(consumer_key, consumer_secret, access_token, access_token_secret)
twitter.update_status(it)
print(it)
time.sleep(14400)
tweetit()
tweetit()
| jac0bbennett/Trump-Quote-Bot | trumpbot/gentweet.py | Python | mit | 1,972 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# LORZE erasandcad, a 2D CAD with an intuitive user interface,
# simple and easy. <http://erasand.jimdo.com/projekte/lorze/>
# (C) 2013, Andreas Ulrich
#
# This file is part of "LORZE erasandcad"
# "LORZE erasandcad" is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# "LORZE erasandcad" is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU General Public License for more details. You should
# have received a copy of the GNU General Public License along with
# "LORZE erasandcad". If not, see <http://www.gnu.org/licenses/>.
import locale
import wx
from lrzgui import LorzeGUI
from lrzgui import GraphicPanel
from lrzdlgs import LorzeDlgOptions
from lrzdlgs import LorzeAbout
from lrzdlgs import LorzeDlgHelper
from lrzdlgs import LorzeHelp
from lrzdlgs import LorzeDlgSelDrawing
from lrzdlgs import LorzeDlgSelCLSW
from lrzdlgs import LorzeDlgLog
from lrzadmin import LorzeOptions
from lrzadmin import LorzeLog
from lrzadmin import LorzeValueCheck
from lrzadmin import LorzeAttribute
from lrzadmin import LorzeRestart
from lrzadmin import LorzeDrawManager
from lrzadmin import LorzeSelection
from lrzdraw import LorzeGeoHelp
class TestFrame(wx.Frame):
''' object for testing'''
def __init__(self):
# Subclass
wx.Frame.__init__(self, None, title=u'testing lrzadmin.py',
size=(600, 600))
self.__restart = LorzeRestart()
self.__valchck = LorzeValueCheck(self)
self.__options = LorzeOptions(self, self.__valchck)
self.__dlg = LorzeDlgHelper(self, self.__options)
self.__geohelp = LorzeGeoHelp(self.__options)
self.__log = LorzeLog()
self.__attribute = LorzeAttribute(self.__options, self.__log,
self.__valchck)
self.__drawman = LorzeDrawManager(self.__attribute, self.__log,
self.__geohelp)
self.__selection = LorzeSelection(self.__drawman,
self.__geohelp)
self.__gdi = GraphicPanel(self, self.__options, self.__geohelp,
self.__log, self.__valchck,
self.__attribute, self.__drawman,
self.__selection)
# Set attributes, drawings, drawing elements
self.set_attr_draw_elem()
self.__gdi.set_wx_colordic()
self.__gdi.set_wx_styledic()
self.__gdi.set_wx_widthdic()
self.__testing = ('GUI with GDI panel',
'Options dialogue',
'About dialogue',
'Help dialogue',
'Load dialogue',
'Save dialogue',
'Color dialogue',
'Text dialogue',
'List dialogue',
'Selection drawing dialogue | default',
'Selection drawing dialogue | 1 selected',
'Selection drawing dialogue | 3 selected',
'Selection colour dialogue | default',
'Selection colour dialogue | options',
'Selection colour dialogue | 1 selected',
'Selection colour dialogue | 3 selected',
'Selection layer dialogue | default',
'Selection layer dialogue | options',
'Selection layer dialogue | 1 selected',
'Selection layer dialogue | 3 selected',
'Selection line style dialogue | default',
'Selection line style dialogue | options',
'Selection line style dialogue | 1 selected',
'Selection line style dialogue | 3 selected',
'LorzeLog dialouge',
'')
# Listbox
self.__list = wx.ListBox(self, size=(-1, -1),
choices=self.__testing)
self.__list.SetSelection(0)
buttonexit = wx.Button(self, label=u'Exit')
# bindings
buttonexit.Bind(wx.EVT_BUTTON, self.on_exit)
self.__list.Bind(wx.EVT_LISTBOX_DCLICK, self.on_list)
self.__list.Bind(wx.EVT_KEY_DOWN, self.on_key_down)
# layout
vbox = wx.BoxSizer(wx.VERTICAL)
vbox.Add(self.__list, 1, wx.EXPAND | wx.ALL, 5)
vbox.Add(buttonexit, 0, wx.EXPAND | wx.ALL, 5)
self.__list.SetFocus()
self.SetSizer(vbox)
self.Centre()
self.Show()
def on_exit(self, event):
'''Event, exit program'''
self.exit_frame()
def on_list(self, event):
'''Event, read selection in the list'''
self.get_selection()
def on_key_down(self, event):
'''Event, get key'''
if event.GetKeyCode() == wx.WXK_ESCAPE:
self.exit_frame()
elif event.GetKeyCode() == wx.WXK_RETURN:
self.get_selection()
elif event.GetKeyCode() == wx.WXK_NUMPAD_ENTER:
self.get_selection()
def exit_frame(self):
'''Close the program'''
self.Close()
def get_selection(self):
'''Run the entry in the list'''
self.__cmd = self.__testing[self.__list.GetSelection()]
if self.__cmd == 'GUI with GDI panel':
LorzeGUI(self.__restart)
if self.__cmd == 'Options dialogue':
self.show_options_dlg()
elif self.__cmd == 'About dialogue':
LorzeAbout(self.__options)
elif self.__cmd == 'Help dialogue':
self.show_help_dlg()
elif self.__cmd == 'Load dialogue':
self.show_load_dlg()
elif self.__cmd == 'Save dialogue':
self.show_save_dlg()
elif self.__cmd == 'Color dialogue':
self.show_color_dlg()
elif self.__cmd == 'Text dialogue':
self.show_text_dlg()
elif self.__cmd == 'List dialogue':
self.show_list_dlg()
# drawing selection
elif self.__cmd == 'Selection drawing dialogue | default':
self.show_seldraw_dlg('default')
elif self.__cmd == 'Selection drawing dialogue | 1 selected':
self.show_seldraw_dlg('1 selected')
elif self.__cmd == 'Selection drawing dialogue | 3 selected':
self.show_seldraw_dlg('3 selected')
# colour selection
elif self.__cmd == 'Selection colour dialogue | default':
self.show_selclsw_dlg('default', 'color')
elif self.__cmd == 'Selection colour dialogue | options':
self.show_selclsw_dlg('options', 'color')
elif self.__cmd == 'Selection colour dialogue | 1 selected':
self.show_selclsw_dlg('1 selected', 'color')
elif self.__cmd == 'Selection colour dialogue | 3 selected':
self.show_selclsw_dlg('3 selected', 'color')
# layer selection
elif self.__cmd == 'Selection layer dialogue | default':
self.show_selclsw_dlg('default', 'layer')
elif self.__cmd == 'Selection layer dialogue | options':
self.show_selclsw_dlg('options', 'layer')
elif self.__cmd == 'Selection layer dialogue | 1 selected':
self.show_selclsw_dlg('1 selected', 'layer')
elif self.__cmd == 'Selection layer dialogue | 3 selected':
self.show_selclsw_dlg('3 selected', 'layer')
# style selection
elif self.__cmd == 'Selection line style dialogue | default':
self.show_selclsw_dlg('default', 'style')
elif self.__cmd == 'Selection line style dialogue | options':
self.show_selclsw_dlg('options', 'style')
elif self.__cmd == 'Selection line style dialogue | 1 selected':
self.show_selclsw_dlg('1 selected', 'style')
elif self.__cmd == 'Selection line style dialogue | 3 selected':
self.show_selclsw_dlg('3 selected', 'style')
# lorzelog
elif self.__cmd == 'LorzeLog dialouge':
self.show_lorzelog_dlg()
def get_gdi_size(self):
'''Simulate the method from LorzeGUI, return framesize'''
return(self.GetSize())
def set_status_mouse(self, x, y):
'''Simulate the method from LorzeGUI, do nothing'''
pass
def get_borders_dict(self):
'''Get __bordersdict.'''
return({'simple': wx.SIMPLE_BORDER,
'raised': wx.RAISED_BORDER,
'sunken': wx.SUNKEN_BORDER,
'no': wx.NO_BORDER})
def get_cursors_dict(self):
'''Get __cursorsdict.'''
return({'arrow': wx.CURSOR_ARROW,
'right arrow': wx.CURSOR_RIGHT_ARROW,
'blank': wx.CURSOR_BLANK,
'bullseye': wx.CURSOR_BULLSEYE,
'char': wx.CURSOR_CHAR,
'cross': wx.CURSOR_CROSS,
'hand': wx.CURSOR_HAND,
'ibeam': wx.CURSOR_IBEAM,
'left button': wx.CURSOR_LEFT_BUTTON,
'magnifier': wx.CURSOR_MAGNIFIER,
'middle button': wx.CURSOR_MIDDLE_BUTTON,
'no entry': wx.CURSOR_NO_ENTRY,
'paint brush': wx.CURSOR_PAINT_BRUSH,
'pencil': wx.CURSOR_PENCIL,
'point left': wx.CURSOR_POINT_LEFT,
'point right': wx.CURSOR_POINT_RIGHT,
'question arrow': wx.CURSOR_QUESTION_ARROW,
'right button': wx.CURSOR_RIGHT_BUTTON,
'sizenesW': wx.CURSOR_SIZENESW,
'sizens': wx.CURSOR_SIZENS,
'sizenwse': wx.CURSOR_SIZENWSE,
'sizewe': wx.CURSOR_SIZEWE,
'sizing': wx.CURSOR_SIZING,
'spraycan': wx.CURSOR_SPRAYCAN,
'wait': wx.CURSOR_WAIT,
'watch': wx.CURSOR_WATCH,
'arrowwait': wx.CURSOR_ARROWWAIT,
'default': wx.CURSOR_DEFAULT})
def get_line_style_dict(self):
'''Get __linstyldict.'''
return({'solid': wx.SOLID,
'dot': wx.DOT,
'long dash': wx.LONG_DASH,
'short dash': wx.SHORT_DASH,
'dot dash': wx.DOT_DASH,
'user dash': wx.USER_DASH})
def set_attr_draw_elem(self):
'''Set attributes and elements for testing.'''
# Add colour entrys.
for i in [('Red', 'FF0000', 'base color'),
('Blue', '0000FF', 'base color'),
('Green', '008000', 'base color'),
('Yellow', 'FFFF00', 'base color'),
('Gray', '808080', 'base color'),
('White', 'FFFFFF', 'base color')]:
self.__attribute.add_color_entry(i[0], i[1], i[2])
# Add layer entrys.
for i in [('Layer 1', 'default'),
('Layer 2', 'default'),
('Layer 3', 'default'),
('Layer 4', 'default'),
('Layer 5', 'default'),
('Layer 6', 'default')]:
self.__attribute.add_layer_entry(i[0], i[1])
# Add line style entrys.
for i in [('Solid', ('solid | '), 'standard'),
('Dot', ('dot | '), 'standard'),
('Long dash', ('long dash | '), 'standard'),
('Short dash', ('short dash | '), 'standard'),
('Dot dash', ('dot dash | '), 'standard'),
('Axis 13mm', ('user dash | 10, 2, 1, 2'),
'standard')]:
self.__attribute.add_style_entry(i[0], i[1], i[2])
# Add line width entrys.
for i in [('0.13mm', '0.13', 'Rotring'),
('0.18mm', '0.18', 'Rotring'),
('0.25mm', '0.25', 'Rotring'),
('0.35mm', '0.35', 'Rotring'),
('0.50mm', '0.50', 'Rotring'),
('0.70mm', '0.70', 'Rotring')]:
self.__attribute.add_width_entry(i[0], i[1], i[2])
# Add 3 drawings
self.__drawman.add_drawing('lrzwxtest 1')
self.__drawman.add_drawing('lrzwxtest 2')
self.__drawman.add_drawing('lrzwxtest 3')
# 6 lines
line1 = ('L', ('Red', 'Layer 1', 'Solid', '0.13mm',
(1, 1, 10, 10)))
line2 = ('L', ('Blue', 'Layer 2', 'Dot', '0.18mm',
(2, 2, 20, 20)))
line3 = ('L', ('Green', 'Layer 3', 'Long dash', '0.25mm',
(3, 3, 30, 30)))
line4 = ('L', ('Yellow', 'Layer 4', 'Short dash', '0.35mm',
(4, 4, 40, 40)))
line5 = ('L', ('Gray', 'Layer 5', 'Dot dash', '0.50mm',
(5, 5, 50, 50)))
line6 = ('L', ('White', 'Layer 6', 'Axis 13mm', '0.70mm',
(6, 6, 60, 60)))
# Add 6 lines in 3 drawings
for i in [('lrzwxtest 1', line1, line2),
('lrzwxtest 2', line3, line4),
('lrzwxtest 3', line5, line6)]:
drawing, element1, element2 = i
self.__drawman.set_default_draw(drawing)
for j in (element1, element2):
typ, attr = j
self.__drawman.get_drawing(drawing).add_(typ, attr)
def show_options_dlg(self):
dlg = LorzeDlgOptions(self, self.__options, self.__valchck,
self.__dlg)
dlg.ShowModal()
restart = dlg.get_restart()
dlg.Destroy()
if restart:
text = ''
for i in ('gdiborder',
'cursorwin',
'cursordraw',
'cursorselect',
'sccolpoint',
'sccolsegm',
'sccolelem',
'prevcol',
'prevstyle',
'selcol',
'defcolorname',
'defcolorvalue',
'defcolorlabel',
'deflayername',
'deflayerlabel',
'defstylename',
'defstylevalue',
'defstylelabel',
'defwidthname',
'defwidthvalue',
'defwidthlabel',
'framewidth',
'frameheight',
'dezplcs',
'dlgborder',
'dlgoptwidth',
'dlgoptheight',
'sbrange',
'sbsize',
'sensitive',
'maxstylescale',
'maxuserscale',
'scsize',
'scwidth',
'prevwidth',
'stcoordw',
'optlistwidth0',
'optlistwidth1',
'dlghelpwidth',
'dlghelpheight',
'dlglistselwidth',
'dlglistselheight',
'dlgseldrawwidth',
'dlgseldrawheight',
'dlgselcolwidth',
'dlgselcolheight',
'collistwidth0',
'collistwidth1',
'collistwidth2',
'collistwidth3',
'dlgnewcolwidth',
'dlgnewcolheight',
'wheelmagnif',
'wheelreduce',
'scmarksize'):
text = text + i + ': ' + \
str(self.__options.get_(i)) + '\n'
print(self.__cmd)
print(text)
dlg = wx.MessageDialog(self, 'The options are changed. ' + \
'You have to restart lrzwxtest.py', self.__cmd,
style=wx.OK)
dlg.ShowModal()
dlg.Destroy()
self.Close()
def show_help_dlg(self):
dlg = LorzeHelp(self, self.__options)
dlg.ShowModal()
dlg.Destroy()
def show_load_dlg(self):
text = self.__dlg.open_dialog('Load dialog', '/', 'Test.txt',
'Text (*.txt) | *.txt | All (*.*) | *.*')
print(self.__cmd)
print(text)
def show_save_dlg(self):
text = self.__dlg.save_dialog('Save dialog', '/', 'Test.txt',
'Text (*.txt) | *.txt | All (*.*) | *.*', '.txt')
print(self.__cmd)
print(text)
def show_color_dlg(self):
text = self.__dlg.color_dialog('#FFFFFF')
print(self.__cmd)
print(text)
def show_text_dlg(self):
text = self.__dlg.text_dialog('Test', 'Testeingabe ..', '123')
print(self.__cmd)
print(text)
def show_list_dlg(self):
text = self.__dlg.list_selection('List-Test', ['a', 'b', 'c',
'd', 'e', 'f', 'g', 'h', 'i'],
'e')
print(self.__cmd)
print(text)
def show_seldraw_dlg(self, mode):
if mode == 'default':
self.__selection.clear()
elif mode == '1 selected':
self.__selection.clear()
self.__selection.add_remove('L0', 'lrzwxtest 1')
elif mode == '3 selected':
self.__selection.clear()
self.__selection.add_remove('L0', 'lrzwxtest 1')
self.__selection.add_remove('L0', 'lrzwxtest 2')
self.__selection.add_remove('L0', 'lrzwxtest 3')
seldlg = LorzeDlgSelDrawing(self, self.__options,
self.__drawman, self.__selection)
seldlg.ShowModal()
drawing = seldlg.get_choice()
seldlg.Destroy()
#~ dlg = wx.MessageDialog(self, drawing, self.__cmd, style=wx.OK)
#~ dlg.ShowModal()
#~ dlg.Destroy()
print(self.__cmd)
print(drawing)
def show_lorzelog_dlg(self):
dlg = LorzeDlgLog(self, self.__options, self.__log)
dlg.ShowModal()
dlg.Destroy()
def show_selclsw_dlg(self, mode, typ):
if mode == 'default':
self.__selection.clear()
elif mode == 'options':
self.__selection.clear()
elif mode == '1 selected':
self.__selection.clear()
self.__selection.add_remove('L0', 'lrzwxtest 1')
mode == 'selected'
elif mode == '3 selected':
self.__selection.clear()
self.__selection.add_remove('L0', 'lrzwxtest 1')
self.__selection.add_remove('L0', 'lrzwxtest 2')
self.__selection.add_remove('L0', 'lrzwxtest 3')
mode == 'selected'
seldlg = LorzeDlgSelCLSW(self, typ, mode, self.__options,
self.__attribute, self.__drawman,
self.__selection, self.__dlg,
self.__valchck)
seldlg.ShowModal()
attr = seldlg.get_choice()
newlist = seldlg.get_new_created()
seldlg.Destroy()
print(self.__cmd)
if attr == '__cancel':
print('Cancel')
elif attr == '__default':
print('No changes in ' + typ)
print('New created '+ typ + 's', newlist)
else:
print('Chosen ' + typ, attr)
print('New created '+ typ + 's', newlist)
class TestTextDlg(wx.Dialog):
'''TestTextDlg(parent, title, text) - show text in a dialogue'''
def __init__(self, parent, title, text):
wx.Dialog.__init__(self, parent, wx.ID_ANY, title,
size=parent.GetSize(),
style=wx.DEFAULT_DIALOG_STYLE |
wx.RESIZE_BORDER)
textctrl = wx.TextCtrl(self, value=text, style=wx.TE_MULTILINE)
buttonok = wx.Button(self, label='OK')
textctrl.Bind(wx.EVT_KEY_DOWN, self.on_key_down)
buttonok.Bind(wx.EVT_BUTTON, self.on_ok)
vbox = wx.BoxSizer(wx.VERTICAL)
vbox.Add(textctrl, 1, wx.EXPAND | wx.ALL, 5)
vbox.Add(buttonok, 0, wx.EXPAND | wx.ALL, 5)
textctrl.SetFocus()
self.SetSizer(vbox)
self.Centre()
def on_ok(self, event):
'''Event, close the dialogue'''
self.Close()
def on_key_down(self, event):
'''Event, get key'''
if event.GetKeyCode() == wx.WXK_ESCAPE:
self.Close()
elif event.GetKeyCode() == wx.WXK_RETURN:
self.Close()
elif event.GetKeyCode() == wx.WXK_NUMPAD_ENTER:
self.Close()
if __name__ == '__main__':
app = wx.App()
# internationalization
wxloc = wx.Locale()
wxloc.AddCatalogLookupPathPrefix('./in18')
# get system language ('xx_XX', 'CHARSET')
wxlang = locale.getdefaultlocale()
wxlang = wxlang[0][:2]
# select translations
if wxlang == 'de':
wxloc.AddCatalog('lorze_de')
frame = TestFrame()
app.MainLoop()
| ulrich3110/LORZE | Modules/lrzwxtest.py | Python | gpl-3.0 | 21,386 |
__author__ = 'haukurk'
from components.emailserver.server import email_watcher
from components.smsinterpreter import sms
import asyncore
from utils.logger import logger
def component_proxy(message):
"""
Proxy between email component and sms interpreter.
Email component event returns an email.Message object and the interpreter takes 2 arguments:
text and a mobile number.
"""
response_dictionary = sms.send_sms(str(message["To"].split('@')[0]), message.get_payload())
if response_dictionary["status"] == 200:
logger.info("SMS Sent out successfully to: " + str(message["To"].split('@')[0]))
else:
logger.info("SMS Sent out to " + str(message["To"].split('@')[0]) + " with the status code: " + str(
response_dictionary["status"]))
# Listen for emails and deliver event to the component proxy.
email_watcher.emailArrived += component_proxy
print "Bridge started! We are listening on port 25."
logger.info("Bridge started! We are listening on port 25.")
# Leave for LAST. Listen for email events from async socket handler for email server.
asyncore.loop() | haukurk/email-to-smsapi | run.py | Python | mit | 1,123 |
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import unittest
from pymatgen.core import Structure
from pymatgen.util.string import (
disordered_formula,
formula_double_format,
htmlify,
latexify,
latexify_spacegroup,
transformation_to_string,
unicodeify,
unicodeify_spacegroup,
unicodeify_species,
Stringify,
)
class SubStr(Stringify):
def __str__(self):
return "Fe8O12"
class SupStr(Stringify):
STRING_MODE = "SUPERSCRIPT"
def to_pretty_string(self) -> str:
return "Fe2+"
def __str__(self):
return "Fe**2+"
class StringifyTest(unittest.TestCase):
def test_to_latex_string(self):
self.assertEqual(SubStr().to_latex_string(), "Fe$_{8}$O$_{12}$")
self.assertEqual(SupStr().to_latex_string(), "Fe$^{2+}$")
def test_to_html_string(self):
self.assertEqual(SubStr().to_html_string(), "Fe<sub>8</sub>O<sub>12</sub>")
self.assertEqual(SupStr().to_html_string(), "Fe<sup>2+</sup>")
def test_to_unicode_string(self):
self.assertEqual(SubStr().to_unicode_string(), "Fe₈O₁₂")
self.assertEqual(SupStr().to_unicode_string(), "Fe²⁺")
class FuncTest(unittest.TestCase):
def test_latexify(self):
self.assertEqual(latexify("Li3Fe2(PO4)3"), "Li$_{3}$Fe$_{2}$(PO$_{4}$)$_{3}$")
self.assertEqual(latexify("Li0.2Na0.8Cl"), "Li$_{0.2}$Na$_{0.8}$Cl")
def test_latexify_spacegroup(self):
self.assertEqual(latexify_spacegroup("Fd-3m"), "Fd$\\overline{3}$m")
self.assertEqual(latexify_spacegroup("P2_1/c"), "P2$_{1}$/c")
def test_htmlify(self):
self.assertEqual(
htmlify("Li3Fe2(PO4)3"),
"Li<sub>3</sub>Fe<sub>2</sub>(PO<sub>4</sub>)<sub>3</sub>",
)
self.assertEqual(htmlify("Li0.2Na0.8Cl"), "Li<sub>0.2</sub>Na<sub>0.8</sub>Cl")
def test_unicodeify(self):
self.assertEqual(unicodeify("Li3Fe2(PO4)3"), "Li₃Fe₂(PO₄)₃")
self.assertRaises(ValueError, unicodeify, "Li0.2Na0.8Cl")
self.assertEqual(unicodeify_species("O2+"), "O²⁺")
self.assertEqual(unicodeify_spacegroup("F-3m"), "F3̅m")
def test_formula_double_format(self):
self.assertEqual(formula_double_format(1.00), "")
self.assertEqual(formula_double_format(2.00), "2")
self.assertEqual(formula_double_format(2.10), "2.1")
self.assertEqual(formula_double_format(2.10000000002), "2.1")
def test_transformation_to_string(self):
m = [[1, 0, 0], [0, 1, 0], [0, 0, 1]]
t = [0, 0, 0]
s = "x,y,z"
ms = "mx,my,mz"
abc = "a,b,c"
self.assertEqual(s, transformation_to_string(m, t))
self.assertEqual(ms, transformation_to_string(m, t, c="m"))
self.assertEqual(abc, transformation_to_string(m, t, components=("a", "b", "c")))
m = [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
t = [11, 12, 13]
s = "x+2y+3z+11,4x+5y+6z+12,7x+8y+9z+13"
self.assertEqual(s, transformation_to_string(m, t))
m = [
[-1 / 2, -2 / 3, -3 / 4],
[-5 / 6, -6 / 7, -7 / 8],
[-8 / 9, -9 / 10, -10 / 11],
]
t = [-11 / 12, -12 / 13, -13 / 14]
s = "-x/2-2y/3-3z/4-11/12,-5x/6-6y/7-7z/8-12/13,-8x/9-9y/10-10z/11-13/14"
self.assertEqual(s, transformation_to_string(m, t))
def test_disordered_formula(self):
disordered_struct = Structure(
[[10, 0, 0], [0, 10, 0], [0, 0, 10]],
[{"Cu": 0.25, "Au": 0.75}],
[[0, 0, 0]],
)
formula_plain = disordered_formula(disordered_struct, fmt="plain")
formula_latex = disordered_formula(disordered_struct, fmt="LaTeX")
formula_html = disordered_formula(disordered_struct, fmt="HTML")
self.assertEqual(formula_plain, "CuxAu1-x x=0.25")
self.assertEqual(formula_latex, "Cu_{x}Au_{1-x} x=0.25")
self.assertEqual(formula_html, "Cu<sub>x</sub>Au<sub>1-x</sub> x=0.25")
if __name__ == "__main__":
unittest.main()
| gmatteo/pymatgen | pymatgen/util/tests/test_string.py | Python | mit | 4,104 |
#!/usr/bin/env python
__author__ = 'waroquiers'
import unittest
import os
import json
import numpy as np
import shutil
from monty.tempfile import ScratchDir
from pymatgen.util.testing import PymatgenTest
from pymatgen.analysis.chemenv.coordination_environments.structure_environments import StructureEnvironments
from pymatgen.analysis.chemenv.coordination_environments.structure_environments import LightStructureEnvironments
from pymatgen.core.periodic_table import Specie
from pymatgen.analysis.chemenv.coordination_environments.chemenv_strategies import SimplestChemenvStrategy
from pymatgen.analysis.chemenv.coordination_environments.chemenv_strategies import MultiWeightsChemenvStrategy
se_files_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..", "..", "..",
'test_files', "chemenv", "structure_environments_files")
class StructureEnvironmentsTest(PymatgenTest):
def test_structure_environments(self):
with ScratchDir("."):
f = open("{}/{}".format(se_files_dir, 'se_mp-7000.json'), 'r')
dd = json.load(f)
f.close()
se = StructureEnvironments.from_dict(dd)
isite = 6
csm_and_maps_fig, csm_and_maps_subplot = se.get_csm_and_maps(isite=isite)
np.testing.assert_array_almost_equal(csm_and_maps_subplot.lines[0].get_xydata().flatten(), [0.0, 0.53499332])
np.testing.assert_array_almost_equal(csm_and_maps_subplot.lines[1].get_xydata().flatten(), [1.0, 0.47026441])
np.testing.assert_array_almost_equal(csm_and_maps_subplot.lines[2].get_xydata().flatten(), [2.0, 0.00988778])
environments_figure, environments_subplot = se.get_environments_figure(isite=isite)
np.testing.assert_array_almost_equal(np.array(environments_subplot.patches[0].get_xy()),
[[1., 1.],
[1., 0.99301365],
[1.00179228, 0.99301365],
[1.00179228, 1.],
[1., 1.]])
np.testing.assert_array_almost_equal(np.array(environments_subplot.patches[1].get_xy()),
[[1., 0.99301365],
[1., 0.],
[1.00179228, 0.],
[1.00179228, 0.99301365],
[1., 0.99301365]])
np.testing.assert_array_almost_equal(np.array(environments_subplot.patches[2].get_xy()),
[[1.00179228, 1.],
[1.00179228, 0.99301365],
[2.25, 0.99301365],
[2.25, 1.],
[1.00179228, 1.]])
np.testing.assert_array_almost_equal(np.array(environments_subplot.patches[3].get_xy()),
[[1.00179228, 0.99301365],
[1.00179228, 0.],
[2.22376156, 0.],
[2.22376156, 0.0060837],
[2.25, 0.0060837],
[2.25, 0.99301365],
[1.00179228, 0.99301365]])
np.testing.assert_array_almost_equal(np.array(environments_subplot.patches[4].get_xy()),
[[2.22376156, 0.0060837],
[2.22376156, 0.],
[2.25, 0.],
[2.25, 0.0060837],
[2.22376156, 0.0060837]])
se.save_environments_figure(isite=isite, imagename='image.png')
self.assertTrue(os.path.exists('image.png'))
self.assertEqual(len(se.differences_wrt(se)), 0)
self.assertFalse(se.__ne__(se))
ce = se.ce_list[isite][4][0]
self.assertTrue(ce.__len__(), 4)
symbol, mingeom = ce.minimum_geometry(symmetry_measure_type='csm_wocs_ctwocc')
self.assertEqual(symbol, 'T:4')
self.assertAlmostEqual(mingeom['symmetry_measure'], 0.00988778424054)
np.testing.assert_array_almost_equal(mingeom['other_symmetry_measures']['rotation_matrix_wcs_csc'],
[[-0.8433079817973094, -0.19705747216466898, 0.5000000005010193],
[0.4868840909509757, 0.11377118475194581, 0.8660254034951744],
[-0.22754236927612112, 0.9737681809261427, 1.3979531202869064e-13]])
self.assertEqual(mingeom['detailed_voronoi_index'], {'index': 0, 'cn': 4})
self.assertAlmostEqual(mingeom['other_symmetry_measures']['scaling_factor_wocs_ctwocc'], 1.6270605877934026)
ce_string = ce.__str__()
self.assertTrue('csm1 (with central site) : 0.00988' in ce_string)
self.assertTrue('csm2 (without central site) : 0.00981' in ce_string)
self.assertTrue('csm1 (with central site) : 12.987' in ce_string)
self.assertTrue('csm2 (without central site) : 11.827' in ce_string)
self.assertTrue('csm1 (with central site) : 32.466' in ce_string)
self.assertTrue('csm2 (without central site) : 32.466' in ce_string)
self.assertTrue('csm1 (with central site) : 34.644' in ce_string)
self.assertTrue('csm2 (without central site) : 32.466' in ce_string)
mingeoms = ce.minimum_geometries(symmetry_measure_type='csm_wocs_ctwocc', max_csm=12.0)
self.assertEqual(len(mingeoms), 2)
mingeoms = ce.minimum_geometries(symmetry_measure_type='csm_wocs_ctwcc', max_csm=12.0)
self.assertEqual(len(mingeoms), 1)
mingeoms = ce.minimum_geometries(n=3)
self.assertEqual(len(mingeoms), 3)
ce2 = se.ce_list[7][4][0]
self.assertTrue(ce.is_close_to(ce2, rtol=0.01, atol=1e-4))
self.assertFalse(ce.is_close_to(ce2, rtol=0.0, atol=1e-8))
self.assertFalse(ce.__eq__(ce2))
self.assertTrue(ce.__ne__(ce2))
def test_light_structure_environments(self):
with ScratchDir("."):
f = open("{}/{}".format(se_files_dir, 'se_mp-7000.json'), 'r')
dd = json.load(f)
f.close()
se = StructureEnvironments.from_dict(dd)
strategy = SimplestChemenvStrategy()
lse = LightStructureEnvironments.from_structure_environments(structure_environments=se, strategy=strategy,
valences='undefined')
isite = 6
nb_set = lse.neighbors_sets[isite][0]
neighb_coords = [np.array([0.2443798, 1.80409653, -1.13218359]),
np.array([1.44020353, 1.11368738, 1.13218359]),
np.array([2.75513098, 2.54465207, -0.70467298]),
np.array([0.82616785, 3.65833945, 0.70467298])]
neighb_indices = [0, 3, 5, 1]
neighb_images = [[0, 0, -1], [0, 0, 0], [0, 0, -1], [0, 0, 0]]
np.testing.assert_array_almost_equal(neighb_coords, nb_set.neighb_coords)
np.testing.assert_array_almost_equal(neighb_coords, [s.coords for s in nb_set.neighb_sites])
nb_sai = nb_set.neighb_sites_and_indices
np.testing.assert_array_almost_equal(neighb_coords, [sai['site'].coords for sai in nb_sai])
np.testing.assert_array_almost_equal(neighb_indices, [sai['index'] for sai in nb_sai])
nb_iai = nb_set.neighb_indices_and_images
np.testing.assert_array_almost_equal(neighb_indices, [iai['index'] for iai in nb_iai])
np.testing.assert_array_equal(neighb_images, [iai['image_cell'] for iai in nb_iai])
self.assertEqual(nb_set.__len__(), 4)
self.assertEqual(nb_set.__hash__(), 4)
self.assertFalse(nb_set.__ne__(nb_set))
self.assertEqual(nb_set.__str__(), 'Neighbors Set for site #6 :\n'
' - Coordination number : 4\n'
' - Neighbors sites indices : 0, 1, 2, 3\n')
stats = lse.get_statistics()
neighbors = lse.strategy.get_site_neighbors(site=lse.structure[isite])
self.assertArrayAlmostEqual(neighbors[0].coords, np.array([ 0.2443798, 1.80409653, -1.13218359]))
self.assertArrayAlmostEqual(neighbors[1].coords, np.array([ 1.44020353, 1.11368738, 1.13218359]))
self.assertArrayAlmostEqual(neighbors[2].coords, np.array([ 2.75513098, 2.54465207, -0.70467298]))
self.assertArrayAlmostEqual(neighbors[3].coords, np.array([ 0.82616785, 3.65833945, 0.70467298]))
equiv_site_index_and_transform = lse.strategy.equivalent_site_index_and_transform(neighbors[0])
self.assertEqual(equiv_site_index_and_transform[0], 0)
self.assertArrayAlmostEqual(equiv_site_index_and_transform[1], [0.0, 0.0, 0.0])
self.assertArrayAlmostEqual(equiv_site_index_and_transform[2], [0.0, 0.0, -1.0])
equiv_site_index_and_transform = lse.strategy.equivalent_site_index_and_transform(neighbors[1])
self.assertEqual(equiv_site_index_and_transform[0], 3)
self.assertArrayAlmostEqual(equiv_site_index_and_transform[1], [0.0, 0.0, 0.0])
self.assertArrayAlmostEqual(equiv_site_index_and_transform[2], [0.0, 0.0, 0.0])
self.assertEqual(stats['atom_coordination_environments_present'], {'Si': {'T:4': 3.0}})
self.assertEqual(stats['coordination_environments_atom_present'], {'T:4': {'Si': 3.0}})
self.assertEqual(stats['fraction_atom_coordination_environments_present'], {'Si': {'T:4': 1.0}})
site_info_ce = lse.get_site_info_for_specie_ce(specie=Specie('Si', 4), ce_symbol='T:4')
np.testing.assert_array_almost_equal(site_info_ce['fractions'], [1.0, 1.0, 1.0])
np.testing.assert_array_almost_equal(site_info_ce['csms'],
[0.009887784240541068, 0.009887786546730826, 0.009887787384385317])
self.assertEqual(site_info_ce['isites'], [6, 7, 8])
site_info_allces = lse.get_site_info_for_specie_allces(specie=Specie('Si', 4))
self.assertEqual(site_info_allces['T:4'], site_info_ce)
self.assertFalse(lse.contains_only_one_anion('I-'))
self.assertFalse(lse.contains_only_one_anion_atom('I'))
self.assertTrue(lse.site_contains_environment(isite=isite, ce_symbol='T:4'))
self.assertFalse(lse.site_contains_environment(isite=isite, ce_symbol='S:4'))
self.assertFalse(lse.structure_contains_atom_environment(atom_symbol='Si', ce_symbol='S:4'))
self.assertTrue(lse.structure_contains_atom_environment(atom_symbol='Si', ce_symbol='T:4'))
self.assertFalse(lse.structure_contains_atom_environment(atom_symbol='O', ce_symbol='T:4'))
self.assertTrue(lse.uniquely_determines_coordination_environments)
self.assertFalse(lse.__ne__(lse))
envs = lse.strategy.get_site_coordination_environments(lse.structure[6])
self.assertEqual(len(envs), 1)
self.assertEqual(envs[0][0], 'T:4')
multi_strategy = MultiWeightsChemenvStrategy.stats_article_weights_parameters()
lse_multi = LightStructureEnvironments.from_structure_environments(strategy=multi_strategy,
structure_environments=se,
valences='undefined')
self.assertAlmostEqual(lse_multi.coordination_environments[isite][0]['csm'], 0.009887784240541068)
self.assertAlmostEqual(lse_multi.coordination_environments[isite][0]['ce_fraction'], 1.0)
self.assertEqual(lse_multi.coordination_environments[isite][0]['ce_symbol'], 'T:4')
if __name__ == "__main__":
unittest.main() | dongsenfo/pymatgen | pymatgen/analysis/chemenv/coordination_environments/tests/test_structure_environments.py | Python | mit | 12,696 |
import math
import pyglet
import resources.indices as ind
#######################################
# Core Classes.
class Vector:
"""Basic 2D vectors"""
def __init__(self, x=0, y=0):
"""Initialization.
Args:
x: x position
y: y position
"""
self.x = x
self.y = y
def __add__(self, other):
"""Vector addition."""
if isinstance(other, Vector):
x = self.x + other.x
y = self.y + other.y
else:
x = self.x + other
y = self.y + other
return Vector(x, y)
def __sub__(self, other):
"""Vector substraction."""
if isinstance(other, Vector):
x = self.x - other.x
y = self.y - other.y
else:
x = self.x - other
y = self.y - other
return Vector(x, y)
def __mul__(self, other):
"""Vector dot product."""
if isinstance(other, Vector):
x = self.x * other.x
y = self.y * other.y
else:
x = self.x * other
y = self.y * other
return Vector(x, y)
def __rmul__(self, other):
return self.__mul__(other)
def __truediv__(self, other):
"""Division by scalar."""
x = self.x / other
y = self.y / other
return Vector(x, y)
def angle_deg(self):
"""Return angle in degrees."""
return math.degrees(math.atan2(self.y, self.x))
def angle_rad(self):
"""Return angle in radians."""
return math.atan2(self.y, self.x)
def mag(self):
"""Return vector magnitude."""
return math.sqrt(self.x ** 2 + self.y ** 2)
def norm(self):
"""Return vector magnitude."""
mag = self.mag()
x = self.x / mag
y = self.y / mag
return Vector(x, y)
class Rect:
"""Basic class for managing rectangles."""
def __init__(self, x=0, y=0, width=0, height=0):
self.x = x
self.y = y
self.width = width
self.height = height
#######################################
# Core functions.
def energy(moon, planets, gravity=0):
"""Determine the current energy of the moon.
Args:
moon (Moon): Moon object to determine energy for.
planets (list of Planet): Planets objects setting up the
gravitational field for the moon.
gravity (float): Gravity constant.
Returns: dict
ind.TOTAL (float): Total energy of the moon.
ind.KINETIC (float): Kinetic energy of the moon.
ind.POTENTIAL (float): Potential energy of the moon.
"""
pe = 0
for planet in planets:
r = planet.locus - moon.locus
if r.mag() > (planet.width / 2 + moon.width / 2):
pe -= gravity * planet.mass / r.mag()
ke = moon.mass * (moon.velocity.mag() ** 2) / 2
te = ke + pe
return {ind.TOTAL: te, ind.KINETIC: ke, ind.POTENTIAL: pe}
def update(dt, moon, planets, gravity=0):
"""Updates the current postion and velocity of the moon.
Args:
dt (float): Time step in seconds.
moon (Moon): Moon object to be updated.
planets (list of Planet): Planets setting up the
gravitational field.
Returns:
Nothing.
The moon is update over the timestep dt using the inverse square
law of gravitation given the field set up by the planets. The
integration uses the fourth-order Runge-Kutta algorithm.
"""
if moon.crashed:
return 0
rkv1 = Vector(0, 0)
for planet in planets:
r = planet.locus - moon.locus
if r.mag() > (planet.width / 2 + moon.width / 2):
rkv1 += gravity * planet.mass * r.norm() * dt / r.mag() ** 2
else:
moon.crash()
return 0
rkx1 = moon.velocity * dt
rkv2 = Vector(0, 0)
for planet in planets:
r = planet.locus - moon.locus + rkx1 / 2
if r.mag() > (planet.width / 2 + moon.width / 2):
rkv2 += gravity * planet.mass * r.norm() * dt / r.mag() ** 2
rkx2 = (moon.velocity + rkv1 / 2) * dt
rkv3 = Vector(0 ,0)
for planet in planets:
r = planet.locus - moon.locus + rkx2 / 2
if r.mag() > (planet.width / 2 + moon.width / 2):
rkv3 += gravity * planet.mass * r.norm() * dt / r.mag() ** 2
rkx3 = (moon.velocity + rkv2 / 2) * dt
rkv4 = Vector(0 ,0)
for planet in planets:
r = planet.locus - moon.locus + rkx3
if r.mag() > (planet.width / 2 + moon.width / 2):
rkv4 += gravity * planet.mass * r.norm() * dt / r.mag() ** 2
rkx4 = (moon.velocity + rkv3) * dt
moon.velocity += (rkv1 + 2 * rkv2 + 2 * rkv3 + rkv4) / 6
moon.locus += (rkx1 + 2 * rkx2 + 2 * rkx3 + rkx4) / 6
def inrect(x, y, rect):
"""Determines whether a point lies in a rectangle.
Args:
x (int or float): X-coordinate of point.
y (int or float): Y-coordinate of point.
rect (Rect): Rectangle to check for point containment.
The rectangle must be perpendicular/parallel to the coordinates.
If this is not the case, it must first be rotated."""
in_rect_x = x > rect.x and x < rect.x + rect.width
in_rect_y = y > rect.y and y < rect.y + rect.height
return in_rect_x and in_rect_y
| MWRuszczycky/SimpleMoonSimulator | moonsim/model/engine.py | Python | bsd-3-clause | 5,361 |
import discord
from sigma.core.utilities.data_processing import user_avatar
def count_all_commands(db, user):
cmd_items = db[db.db_cfg.database]['CommandStats'].aggregate(
[
{'$match': {
'author': user.id
}},
{"$group": {
"_id": {
"command": "$command.name",
},
"count": {"$sum": 1}
}}
]
)
output = {}
total = 0
for x in cmd_items:
output.update({x['_id']['command']: x['count']})
total += x['count']
return output, total
async def profile(cmd, message, args):
if message.mentions:
target = message.mentions[0]
else:
target = message.author
avatar = user_avatar(target)
commands, total_commands = count_all_commands(cmd.db, target)
exp = cmd.db.get_experience(target, message.guild)
global_level = int((exp['global'] // ((((exp['global'] // 690) * 0.0125) + 1) * 690)))
top_cmd = {'cmd': None, 'val': 0}
for command in commands:
if commands[command] > top_cmd['val']:
top_cmd = {'cmd': command, 'val': commands[command]}
if total_commands != 0:
cmd_percentage = int((top_cmd['val'] / total_commands) * 100)
else:
cmd_percentage = 0
cmd_stats = f'Level: {global_level}'
cmd_stats += f'\nMost Used: {top_cmd["cmd"]}'
cmd_stats += f'\nCount: {top_cmd["val"]} ({cmd_percentage}%)'
cmd_stats += f'\nTotal: {total_commands} Commands'
response = discord.Embed(color=target.color)
response.set_thumbnail(url=avatar)
response.add_field(name=f'{target.display_name}\'s Profile', value=cmd_stats)
await message.channel.send(embed=response)
| AngryBork/apex-sigma-plugins | statistics/internals/profile.py | Python | gpl-3.0 | 1,746 |
#!/usr/bin/python
# coding=utf-8
# This file is part of the MLizard library published under the GPL3 license.
# Copyright (C) 2012 Klaus Greff
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division, unicode_literals, print_function
from nose.tools import istest, nottest, with_setup
from nose.tools import assert_true as _assert_true
from nose.tools import assert_not_equal as _assert_not_equal
from nose.tools import raises
from numpy.testing import assert_almost_equal, assert_equal, assert_allclose
from numpy.testing import assert_array_less as assert_less
def assert_true(expr, msg=None):
_assert_true(expr, msg)
def assert_not_equal(first, second, msg=None):
_assert_not_equal(first, second, msg)
# Use the same flag as unittest itself to prevent descent into these functions:
__unittest = 1
| Qwlouse/MLizard | mlizard/test/helpers.py | Python | gpl-3.0 | 928 |
#!/usr/bin/python3
# template.py by Bill Weinman [http://bw.org/]
# created for Python 3 Essential Training on lynda.com
# Copyright 2010 The BearHeart Gorup, LLC
from xml.dom.minidom import parse
from urllib.request import urlopen
from html.parser import HTMLParser
DEFAULT_NAMESPACES = (
None, # RSS 09.1, 0.92, 0.93, 0.94, 2.0
'http://purl.org/rss/1.0/', # RSS 1.0
'http://my.netscape.com/rdf/simple/0.9/', # RSS 0.90
'http://www.w3.org/2005/Atom', # ATOM
'http://purl.org/dc/elements/1.1/' # dublin core namespace
)
class HTMLDataOnly(HTMLParser):
''' only gets data (text) from HTML -- no tags! '''
def handle_data(self, data):
self._data = ' '.join([self._data, data]) if hasattr(self, 'data') else data
def get_data(self):
return self._data
class RSS:
def __init__(self, url):
self.feed = parse(urlopen(url))
# rss or atom?
for t in ('item', 'entry'):
self.node = self.getElementsByTagName(self.feed, t)
if self.node: break
self.feedTitle = self.textOf(self.first(self.feed, 'title'))
self.feedDescription = self.textOf(self.first(self.feed, 'description'))
self.feedURL = url
self._index = 0;
def next_index(self, i = None):
print("next_index", self._index)
if i is None: self._index += 1
elif i < 0: self._index = None
else: self._index = i
if self._index >= len(self.node): self._index = None
return self._index
def title(self, n = None):
return self.textOfNode('title', n).strip()
# atom uses an href attribute for the link
def link(self, n = None):
if n is None: n = self.node[self._index]
l = self.textOfNode('link', n).strip()
return l if l else self.attrOf(n, 'link', 'href').strip()
def description(self, n = None):
htmldata = HTMLDataOnly()
for t in ('description', 'summary'):
text = self.textOfNode(t, n)
if text:
htmldata.feed(text)
return htmldata.get_data().strip()
return ''
def date(self):
for t in ('date', 'pubDate'):
s = self.textOfNode(t)
if s: return s
def getElementsByTagName(self, node, tagName, possibleNamespaces=DEFAULT_NAMESPACES):
for namespace in possibleNamespaces:
children = node.getElementsByTagNameNS(namespace, tagName)
if len(children): return children
return []
def first(self, node, tagName, possibleNamespaces=DEFAULT_NAMESPACES):
children = self.getElementsByTagName(node, tagName, possibleNamespaces)
return children[0] if len(children) else None
def attrOf(self, node, element, attr):
n = self.first(node, element)
return n.getAttribute(attr) if n else ''
def textOf(self, node):
return ''.join([child.data for child in node.childNodes]) if node else ''
def textOfNode(self, tagName, n = None):
if n is None: n = self.node[self._index]
return self.textOf(self.first(n, tagName))
def record(self, n):
return {
'title': self.title(n),
'link': self.link(n),
'description': self.description(n),
'index': self.node.index(n)
}
def records(self):
for n in self.node:
yield self.record(n)
def main():
for url in (
'http://feeds.nytimes.com/nyt/rss/Books',
'http://billweinman.wordpress.com/feed/',
'http://perlhacks.com/atom.xml'
):
rss = RSS(url)
for r in rss.records():
print("node {} of {}".format(r['index'] + 1, len(rss.node)))
print(r['title'])
print(r['link'])
print(r['description'])
if __name__ == "__main__": main()
| hckrtst/learnpython | py3_essential_training/19 Projects/Extras/RSS/rss.py | Python | mit | 3,843 |
"""
dyn_pages/publish_targets/flickr.py
Author: Josh Williams
Date Added: Sun Feb 25 14:55:23 CST 2007
Flickr. Bleh.
"""
## STD LIBS
import md5
from xml.dom import minidom
## OUR LIBS
## 3RD PARTY LIBS
from nevow import rend, inevow, url
from twisted.web.client import HTTPClientFactory
from twisted.internet import reactor
from twisted.internet.defer import Deferred
API_KEY = "831f709dc56117f7875c8489f1571bd9"
SECRET = "987a44f2f546d235"
class flickr_token_getter(HTTPClientFactory):
def __init__(self, frob, log):
self.frob = frob
self.log = log
self.sig = md5.md5("%sapi_key%sfrob%smethodflickr.auth.getToken" % (SECRET, API_KEY, self.frob)).hexdigest()
url = "http://api.flickr.com/services/rest/?method=flickr.auth.getToken&api_key=%s&frob=%s&api_sig=%s" % (API_KEY, self.frob, self.sig)
self.log.debug("url: %s" % url)
HTTPClientFactory.__init__(self, url, agent="Zoto/3.0.1")
self.deferred.addCallback(self.handle_response)
self.deferred.addErrback(self.handle_error)
self.token = None
def get_token(self):
self.d = Deferred()
reactor.connectTCP("www.flickr.com", 80, self)
return self.d
def handle_response(self, page):
self.log.debug("response:\n%s" % page)
dom = minidom.parseString(page)
auth = dom.documentElement
token = ""
for node in auth.getElementsByTagName("token")[0].childNodes:
if node.nodeType == node.TEXT_NODE:
token += node.data
perms = ""
for node in auth.getElementsByTagName("perms")[0].childNodes:
if node.nodeType == node.TEXT_NODE:
perms += node.data
user_node = auth.getElementsByTagName("user")[0]
username = user_node.getAttribute('username')
user_id = user_node.getAttribute('nsid')
self.d.callback((token, username, user_id))
def handle_error(self, error):
self.d.errback(error)
class flickr(rend.Page):
def __init__(self, username, app, log):
rend.Page.__init__(self)
self.username = username
self.app = app
self.log = log
self.error = None
self.token = None
self.flickr_user = None
self.flickr_user_id = None
def locateChild(self, ctx, segments):
if len(segments) > 0 and segments[0]:
if segments[0] == "auth":
## Step1. Authenticate with Flickr.
request = inevow.IRequest(ctx)
sig = md5.md5("%sapi_key%spermswrite" % (SECRET, API_KEY)).hexdigest()
api_url = url.URL(scheme="http", netloc="www.flickr.com", pathsegs=["services", "auth"], querysegs=[["api_key", API_KEY], ["perms", "write"], ["api_sig", sig]])
return api_url, []
elif segments[0] == "auth-response":
request = inevow.IRequest(ctx)
if request.args.has_key("frob"):
frob = request.args['frob'][0]
token_getter = flickr_token_getter(frob, self.log)
d = token_getter.get_token()
d.addCallback(self.handle_perm_token)
d.addErrback(self.handle_failure)
return d
else:
self.frob = None
return self, []
def handle_perm_token(self, result):
self.token, self.flickr_user, self.flickr_user_id = result
self.error = None
return self, []
def handle_failure(self, error):
self.error = error
return self, []
def renderHTTP(self, ctx):
auth_successful = 1
if self.error:
auth_successful = 0
doc = """<!DOCTYPE html
PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html>
<head>
<title>Thanks</title>
<script type="text/javascript" src="/js/3.05/third_party/MochiKit/MochiKit.js"></script>
<script type="text/javascript">
function closeWindow() {
window.opener=top;
window.opener.child_window_closed(%s, ["%s", "%s", "%s"]);
window.self.close();
}
</script>
</head>
<body onload="javascript:closeWindow();">
</body>
</html>
""" % (auth_successful, str(self.token) or "NO TOKEN", str(self.flickr_user) or "NO USER", str(self.flickr_user_id) or "NO USER_ID")
return doc
| kordless/zoto-server | aztk/web/publish_targets/flickr.py | Python | bsd-3-clause | 3,849 |
#!/usr/bin/env python3
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import apache_beam as beam
import csv
def addtimezone(lat, lon):
try:
import timezonefinder
tf = timezonefinder.TimezoneFinder()
tz = tf.timezone_at(lng=float(lon), lat=float(lat))
if tz is None:
tz = 'UTC'
return (lat, lon, tz)
except ValueError:
return (lat, lon, 'TIMEZONE') # header
if __name__ == '__main__':
with beam.Pipeline('DirectRunner') as pipeline:
airports = (pipeline
| beam.io.ReadFromText('airports.csv.gz')
| beam.Map(lambda line: next(csv.reader([line])))
| beam.Map(lambda fields: (fields[0], addtimezone(fields[21], fields[26])))
)
airports | beam.Map(lambda f: '{},{}'.format(f[0], ','.join(f[1])) )| beam.io.textio.WriteToText('airports_with_tz')
pipeline.run()
| GoogleCloudPlatform/training-data-analyst | quests/data-science-on-gcp-edition1_tf2/04_streaming/simulate/df02.py | Python | apache-2.0 | 1,395 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
class ConfigError(Exception):
pass
| weberwang/WeRoBot | werobot/exceptions.py | Python | mit | 122 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('order', '0003_auto_20150113_1629'),
]
operations = [
migrations.AlterModelOptions(
name='line',
options={'ordering': ['pk'], 'verbose_name': 'Order Line', 'verbose_name_plural': 'Order Lines'},
),
]
| michaelkuty/django-oscar | src/oscar/apps/order/migrations/0004_auto_20160111_1108.py | Python | bsd-3-clause | 431 |
#
# Solution to Project Euler problem 74
# Philippe Legault
#
# https://github.com/Bathlamos/Project-Euler-Solutions
def compute():
triangle = frozenset(map(str, [n * (n + 1) / 2 for n in range(45, 141)]))
square = frozenset(map(str, [n ** 2 for n in range(32, 100)]))
pentagonal = frozenset(map(str, [n * (3 * n - 1) / 2 for n in range(26, 82)]))
hexagonal = frozenset(map(str, [n * (2 * n - 1) for n in range(23, 71)]))
heptagonal = frozenset(map(str, [n * (5 * n - 3) / 2 for n in range(21, 64)]))
octogonal = frozenset(map(str, [n * (3 * n - 2) for n in range(19, 59)]))
poss_chains = []
for i in triangle:
poss_chains.append((i, [i], i, set([square, pentagonal, hexagonal, hexagonal, heptagonal, octogonal])))
while poss_chains:
i, array, first_num, sets_to_check = poss_chains.pop()
if len(sets_to_check) == 0 and first_num[:2] == i[-2:]:
return sum(map(int, array))
for s in sets_to_check:
for t in filter(lambda x: i[-2:] == x[:2], s):
poss_chains.append((t, array + [t], first_num, set(sets_to_check) - set([s])))
if __name__ == "__main__":
print(compute()) | Bathlamos/Project-Euler-Solutions | solutions/p061.py | Python | mit | 1,103 |
from django.forms.widgets import HiddenInput
from django.urls import reverse
from django.utils.http import urlencode
from consents.forms import RequiredConsentsForm
from consents.models import Term
from consents.tests.base import ConsentTestBase
from consents.util import person_has_consented_to_required_terms
from workshops.models import Person
class TestActionRequiredTermView(ConsentTestBase):
def setUp(self):
super().setUp()
self.neville = Person.objects.create(
personal="Neville",
family="Longbottom",
email="[email protected]",
gender="M",
username="longbottom_neville",
airport=self.airport_0_0,
is_active=True,
)
def test_agreement_already_set(self):
"""Make sure the view redirect somewhere if person has already agreed
to the required terms."""
# force login Neville
self.client.force_login(self.neville)
url = reverse("action_required_terms")
# form renders
rv = self.client.get(url)
self.assertEqual(rv.status_code, 200)
# Neville decided to agree to all terms on the page
kwargs = {
"initial": {"person": self.neville},
"widgets": {"person": HiddenInput()},
}
self.person_agree_to_terms(
self.neville, RequiredConsentsForm(**kwargs).get_terms()
)
# form throws 404
rv = self.client.get(url)
self.assertEqual(rv.status_code, 404)
def test_optional_agreements_are_optional(self):
"""Make sure the view redirect somewhere if person has agreed
to the required terms."""
# force login Neville
self.client.force_login(self.neville)
url = reverse("action_required_terms")
# form renders
rv = self.client.get(url)
self.assertEqual(rv.status_code, 200)
# Neville decided to agree to only the required terms
self.person_agree_to_terms(
self.neville,
Term.objects.filter(required_type=Term.PROFILE_REQUIRE_TYPE),
)
# form throws 404
rv = self.client.get(url)
self.assertEqual(rv.status_code, 404)
def test_required_agreement_submit(self):
"Make sure the form passes only when required terms are set."
# setup sample data
kwargs = {
"initial": {"person": self.neville},
"widgets": {"person": HiddenInput()},
}
terms = RequiredConsentsForm(**kwargs).get_terms()
data = {
term.slug: term.options[0].pk
for term in terms.exclude(required_type=Term.PROFILE_REQUIRE_TYPE)
}
data["person"] = self.neville.pk
# make sure it doesn't pass without the required consents
form = RequiredConsentsForm(data, initial={"person": self.neville})
self.assertFalse(form.is_valid())
# let's try with consent for required terms
for term in terms.filter(required_type=Term.PROFILE_REQUIRE_TYPE):
data[term.slug] = term.options[0].pk
form = RequiredConsentsForm(data, initial={"person": self.neville})
self.assertTrue(form.is_valid())
class TestTermsMiddleware(ConsentTestBase):
def setUp(self):
super().setUp()
self.neville = Person.objects.create(
personal="Neville",
family="Longbottom",
email="[email protected]",
gender="M",
username="longbottom_neville",
airport=self.airport_0_0,
is_active=True,
)
self.form_url = reverse("action_required_terms")
def test_anonymous_user(self):
"""Ensure anonymous user is not redirected by the Terms middleware."""
urls = [
reverse("login"),
reverse("api:root"),
reverse("training_request"),
reverse("training_request_confirm"),
reverse("workshop_request"),
reverse("workshop_request_confirm"),
]
# ensure we're not logged in
self.client.logout()
for url in urls:
rv = self.client.get(url)
# no redirects!
self.assertEqual(rv.status_code, 200)
# user indeed is anonymous
self.assertEqual(rv.wsgi_request.user.is_anonymous, True)
def test_logged_in_user(self):
"""Ensure logged-in user who has not consented to
the required terms is redirected to the form."""
urls = [
reverse("admin-dashboard"),
reverse("trainee-dashboard"),
]
# ensure we're logged in
self.client.force_login(self.neville)
# ensure we have not yet agreed to the required consents
self.assertEqual(person_has_consented_to_required_terms(self.neville), False)
with self.terms_middleware():
for url in urls:
rv = self.client.get(url)
action_required_url = "{}?next={}".format(
reverse("action_required_terms"), url
)
self.assertRedirects(rv, action_required_url)
def test_no_more_redirects_after_agreement(self):
"""Ensure user is no longer forcefully redirected to accept the
required terms."""
url = reverse("trainee-dashboard")
# ensure we're logged in
self.client.force_login(self.neville)
self.assertEqual(person_has_consented_to_required_terms(self.neville), False)
with self.terms_middleware():
# we can't get to the url because we're redirected to the form
rv = self.client.get(url)
action_required_url = "{}?next={}".format(
reverse("action_required_terms"), url
)
self.assertRedirects(rv, action_required_url)
# agree on the required terms
self.person_agree_to_terms(
self.neville,
Term.objects.filter(required_type=Term.PROFILE_REQUIRE_TYPE),
)
# now the dashboard is easily reachable
rv = self.client.get(url)
self.assertEqual(rv.status_code, 200)
def test_allowed_urls(self):
url = reverse("logout")
# ensure we're logged in
self.client.force_login(self.neville)
self.assertEqual(person_has_consented_to_required_terms(self.neville), False)
with self.terms_middleware():
rv = self.client.get(url)
# doesn't redirect to the form
# But logout does redirect to login
self.assertRedirects(rv, reverse("login"))
def test_next_param(self):
"""Ensure a non-dispatch URL is reachable through `?next` query
string."""
url = reverse("autoupdate_profile")
form_url = "{}?{}".format(
reverse("action_required_terms"), urlencode({"next": url})
)
# ensure we're logged in
self.client.force_login(self.neville)
self.assertEqual(person_has_consented_to_required_terms(self.neville), False)
with self.terms_middleware():
# fill in and submit form
terms = Term.objects.filter(
required_type=Term.PROFILE_REQUIRE_TYPE
).prefetch_active_options()
data = {"person": self.neville.pk}
for term in terms:
data[term.slug] = term.options[0].pk
rv = self.client.post(form_url, data=data)
self.assertRedirects(rv, url)
def test_old_terms_do_not_affect_terms_middleware(self):
"""
User is redirected even if old terms are false.
"""
urls = [
reverse("admin-dashboard"),
reverse("trainee-dashboard"),
]
harry = Person.objects.create(
personal="Harry",
family="Potter",
email="[email protected]",
gender="M",
username="harry_potter",
airport=self.airport_0_0,
is_active=True,
# Setting old terms to False.
may_contact=False,
publish_profile=False,
data_privacy_agreement=False,
)
# ensure we're logged in
self.client.force_login(harry)
# ensure we have not yet agreed to the required consents
self.assertEqual(person_has_consented_to_required_terms(harry), False)
with self.terms_middleware():
for url in urls:
rv = self.client.get(url)
# redirects to the form
action_required_url = "{}?next={}".format(
reverse("action_required_terms"), url
)
self.assertRedirects(rv, action_required_url)
| pbanaszkiewicz/amy | amy/consents/tests/test_action_required_view.py | Python | mit | 8,809 |
"""Helper module for gui test suite
"""
import os
import sys
import hashlib
import logging
from PyQt4 import QtGui, QtCore
from qgis.core import (QgsApplication,
QgsVectorLayer,
QgsRasterLayer,
QgsRectangle,
QgsCoordinateReferenceSystem)
from qgis.gui import QgsMapCanvas
from qgis_interface import QgisInterface
# For testing and demoing
from safe.common.testing import TESTDATA
from safe_qgis.safe_interface import (readKeywordsFromFile,
temp_dir,
unique_filename)
LOGGER = logging.getLogger('InaSAFE')
QGISAPP = None # Static vainasafele used to hold hand to running QGis app
CANVAS = None
PARENT = None
IFACE = None
GEOCRS = 4326 # constant for EPSG:GEOCRS Geographic CRS id
GOOGLECRS = 900913 # constant for EPSG:GOOGLECRS Google Mercator id
DEVNULL = open(os.devnull, 'w')
CONTROL_IMAGE_DIR = os.path.join(os.path.dirname(__file__),
'test_data/test_images')
def assertHashesForFile(theHashes, theFilename):
"""Assert that a files has matches one of a list of expected hashes"""
myHash = hashForFile(theFilename)
myMessage = ('Unexpected hash'
'\nGot: %s'
'\nExpected: %s'
'\nPlease check graphics %s visually '
'and add to list of expected hashes '
'if it is OK on this platform.'
% (myHash, theHashes, theFilename))
assert myHash in theHashes, myMessage
def assertHashForFile(theHash, theFilename):
"""Assert that a files has matches its expected hash"""
myHash = hashForFile(theFilename)
myMessage = ('Unexpected hash'
'\nGot: %s'
'\nExpected: %s' % (myHash, theHash))
assert myHash == theHash, myMessage
def hashForFile(theFilename):
"""Return an md5 checksum for a file"""
myPath = theFilename
myData = file(myPath, 'rb').read()
myHash = hashlib.md5()
myHash.update(myData)
myHash = myHash.hexdigest()
return myHash
def getQgisTestApp():
""" Start one QGis application to test agaist
Input
NIL
Output
handle to qgis app
If QGis is already running the handle to that app will be returned
"""
global QGISAPP # pylint: disable=W0603
if QGISAPP is None:
myGuiFlag = True # All test will run qgis in safe_qgis mode
QGISAPP = QgsApplication(sys.argv, myGuiFlag)
# Note: This block is not needed for QGIS > 1.8 which will
# automatically check the QGIS_PREFIX_PATH var so it is here
# for backwards compatibility only
if 'QGIS_PREFIX_PATH' in os.environ:
myPath = os.environ['QGIS_PREFIX_PATH']
myUseDefaultPathFlag = True
QGISAPP.setPrefixPath(myPath, myUseDefaultPathFlag)
QGISAPP.initQgis()
s = QGISAPP.showSettings()
LOGGER.debug(s)
global PARENT # pylint: disable=W0603
if PARENT is None:
PARENT = QtGui.QWidget()
global CANVAS # pylint: disable=W0603
if CANVAS is None:
CANVAS = QgsMapCanvas(PARENT)
CANVAS.resize(QtCore.QSize(400, 400))
global IFACE # pylint: disable=W0603
if IFACE is None:
# QgisInterface is a stub implementation of the QGIS plugin interface
IFACE = QgisInterface(CANVAS)
return QGISAPP, CANVAS, IFACE, PARENT
def unitTestDataPath(theSubdir=None):
"""Return the absolute path to the InaSAFE unit test data dir.
.. note:: This is not the same thing as the SVN inasafe_data dir. Rather
this is a new dataset where the test datasets are all tiny for fast
testing and the datasets live in the same repo as the code.
Args:
* theSubdir: (Optional) Additional subdir to add to the path - typically
'hazard' or 'exposure'.
"""
from safe.common.testing import UNITDATA
myPath = UNITDATA
if theSubdir is not None:
myPath = os.path.abspath(os.path.join(myPath,
theSubdir))
return myPath
def loadLayer(theLayerFile, theDirectory=TESTDATA):
"""Helper to load and return a single QGIS layer
Args:
theLayerFile: Pathname to raster or vector file
DIR: Optional parameter stating the parent dir. If None,
pathname is assumed to be absolute
Returns: QgsMapLayer, str (for layer type)
"""
# Extract basename and absolute path
myFilename = os.path.split(theLayerFile)[-1] # In case path was absolute
myBaseName, myExt = os.path.splitext(myFilename)
if theDirectory is None:
myPath = theLayerFile
else:
myPath = os.path.join(theDirectory, theLayerFile)
myKeywordPath = myPath[:-4] + '.keywords'
# Determine if layer is hazard or exposure
myKeywords = readKeywordsFromFile(myKeywordPath)
myType = 'undefined'
if 'category' in myKeywords:
myType = myKeywords['category']
myMessage = 'Could not read %s' % myKeywordPath
assert myKeywords is not None, myMessage
# Create QGis Layer Instance
if myExt in ['.asc', '.tif']:
myLayer = QgsRasterLayer(myPath, myBaseName)
elif myExt in ['.shp']:
myLayer = QgsVectorLayer(myPath, myBaseName, 'ogr')
else:
myMessage = 'File %s had illegal extension' % myPath
raise Exception(myMessage)
myMessage = 'Layer "%s" is not valid' % str(myLayer.source())
assert myLayer.isValid(), myMessage
return myLayer, myType
def setCanvasCrs(theEpsgId, theOtfpFlag=False):
"""Helper to set the crs for the CANVAS before a test is run.
Args:
* theEpsgId - Valid EPSG identifier (int)
* theOtfpFlag - whether on the fly projections should be enabled
on the CANVAS. Default to False.
"""
# Enable on-the-fly reprojection
CANVAS.mapRenderer().setProjectionsEnabled(theOtfpFlag)
# Create CRS Instance
myCrs = QgsCoordinateReferenceSystem()
myCrs.createFromId(theEpsgId, QgsCoordinateReferenceSystem.EpsgCrsId)
# Reproject all layers to WGS84 geographic CRS
CANVAS.mapRenderer().setDestinationCrs(myCrs)
def setPadangGeoExtent():
"""Zoom to an area occupied by both both Padang layers"""
myRect = QgsRectangle(100.21, -1.05, 100.63, -0.84)
CANVAS.setExtent(myRect)
def setJakartaGeoExtent():
"""Zoom to an area occupied by both Jakarta layers in Geo"""
myRect = QgsRectangle(106.52, -6.38, 107.14, -6.07)
CANVAS.setExtent(myRect)
def setJakartaGoogleExtent():
"""Zoom to an area occupied by both Jakarta layers in 900913 crs
"""
myRect = QgsRectangle(11873524, -695798, 11913804, -675295)
CANVAS.setExtent(myRect)
def setBatemansBayGeoExtent():
"""Zoom to an area occupied by both Batemans Bay
layers in geo crs"""
myRect = QgsRectangle(150.152, -35.710, 150.187, -35.7013)
CANVAS.setExtent(myRect)
def setYogyaGeoExtent():
"""Zoom to an area occupied by both Jakarta layers in Geo"""
myRect = QgsRectangle(110.348, -7.732, 110.368, -7.716)
CANVAS.setExtent(myRect)
def setGeoExtent(theBoundingBox):
"""Zoom to an area specified given bounding box (list)"""
myRect = QgsRectangle(*theBoundingBox)
CANVAS.setExtent(myRect)
def checkImages(theControlImages, theTestImagePath, theTolerance=1000):
"""Compare a test image against a collection of known good images.
Args:
* theControlImagePath: list of file names. Give only the basename +ext
as the test image path (CONTROL_IMAGE_DIR) will be prepended.
* theTestImagePath: The Image being checked (must have same dimensions
as the control image). Must be full path to image.
* theTolerance: How many pixels may be different between the
two images.
Returns:
(bool, str, str) where:
* bool is success or failure indicator
* str is the file path of the resulting difference image
* str is a message providing analysis comparison notes
Raises:
None
"""
myMessages = ''
for myControlImage in theControlImages:
myFullPath = os.path.join(CONTROL_IMAGE_DIR,
myControlImage)
myFlag, myMessage = checkImage(myFullPath,
theTestImagePath,
theTolerance)
myMessages += myMessage
# As soon as one passes we are done!
if myFlag:
break
return myFlag, myMessages
def checkImage(theControlImagePath, theTestImagePath, theTolerance=1000):
"""Compare a test image against a known good image.
Args:
* theControlImagePath: The image representing expected output
* theTestImagePath: The Image being checked (must have same dimensions
as the control image).
* theTolerance: How many pixels may be different between the
two images.
Returns:
(bool, str, str) where:
* bool is success or failure indicator
* str is a message providing analysis comparison notes
Raises:
None
"""
try:
if not os.path.exists(theTestImagePath):
raise OSError
myTestImage = QtGui.QImage(theTestImagePath)
except OSError:
myMessage = 'Test image:\n%s\ncould not be loaded' % theTestImagePath
return False, myMessage
try:
if not os.path.exists(theControlImagePath):
raise OSError
myControlImage = QtGui.QImage(theControlImagePath)
except OSError:
myMessage = ('Control image:\n%s\ncould not be loaded.\n'
'Test image is:\n%s\n' % (
theControlImagePath,
theTestImagePath))
return False, myMessage
if (myControlImage.width() != myTestImage.width()
or myControlImage.height() != myTestImage.height()):
myMessage = ('Control and test images are different sizes.\n'
'Control image : %s (%i x %i)\n'
'Test image : %s (%i x %i)\n'
'If this test has failed look at the above images '
'to try to determine what may have change or '
'adjust the tolerance if needed.' %
(theControlImagePath,
myControlImage.width(),
myControlImage.height(),
theTestImagePath,
myTestImage.width(),
myTestImage.height()))
return False, myMessage
myImageWidth = myControlImage.width()
myImageHeight = myControlImage.height()
myMismatchCount = 0
myDifferenceImage = QtGui.QImage(myImageWidth,
myImageHeight,
QtGui.QImage.Format_ARGB32_Premultiplied)
myDifferenceImage.fill(152 + 219 * 256 + 249 * 256 * 256)
myControlPixel = QtGui.QColor().rgb()
myTestPixel = QtGui.QColor().rgb()
for myY in range(myImageHeight):
for myX in range(myImageWidth):
myControlPixel = myControlImage.pixel(myX, myY)
myTestPixel = myTestImage.pixel(myX, myY)
if (myControlPixel != myTestPixel):
myMismatchCount = myMismatchCount + 1
myDifferenceImage.setPixel(myX, myY, QtGui.qRgb(255, 0, 0))
myDifferenceFilePath = unique_filename(prefix='difference',
suffix='.png',
dir=temp_dir('test'))
myDifferenceImage.save(myDifferenceFilePath, "PNG")
#allow pixel deviation of 1 percent
myPixelCount = myImageWidth * myImageHeight
# FIXME (Ole): Use relative error i.e. mismatchcount/total pixels
if myMismatchCount > theTolerance:
mySuccessFlag = False
else:
mySuccessFlag = True
myMessage = ('%i of %i pixels are mismatched. Tolerance is %i.\n'
'Control image : %s\n'
'Test image : %s\n'
'Difference image: %s\n'
'If this test has failed look at the above images '
'to try to determine what may have change or '
'adjust the tolerance if needed.' %
(myMismatchCount,
myPixelCount,
theTolerance,
theControlImagePath,
theTestImagePath,
myDifferenceFilePath))
return mySuccessFlag, myMessage
class RedirectStdStreams(object):
"""Context manager for redirection of stdout and stderr
This is from
http://stackoverflow.com/questions/6796492/
python-temporarily-redirect-stdout-stderr
In this context, the class is used to get rid of QGIS
output in the test suite - BUT IT DOESN'T WORK (Maybe
because QGIS starts its providers in a different process?)
Usage:
devnull = open(os.devnull, 'w')
print('Fubar')
with RedirectStdStreams(stdout=devnull, stderr=devnull):
print("You'll never see me")
print("I'm back!")
"""
def __init__(self, stdout=None, stderr=None):
self._stdout = stdout or sys.stdout
self._stderr = stderr or sys.stderr
def __enter__(self):
self.old_stdout, self.old_stderr = sys.stdout, sys.stderr
self.old_stdout.flush()
self.old_stderr.flush()
sys.stdout, sys.stderr = self._stdout, self._stderr
def __exit__(self, exc_type, exc_value, traceback):
self._stdout.flush()
self._stderr.flush()
sys.stdout = self.old_stdout
sys.stderr = self.old_stderr
| gvallarelli/inasafe | safe_qgis/utilities_test.py | Python | gpl-3.0 | 13,793 |
# -*- coding: utf-8 -*-
"""
Copyright (C) 2014 Michael Davidsaver
License is GPL3+, see file LICENSE for details
"""
import unittest
import os.path
from .. import io
from ..util import TempDir
# test data directory
_dir = os.path.dirname(__file__)
class TestSpice(unittest.TestCase):
def test_read(self):
F = os.path.join(_dir, 'opsim-real-bin.raw')
D = io.loadspice(F)
self.assertEqual(1, len(D))
V = D['default']
self.assertEqual(32, len(V))
if __name__=='__main__':
unittest.main()
| mdavidsaver/spicetools | spicetools/test/test_io.py | Python | gpl-3.0 | 541 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sublime
from glob import glob
from ..api import deviot
from ..platformio.project_recognition import ProjectRecognition
from .quick_panel import quick_panel
from .tools import get_setting, save_setting, save_sysetting
from .preferences_bridge import PreferencesBridge
from .I18n import I18n
class QuickMenu(PreferencesBridge):
def __init__(self):
super(QuickMenu, self).__init__()
self.index = 0
self.quick_list = []
self.deeper = 0
self.history = {}
self.translate = I18n().translate
def set_list(self, quick_list):
"""Set List
Set the list with the items to be shown in the
quick panel
Arguments:
quick_list {list} -- list of items
"""
self.quick_list = quick_list
def show_quick_panel(self, callback):
"""Quick Panel
Show the quick panel with the given items, previously setted
in the quick_list object. The callback can set the index
object to selected an item when the panel is called.
Arguments:
callback {obj} -- callback to call after the selection
"""
quick_panel(self.quick_list, callback, index=self.index)
def callback_board(self, selected):
"""Board Callback
The quick panel returns the index of the option selected,
this index is used to get the id of the board and the id
is stored in the setting file.
last_action is used to run the compilation or upload if was
the action before call the list of boards
Arguments:
selected {int} -- index of the selected board
"""
if(selected == -1):
save_sysetting('last_action', None)
return
boards_list = self.boards_list()
board_select = boards_list[selected][-1]
board_id = board_select.split("|")[-1].strip()
self.save_selected_board(board_id)
self.run_last_action()
self.set_status_information()
def boards_list(self):
"""Boards List
PlatformIO returns a JSON list with all information of the boards,
the quick panel requires a list with a different format. We will only
show the name (caption), id and vendor.
Returns:
list -- boards list
"""
from .file import File
selected_boards = self.get_selected_boards()
boards_path = deviot.boards_file_path()
boards_file = File(boards_path)
boards = boards_file.read_json()
# rebuild boards file automatically
if(boards == []):
from ..beginning.install_pio import save_board_list
save_board_list()
boards_file = File(boards_path)
boards = boards_file.read_json()
boards_list = []
start = ''
for board in boards:
id = board['id']
vendor = board['vendor']
if(id in selected_boards):
start = '* '
else:
start = ''
caption = start + board['name']
extra = "%s | %s" % (vendor, id)
boards_list.append([caption, extra])
return boards_list
def callback_environment(self, selected):
"""Environment Callback
Callback to store the select environment
last_action is used to run the compilation or upload if was
the action before call the list of environments
Arguments:
selected {int} -- option selected (index)
"""
if(selected == -1):
save_sysetting('last_action', None)
return
environments_list = self.environment_list()
environment_select = environments_list[selected][1]
environment = environment_select.split("|")[-1].strip()
self.save_environment(environment)
self.run_last_action()
self.set_status_information()
def environment_list(self):
"""
gets a list with all selected environments and format it
to be shown in the quick panel
"""
from .file import File
environments_list = []
boards = self.boards_list()
environments = self.get_selected_boards()
environment = self.get_environment()
new_environments = environments
index = 0
total = len(environments)
count = total
if(environments):
for value in boards:
data = value[1].split("|")
caption = value[0][2:]
id = data[1].strip()
vendor = data[0].strip()
for listed in environments:
if(listed == id):
vendor = "%s | %s" % (vendor, id)
environments_list.append([caption, vendor])
new_environments.remove(listed)
count -= 1
if(environment == listed):
self.index = index
index += 1
if(not count):
break
if(new_environments):
for board in new_environments:
caption = board
vendor = self.translate('unknown') + ' | {0}'.format(board)
environments_list.append([caption, vendor])
return environments_list
def callback_overwrite_baud(self, selected):
"""Baud rate callback
Stores the option selected in the preferences file
Arguments:
selected {int} -- index of the selected baud rate
"""
if(selected == -1):
return
selected = self.quick_list[selected]
selected = None if selected == 'None' else selected
save_setting('upload_speed', selected)
def overwrite_baud_list(self):
"""Baud rate list
List of baud rates used to overwrite the upload speed
Returns:
list -- list of baud rates
"""
current = get_setting('upload_speed', 'None')
items = QuickMenu.baudrate_list()
self.index = items.index(str(current))
return items
def callback_serial_ports(self, selected):
"""Selected Port Callback
Stores the selected serial port in the preferences file
Arguments:
selected {str} -- Port selected ex. 'COM1'
"""
if(selected <= 0):
save_sysetting('last_action', None)
return
if(selected == 1):
self.window.run_command('deviot_set_ip')
return
if(selected == 2):
port_selected = "not"
else:
if(self.quick_list[selected][2] == 'yes' or
self.quick_list[selected][2] == 'no'):
port_selected = self.quick_list[selected][1]
else:
port_selected = self.quick_list[selected][2]
save_setting('port_id', port_selected)
self.run_last_action()
self.set_status_information()
def serial_list(self):
"""Serial Port List
Gets the list of serial ports and mdns services and
return it
Returns:
list -- available serial ports/mdns services
"""
index = 2
header = self.translate('port_list').upper()
ports_list = self.get_ports_list()
ports_list.insert(0, [header, self.translate('select_port_list')])
ports_list.insert(1, [self.translate('menu_add_ip'),
self.translate('add_ip_subtitle')])
ports_list.insert(2, [self.translate('menu_not_used_port'),
self.translate('not_used_subtitle')])
current = get_setting('port_id', None)
if(len(ports_list) < 2):
ports_list = [self.translate('menu_no_serial_mdns').upper()]
for port in ports_list[2:]:
if(current in port):
self.index = index
index += 1
return ports_list
def language_list(self):
"""Language List
Builts the list with the available languages in Deviot
Returns:
list -- English language / Language String list
"""
i18n = I18n()
index = 0
language_list = []
lang_ids = i18n.get_lang_ids()
current = get_setting('lang_id', 'en')
for lang_id in lang_ids:
language = i18n.get_lang_name(lang_id)
language_list.append([language[1], language[0]])
if(current == lang_id):
self.index = index
index += 1
return language_list
def callback_language(self, selected):
"""Language Callback
Stores the user language selection
Arguments:
selected {int} -- user selection
"""
if(selected == -1):
return
from .top_menu import TopMenu
lang_ids = I18n().get_lang_ids()
port_selected = lang_ids[selected]
save_setting('lang_id', port_selected)
save_setting('compile_lang', True)
self.window.run_command('deviot_reload')
def callback_import(self, selected):
"""Import Callback
After select the library it will be inserted by the insert_libary
command, it will include the path of the library to includes
Arguments:
selected {int} -- user index selection
"""
if(selected <= 0):
return
libraries_list = self.import_list()
library_import = libraries_list[selected][2]
window = sublime.active_window()
window.run_command('deviot_insert_library', {'path': library_import})
def import_list(self):
"""Import List
To generate the list of libraries, it search first in the two main
folder of the plugin the first one in '~/.platformio/packages', that
folder contain the libraries includes by default by each platform
(avr, expressif, etc). The second folder is located in
'~/platformio/lib' there are stored all the libraries installed by the
user from the management libraries
Returns:
[list] -- quick panel list with libraries
"""
from .libraries import get_library_list
platform = self.get_platform()
platform = platform if(platform) else 'all'
quick_list = get_library_list(platform=platform)
quick_list.insert(0, [self.translate('select_library').upper()])
if(len(quick_list) <= 1):
quick_list = [[self.translate('menu_no_libraries')]]
return quick_list
def quick_libraries(self):
"""List of libraries
Show the list of libraries availables. The callback will show
the list of examples.
"""
from .libraries import get_library_list
platform = self.get_platform()
platform = platform if(platform) else 'all'
self.quick_list = get_library_list(
example_list=True, platform=platform)
self.quick_list.insert(
0, [self.translate('select_library').upper(), ''])
if(len(self.quick_list) <= 1):
self.quick_list = [[self.translate('menu_no_examples')]]
self.show_quick_panel(self.callback_library)
def callback_library(self, selected):
"""Show Examples
After the previous selection of the library, here will be search
all folders inside of the "example" folder and will be considerated
an example to open
Arguments:
selected {int} -- user index selection
"""
# cancel
if(selected <= 0):
return
option_len = len(self.quick_list[selected])
if(selected == 1 and option_len == 1):
self.index -= 1
if(selected == 1 and self.index == 0 and option_len == 1):
self.quick_libraries()
self.history = {}
return
if(selected == 1 and self.index > 0):
library_path = self.history[self.index - 1]
del self.history[len(self.history) - 1]
# valid option
if(selected > 0):
try:
library_path = self.quick_list[selected][2]
except IndexError:
library_path = self.quick_list[selected][1]
if('examples' not in library_path):
library_path = os.path.join(library_path, 'examples')
if(self.open_file(library_path)):
return
self.history[self.index] = library_path
self.index += 1
library_path = os.path.join(library_path, '*')
self.quick_list = [[self.translate('select_example').upper()]]
self.quick_list.append([self.translate('_previous').upper()])
for files in glob(library_path):
caption = os.path.basename(files)
self.quick_list.append([caption, files])
self.show_quick_panel(self.callback_library)
def serial_baudrate_list(self):
"""Serial Baudrate
List of baud rates to use with the serial monitor.
It check if there is already an option selected and
set it in the index object.
Returns:
[list] -- list of
"""
current = get_setting('baudrate', "9600")
items = QuickMenu.baudrate_list()
try:
self.index = items.index(current)
except ValueError:
self.index = 0
return items
def callback_serial_baudrate(self, selected):
"""Serial baud rate callback
callback to select the baud rate used in the serial
monitor. The option is stored in the preferences file
Arguments:
selected {int} -- index of the selected baud rate
"""
if(selected == -1):
return
selected = self.quick_list[selected]
selected = None if selected == 'None' else selected
save_setting('baudrate', selected)
def line_endings_list(self):
"""Serial ending strings
List of ending string used in the monitor serial
"""
items = [
['None'],
['New Line', '\n'],
['Carriage Return', '\r'],
['Both NL & CR', '\r\n']
]
current = get_setting('line_ending', None)
simplified = [i[1] for i in items if len(i) > 1]
simplified.insert(0, None)
self.index = simplified.index(current)
return items
def callback_line_endings(self, selected):
"""Callback line endings
Stores the line ending selected by the user
Arguments:
selected {int} -- index user selection
"""
if(selected == -1):
return
try:
selected = self.quick_list[selected][1]
except IndexError:
selected = None
save_setting('line_ending', selected)
def display_mode_list(self):
"""Display modes
List of display modes
"""
items = [['Text'], ['ASCII'], ['HEX'], ['Mix']]
current = get_setting('display_mode', 'Text')
self.index = items.index([current])
return items
def callback_display_mode(self, selected):
"""Display mode callback
Stores the display mode selected by the user
Arguments:
selected {int} -- index user selection
"""
if(selected == -1):
return
selected = self.quick_list[selected][0]
save_setting('display_mode', selected)
@staticmethod
def baudrate_list():
"""Baudrate list
List of baud rates shown in the monitor serial and upload speed
quick panels.
"""
baudrate_list = ['None',
'1200',
'1800',
'2400',
'4800',
'9600',
'19200',
'38400',
'57600',
'74880',
'115200',
'230400',
'460800',
'500000',
'576000',
'921600',
'1000000',
'1152000']
return baudrate_list
def open_file(self, sketch_path):
"""Open sketch
search in the given path a ino or pde file extension
and open it in a new windows when it's found
Arguments:
sketch_path {str} -- path (file/folder) where to search
Returns:
[bool] -- true if it open file, false if not
"""
window = sublime.active_window()
if(sketch_path.endswith(('.ino', '.pde'))):
window.open_file(sketch_path)
return True
for file in os.listdir(sketch_path):
if(file.endswith(('.ino', '.pde'))):
file = os.path.join(sketch_path, file)
window.open_file(file)
return True
return False
| gepd/Deviot | libraries/quick_menu.py | Python | apache-2.0 | 17,317 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Increase text size for MySQL (not relevant for other DBs' text types)
Revision ID: d2ae31099d61
Revises: 947454bf1dff
Create Date: 2017-08-18 17:07:16.686130
"""
from alembic import op
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = 'd2ae31099d61'
down_revision = '947454bf1dff'
branch_labels = None
depends_on = None
def upgrade():
conn = op.get_bind()
if conn.dialect.name == "mysql":
op.alter_column(table_name='variable', column_name='val', type_=mysql.MEDIUMTEXT)
def downgrade():
conn = op.get_bind()
if conn.dialect.name == "mysql":
op.alter_column(table_name='variable', column_name='val', type_=mysql.TEXT)
| apache/incubator-airflow | airflow/migrations/versions/d2ae31099d61_increase_text_size_for_mysql.py | Python | apache-2.0 | 1,488 |
from openerp.osv import fields, osv
from mako.template import Template
from mako.lookup import TemplateLookup
import os, inspect, subprocess, shutil, signal
# add permissions sudo with visudo
# visudo
# add this 2 lines in the file
# openerp ALL = NOPASSWD : /usr/bin/service openerp-* *
# openerp ALL = NOPASSWD : /usr/sbin/update-rc.d * openerp-* *
#
class server_manager(osv.osv):
_name = 'server.manager'
def _get_path_server(self, cr, uid, ids,name, arg, context=None):
res = dict.fromkeys(ids, False)
server_path = self.pool.get('ir.config_parameter').get_param(cr, uid, "server_path", context=context)
for line in self.browse(cr, uid, ids, context=context):
res[line.id] = server_path +'openerp-' + line.name
return res
def _get_path_server_configuration(self, cr, uid, ids,name, arg, context=None):
res = dict.fromkeys(ids, False)
conf_path = self.pool.get('ir.config_parameter').get_param(cr, uid, "conf_path", context=context)
for line in self.browse(cr, uid, ids, context=context):
res[line.id] = conf_path + 'openerp-server-' + line.name + '.conf'
return res
def _status_server(self, cr, uid, ids, name, arg=None, context=None):
if not len(ids):
return False
res = {}
for reg in self.browse(cr, uid, ids, context):
name = 'openerp-server-' + reg.name
service = "ps -ax | grep " + name +" | grep 'python' | awk '{ print $1}'"
proc = os.popen(service).read()
proc = proc.split()
proc = proc[:-1]
try:
num = len(proc)
pid = map(int, proc)
self.write(cr, uid, [reg.id], {'pid': pid})
res[reg.id] = num
except:
return res
return res
_columns = {
'name': fields.char('Name', size=50, required=True),
'conf': fields.char('Name file configuration', size=50, required=True),
'path_server': fields.function(_get_path_server, type='char', string='path server'),
'path_configuration': fields.function(_get_path_server_configuration, type='char', string='path server configuration'),
'debug_mode':fields.boolean('Debug mode'),
'db_name': fields.char('Database Name', size=100,required=True),
'db_filter':fields.char('Database Filter', size=100,required=True),
'list_db':fields.boolean('List database'),
'db_user':fields.char('Database User', size=100,required=True),
'db_password':fields.char('Database password', size=100,required=True),
'netrpc_interface':fields.char('xmlrpc interface', size=20,required=True),
'xmlrpc_interface':fields.char('xmlrpc interface', size=20,required=True),
'xmlrpc_port':fields.char('xmlrpc port', size=5,required=True),
'static_http_document_root':fields.char('static http document root', size=200,required=True),
'state': fields.selection([('draft','Draft'), ('conf','Conf created'),('daemon','Daemon Created'), ('active','Active'), ('disable','Disable')], 'State', readonly=True, help="The state.", select=True),
'admin_passwd': fields.char('admin password', size=64, required=True),
'log':fields.char('log path', size=100, required=True),
'notes':fields.text('notes'),
'active_process':fields.function(_status_server, type='integer', string='active processes',store=False),
'pid':fields.text('pid list', readonly=True, store=False),
'autostart':fields.boolean('Autostart'),
}
_defaults = {
'db_filter': '.*',
'db_name': 'False',
'db_user': 'openerp',
'db_password': 'postgres',
'netrpc_interface':'localhost',
'xmlrpc_interface':'localhost',
'xmlrpc_port':'65450',
'static_http_document_root':'/var/www/',
'state': 'draft',
'log': '/var/log/openerp/openerp.log'
}
_sql_constraints = [
('name_uniq', 'unique(name)', 'Name must be unique per Company!'),
]
def action_autostart(self, cr, uid, ids, context=None):
obj = self.pool.get('server.manager')
for line in obj.browse(cr, uid, ids):
if context['autostart']:
service ='sudo update-rc.d -f ' + 'openerp-'+line.name +' defaults'
else:
service ='sudo update-rc.d -f ' + 'openerp-'+line.name +' remove'
proc = subprocess.call([service], shell=True)
return True
return False
def create_conf(self, cr, uid, ids, context=None):
currentPath = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
absfilePath = os.path.abspath(os.path.join(currentPath, 'templates/'))
lookup = TemplateLookup(directories=[absfilePath])
obj = self.pool.get('server.manager')
for line in obj.browse(cr, uid, ids):
template = Template("""<%include file="conf.mako"/>""", lookup=lookup)
templateRendered = template.render(admin_passwd=line.admin_passwd, \
db_name=line.db_name, \
db_password=line.db_password, \
db_user=line.db_user, \
db_filter=line.db_filter, \
debug_mode=line.debug_mode, \
list_db=line.list_db, \
log=line.log, \
netrpc_interface=line.netrpc_interface, \
static_http_document_root=line.static_http_document_root, \
xmlrpc_interface=line.xmlrpc_interface, \
xmlrpc_port=line.xmlrpc_port, \
)
virtualhostPath = line.path_configuration
f = open(virtualhostPath, 'w')
#os.chmod(virtualhostPath, 0755)
f.write(templateRendered)
f.close()
return True
def create_daemon(self, cr, uid, ids, context=None):
currentPath = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
absfilePath = os.path.abspath(os.path.join(currentPath, 'templates/'))
lookup = TemplateLookup(directories=[absfilePath])
obj = self.pool.get('server.manager')
for line in obj.browse(cr, uid, ids):
name = 'openerp-server-'+line.name
template = Template("""<%include file="daemon.mako"/>""", lookup=lookup)
templateRendered = template.render(
PIDFILE1='${PIDFILE}', \
CASE='${1}' ,\
DAEMON1='${DAEMON}', \
DAEMON_OPTS1='${DAEMON_OPTS}', \
NAME1='${NAME}', \
DESC1='${DESC}', \
USER1='${USER}', \
NAME=name, \
DESC=name, \
CONFIGFILE=line.path_configuration, \
USER=line.db_user, \
)
virtualhostPath = line.path_server
f = open(virtualhostPath, 'w')
os.chmod(virtualhostPath, 0755)
f.write(templateRendered)
f.close()
return True
def action_start_server(self, cr, uid, ids, context=None):
obj = self.pool.get('server.manager')
for line in obj.browse(cr, uid, ids):
service ='sudo service ' + 'openerp-'+line.name +' start'
proc = subprocess.call([service], shell=True)
#self.write(cr, uid, [line.id], {'notes':proc})
self.action_status_server( cr, uid, ids, context)
return True
def action_stop_server(self, cr, uid, ids, context=None):
try:
if context['name'] and context['name'] == cr.dbname:
return False
pids = self.action_status_server( cr, uid, ids, context)
for pid in pids:
os.kill(pid, signal.SIGKILL)
self.action_status_server( cr, uid, ids, context)
return True
except:
self.action_status_server( cr, uid, ids, context)
return False
def action_restart_server(self, cr, uid, ids, context=None):
self.action_stop_server(cr, uid, ids, context)
self.action_start_server(cr, uid, ids, context)
return True
def action_status_server(self, cr, uid, ids, context=None):
if not len(ids):
return False
for reg in self.browse(cr, uid, ids, context):
name = 'openerp-server-' + reg.name
service = "ps -ax | grep " + name +" | grep 'python' | awk '{ print $1}'"
proc = os.popen(service).read()
proc = proc.split()
proc = proc[:-1]
try:
num = len(proc)
pid = map(int, proc)
self.write(cr, uid, [reg.id], {'pid': pid,'active_process':num})
return pid
except:
return []
def action_workflow_draft(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, { 'state' : 'draft' }, context=context)
return True
def action_workflow_conf(self, cr, uid, ids, context=None):
self.create_conf( cr, uid, ids, context)
self.write(cr, uid, ids, { 'state' : 'conf' }, context=context)
return True
def action_workflow_daemon(self, cr, uid, ids, context=None):
self.create_daemon( cr, uid, ids, context)
self.write(cr, uid, ids, { 'state' : 'daemon' }, context=context)
return True
def action_workflow_active(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, { 'state' : 'active' }, context=context)
return True
def action_workflow_disable(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, { 'state' : 'disable' }, context=context)
return True
| jmesteve/saas3 | openerp/addons_extra/server_manager/server_manager.py | Python | agpl-3.0 | 10,953 |
#!/usr/bin/python
# -- Content-Encoding: UTF-8 --
"""
Herald HTTP beans definition
:author: Thomas Calmant
:copyright: Copyright 2014, isandlaTech
:license: Apache License 2.0
:version: 0.0.3
:status: Alpha
..
Copyright 2014 isandlaTech
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Module version
__version_info__ = (0, 0, 3)
__version__ = ".".join(str(x) for x in __version_info__)
# Documentation strings format
__docformat__ = "restructuredtext en"
# ------------------------------------------------------------------------------
# Herald HTTP
from . import ACCESS_ID
# Standard library
import functools
# ------------------------------------------------------------------------------
@functools.total_ordering
class HTTPAccess(object):
"""
Description of an HTTP access
"""
def __init__(self, host, port, path):
"""
Sets up the access
:param host: HTTP server host
:param port: HTTP server port
:param path: Path to the Herald service
"""
# Normalize path
if path[0] == '/':
path = path[1:]
self.__host = host
self.__port = int(port)
self.__path = path
def __hash__(self):
"""
Hash is based on the access tuple
"""
return hash(self.access)
def __eq__(self, other):
"""
Equality based on JID
"""
if isinstance(other, HTTPAccess):
return self.access == other.access
return False
def __lt__(self, other):
"""
JID string ordering
"""
if isinstance(other, HTTPAccess):
return self.access < other.access
return False
def __str__(self):
"""
String representation
"""
return "http://{0}:{1}/{2}".format(self.__host, self.__port,
self.__path)
@property
def access_id(self):
"""
Retrieves the access ID associated to this kind of access
"""
return ACCESS_ID
@property
def access(self):
"""
Returns the access to the peer as a 3-tuple (host, port, path)
"""
return self.__host, self.__port, self.__path
@property
def address(self):
"""
Returns the address of the HTTP server to access the peer (host, port)
"""
return self.__host, self.__port
@property
def host(self):
"""
Retrieves the host address of the associated peer
"""
return self.__host
@property
def port(self):
"""
Retrieves the host port of the associated peer
"""
return self.__port
@property
def path(self):
"""
Retrieves the path to the Herald service
"""
return self.__path
def dump(self):
"""
Returns the content to store in a directory dump to describe this
access
"""
return self.access
| librallu/cohorte-herald | python/herald/transports/http/beans.py | Python | apache-2.0 | 3,526 |
# -*- coding: utf-8 -*-
import random
# Animal data from Wikipedia circa Dec 2014.
animals = [
'Abditomys',
'Abeomelomys',
'Abrawayaomys',
'Abrocoma',
'Abrothrix',
'Acerodon',
'Acinonyx',
'Acomys',
'Aconaemys',
'Acrobates',
'Addax',
'Aegialomys',
'Aepeomys',
'Aepyceros',
'Aepyprymnus',
'Aeretes',
'Aeromys',
'Aethalops',
'Aethomys',
'Ailuropoda',
'Ailurops',
'Ailurus',
'Akodon',
'Alcelaphus',
'Alces',
'Alionycteris',
'Allactaga',
'Allactodipus',
'Allenopithecus',
'Allocebus',
'Allochrocebus',
'Allocricetulus',
'Alouatta',
'Alticola',
'Amblonyx',
'Amblysomus',
'Ametrida',
'Ammodillus',
'Ammodorcas',
'Ammospermophilus',
'Ammotragus',
'Amorphochilus',
'Amphinectomys',
'Anathana',
'Andalgalomys',
'Andinomys',
'Anisomys',
'Anomalurus',
'Anonymomys',
'Anotomys',
'Anoura',
'Anourosorex',
'Antechinomys',
'Antechinus',
'Anthops',
'Antidorcas',
'Antilocapra',
'Antrozous',
'Aonyx',
'Aplodontia',
'Apodemus',
'Apomys',
'Aproteles',
'Arabitragus',
'Arborimus',
'Archboldomys',
'Arctictis',
'Arctocebus',
'Arctocephalus',
'Arctogalidia',
'Arctonyx',
'Ardops',
'Arielulus',
'Ariteus',
'Artibeus',
'Arvicanthis',
'Arvicola',
'Asellia',
'Aselliscus',
'Atelerix',
'Ateles',
'Atelocynus',
'Atherurus',
'Atilax',
'Atlantoxerus',
'Auliscomys',
'Babyrousa',
'Baiomys',
'Baiyankamys',
'Balaena',
'Balaenoptera',
'Balantiopteryx',
'Balionycteris',
'Bandicota',
'Barbastella',
'Bassaricyon',
'Bassariscus',
'Bathyergus',
'Batomys',
'Bauerus',
'Bdeogale',
'Beamys',
'Beatragus',
'Belomys',
'Berardius',
'Berylmys',
'Bettongia',
'Bibimys',
'Biswamoyopterus',
'Blanfordimys',
'Blarina',
'Blarinella',
'Blarinomys',
'Blastocerus',
'Bos',
'Boselaphus',
'Brachiones',
'Brachylagus',
'Brachyphylla',
'Brachytarsomys',
'Brachyteles',
'Brachyuromys',
'Bradypus',
'Brucepattersonius',
'Bubalus',
'Budorcas',
'Bullimus',
'Bunolagus',
'Bunomys',
'Burramys',
'Cabassous',
'Cacajao',
'Caenolestes',
'Calcochloris',
'Callibella',
'Callicebus',
'Callimico',
'Callistomys',
'Callithrix',
'Callorhinus',
'Callosciurus',
'Calomys',
'Calomyscus',
'Caluromys',
'Caluromysiops',
'Calyptophractus',
'Canis',
'Cannomys',
'Cansumys',
'Caperea',
'Capreolus',
'Capricornis',
'Caprolagus',
'Capromys',
'Caracal',
'Cardiocranius',
'Cardioderma',
'Carollia',
'Carpitalpa',
'Carpomys',
'Carterodon',
'Caryomys',
'Casinycteris',
'Catagonus',
'Catopuma',
'Cavia',
'Cebuella',
'Cebus',
'Centronycteris',
'Cephalopachus',
'Cephalophus',
'Cephalorhynchus',
'Ceratotherium',
'Cercartetus',
'Cercocebus',
'Cercopithecus',
'Cerdocyon',
'Cerradomys',
'Cervus',
'Chacodelphys',
'Chaetocauda',
'Chaetodipus',
'Chaetomys',
'Chaetophractus',
'Chalinolobus',
'Cheirogaleus',
'Cheiromeles',
'Chelemys',
'Chibchanomys',
'Chilomys',
'Chilonatalus',
'Chimarrogale',
'Chinchilla',
'Chinchillula',
'Chionomys',
'Chiroderma',
'Chiromyscus',
'Chironax',
'Chironectes',
'Chiropodomys',
'Chiropotes',
'Chiruromys',
'Chlamyphorus',
'Chlorocebus',
'Chlorotalpa',
'Chodsigoa',
'Choeroniscus',
'Choeronycteris',
'Choeropsis',
'Choloepus',
'Chrotogale',
'Chrotomys',
'Chrotopterus',
'Chrysochloris',
'Chrysocyon',
'Chrysospalax',
'Cistugo',
'Civettictis',
'Cloeotis',
'Clyomys',
'Coccymys',
'Coelops',
'Coendou',
'Coleura',
'Colobus',
'Colomys',
'Condylura',
'Conepatus',
'Congosorex',
'Conilurus',
'Connochaetes',
'Cormura',
'Corynorhinus',
'Craseonycteris',
'Crateromys',
'Cratogeomys',
'Cremnomys',
'Cricetomys',
'Cricetulus',
'Cricetus',
'Crocidura',
'Crocuta',
'Crossarchus',
'Crossomys',
'Crunomys',
'Cryptochloris',
'Cryptomys',
'Cryptonanus',
'Cryptoprocta',
'Cryptotis',
'Ctenodactylus',
'Ctenomys',
'Cuon',
'Cuscomys',
'Cynictis',
'Cynocephalus',
'Cynogale',
'Cynomops',
'Cynomys',
'Cynopterus',
'Cyttarops',
'Dacnomys',
'Dactylomys',
'Dactylopsila',
'Damaliscus',
'Dasycercus',
'Dasykaluta',
'Dasymys',
'Dasyprocta',
'Dasypus',
'Dasyuroides',
'Dasyurus',
'Daubentonia',
'Delanymys',
'Delomys',
'Delphinapterus',
'Delphinus',
'Deltamys',
'Dendrogale',
'Dendrohyrax',
'Dendrolagus',
'Dendromus',
'Dendroprionomys',
'Deomys',
'Dephomys',
'Desmalopex',
'Desmana',
'Desmodilliscus',
'Desmodillus',
'Desmodus',
'Desmomys',
'Diaemus',
'Dicerorhinus',
'Diceros',
'Diclidurus',
'Dicrostonyx',
'Didelphis',
'Dinaromys',
'Dinomys',
'Diomys',
'Diphylla',
'Diplogale',
'Diplomesodon',
'Diplomys',
'Diplothrix',
'Dipodillus',
'Dipodomys',
'Dipus',
'Distoechurus',
'Dobsonia',
'Dolichotis',
'Dologale',
'Dorcatragus',
'Dorcopsis',
'Dorcopsulus',
'Dremomys',
'Dromiciops',
'Drymoreomys',
'Dryomys',
'Dugong',
'Dyacopterus',
'Dymecodon',
'Echimys',
'Echinoprocta',
'Echinosorex',
'Echiothrix',
'Echymipera',
'Ectophylla',
'Eidolon',
'Elaphodus',
'Elaphurus',
'Elephantulus',
'Elephas',
'Eligmodontia',
'Eliomys',
'Eliurus',
'Ellobius',
'Emballonura',
'Enchisthenes',
'Eoglaucomys',
'Eolagurus',
'Eonycteris',
'Eospalax',
'Eothenomys',
'Eozapus',
'Episoriculus',
'Epixerus',
'Epomophorus',
'Epomops',
'Eptesicus',
'Eremitalpa',
'Eremodipus',
'Eremoryzomys',
'Erethizon',
'Erignathus',
'Erinaceus',
'Eropeplus',
'Erophylla',
'Erythrocebus',
'Eschrichtius',
'Eubalaena',
'Euchoreutes',
'Euderma',
'Eudiscopus',
'Eudorcas',
'Eulemur',
'Eumetopias',
'Eumops',
'Euneomys',
'Euoticus',
'Eupetaurus',
'Euphractus',
'Eupleres',
'Euroscaptor',
'Euryoryzomys',
'Euryzygomatomys',
'Eutamias',
'Exilisciurus',
'Falsistrellus',
'Felis',
'Felovia',
'Feresa',
'Feroculus',
'Fukomys',
'Funambulus',
'Funisciurus',
'Furipterus',
'Galemys',
'Galenomys',
'Galeopterus',
'Galerella',
'Galictis',
'Galidia',
'Galidictis',
'Gazella',
'Genetta',
'Geocapromys',
'Geogale',
'Geomys',
'Georychus',
'Geoxus',
'Gerbilliscus',
'Gerbillurus',
'Gerbillus',
'Giraffa',
'Glaucomys',
'Glauconycteris',
'Glironia',
'Glirulus',
'Glischropus',
'Globicephala',
'Glossophaga',
'Glyphonycteris',
'Glyphotes',
'Golunda',
'Gorilla',
'Gracilinanus',
'Grammomys',
'Graomys',
'Graphiurus',
'Gulo',
'Gymnobelideus',
'Gymnuromys',
'Habromys',
'Hadromys',
'Haeromys',
'Halichoerus',
'Handleyomys',
'Hapalemur',
'Hapalomys',
'Haplonycteris',
'Harpiocephalus',
'Harpiola',
'Harpyionycteris',
'Heimyscus',
'Helarctos',
'Heliophobius',
'Heliosciurus',
'Helogale',
'Hemibelideus',
'Hemicentetes',
'Hemiechinus',
'Hemigalus',
'Hemitragus',
'Herpestes',
'Hesperoptenus',
'Heterocephalus',
'Heterohyrax',
'Heteromys',
'Hippocamelus',
'Hippopotamus',
'Hipposideros',
'Hippotragus',
'Histiotus',
'Histriophoca',
'Hodomys',
'Holochilus',
'Homo',
'Hoolock',
'Hoplomys',
'Hybomys',
'Hydrictis',
'Hydrochoerus',
'Hydromys',
'Hydropotes',
'Hydrurga',
'Hyemoschus',
'Hyladelphys',
'Hylaeamys',
'Hylobates',
'Hylochoerus',
'Hylomyscus',
'Hylonycteris',
'Hylopetes',
'Hyomys',
'Hyosciurus',
'Hyperacrius',
'Hyperoodon',
'Hypogeomys',
'Hypsignathus',
'Hypsiprymnodon',
'Hypsugo',
'Ictonyx',
'Idionycteris',
'Idiurus',
'Indopacetus',
'Indri',
'Inia',
'Iomys',
'Irenomys',
'Isoodon',
'Isothrix',
'Isthmomys',
'Juliomys',
'Juscelinomys',
'Kadarsanomys',
'Kannabateomys',
'Kerivoula',
'Kerodon',
'Kogia',
'Komodomys',
'Kunsia',
'La Plata Dolphin',
'Laephotis',
'Lagenodelphis',
'Lagenorhynchus',
'Lagidium',
'Lagorchestes',
'Lagostomus',
'Lagostrophus',
'Lagothrix',
'Lamottemys',
'Lampronycteris',
'Laonastes',
'Lariscus',
'Lasionycteris',
'Lasiopodomys',
'Lasiorhinus',
'Lasiurus',
'Latidens',
'Lavia',
'Leggadina',
'Leimacomys',
'Leimacomys',
'Lemmiscus',
'Lemmus',
'Lemniscomys',
'Lemur',
'Lenomys',
'Lenothrix',
'Lenoxus',
'Leontopithecus',
'Leopardus',
'Leopoldamys',
'Lepilemur',
'Leporillus',
'Leptailurus',
'Leptomys',
'Leptonychotes',
'Leptonycteris',
'Lepus',
'Lestodelphys',
'Lestoros',
'Liberiictis',
'Lichonycteris',
'Limnogale',
'Limnomys',
'Liomys',
'Lionycteris',
'Lipotes',
'Lissodelphis',
'Lissonycteris',
'Litocranius',
'Lobodon',
'Lonchophylla',
'Lonchorhina',
'Lonchothrix',
'Lontra',
'Lophiomys',
'Lophocebus',
'Lophostoma',
'Lophuromys',
'Lorentzimys',
'Loxodonta',
'Loxodontomys',
'Lundomys',
'Lutra',
'Lutreolina',
'Lutrogale',
'Lycalopex',
'Lyncodon',
'Lynx',
'Macroderma',
'Macrogalidia',
'Macroglossus',
'Macrophyllum',
'Macropus',
'Macroscelides',
'Macrotarsomys',
'Macrotis',
'Macrotus',
'Macruromys',
'Madoqua',
'Madromys',
'Makalata',
'Malacomys',
'Mallomys',
'Mammelomys',
'Mandrillus',
'Manis',
'Margaretamys',
'Marmosa',
'Marmosops',
'Marmota',
'Martes',
'Massoutiera',
'Mastacomys',
'Mastomys',
'Maxomys',
'Megadendromus',
'Megaderma',
'Megadontomys',
'Megaerops',
'Megaloglossus',
'Megaptera',
'Megasorex',
'Melanomys',
'Melasmothrix',
'Mellivora',
'Melogale',
'Melomys',
'Melonycteris',
'Melursus',
'Menetes',
'Mesechinus',
'Mesembriomys',
'Mesocapromys',
'Mesocricetus',
'Mesomys',
'Mesophylla',
'Mesoplodon',
'Metachirus',
'Micaelamys',
'Microakodontomys',
'Microcavia',
'Microcebus',
'Microdillus',
'Microdipodops',
'Microgale',
'Microhydromys',
'Micromurexia',
'Micromys',
'Micronycteris',
'Microperoryctes',
'Micropotamogale',
'Micropteropus',
'Microryzomys',
'Microsciurus',
'Microtus',
'Millardia',
'Mimetillus',
'Mimon',
'Mindomys',
'Miniopterus',
'Miopithecus',
'Mirimiri',
'Mirounga',
'Mirzamys',
'Mogera',
'Molossops',
'Monachus',
'Monodelphis',
'Monodon',
'Monophyllus',
'Monticolomys',
'Mops',
'Mormoops',
'Mormopterus',
'Moschiola',
'Moschus',
'Mosia',
'Mungos',
'Mungotictis',
'Muntiacus',
'Murexechinus',
'Murexia',
'Muriculus',
'Murina',
'Muscardinus',
'Musonycteris',
'Musseromys',
'Mustela',
'Mydaus',
'Mylomys',
'Myocastor',
'Myodes',
'Myoictis',
'Myomimus',
'Myomyscus',
'Myonycteris',
'Myoprocta',
'Myopterus',
'Myopus',
'Myosciurus',
'Myosorex',
'Myospalax',
'Myotis',
'Myotomys',
'Myrmecobius',
'Myrmecophaga',
'Mysateles',
'Mystacina',
'Mystromys',
'Myzopoda',
'Naemorhedus',
'Nandinia',
'Nanger',
'Nannosciurus',
'Nanonycteris',
'Napaeozapus',
'Nasua',
'Nasuella',
'Natalus',
'Neacomys',
'Neamblysomus',
'Necromys',
'Nectogale',
'Nectomys',
'Neodon',
'Neofelis',
'Neofiber',
'Neohylomys',
'Neomys',
'Neonycteris',
'Neophascogale',
'Neophoca',
'Neophocaena',
'Neopteryx',
'Neoromicia',
'Neotamias',
'Neotetracus',
'Neotoma',
'Neotomodon',
'Neotomys',
'Neotragus',
'Neovison',
'Nephelomys',
'Nesokia',
'Nesolagus',
'Nesomys',
'Nesoromys',
'Nesoryzomys',
'Neurotrichus',
'Neusticomys',
'Nilgiritragus',
'Nilopegamys',
'Ningaui',
'Niumbaha',
'Niviventer',
'Noctilio',
'Nomascus',
'Notiomys',
'Notiosorex',
'Notocitellus',
'Notomys',
'Notopteris',
'Notoryctes',
'Nyala',
'Nyctalus',
'Nyctereutes',
'Nycteris',
'Nycticebus',
'Nycticeinops',
'Nycticeius',
'Nyctiellus',
'Nyctinomops',
'Nyctomys',
'Nyctophilus',
'Ochotona',
'Ochrotomys',
'Octodon',
'Octodontomys',
'Octomys',
'Odobenus',
'Odocoileus',
'Oecomys',
'Oenomys',
'Okapia',
'Olallamys',
'Oligoryzomys',
'Ommatophoca',
'Ondatra',
'Onychogalea',
'Onychomys',
'Orcaella',
'Orcinus',
'Oreamnos',
'Oreonax',
'Oreoryzomys',
'Oreotragus',
'Ornithorhynchus',
'Orthogeomys',
'Oryctolagus',
'Oryx',
'Oryzomys',
'Oryzorictes',
'Osgoodomys',
'Otaria',
'Otocyon',
'Otolemur',
'Otomops',
'Otomys',
'Otonycteris',
'Otonyctomys',
'Otopteropus',
'Otospermophilus',
'Ototylomys',
'Ourebia',
'Ovibos',
'Ovis',
'Oxymycterus',
'Ozotoceros',
'Pachyuromys',
'Pagophilus',
'Paguma',
'Palawanomys',
'Panthera',
'Pantholops',
'Papagomys',
'Papio',
'Pappogeomys',
'Paracoelops',
'Paracrocidura',
'Paracynictis',
'Paradipus',
'Paradoxurus',
'Paraechinus',
'Parahydromys',
'Paraleptomys',
'Paralomys',
'Paramelomys',
'Paramurexia',
'Parantechinus',
'Paranyctimene',
'Parascalops',
'Parascaptor',
'Parastrellus',
'Paratriaenops',
'Paraxerus',
'Pardofelis',
'Parotomys',
'Paruromys',
'Pattonomys',
'Paucidentomys',
'Paulamys',
'Pearsonomys',
'Pecari',
'Pectinator',
'Pedetes',
'Pelomys',
'Pentalagus',
'Penthetor',
'Peponocephala',
'Perameles',
'Perimyotis',
'Perodicticus',
'Perognathus',
'Peromyscus',
'Peropteryx',
'Peroryctes',
'Petaurillus',
'Petaurista',
'Petauroides',
'Petaurus',
'Petinomys',
'Petrodromus',
'Petrogale',
'Petromus',
'Petromyscus',
'Petropseudes',
'Phacochoerus',
'Phaenomys',
'Phaiomys',
'Phalanger',
'Phaner',
'Pharotis',
'Phascogale',
'Phascolarctos',
'Phascolosorex',
'Phascomurexia',
'Phenacomys',
'Philantomba',
'Phloeomys',
'Phoca',
'Phocarctos',
'Phocoena',
'Phocoenoides',
'Phodopus',
'Phoniscus',
'Phylloderma',
'Phyllomys',
'Phyllonycteris',
'Phyllops',
'Phyllostomus',
'Phyllotis',
'Physeter',
'Piliocolobus',
'Pipanacoctomys',
'Pipistrellus',
'Pithecheir',
'Pithecheirops',
'Pithecia',
'Plagiodontia',
'Planigale',
'Platacanthomys',
'Platalina',
'Platanista',
'Platymops',
'Platyrrhinus',
'Plecotus',
'Plerotes',
'Podogymnura',
'Podomys',
'Podoxymys',
'Poecilogale',
'Poelagus',
'Poephagus',
'Pogonomelomys',
'Pogonomys',
'Poliocitellus',
'Porcula',
'Potamochoerus',
'Potamogale',
'Potorous',
'Potos',
'Praomys',
'Presbytis',
'Priodontes',
'Prionailurus',
'Prionodon',
'Prionomys',
'Procapra',
'Procavia',
'Procolobus',
'Proechimys',
'Proedromys',
'Profelis',
'Prolemur',
'Prometheomys',
'Promops',
'Pronolagus',
'Propithecus',
'Prosciurillus',
'Proteles',
'Protochromys',
'Protoxerus',
'Psammomys',
'Pseudantechinus',
'Pseudocheirus',
'Pseudochirops',
'Pseudochirulus',
'Pseudohydromys',
'Pseudois',
'Pseudomys',
'Pseudopotto',
'Pseudorca',
'Pseudoryx',
'Pseudoryzomys',
'Ptenochirus',
'Pteralopex',
'Pteromys',
'Pteromyscus',
'Pteronotus',
'Pteronura',
'Pteropus',
'Ptilocercus',
'Punomys',
'Pusa',
'Pygathrix',
'Pygeretmus',
'Pygoderma',
'Raphicerus',
'Rattus',
'Ratufa',
'Redunca',
'Reithrodon',
'Reithrodontomys',
'Rhabdomys',
'Rhagomys',
'Rheithrosciurus',
'Rheomys',
'Rhinoceros',
'Rhinolophus',
'Rhinonicteris',
'Rhinophylla',
'Rhinopithecus',
'Rhinopoma',
'Rhinosciurus',
'Rhipidomys',
'Rhizomys',
'Rhogeessa',
'Rhombomys',
'Rhynchocyon',
'Rhynchogale',
'Rhyncholestes',
'Rhynchomeles',
'Rhynchomys',
'Rhynchonycteris',
'Romerolagus',
'Rousettus',
'Rubrisciurus',
'Rungwecebus',
'Rupicapra',
'Ruwenzorisorex',
'Saccolaimus',
'Saccopteryx',
'Saccostomus',
'Saguinus',
'Saiga',
'Saimiri',
'Salanoia',
'Salinoctomys',
'Salinomys',
'Salpingotulus',
'Salpingotus',
'Sarcophilus',
'Sauromys',
'Saxatilomys',
'Scalopus',
'Scapanulus',
'Scapanus',
'Scapteromys',
'Scaptochirus',
'Scaptonyx',
'Sciurillus',
'Sciurotamias',
'Sciurus',
'Scleronycteris',
'Scolomys',
'Scoteanax',
'Scotinomys',
'Scotoecus',
'Scotomanes',
'Scotonycteris',
'Scotophilus',
'Scotorepens',
'Scotozous',
'Scuirocheirus',
'Scutisorex',
'Sekeetamys',
'Selevinia',
'Semnopithecus',
'Setifer',
'Setonix',
'Sicista',
'Sigmodon',
'Sigmodontomys',
'Simias',
'Sminthopsis',
'Solenodon',
'Solisorex',
'Solomys',
'Sommeromys',
'Sooretamys',
'Sorex',
'Soriculus',
'Sotalia',
'Spalacopus',
'Spalax',
'Speothos',
'Spermophilopsis',
'Spermophilus',
'Sphaerias',
'Sphaeronycteris',
'Sphiggurus',
'Spilocuscus',
'Spilogale',
'Srilankamys',
'Steatomys',
'Stenella',
'Stenocephalemys',
'Stenoderma',
'Stochomys',
'Strigocuscus',
'Sturnira',
'Styloctenium',
'Stylodipus',
'Suncus',
'Sundamys',
'Sundasciurus',
'Surdisorex',
'Syconycteris',
'Sylvicapra',
'Sylvilagus',
'Sylvisorex',
'Symphalangus',
'Synaptomys',
'Syncerus',
'Syntheosciurus',
'Tachyglossus',
'Tachyoryctes',
'Tadarida',
'Taeromys',
'Tamias',
'Tamiasciurus',
'Tamiops',
'Tapecomys',
'Taphozous',
'Tapirus',
'Tarsipes',
'Tarsius',
'Tarsomys',
'Tasmacetus',
'Tateomys',
'Tatera',
'Taterillus',
'Taurotragus',
'Taxidea',
'Tayassu',
'Tetracerus',
'Thallomys',
'Thalpomys',
'Thamnomys',
'Thaptomys',
'Theropithecus',
'Thomasomys',
'Thomomys',
'Thoopterus',
'Thrichomys',
'Thryonomys',
'Thylamys',
'Thylogale',
'Thyroptera',
'Tlacuatzin',
'Tokudaia',
'Tolypeutes',
'Tomopeas',
'Tonatia',
'Tonkinomys',
'Toromys',
'Trachops',
'Trachypithecus',
'Tragelaphus',
'Tragulus',
'Transandinomys',
'Tremarctos',
'Triaenops',
'Trichechus',
'Trichosurus',
'Trichys',
'Trinomys',
'Trinycteris',
'Trogopterus',
'Tryphomys',
'Tscherskia',
'Tursiops',
'Tylomys',
'Tylonycteris',
'Tympanoctomys',
'Typhlomys',
'Uranomys',
'Urocyon',
'Uroderma',
'Urogale',
'Uromys',
'Uropsilus',
'Urotrichus',
'Vampyressa',
'Vampyrodes',
'Vampyrum',
'Vandeleuria',
'Varecia',
'Vernaya',
'Vespadelus',
'Vespertilio',
'Vicugna',
'Viverra',
'Viverricula',
'Voalavo',
'Volemys',
'Vombatus',
'Vormela',
'Vulpes',
'Waiomys',
'Wallabia',
'Wiedomys',
'Wilfredomys',
'Wrinkle-faced Bat',
'Wyulda',
'Xenogale',
'Xenomys',
'Xenuromys',
'Xeromys',
'Xeronycteris',
'Xerospermophilus',
'Xerus',
'Zaedyus',
'Zaglossus',
'Zalophus',
'Zapus',
'Zelotomys',
'Ziphius',
'Zygodontomys',
'Zygogeomys',
'Zyzomys',
]
first_names = [
'Abigail',
'Alexander',
'Alexis',
'Amanda',
'Amy',
'Andrew',
'Angela',
'Ashley',
'Ava',
'Barbara',
'Betty',
'Brittany',
'Carol',
'Charles',
'Christopher',
'Daniel',
'David',
'Deborah',
'Debra',
'Donna',
'Dorothy',
'Emily',
'Emma',
'Ethan',
'Hannah',
'Heather',
'Helen',
'Isabella',
'Jacob',
'James',
'Jason',
'Jayden',
'Jennifer',
'Jessica',
'Joan',
'John',
'Joseph',
'Joshua',
'Judith',
'Karen',
'Kimberly',
'Liam',
'Linda',
'Lisa',
'Madison',
'Margaret',
'Mary',
'Mason',
'Matthew',
'Melissa',
'Michael',
'Michelle',
'Nicholas',
'Noah',
'Olivia',
'Patricia',
'Richard',
'Robert',
'Ruth',
'Samantha',
'Sandra',
'Sarah',
'Shirley',
'Sophia',
'Susan',
'Tyler',
'William',
]
last_names = [
'Abbott',
'Acevedo',
'Acosta',
'Adams',
'Adkins',
'Aguilar',
'Aguirre',
'Albert',
'Alexander',
'Alford',
'Allen',
'Allison',
'Alston',
'Alvarado',
'Alvarez',
'Anderson',
'Andrews',
'Anthony',
'Armstrong',
'Arnold',
'Ashley',
'Atkins',
'Atkinson',
'Austin',
'Avery',
'Avila',
'Ayala',
'Ayers',
'Bailey',
'Baird',
'Baker',
'Baldwin',
'Ball',
'Ballard',
'Banks',
'Barber',
'Barker',
'Barlow',
'Barnes',
'Barnett',
'Barr',
'Barrera',
'Barrett',
'Barron',
'Barry',
'Bartlett',
'Barton',
'Bass',
'Bates',
'Battle',
'Bauer',
'Baxter',
'Beach',
'Bean',
'Beard',
'Beasley',
'Beck',
'Becker',
'Bell',
'Bender',
'Benjamin',
'Bennett',
'Benson',
'Bentley',
'Benton',
'Berg',
'Berger',
'Bernard',
'Berry',
'Best',
'Bird',
'Bishop',
'Black',
'Blackburn',
'Blackwell',
'Blair',
'Blake',
'Blanchard',
'Blankenship',
'Blevins',
'Bolton',
'Bond',
'Bonner',
'Booker',
'Boone',
'Booth',
'Bowen',
'Bowers',
'Bowman',
'Boyd',
'Boyer',
'Boyle',
'Bradford',
'Bradley',
'Bradshaw',
'Brady',
'Branch',
'Bray',
'Brennan',
'Brewer',
'Bridges',
'Briggs',
'Bright',
'Britt',
'Brock',
'Brooks',
'Brown',
'Browning',
'Bruce',
'Bryan',
'Bryant',
'Buchanan',
'Buck',
'Buckley',
'Buckner',
'Bullock',
'Burch',
'Burgess',
'Burke',
'Burks',
'Burnett',
'Burns',
'Burris',
'Burt',
'Burton',
'Bush',
'Butler',
'Byers',
'Byrd',
'Cabrera',
'Cain',
'Calderon',
'Caldwell',
'Calhoun',
'Callahan',
'Camacho',
'Cameron',
'Campbell',
'Campos',
'Cannon',
'Cantrell',
'Cantu',
'Cardenas',
'Carey',
'Carlson',
'Carney',
'Carpenter',
'Carr',
'Carrillo',
'Carroll',
'Carson',
'Carter',
'Carver',
'Case',
'Casey',
'Cash',
'Castaneda',
'Castillo',
'Castro',
'Cervantes',
'Chambers',
'Chan',
'Chandler',
'Chaney',
'Chang',
'Chapman',
'Charles',
'Chase',
'Chavez',
'Chen',
'Cherry',
'Christensen',
'Christian',
'Church',
'Clark',
'Clarke',
'Clay',
'Clayton',
'Clements',
'Clemons',
'Cleveland',
'Cline',
'Cobb',
'Cochran',
'Coffey',
'Cohen',
'Cole',
'Coleman',
'Collier',
'Collins',
'Colon',
'Combs',
'Compton',
'Conley',
'Conner',
'Conrad',
'Contreras',
'Conway',
'Cook',
'Cooke',
'Cooley',
'Cooper',
'Copeland',
'Cortez',
'Cote',
'Cotton',
'Cox',
'Craft',
'Craig',
'Crane',
'Crawford',
'Crosby',
'Cross',
'Cruz',
'Cummings',
'Cunningham',
'Curry',
'Curtis',
'Dale',
'Dalton',
'Daniel',
'Daniels',
'Daugherty',
'Davenport',
'David',
'Davidson',
'Davis',
'Dawson',
'Day',
'Dean',
'Decker',
'Dejesus',
'Delacruz',
'Delaney',
'Deleon',
'Delgado',
'Dennis',
'Diaz',
'Dickerson',
'Dickson',
'Dillard',
'Dillon',
'Dixon',
'Dodson',
'Dominguez',
'Donaldson',
'Donovan',
'Dorsey',
'Dotson',
'Douglas',
'Downs',
'Doyle',
'Drake',
'Dudley',
'Duffy',
'Duke',
'Duncan',
'Dunlap',
'Dunn',
'Duran',
'Durham',
'Dyer',
'Eaton',
'Edwards',
'Elliott',
'Ellis',
'Ellison',
'Emerson',
'England',
'English',
'Erickson',
'Espinoza',
'Estes',
'Estrada',
'Evans',
'Everett',
'Ewing',
'Farley',
'Farmer',
'Farrell',
'Faulkner',
'Ferguson',
'Fernandez',
'Ferrell',
'Fields',
'Figueroa',
'Finch',
'Finley',
'Fischer',
'Fisher',
'Fitzgerald',
'Fitzpatrick',
'Fleming',
'Fletcher',
'Flores',
'Flowers',
'Floyd',
'Flynn',
'Foley',
'Forbes',
'Ford',
'Foreman',
'Foster',
'Fowler',
'Fox',
'Francis',
'Franco',
'Frank',
'Franklin',
'Franks',
'Frazier',
'Frederick',
'Freeman',
'French',
'Frost',
'Fry',
'Frye',
'Fuentes',
'Fuller',
'Fulton',
'Gaines',
'Gallagher',
'Gallegos',
'Galloway',
'Gamble',
'Garcia',
'Gardner',
'Garner',
'Garrett',
'Garrison',
'Garza',
'Gates',
'Gay',
'Gentry',
'George',
'Gibbs',
'Gibson',
'Gilbert',
'Giles',
'Gill',
'Gillespie',
'Gilliam',
'Gilmore',
'Glass',
'Glenn',
'Glover',
'Goff',
'Golden',
'Gomez',
'Gonzales',
'Gonzalez',
'Good',
'Goodman',
'Goodwin',
'Gordon',
'Gould',
'Graham',
'Grant',
'Graves',
'Gray',
'Green',
'Greene',
'Greer',
'Gregory',
'Griffin',
'Griffith',
'Grimes',
'Gross',
'Guerra',
'Guerrero',
'Guthrie',
'Gutierrez',
'Guy',
'Guzman',
'Hahn',
'Hale',
'Haley',
'Hall',
'Hamilton',
'Hammond',
'Hampton',
'Hancock',
'Haney',
'Hansen',
'Hanson',
'Hardin',
'Harding',
'Hardy',
'Harmon',
'Harper',
'Harrell',
'Harrington',
'Harris',
'Harrison',
'Hart',
'Hartman',
'Harvey',
'Hatfield',
'Hawkins',
'Hayden',
'Hayes',
'Haynes',
'Hays',
'Head',
'Heath',
'Hebert',
'Henderson',
'Hendricks',
'Hendrix',
'Henry',
'Hensley',
'Henson',
'Herman',
'Hernandez',
'Herrera',
'Herring',
'Hess',
'Hester',
'Hewitt',
'Hickman',
'Hicks',
'Higgins',
'Hill',
'Hines',
'Hinton',
'Hobbs',
'Hodge',
'Hodges',
'Hoffman',
'Hogan',
'Holcomb',
'Holden',
'Holder',
'Holland',
'Holloway',
'Holman',
'Holmes',
'Holt',
'Hood',
'Hooper',
'Hoover',
'Hopkins',
'Hopper',
'Horn',
'Horne',
'Horton',
'House',
'Houston',
'Howard',
'Howe',
'Howell',
'Hubbard',
'Huber',
'Hudson',
'Huff',
'Huffman',
'Hughes',
'Hull',
'Humphrey',
'Hunt',
'Hunter',
'Hurley',
'Hurst',
'Hutchinson',
'Hyde',
'Ingram',
'Irwin',
'Jackson',
'Jacobs',
'Jacobson',
'James',
'Jarvis',
'Jefferson',
'Jenkins',
'Jennings',
'Jensen',
'Jimenez',
'Johns',
'Johnson',
'Johnston',
'Jones',
'Jordan',
'Joseph',
'Joyce',
'Joyner',
'Juarez',
'Justice',
'Kane',
'Kaufman',
'Keith',
'Keller',
'Kelley',
'Kelly',
'Kemp',
'Kennedy',
'Kent',
'Kerr',
'Key',
'Kidd',
'Kim',
'King',
'Kinney',
'Kirby',
'Kirk',
'Kirkland',
'Klein',
'Kline',
'Knapp',
'Knight',
'Knowles',
'Knox',
'Koch',
'Kramer',
'Lamb',
'Lambert',
'Lancaster',
'Landry',
'Lane',
'Lang',
'Langley',
'Lara',
'Larsen',
'Larson',
'Lawrence',
'Lawson',
'Le',
'Leach',
'Leblanc',
'Lee',
'Leon',
'Leonard',
'Lester',
'Levine',
'Levy',
'Lewis',
'Lindsay',
'Lindsey',
'Little',
'Livingston',
'Lloyd',
'Logan',
'Long',
'Lopez',
'Lott',
'Love',
'Lowe',
'Lowery',
'Lucas',
'Luna',
'Lynch',
'Lynn',
'Lyons',
'Macdonald',
'Macias',
'Mack',
'Madden',
'Maddox',
'Maldonado',
'Malone',
'Mann',
'Manning',
'Marks',
'Marquez',
'Marsh',
'Marshall',
'Martin',
'Martinez',
'Mason',
'Massey',
'Mathews',
'Mathis',
'Matthews',
'Maxwell',
'May',
'Mayer',
'Maynard',
'Mayo',
'Mays',
'Mcbride',
'Mccall',
'Mccarthy',
'Mccarty',
'Mcclain',
'Mcclure',
'Mcconnell',
'Mccormick',
'Mccoy',
'Mccray',
'Mccullough',
'Mcdaniel',
'Mcdonald',
'Mcdowell',
'Mcfadden',
'Mcfarland',
'Mcgee',
'Mcgowan',
'Mcguire',
'Mcintosh',
'Mcintyre',
'Mckay',
'Mckee',
'Mckenzie',
'Mckinney',
'Mcknight',
'Mclaughlin',
'Mclean',
'Mcleod',
'Mcmahon',
'Mcmillan',
'Mcneil',
'Mcpherson',
'Meadows',
'Medina',
'Mejia',
'Melendez',
'Melton',
'Mendez',
'Mendoza',
'Mercado',
'Mercer',
'Merrill',
'Merritt',
'Meyer',
'Meyers',
'Michael',
'Middleton',
'Miles',
'Miller',
'Mills',
'Miranda',
'Mitchell',
'Molina',
'Monroe',
'Montgomery',
'Montoya',
'Moody',
'Moon',
'Mooney',
'Moore',
'Morales',
'Moran',
'Moreno',
'Morgan',
'Morin',
'Morris',
'Morrison',
'Morrow',
'Morse',
'Morton',
'Moses',
'Mosley',
'Moss',
'Mueller',
'Mullen',
'Mullins',
'Munoz',
'Murphy',
'Murray',
'Myers',
'Nash',
'Navarro',
'Neal',
'Nelson',
'Newman',
'Newton',
'Nguyen',
'Nichols',
'Nicholson',
'Nielsen',
'Nieves',
'Nixon',
'Noble',
'Noel',
'Nolan',
'Norman',
'Norris',
'Norton',
'Nunez',
'Obrien',
'Ochoa',
'Oconnor',
'Odom',
'Odonnell',
'Oliver',
'Olsen',
'Olson',
'Oneal',
'Oneil',
'Oneill',
'Orr',
'Ortega',
'Ortiz',
'Osborn',
'Osborne',
'Owen',
'Owens',
'Pace',
'Pacheco',
'Padilla',
'Page',
'Palmer',
'Park',
'Parker',
'Parks',
'Parrish',
'Parsons',
'Pate',
'Patel',
'Patrick',
'Patterson',
'Patton',
'Paul',
'Payne',
'Pearson',
'Peck',
'Pena',
'Pennington',
'Perez',
'Perkins',
'Perry',
'Peters',
'Petersen',
'Peterson',
'Petty',
'Phelps',
'Phillips',
'Pickett',
'Pierce',
'Pittman',
'Pitts',
'Pollard',
'Poole',
'Pope',
'Porter',
'Potter',
'Potts',
'Powell',
'Powers',
'Pratt',
'Preston',
'Price',
'Prince',
'Pruitt',
'Puckett',
'Pugh',
'Quinn',
'Ramirez',
'Ramos',
'Ramsey',
'Randall',
'Randolph',
'Rasmussen',
'Ratliff',
'Ray',
'Raymond',
'Reed',
'Reese',
'Reeves',
'Reid',
'Reilly',
'Reyes',
'Reynolds',
'Rhodes',
'Rice',
'Rich',
'Richard',
'Richards',
'Richardson',
'Richmond',
'Riddle',
'Riggs',
'Riley',
'Rios',
'Rivas',
'Rivera',
'Rivers',
'Roach',
'Robbins',
'Roberson',
'Roberts',
'Robertson',
'Robinson',
'Robles',
'Rocha',
'Rodgers',
'Rodriguez',
'Rodriquez',
'Rogers',
'Rojas',
'Rollins',
'Roman',
'Romero',
'Rosa',
'Rosales',
'Rosario',
'Rose',
'Ross',
'Roth',
'Rowe',
'Rowland',
'Roy',
'Ruiz',
'Rush',
'Russell',
'Russo',
'Rutledge',
'Ryan',
'Salas',
'Salazar',
'Salinas',
'Sampson',
'Sanchez',
'Sanders',
'Sandoval',
'Sanford',
'Santana',
'Santiago',
'Santos',
'Sargent',
'Saunders',
'Savage',
'Sawyer',
'Schmidt',
'Schneider',
'Schroeder',
'Schultz',
'Schwartz',
'Scott',
'Sears',
'Sellers',
'Serrano',
'Sexton',
'Shaffer',
'Shannon',
'Sharp',
'Sharpe',
'Shaw',
'Shelton',
'Shepard',
'Shepherd',
'Sheppard',
'Sherman',
'Shields',
'Short',
'Silva',
'Simmons',
'Simon',
'Simpson',
'Sims',
'Singleton',
'Skinner',
'Slater',
'Sloan',
'Small',
'Smith',
'Snider',
'Snow',
'Snyder',
'Solis',
'Solomon',
'Sosa',
'Soto',
'Sparks',
'Spears',
'Spence',
'Spencer',
'Stafford',
'Stanley',
'Stanton',
'Stark',
'Steele',
'Stein',
'Stephens',
'Stephenson',
'Stevens',
'Stevenson',
'Stewart',
'Stokes',
'Stone',
'Stout',
'Strickland',
'Strong',
'Stuart',
'Suarez',
'Sullivan',
'Summers',
'Sutton',
'Swanson',
'Sweeney',
'Sweet',
'Sykes',
'Talley',
'Tanner',
'Tate',
'Taylor',
'Terrell',
'Terry',
'Thomas',
'Thompson',
'Thornton',
'Tillman',
'Todd',
'Torres',
'Townsend',
'Tran',
'Travis',
'Trevino',
'Trujillo',
'Tucker',
'Turner',
'Tyler',
'Tyson',
'Underwood',
'Valdez',
'Valencia',
'Valentine',
'Valenzuela',
'Vance',
'Vang',
'Vargas',
'Vasquez',
'Vaughan',
'Vaughn',
'Vazquez',
'Vega',
'Velasquez',
'Velazquez',
'Velez',
'Villarreal',
'Vincent',
'Vinson',
'Wade',
'Wagner',
'Walker',
'Wall',
'Wallace',
'Waller',
'Walls',
'Walsh',
'Walter',
'Walters',
'Walton',
'Ward',
'Ware',
'Warner',
'Warren',
'Washington',
'Waters',
'Watkins',
'Watson',
'Watts',
'Weaver',
'Webb',
'Weber',
'Webster',
'Weeks',
'Weiss',
'Welch',
'Wells',
'West',
'Wheeler',
'Whitaker',
'White',
'Whitehead',
'Whitfield',
'Whitley',
'Whitney',
'Wiggins',
'Wilcox',
'Wilder',
'Wiley',
'Wilkerson',
'Wilkins',
'Wilkinson',
'William',
'Williams',
'Williamson',
'Willis',
'Wilson',
'Winters',
'Wise',
'Witt',
'Wolf',
'Wolfe',
'Wong',
'Wood',
'Woodard',
'Woods',
'Woodward',
'Wooten',
'Workman',
'Wright',
'Wyatt',
'Wynn',
'Yang',
'Yates',
'York',
'Young',
'Zamora',
'Zimmerman',
]
chinese_last_names = [
'李',
'王',
'张',
'刘',
'陈',
'杨',
'赵',
'黄',
'周',
'吴',
'徐',
'孙',
'胡',
'朱',
'高',
'林',
'何',
'郭',
'马',
'罗',
'梁',
'宋',
'郑',
'谢',
'韩',
'唐',
'冯',
'于',
'董',
'萧',
'程',
'曹',
'袁',
'邓',
'许',
'傅',
'沈',
'曾',
'彭',
'吕',
'苏',
'卢',
'蒋',
'蔡',
'贾',
'丁',
'魏',
'薛',
'叶',
'阎',
'余',
'潘',
'杜',
'戴',
'夏',
'钟',
'汪',
'田',
'任',
'姜',
'范',
'方',
'石',
'姚',
'谭',
'盛',
'邹',
'熊',
'金',
'陆',
'郝',
'孔',
'白',
'崔',
'康',
'毛',
'邱',
'秦',
'江',
'史',
'顾',
'侯',
'邵',
'孟',
'龙',
'万',
'段',
'章',
'钱',
'汤',
'尹',
'黎',
'易',
'常',
'武',
'乔',
'贺',
'赖',
'龚',
'文',
]
chinese_first_names = [
'佩',
'佳',
'俊',
'偉',
'冠',
'凱',
'君',
'哲',
'嘉',
'如',
'婷',
'子',
'宇',
'安',
'宏',
'宜',
'家',
'庭',
'廷',
'建',
'彥',
'志',
'怡',
'惠',
'慧',
'文',
'明',
'智',
'柏',
'欣',
'涵',
'淑',
'玉',
'玲',
'瑋',
'瑜',
'秀',
'穎',
'維',
'美',
'翔',
'育',
'芳',
'華',
'萱',
'豪',
'軒',
'銘',
'雅',
'雯',
]
def animal():
return random.choice(animals)
def english_name():
return "{} {}".format(random.choice(first_names), random.choice(last_names))
def chinese_name():
return "{} {}".format(random.choice(chinese_first_names), random.choice(chinese_last_names))
| jachin/randomium | randomium/__init__.py | Python | mit | 37,956 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from mock import MagicMock
from django.conf import settings
from draalcore.auth.templatetags.tags import social_auth
from draalcore.test_utils.basetest import BaseTest
class TemplateTagsTestCase(BaseTest):
def test_social_auth(self):
"""Test user's social auth status"""
# No social auth in use
user = MagicMock(password='pw')
self.assertTrue(social_auth(user))
# Social auth in use
user.password = settings.SOCIAL_AUTH_USER_PASSWORD
self.assertFalse(social_auth(user))
| jojanper/draalcore | draalcore/auth/tests/test_templatetags.py | Python | mit | 582 |
#
# @BEGIN LICENSE
#
# Psi4: an open-source quantum chemistry software package
#
# Copyright (c) 2007-2021 The Psi4 Developers.
#
# The copyrights for code used from other parties are included in
# the corresponding files.
#
# This file is part of Psi4.
#
# Psi4 is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, version 3.
#
# Psi4 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with Psi4; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @END LICENSE
#
"""
| Database (Hobza) of interaction energies for bimolecular complexes.
| Geometries from <Reference>.
| Reference interaction energies from Rezac and Hobza, JCTC (in press).
- **cp** ``'off'`` <erase this comment and after unless on is a valid option> || ``'on'``
- **rlxd** ``'off'`` <erase this comment and after unless on is valid option> || ``'on'``
- **benchmark**
- ``'<benchmark_name>'`` <Reference>.
- |dl| ``'<default_benchmark_name>'`` |dr| <Reference>.
- **subset**
- ``'small'`` <members_description>
- ``'large'`` <members_description>
- ``'<subset>'`` <members_description>
"""
import re
import qcdb
# <<< A24 Database Module >>>
dbse = 'A24'
# <<< Database Members >>>
HRXN = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24]
HRXN_SM = []
HRXN_LG = []
# <<< Chemical Systems Involved >>>
RXNM = {} # reaction matrix of reagent contributions per reaction
ACTV = {} # order of active reagents per reaction
ACTV_CP = {} # order of active reagents per counterpoise-corrected reaction
ACTV_SA = {} # order of active reagents for non-supermolecular calculations
for rxn in HRXN:
RXNM[ '%s-%s' % (dbse, rxn)] = {'%s-%s-dimer' % (dbse, rxn) : +1,
'%s-%s-monoA-CP' % (dbse, rxn) : -1,
'%s-%s-monoB-CP' % (dbse, rxn) : -1,
'%s-%s-monoA-unCP' % (dbse, rxn) : -1,
'%s-%s-monoB-unCP' % (dbse, rxn) : -1 }
ACTV_SA['%s-%s' % (dbse, rxn)] = ['%s-%s-dimer' % (dbse, rxn) ]
ACTV_CP['%s-%s' % (dbse, rxn)] = ['%s-%s-dimer' % (dbse, rxn),
'%s-%s-monoA-CP' % (dbse, rxn),
'%s-%s-monoB-CP' % (dbse, rxn) ]
ACTV[ '%s-%s' % (dbse, rxn)] = ['%s-%s-dimer' % (dbse, rxn),
'%s-%s-monoA-unCP' % (dbse, rxn),
'%s-%s-monoB-unCP' % (dbse, rxn) ]
ACTV['%s-%s' % (dbse, rxn)] = ['%s-%s-dimer' % (dbse, rxn) ]
# <<< Reference Values [kcal/mol] from Rezac and Hobza dx.doi.org/10.1021/ct400057w >>>
BIND = {}
BIND['%s-%s' % (dbse, 1 )] = -6.524
BIND['%s-%s' % (dbse, 2 )] = -5.014
BIND['%s-%s' % (dbse, 3 )] = -4.749
BIND['%s-%s' % (dbse, 4 )] = -4.572
BIND['%s-%s' % (dbse, 5 )] = -3.157
BIND['%s-%s' % (dbse, 6 )] = -1.679
BIND['%s-%s' % (dbse, 7 )] = -0.779
BIND['%s-%s' % (dbse, 8 )] = -0.672
BIND['%s-%s' % (dbse, 9 )] = -4.474
BIND['%s-%s' % (dbse, 10 )] = -2.578
BIND['%s-%s' % (dbse, 11 )] = -1.629
BIND['%s-%s' % (dbse, 12 )] = -1.537
BIND['%s-%s' % (dbse, 13 )] = -1.389
BIND['%s-%s' % (dbse, 14 )] = -1.110
BIND['%s-%s' % (dbse, 15 )] = -0.514
BIND['%s-%s' % (dbse, 16 )] = -1.518
BIND['%s-%s' % (dbse, 17 )] = -0.837
BIND['%s-%s' % (dbse, 18 )] = -0.615
BIND['%s-%s' % (dbse, 19 )] = -0.538
BIND['%s-%s' % (dbse, 20 )] = -0.408
BIND['%s-%s' % (dbse, 21 )] = -0.370
BIND['%s-%s' % (dbse, 22 )] = 0.784
BIND['%s-%s' % (dbse, 23 )] = 0.897
BIND['%s-%s' % (dbse, 24 )] = 1.075
# <<< Comment Lines >>>
TAGL = {}
TAGL['%s-%s' % (dbse, 1)] = """ water_ammonia_Cs """
TAGL['%s-%s-dimer' % (dbse, 1)] = """Dimer from water_ammonia_Cs """
TAGL['%s-%s-monoA-CP' % (dbse, 1)] = """Monomer A water_ammonia_Cs """
TAGL['%s-%s-monoB-CP' % (dbse, 1)] = """Monomer B water_ammonia_Cs """
TAGL['%s-%s-monoA-unCP' % (dbse, 1)] = """Monomer A water_ammonia_Cs """
TAGL['%s-%s-monoB-unCP' % (dbse, 1)] = """Monomer B water_ammonia_Cs """
TAGL['%s-%s' % (dbse, 2)] = """ water_water_Cs """
TAGL['%s-%s-dimer' % (dbse, 2)] = """Dimer from water_water_Cs """
TAGL['%s-%s-monoA-CP' % (dbse, 2)] = """Monomer A from water_water_Cs """
TAGL['%s-%s-monoB-CP' % (dbse, 2)] = """Monomer B from water_water_Cs """
TAGL['%s-%s-monoA-unCP' % (dbse, 2)] = """Monomer A from water_water_Cs """
TAGL['%s-%s-monoB-unCP' % (dbse, 2)] = """Monomer B from water_water_Cs """
TAGL['%s-%s' % (dbse, 3)] = """ HCN_HCN_Cxv """
TAGL['%s-%s-dimer' % (dbse, 3)] = """Dimer from HCN_HCN_Cxv """
TAGL['%s-%s-monoA-CP' % (dbse, 3)] = """Monomer A from HCN_HCN_Cxv """
TAGL['%s-%s-monoB-CP' % (dbse, 3)] = """Monomer B from HCN_HCN_Cxv """
TAGL['%s-%s-monoA-unCP' % (dbse, 3)] = """Monomer A from HCN_HCN_Cxv """
TAGL['%s-%s-monoB-unCP' % (dbse, 3)] = """Monomer B from HCN_HCN_Cxv """
TAGL['%s-%s' % (dbse, 4)] = """ HF_HF_Cs """
TAGL['%s-%s-dimer' % (dbse, 4)] = """Dimer from HF_HF_Cs """
TAGL['%s-%s-monoA-CP' % (dbse, 4)] = """Monomer A from HF_HF_Cs """
TAGL['%s-%s-monoB-CP' % (dbse, 4)] = """Monomer B from HF_HF_Cs """
TAGL['%s-%s-monoA-unCP' % (dbse, 4)] = """Monomer A from HF_HF_Cs """
TAGL['%s-%s-monoB-unCP' % (dbse, 4)] = """Monomer B from HF_HF_Cs """
TAGL['%s-%s' % (dbse, 5)] = """ ammonia_ammonia_C2h """
TAGL['%s-%s-dimer' % (dbse, 5)] = """Dimer from ammonia_ammonia_C2h """
TAGL['%s-%s-monoA-CP' % (dbse, 5)] = """Monomer A from ammonia_ammonia_C2h """
TAGL['%s-%s-monoB-CP' % (dbse, 5)] = """Monomer B from ammonia_ammonia_C2h """
TAGL['%s-%s-monoA-unCP' % (dbse, 5)] = """Monomer A from ammonia_ammonia_C2h """
TAGL['%s-%s-monoB-unCP' % (dbse, 5)] = """Monomer B from ammonia_ammonia_C2h """
TAGL['%s-%s' % (dbse, 6)] = """ methane_HF_C3v """
TAGL['%s-%s-dimer' % (dbse, 6)] = """Dimer from methane_HF_C3v """
TAGL['%s-%s-monoA-CP' % (dbse, 6)] = """Monomer A from methane_HF_C3v """
TAGL['%s-%s-monoB-CP' % (dbse, 6)] = """Monomer B from methane_HF_C3v """
TAGL['%s-%s-monoA-unCP' % (dbse, 6)] = """Monomer A from methane_HF_C3v """
TAGL['%s-%s-monoB-unCP' % (dbse, 6)] = """Monomer B from methane_HF_C3v """
TAGL['%s-%s' % (dbse, 7)] = """ ammmonia_methane_C3v """
TAGL['%s-%s-dimer' % (dbse, 7)] = """Dimer from ammmonia_methane_C3v """
TAGL['%s-%s-monoA-CP' % (dbse, 7)] = """Monomer A from ammmonia_methane_C3v """
TAGL['%s-%s-monoB-CP' % (dbse, 7)] = """Monomer B from ammmonia_methane_C3v """
TAGL['%s-%s-monoA-unCP' % (dbse, 7)] = """Monomer A from ammmonia_methane_C3v """
TAGL['%s-%s-monoB-unCP' % (dbse, 7)] = """Monomer B from ammmonia_methane_C3v """
TAGL['%s-%s' % (dbse, 8)] = """ methane_water_Cs """
TAGL['%s-%s-dimer' % (dbse, 8)] = """Dimer from methane_water_Cs """
TAGL['%s-%s-monoA-CP' % (dbse, 8)] = """Monomer A from methane_water_Cs """
TAGL['%s-%s-monoB-CP' % (dbse, 8)] = """Monomer B from methane_water_Cs """
TAGL['%s-%s-monoA-unCP' % (dbse, 8)] = """Monomer A from methane_water_Cs """
TAGL['%s-%s-monoB-unCP' % (dbse, 8)] = """Monomer B from methane_water_Cs """
TAGL['%s-%s' % (dbse, 9)] = """ formaldehyde_formaldehyde_Cs """
TAGL['%s-%s-dimer' % (dbse, 9)] = """Dimer from formaldehyde_formaldehyde_Cs """
TAGL['%s-%s-monoA-CP' % (dbse, 9)] = """Monomer A from formaldehyde_formaldehyde_Cs """
TAGL['%s-%s-monoB-CP' % (dbse, 9)] = """Monomer B from formaldehyde_formaldehyde_Cs """
TAGL['%s-%s-monoA-unCP' % (dbse, 9)] = """Monomer A from formaldehyde_formaldehyde_Cs """
TAGL['%s-%s-monoB-unCP' % (dbse, 9)] = """Monomer B from formaldehyde_formaldehyde_Cs """
TAGL['%s-%s' % (dbse, 10)] = """ ethene_wat_Cs """
TAGL['%s-%s-dimer' % (dbse, 10)] = """Dimer from ethene_wat_Cs """
TAGL['%s-%s-monoA-CP' % (dbse, 10)] = """Monomer A from ethene_wat_Cs """
TAGL['%s-%s-monoB-CP' % (dbse, 10)] = """Monomer B from ethene_wat_Cs """
TAGL['%s-%s-monoA-unCP' % (dbse, 10)] = """Monomer A from ethene_wat_Cs """
TAGL['%s-%s-monoB-unCP' % (dbse, 10)] = """Monomer B from ethene_wat_Cs """
TAGL['%s-%s' % (dbse, 11)] = """ ethene_formaldehyde_Cs """
TAGL['%s-%s-dimer' % (dbse, 11)] = """Dimer from ethene_formaldehyde_Cs """
TAGL['%s-%s-monoA-CP' % (dbse, 11)] = """Monomer A from ethene_formaldehyde_Cs """
TAGL['%s-%s-monoB-CP' % (dbse, 11)] = """Monomer B from ethene_formaldehyde_Cs """
TAGL['%s-%s-monoA-unCP' % (dbse, 11)] = """Monomer A from ethene_formaldehyde_Cs """
TAGL['%s-%s-monoB-unCP' % (dbse, 11)] = """Monomer B from ethene_formaldehyde_Cs """
TAGL['%s-%s' % (dbse, 12)] = """ ethyne_ethyne_C2v """
TAGL['%s-%s-dimer' % (dbse, 12)] = """Dimer from ethyne_ethyne_C2v """
TAGL['%s-%s-monoA-CP' % (dbse, 12)] = """Monomer A from ethyne_ethyne_C2v """
TAGL['%s-%s-monoB-CP' % (dbse, 12)] = """Monomer B from ethyne_ethyne_C2v """
TAGL['%s-%s-monoA-unCP' % (dbse, 12)] = """Monomer A from ethyne_ethyne_C2v """
TAGL['%s-%s-monoB-unCP' % (dbse, 12)] = """Monomer B from ethyne_ethyne_C2v """
TAGL['%s-%s' % (dbse, 13)] = """ ethene_ammonia_Cs """
TAGL['%s-%s-dimer' % (dbse, 13)] = """Dimer from ethene_ammonia_Cs """
TAGL['%s-%s-monoA-CP' % (dbse, 13)] = """Monomer A from ethene_ammonia_Cs """
TAGL['%s-%s-monoB-CP' % (dbse, 13)] = """Monomer B from ethene_ammonia_Cs """
TAGL['%s-%s-monoA-unCP' % (dbse, 13)] = """Monomer A from ethene_ammonia_Cs """
TAGL['%s-%s-monoB-unCP' % (dbse, 13)] = """Monomer B from ethene_ammonia_Cs """
TAGL['%s-%s' % (dbse, 14)] = """ ethene_ethene_C2v """
TAGL['%s-%s-dimer' % (dbse, 14)] = """Dimer from ethene_ethene_C2v """
TAGL['%s-%s-monoA-CP' % (dbse, 14)] = """Monomer A from ethene_ethene_C2v """
TAGL['%s-%s-monoB-CP' % (dbse, 14)] = """Monomer B from ethene_ethene_C2v """
TAGL['%s-%s-monoA-unCP' % (dbse, 14)] = """Monomer A from ethene_ethene_C2v """
TAGL['%s-%s-monoB-unCP' % (dbse, 14)] = """Monomer B from ethene_ethene_C2v """
TAGL['%s-%s' % (dbse, 15)] = """ methane_ethene_Cs """
TAGL['%s-%s-dimer' % (dbse, 15)] = """Dimer from methane_ethene_Cs """
TAGL['%s-%s-monoA-CP' % (dbse, 15)] = """Monomer A from methane_ethene_Cs """
TAGL['%s-%s-monoB-CP' % (dbse, 15)] = """Monomer B from methane_ethene_Cs """
TAGL['%s-%s-monoA-unCP' % (dbse, 15)] = """Monomer A from methane_ethene_Cs """
TAGL['%s-%s-monoB-unCP' % (dbse, 15)] = """Monomer B from methane_ethene_Cs """
TAGL['%s-%s' % (dbse, 16)] = """ borane_methane_Cs """
TAGL['%s-%s-dimer' % (dbse, 16)] = """Dimer from borane_methane_Cs """
TAGL['%s-%s-monoA-CP' % (dbse, 16)] = """Monomer A from borane_methane_Cs """
TAGL['%s-%s-monoB-CP' % (dbse, 16)] = """Monomer B from borane_methane_Cs """
TAGL['%s-%s-monoA-unCP' % (dbse, 16)] = """Monomer A from borane_methane_Cs """
TAGL['%s-%s-monoB-unCP' % (dbse, 16)] = """Monomer B from borane_methane_Cs """
TAGL['%s-%s' % (dbse, 17)] = """ methane_ethane_Cs """
TAGL['%s-%s-dimer' % (dbse, 17)] = """Dimer from methane_ethane_Cs """
TAGL['%s-%s-monoA-CP' % (dbse, 17)] = """Monomer A from methane_ethane_Cs """
TAGL['%s-%s-monoB-CP' % (dbse, 17)] = """Monomer B from methane_ethane_Cs """
TAGL['%s-%s-monoA-unCP' % (dbse, 17)] = """Monomer A from methane_ethane_Cs """
TAGL['%s-%s-monoB-unCP' % (dbse, 17)] = """Monomer B from methane_ethane_Cs """
TAGL['%s-%s' % (dbse, 18)] = """ methane_ethane_C3 """
TAGL['%s-%s-dimer' % (dbse, 18)] = """Dimer from methane_ethane_C3 """
TAGL['%s-%s-monoA-CP' % (dbse, 18)] = """Monomer A from methane_ethane_C3 """
TAGL['%s-%s-monoB-CP' % (dbse, 18)] = """Monomer B from methane_ethane_C3 """
TAGL['%s-%s-monoA-unCP' % (dbse, 18)] = """Monomer A from methane_ethane_C3 """
TAGL['%s-%s-monoB-unCP' % (dbse, 18)] = """Monomer B from methane_ethane_C3 """
TAGL['%s-%s' % (dbse, 19)] = """ methane_methane_D3d """
TAGL['%s-%s-dimer' % (dbse, 19)] = """Dimer from methane_methane_D3d """
TAGL['%s-%s-monoA-CP' % (dbse, 19)] = """Monomer A from methane_methane_D3d """
TAGL['%s-%s-monoB-CP' % (dbse, 19)] = """Monomer B from methane_methane_D3d """
TAGL['%s-%s-monoA-unCP' % (dbse, 19)] = """Monomer A from methane_methane_D3d """
TAGL['%s-%s-monoB-unCP' % (dbse, 19)] = """Monomer B from methane_methane_D3d """
TAGL['%s-%s' % (dbse, 20)] = """ methane_Ar_C3v """
TAGL['%s-%s-dimer' % (dbse, 20)] = """Dimer from methane_Ar_C3v """
TAGL['%s-%s-monoA-CP' % (dbse, 20)] = """Monomer A from methane_Ar_C3v """
TAGL['%s-%s-monoB-CP' % (dbse, 20)] = """Monomer B from methane_Ar_C3v """
TAGL['%s-%s-monoA-unCP' % (dbse, 20)] = """Monomer A from methane_Ar_C3v """
TAGL['%s-%s-monoB-unCP' % (dbse, 20)] = """Monomer B from methane_Ar_C3v """
TAGL['%s-%s' % (dbse, 21)] = """ ethene_Ar_C2v """
TAGL['%s-%s-dimer' % (dbse, 21)] = """Dimer from ethene_Ar_C2v """
TAGL['%s-%s-monoA-CP' % (dbse, 21)] = """Monomer A from ethene_Ar_C2v """
TAGL['%s-%s-monoB-CP' % (dbse, 21)] = """Monomer B from ethene_Ar_C2v """
TAGL['%s-%s-monoA-unCP' % (dbse, 21)] = """Monomer A from ethene_Ar_C2v """
TAGL['%s-%s-monoB-unCP' % (dbse, 21)] = """Monomer B from ethene_Ar_C2v """
TAGL['%s-%s' % (dbse, 22)] = """ ethene_ethyne_C2v """
TAGL['%s-%s-dimer' % (dbse, 22)] = """Dimer from ethene_ethyne_C2v """
TAGL['%s-%s-monoA-CP' % (dbse, 22)] = """Monomer A from ethene_ethyne_C2v """
TAGL['%s-%s-monoB-CP' % (dbse, 22)] = """Monomer B from ethene_ethyne_C2v """
TAGL['%s-%s-monoA-unCP' % (dbse, 22)] = """Monomer A from ethene_ethyne_C2v """
TAGL['%s-%s-monoB-unCP' % (dbse, 22)] = """Monomer B from ethene_ethyne_C2v """
TAGL['%s-%s' % (dbse, 23)] = """ ethene_ethene_D2h """
TAGL['%s-%s-dimer' % (dbse, 23)] = """Dimer from ethene_ethene_D2h """
TAGL['%s-%s-monoA-CP' % (dbse, 23)] = """Monomer A from ethene_ethene_D2h """
TAGL['%s-%s-monoB-CP' % (dbse, 23)] = """Monomer B from ethene_ethene_D2h """
TAGL['%s-%s-monoA-unCP' % (dbse, 23)] = """Monomer A from ethene_ethene_D2h """
TAGL['%s-%s-monoB-unCP' % (dbse, 23)] = """Monomer B from ethene_ethene_D2h """
TAGL['%s-%s' % (dbse, 24)] = """ ethyne_ethyne_D2h """
TAGL['%s-%s-dimer' % (dbse, 24)] = """Dimer from ethyne_ethyne_D2h """
TAGL['%s-%s-monoA-CP' % (dbse, 24)] = """Monomer A from ethyne_ethyne_D2h """
TAGL['%s-%s-monoB-CP' % (dbse, 24)] = """Monomer B from ethyne_ethyne_D2h """
TAGL['%s-%s-monoA-unCP' % (dbse, 24)] = """Monomer A from ethyne_ethyne_D2h """
TAGL['%s-%s-monoB-unCP' % (dbse, 24)] = """Monomer B from ethyne_ethyne_D2h """
# <<< Geometry Specification Strings >>>
GEOS = {}
GEOS['%s-%s-dimer' % (dbse, '1')] = qcdb.Molecule("""
0 1
O 0.00000000 -0.05786571 -1.47979303
H 0.00000000 0.82293384 -1.85541474
H 0.00000000 0.07949567 -0.51934253
--
0 1
N 0.00000000 0.01436394 1.46454628
H 0.00000000 -0.98104857 1.65344779
H -0.81348351 0.39876776 1.92934049
H 0.81348351 0.39876776 1.92934049
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '2')] = qcdb.Molecule("""
0 1
O -0.06699914 0.00000000 1.49435474
H 0.81573427 0.00000000 1.86586639
H 0.06885510 0.00000000 0.53914277
--
0 1
O 0.06254775 0.00000000 -1.42263208
H -0.40696540 -0.76017841 -1.77174450
H -0.40696540 0.76017841 -1.77174450
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '3')] = qcdb.Molecule("""
0 1
H 0.00000000 0.00000000 3.85521306
C 0.00000000 0.00000000 2.78649976
N 0.00000000 0.00000000 1.63150791
--
0 1
H 0.00000000 0.00000000 -0.59377492
C 0.00000000 0.00000000 -1.66809824
N 0.00000000 0.00000000 -2.82525056
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '4')] = qcdb.Molecule("""
0 1
H 0.00000000 0.80267982 1.69529329
F 0.00000000 -0.04596666 1.34034818
--
0 1
H 0.00000000 -0.12040787 -0.49082840
F 0.00000000 0.00976945 -1.40424978
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '5')] = qcdb.Molecule("""
0 1
N -0.04998129 -1.58709323 0.00000000
H 0.12296265 -2.16846018 0.81105976
H 0.12296265 -2.16846018 -0.81105976
H 0.65988580 -0.86235298 0.00000000
--
0 1
N 0.04998129 1.58709323 0.00000000
H -0.12296265 2.16846018 0.81105976
H -0.65988580 0.86235298 0.00000000
H -0.12296265 2.16846018 -0.81105976
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '6')] = qcdb.Molecule("""
0 1
C 0.00000000 -0.00000000 1.77071609
H 0.51593378 -0.89362352 1.42025061
H -0.00000000 0.00000000 2.85805859
H 0.51593378 0.89362352 1.42025061
H -1.03186756 0.00000000 1.42025061
--
0 1
H -0.00000000 0.00000000 -0.54877328
F -0.00000000 0.00000000 -1.46803256
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '7')] = qcdb.Molecule("""
0 1
N -0.00000000 0.00000000 1.84833659
H 0.93730979 -0.00000000 2.23206741
H -0.46865489 -0.81173409 2.23206741
H -0.46865489 0.81173409 2.23206741
--
0 1
H 0.00000000 -0.00000000 -0.94497174
C 0.00000000 -0.00000000 -2.03363752
H 0.51251439 0.88770096 -2.40095125
H 0.51251439 -0.88770096 -2.40095125
H -1.02502878 0.00000000 -2.40095125
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '8')] = qcdb.Molecule("""
0 1
C 0.00069016 0.00000000 -1.99985520
H -0.50741740 0.88759452 -2.37290605
H 1.03052749 0.00000000 -2.35282982
H -0.01314396 0.00000000 -0.91190852
H -0.50741740 -0.88759452 -2.37290605
--
0 1
O -0.00472553 0.00000000 1.71597466
H 0.03211863 0.75755459 2.30172044
H 0.03211863 -0.75755459 2.30172044
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '9')] = qcdb.Molecule("""
0 1
C 0.00000000 0.60123980 -1.35383976
O 0.00000000 -0.59301814 -1.55209021
H 0.93542250 1.17427624 -1.26515132
H -0.93542250 1.17427624 -1.26515132
--
0 1
C 0.00000000 -0.60200476 1.55228866
O 0.00000000 0.59238638 1.35511328
H 0.00000000 -1.00937982 2.57524635
H 0.00000000 -1.32002906 0.71694997
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '10')] = qcdb.Molecule("""
0 1
C 0.01058825 -0.66806246 1.29820809
C 0.01058825 0.66806246 1.29820809
H 0.86863216 1.23267933 0.95426815
H -0.84608285 1.23258495 1.64525385
H -0.84608285 -1.23258495 1.64525385
H 0.86863216 -1.23267933 0.95426815
--
0 1
H -0.79685627 0.00000000 -2.50911038
O 0.04347445 0.00000000 -2.04834054
H -0.19067546 0.00000000 -1.11576944
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '11')] = qcdb.Molecule("""
0 1
C 0.00000000 -0.59797089 1.47742864
C 0.00000000 0.42131196 2.33957848
H 0.92113351 -1.02957102 1.10653516
H -0.92113351 -1.02957102 1.10653516
H -0.92393815 0.85124826 2.70694633
H 0.92393815 0.85124826 2.70694633
--
0 1
O 0.00000000 -0.51877334 -1.82845679
C 0.00000000 0.68616220 -1.73709412
H 0.00000000 1.33077474 -2.63186355
H 0.00000000 1.18902807 -0.75645498
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '12')] = qcdb.Molecule("""
0 1
C 0.00000000 0.60356400 -2.18173438
H 0.00000000 1.66847581 -2.18429610
C 0.00000000 -0.60356400 -2.18173438
H 0.00000000 -1.66847581 -2.18429610
--
0 1
C -0.00000000 0.00000000 1.57829513
H -0.00000000 0.00000000 0.51136193
C -0.00000000 0.00000000 2.78576543
H -0.00000000 0.00000000 3.85017859
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '13')] = qcdb.Molecule("""
0 1
C 0.00000000 -0.59662248 1.58722206
C 0.00000000 0.68258238 1.20494642
H 0.92312147 1.22423658 1.04062463
H -0.92312147 1.22423658 1.04062463
H -0.92388993 -1.13738548 1.75121281
H 0.92388993 -1.13738548 1.75121281
--
0 1
N 0.00000000 -0.00401379 -2.31096701
H -0.81122549 -0.45983060 -2.71043881
H 0.00000000 -0.22249432 -1.32128161
H 0.81122549 -0.45983060 -2.71043881
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '14')] = qcdb.Molecule("""
0 1
H 0.92444510 -1.23172221 -1.90619313
H -0.92444510 -1.23172221 -1.90619313
H -0.92444510 1.23172221 -1.90619313
H 0.92444510 1.23172221 -1.90619313
C 0.00000000 0.66728778 -1.90556520
C 0.00000000 -0.66728778 -1.90556520
--
0 1
H -0.00000000 1.23344948 2.82931792
H 0.00000000 1.22547148 0.97776199
H -0.00000000 -1.22547148 0.97776199
H -0.00000000 -1.23344948 2.82931792
C -0.00000000 -0.66711698 1.90601042
C -0.00000000 0.66711698 1.90601042
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '15')] = qcdb.Molecule("""
0 1
C 0.00000000 0.64634385 -1.60849815
C 0.00000000 -0.67914355 -1.45381675
H -0.92399961 -1.24016223 -1.38784883
H 0.92399961 -1.24016223 -1.38784883
H 0.92403607 1.20737602 -1.67357285
H -0.92403607 1.20737602 -1.67357285
--
0 1
H 0.00000000 0.08295411 1.59016711
C 0.00000000 0.02871509 2.67711785
H 0.88825459 0.52261990 3.06664029
H -0.88825459 0.52261990 3.06664029
H 0.00000000 -1.01394800 2.98955227
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '16')] = qcdb.Molecule("""
0 1
C 0.00346000 0.00000000 1.38045208
H 0.84849635 0.00000000 0.68958651
H 0.39513333 0.00000000 2.39584935
H -0.60268447 -0.88994299 1.22482674
H -0.60268447 0.88994299 1.22482674
--
0 1
B -0.00555317 0.00000000 -1.59887976
H 0.58455128 -1.03051800 -1.67949525
H 0.58455128 1.03051800 -1.67949525
H -1.18903148 0.00000000 -1.47677217
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '17')] = qcdb.Molecule("""
0 1
C 0.00000000 -0.06374421 2.42054090
H 0.00000000 1.02169396 2.34238038
H 0.88828307 -0.46131911 1.93307194
H -0.88828307 -0.46131911 1.93307194
H 0.00000000 -0.35363606 3.46945195
--
0 1
C 0.00000000 0.78133572 -1.13543912
H 0.00000000 1.37465349 -2.05114442
H -0.88043002 1.06310554 -0.55580918
C 0.00000000 -0.71332890 -1.44723686
H 0.88043002 1.06310554 -0.55580918
H 0.00000000 -1.30641812 -0.53140693
H -0.88100343 -0.99533072 -2.02587154
H 0.88100343 -0.99533072 -2.02587154
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '18')] = qcdb.Molecule("""
0 1
C -0.00000000 0.00000000 -2.85810471
H 0.39304720 -0.94712229 -2.49369739
H 0.62370837 0.81395000 -2.49369739
H -1.01675556 0.13317229 -2.49369739
H 0.00000000 -0.00000000 -3.94634214
--
0 1
C 0.00000000 -0.00000000 0.76143405
C -0.00000000 -0.00000000 2.28821715
H -0.61711193 -0.80824397 0.36571527
H -0.39140385 0.93855659 0.36571527
H 1.00851577 -0.13031262 0.36571527
H -1.00891703 0.13031295 2.68258296
H 0.39160418 -0.93890425 2.68258296
H 0.61731284 0.80859130 2.68258296
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '19')] = qcdb.Molecule("""
0 1
C -0.00000000 0.00000000 1.81901457
H 0.51274115 0.88809373 1.45476743
H 0.51274115 -0.88809373 1.45476743
H -1.02548230 0.00000000 1.45476743
H 0.00000000 -0.00000000 2.90722072
--
0 1
C 0.00000000 -0.00000000 -1.81901457
H -0.00000000 0.00000000 -2.90722072
H -0.51274115 0.88809373 -1.45476743
H -0.51274115 -0.88809373 -1.45476743
H 1.02548230 -0.00000000 -1.45476743
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '20')] = qcdb.Molecule("""
0 1
C -0.00000000 0.00000000 -2.62458428
H 0.51286762 0.88831278 -2.26110195
H 0.51286762 -0.88831278 -2.26110195
H -0.00000000 0.00000000 -3.71273928
H -1.02573525 0.00000000 -2.26110195
--
0 1
AR -0.00000000 0.00000000 1.05395172
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '21')] = qcdb.Molecule("""
0 1
C 0.00000000 0.66718073 -2.29024825
C 0.00000000 -0.66718073 -2.29024825
H -0.92400768 1.23202333 -2.28975239
H 0.92400768 1.23202333 -2.28975239
H -0.92400768 -1.23202333 -2.28975239
H 0.92400768 -1.23202333 -2.28975239
--
0 1
AR -0.00000000 0.00000000 1.60829261
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '22')] = qcdb.Molecule("""
0 1
H -0.92396100 1.23195600 -1.68478123
H 0.92396100 1.23195600 -1.68478123
H 0.92396100 -1.23195600 -1.68478123
H -0.92396100 -1.23195600 -1.68478123
C 0.00000000 0.66717600 -1.68478123
C 0.00000000 -0.66717600 -1.68478123
--
0 1
H -0.00000000 -1.66786500 1.81521877
H -0.00000000 1.66786500 1.81521877
C -0.00000000 -0.60339700 1.81521877
C -0.00000000 0.60339700 1.81521877
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '23')] = qcdb.Molecule("""
0 1
H -0.92396100 1.23195600 -1.75000000
H 0.92396100 1.23195600 -1.75000000
H 0.92396100 -1.23195600 -1.75000000
H -0.92396100 -1.23195600 -1.75000000
C 0.00000000 0.66717600 -1.75000000
C -0.00000000 -0.66717600 -1.75000000
--
0 1
H -0.92396100 1.23195600 1.75000000
H 0.92396100 1.23195600 1.75000000
H 0.92396100 -1.23195600 1.75000000
H -0.92396100 -1.23195600 1.75000000
C 0.00000000 0.66717600 1.75000000
C -0.00000000 -0.66717600 1.75000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '24')] = qcdb.Molecule("""
0 1
H -0.00000000 -1.66786500 -1.75000000
H 0.00000000 1.66786500 -1.75000000
C -0.00000000 -0.60339700 -1.75000000
C 0.00000000 0.60339700 -1.75000000
--
0 1
H -0.00000000 -1.66786500 1.75000000
H 0.00000000 1.66786500 1.75000000
C -0.00000000 -0.60339700 1.75000000
C 0.00000000 0.60339700 1.75000000
units angstrom
""")
# <<< Derived Geometry Strings >>>
for rxn in HRXN:
GEOS['%s-%s-monoA-unCP' % (dbse, rxn)] = GEOS['%s-%s-dimer' % (dbse, rxn)].extract_fragments(1)
GEOS['%s-%s-monoB-unCP' % (dbse, rxn)] = GEOS['%s-%s-dimer' % (dbse, rxn)].extract_fragments(2)
GEOS['%s-%s-monoA-CP' % (dbse, rxn)] = GEOS['%s-%s-dimer' % (dbse, rxn)].extract_fragments(1, 2)
GEOS['%s-%s-monoB-CP' % (dbse, rxn)] = GEOS['%s-%s-dimer' % (dbse, rxn)].extract_fragments(2, 1)
| ashutoshvt/psi4 | psi4/share/psi4/databases/A24alt.py | Python | lgpl-3.0 | 29,758 |
import gobject
import libqtile.hook
from libqtile.manager import Key
from libqtile.command import lazy
class Match(object):
''' Match for dynamic groups
it can match by title, class or role '''
def __init__(self, title=[], wm_class=[], role=[], wm_type=[]):
self._rules = [('title', t) for t in title]
self._rules += [('wm_class', w) for w in wm_class]
self._rules += [('role', r) for r in role]
self._rules += [('wm_type', r) for r in wm_type]
def compare(self, client):
for _type, rule in self._rules:
match_func = getattr(rule, 'match', None) or\
getattr(rule, 'count')
if _type == 'title':
value = client.name
elif _type == 'wm_class':
value = client.window.get_wm_class()
if value:
value = value[1]
elif _type == 'wm_type':
value = client.window.get_wm_type()
else:
value = client.window.get_wm_window_role()
if value and match_func(value):
return True
return False
def simple_key_binder(mod):
''' Bind keys to mod+group position '''
def func(dgroup):
# unbind all
for key in dgroup.keys[:]:
dgroup.qtile.unmapKey(key)
dgroup.keys.remove(key)
# keys 1 to 9 and 0
keynumbers = range(1,10) + [0]
# bind all keys
for num, group in zip(keynumbers, dgroup.qtile.groups[:10]):
name = group.name
key = Key([mod], str(num), lazy.group[name].toscreen())
key_s = Key([mod, "shift"], str(num), lazy.window.togroup(name))
dgroup.keys.append(key)
dgroup.keys.append(key_s)
dgroup.qtile.mapKey(key)
dgroup.qtile.mapKey(key_s)
return func
class DGroups(object):
''' Dynamic Groups '''
def __init__(self, qtile, groups, apps, key_binder=None, delay=1):
self.qtile = qtile
self.groups = groups
self.apps = apps
self.keys = []
self.key_binder = key_binder
self._setup_hooks()
self._setup_groups()
self.delay = delay
self.timeout = {}
def _setup_groups(self):
for name, tag in self.groups.iteritems():
if tag.get('init') == True:
self.qtile.addGroup(name)
spawn_cmd = tag.get('spawn')
if spawn_cmd:
self.qtile.cmd_spawn(spawn_cmd)
def _setup_hooks(self):
libqtile.hook.subscribe.client_new(self._add)
libqtile.hook.subscribe.client_killed(self._del)
if self.key_binder:
libqtile.hook.subscribe.addgroup(
lambda: self.key_binder(self))
libqtile.hook.subscribe.delgroup(
lambda: self.key_binder(self))
def shuffle_groups(self, lst, match):
masters = []
for client in lst:
if match.compare(client):
masters.append(client)
for master in masters:
lst.remove(master)
lst.insert(0, master)
def _add(self, client):
if client in self.timeout:
gobject.source_remove(self.timeout[client])
del(self.timeout[client])
group_set = False
intrusive = False
for app in self.apps:
# Matching Rules
if app['match'].compare(client):
if 'group' in app:
group = app['group']
self.qtile.addGroup(group)
client.togroup(group)
group_set = True
group_obj = self.qtile.groupMap[group]
group_opts = self.groups.get(group)
if group_opts:
layout = group_opts.get('layout')
master = group_opts.get('master')
if layout:
group_obj.layout = layout
if master:
group_obj.layout.shuffle(
lambda lst: self.shuffle_groups(lst, master))
if 'float' in app and app['float']:
client.floating = True
if 'intrusive' in app:
intrusive = app['intrusive']
# If app doesn't have a group
if not group_set:
current_group = self.qtile.currentGroup.name
if current_group in self.groups and\
self.groups[current_group].get('exclusive') and\
not intrusive:
wm_class = client.window.get_wm_class()
if wm_class:
group_name = wm_class[1]
else:
group_name = client.name
self.qtile.addGroup(group_name)
client.togroup(group_name)
def _del(self, client):
group = client.group
def delete_client():
# Delete group if empty and dont persist
if group and not (group.name in self.groups and\
self.groups[group.name].get('persist')) and\
len(group.windows) <= 0:
self.qtile.delGroup(group.name)
# wait the delay until really delete the group
self.timeout[client] = gobject.timeout_add_seconds(self.delay,
delete_client)
| andrelaszlo/qtile | examples/config/dgroups.py | Python | mit | 5,538 |
"""
Contains the ClientManager class that can be used to manage a set of
clients.
"""
"""
Copyright 2011-2013 Gregory Holt
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import contextlib
from six.moves import queue
class ClientManager(object):
"""
Can be used to manage a set of clients.
:param client_class: The class to create when a new client is
needed.
:param args: The args for the client constructor.
:param kwargs: The keyword args for the client constructor.
"""
def __init__(self, client_class, *args, **kwargs):
self.client_class = client_class
self.args = args
self.kwargs = kwargs
self.clients = queue.Queue()
self.client_id = 0
def get_client(self):
"""
Obtains a client for use, whether an existing unused client
or a brand new one if none are available.
"""
client = None
try:
client = self.clients.get(block=False)
except queue.Empty:
pass
if not client:
self.client_id += 1
kwargs = dict(self.kwargs)
kwargs['verbose_id'] = kwargs.get(
'verbose_id', '') + str(self.client_id)
client = self.client_class(*self.args, **kwargs)
return client
def put_client(self, client):
"""
Returns a client back into the pool for availability to
future calls to get_client. This should only be called if
get_client was used to obtain the client; with_client is a
context manager that does this for you.
"""
self.clients.put(client)
@contextlib.contextmanager
def with_client(self):
"""
A context manager that obtains a client for use, whether an
existing unused client or a brand new one if none are
available.
"""
client = self.get_client()
yield client
self.put_client(client)
| gholt/swiftly | swiftly/client/manager.py | Python | apache-2.0 | 2,442 |
# GA hello world
# Selection using ranking with ellitism
import sys
import random
import array
import copy
"""
Parameters to be tweaked
"""
elitePercent=5 # Percentage copied to next generation
selectPercent=50 # Top 50% available for selection
POPSIZE = 100 # population size
MAXITER = 1000 # maximum iterations
CROSSOVERPROB= 0.5 # Fraction of new population created by crossover breeding
MUTATEPROB = 0.5 # mutation rate of breed genes
## End of tweakable parameters
NELITE = int((POPSIZE*elitePercent)/100); # top of population survive
NSELECT = int((POPSIZE*selectPercent)/100); # how many are bred
secret="Hello World!"
size=len(secret);
low_char=32
hi_char=121
class Gene:
def __init__(self,string):
self.string=string
self.fitness=None
def funcPrint(self):
print self.string, self.fitness
def randomChar():
return chr(random.randint(low_char,hi_char))
def seed():
string=array.array('c')
for i in xrange(size):
string.append(randomChar())
return string
def evaluate1(string): # number correct
sum=0
for a,b in zip(string,secret):
if a == b:
sum += 1
return sum
def clone(string):
return copy.deepcopy(string)
def mate(a,b):
i=random.randint(1,size-1)
ret = a[0:i]+b[i:size]
return ret
def mutate1(a): # randomly replace a character
i=random.randint(0,size-1)
a[i]=randomChar()
def mutate2(a): # add/subtract 1 from the character code
i=random.randint(0,size-1)
ic = ord(a[i])
im=random.randint(0,1)
ic=ic + 2*im -1
if ic >hi_char:
ic=hi_char
if ic <low_char:
ic=low_char
a[i]=chr(ic)
def evaluate2(string): # sum of diff in char codes
sum=0
for a,b in zip(string,secret):
sum -= abs(ord(a)-ord(b))
return sum
# creat the next generation
def newPopulation():
newpop=[]
# copy top NELITE to the new population
for m in pop[0:NELITE]:
newpop.append(m)
# create the rest by breeding/cloning+mutating from the top NSELECT
for i in range(NELITE,POPSIZE):
i1 = random.randint(0,NSELECT-1)
if random.random() < CROSSOVERPROB: # create by breeding
i2 = random.randint(0,NSELECT-1)
gene=Gene(mate(pop[i1].string,pop[i2].string))
if random.random() < MUTATEPROB:
mutate(gene.string)
else:
gene=Gene(clone(pop[i1].string)) # just clone
mutate(gene.string) #' always mutate cloned
newpop.append(gene)
return newpop
if __name__ == '__main__':
mutate=mutate2
evaluate=evaluate2
target_fitness=evaluate(secret)
pop=[]
for i in range(POPSIZE):
pop.append(Gene(seed()))
count=0
while count< MAXITER*POPSIZE:
for m in pop:
m.fitness=evaluate(m.string)
count += 1
pop = sorted(pop, key = lambda x:x.fitness,reverse=True)
print count,pop[0].string.tostring(),pop[0].fitness
if pop[0].fitness >= target_fitness:
break
pop = newPopulation();
| pauljohnleonard/pod-world | CI_2015/CI_examples/GAHelloWorld/02_gaHelloWorldElite.py | Python | gpl-2.0 | 3,403 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource import Resource
class ReissueCertificateOrderRequest(Resource):
"""Class representing certificate reissue request.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:param location: Resource Location.
:type location: str
:ivar type: Resource type.
:vartype type: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param key_size: Certificate Key Size.
:type key_size: int
:param delay_existing_revoke_in_hours: Delay in hours to revoke existing
certificate after the new certificate is issued.
:type delay_existing_revoke_in_hours: int
:param csr: Csr to be used for re-key operation.
:type csr: str
:param is_private_key_external: Should we change the ASC type (from
managed private key to external private key and vice versa).
:type is_private_key_external: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'location': {'required': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'key_size': {'key': 'properties.keySize', 'type': 'int'},
'delay_existing_revoke_in_hours': {'key': 'properties.delayExistingRevokeInHours', 'type': 'int'},
'csr': {'key': 'properties.csr', 'type': 'str'},
'is_private_key_external': {'key': 'properties.isPrivateKeyExternal', 'type': 'bool'},
}
def __init__(self, location, kind=None, tags=None, key_size=None, delay_existing_revoke_in_hours=None, csr=None, is_private_key_external=None):
super(ReissueCertificateOrderRequest, self).__init__(kind=kind, location=location, tags=tags)
self.key_size = key_size
self.delay_existing_revoke_in_hours = delay_existing_revoke_in_hours
self.csr = csr
self.is_private_key_external = is_private_key_external
| AutorestCI/azure-sdk-for-python | azure-mgmt-web/azure/mgmt/web/models/reissue_certificate_order_request.py | Python | mit | 2,817 |
# coding: utf-8
"""
插入所有需要的库,和函数
"""
import pandas as pd
#----------------------------------------------------------------------
def klLoad(self,bars=None):
"""载入合约数据"""
bars = pd.DataFrame.from_csv('datasig.csv')
kTool = self.canvas
for sig in kTool.sigPlots:
kTool.pwKL.removeItem(kTool.sigPlots[sig])
kTool.sigData = {}
kTool.sigPlots = {}
for sig in kTool.subSigPlots:
kTool.pwOI.removeItem(kTool.subSigPlots[sig])
kTool.subSigData = {}
kTool.subSigPlots = {}
self.loadData(bars)
| moonnejs/uiKLine | func-button/klLoad.py | Python | mit | 583 |
#!/usr/bin/env python
# coding=utf-8
# Copyright [2017] [B2W Digital]
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from marvin_python_toolbox.engine_base import EngineBaseDataHandler
@pytest.fixture
def engine_action():
class EngineAction(EngineBaseDataHandler):
def execute(self, **kwargs):
return 1
return EngineAction(default_root_path="/tmp/.marvin")
class TestEngineBaseDataHandler:
def test_initial_dataset(self, engine_action):
engine_action.marvin_initial_dataset = [1]
assert engine_action.marvin_initial_dataset == engine_action._initial_dataset == [1]
def test_dataset(self, engine_action):
engine_action.marvin_dataset = [1]
assert engine_action.marvin_dataset == engine_action._dataset == [1]
| marvin-ai/marvin-python-toolbox | tests/engine_base/test_engine_base_data_handler.py | Python | apache-2.0 | 1,298 |
#! /usr/bin/env python
# encoding: utf-8
import sys
if sys.hexversion < 0x020400f0: from sets import Set as set
import os,shutil,sys,re,random,datetime
from Utils import md5
import Build,Runner,Utils,Node,Logs,Options
from Logs import debug,warn,error
from Constants import*
algotype=NORMAL
COMPILE_TEMPLATE_SHELL='''
def f(task):
env = task.env
wd = getattr(task, 'cwd', None)
p = env.get_flat
cmd = \'\'\' %s \'\'\' % s
return task.exec_command(cmd, cwd=wd)
'''
COMPILE_TEMPLATE_NOSHELL='''
def f(task):
env = task.env
wd = getattr(task, 'cwd', None)
def to_list(xx):
if isinstance(xx, str): return [xx]
return xx
lst = []
%s
lst = [x for x in lst if x]
return task.exec_command(lst, cwd=wd)
'''
file_deps=Utils.nada
class TaskManager(object):
def __init__(self):
self.groups=[]
self.tasks_done=[]
self.current_group=0
self.groups_names={}
def get_next_set(self):
ret=None
while not ret and self.current_group<len(self.groups):
ret=self.groups[self.current_group].get_next_set()
if ret:return ret
else:self.current_group+=1
return(None,None)
def add_group(self,name=None,set=True):
g=TaskGroup()
if name and name in self.groups_names:
error('add_group: name %s already present'%name)
self.groups_names[name]=g
self.groups.append(g)
if set:
self.current_group=len(self.groups)-1
def set_group(self,idx):
if isinstance(idx,str):
g=self.groups_names[idx]
for x in xrange(len(self.groups)):
if id(g)==id(self.groups[x]):
self.current_group=x
else:
self.current_group=idx
def add_task_gen(self,tgen):
if not self.groups:self.add_group()
self.groups[self.current_group].tasks_gen.append(tgen)
def add_task(self,task):
if not self.groups:self.add_group()
self.groups[self.current_group].tasks.append(task)
def total(self):
total=0
if not self.groups:return 0
for group in self.groups:
total+=len(group.tasks)
return total
def add_finished(self,tsk):
self.tasks_done.append(tsk)
bld=tsk.generator.bld
if bld.is_install:
f=None
if'install'in tsk.__dict__:
f=tsk.__dict__['install']
if f:f(tsk)
else:
tsk.install()
class TaskGroup(object):
def __init__(self):
self.tasks=[]
self.tasks_gen=[]
self.cstr_groups=Utils.DefaultDict(list)
self.cstr_order=Utils.DefaultDict(set)
self.temp_tasks=[]
self.ready=0
def reset(self):
for x in self.cstr_groups:
self.tasks+=self.cstr_groups[x]
self.tasks=self.temp_tasks+self.tasks
self.temp_tasks=[]
self.cstr_groups=Utils.DefaultDict(list)
self.cstr_order=Utils.DefaultDict(set)
self.ready=0
def prepare(self):
self.ready=1
file_deps(self.tasks)
self.make_cstr_groups()
self.extract_constraints()
def get_next_set(self):
global algotype
if algotype==NORMAL:
tasks=self.tasks_in_parallel()
maxj=MAXJOBS
elif algotype==JOBCONTROL:
(maxj,tasks)=self.tasks_by_max_jobs()
elif algotype==MAXPARALLEL:
tasks=self.tasks_with_inner_constraints()
maxj=MAXJOBS
else:
raise Utils.WafError("unknown algorithm type %s"%(algotype))
if not tasks:return()
return(maxj,tasks)
def make_cstr_groups(self):
self.cstr_groups=Utils.DefaultDict(list)
for x in self.tasks:
h=x.hash_constraints()
self.cstr_groups[h].append(x)
def set_order(self,a,b):
self.cstr_order[a].add(b)
def compare_exts(self,t1,t2):
x="ext_in"
y="ext_out"
in_=t1.attr(x,())
out_=t2.attr(y,())
for k in in_:
if k in out_:
return-1
in_=t2.attr(x,())
out_=t1.attr(y,())
for k in in_:
if k in out_:
return 1
return 0
def compare_partial(self,t1,t2):
m="after"
n="before"
name=t2.__class__.__name__
if name in Utils.to_list(t1.attr(m,())):return-1
elif name in Utils.to_list(t1.attr(n,())):return 1
name=t1.__class__.__name__
if name in Utils.to_list(t2.attr(m,())):return 1
elif name in Utils.to_list(t2.attr(n,())):return-1
return 0
def extract_constraints(self):
keys=self.cstr_groups.keys()
max=len(keys)
for i in xrange(max):
t1=self.cstr_groups[keys[i]][0]
for j in xrange(i+1,max):
t2=self.cstr_groups[keys[j]][0]
val=(self.compare_exts(t1,t2)or self.compare_partial(t1,t2))
if val>0:
self.set_order(keys[i],keys[j])
elif val<0:
self.set_order(keys[j],keys[i])
def tasks_in_parallel(self):
if not self.ready:self.prepare()
keys=self.cstr_groups.keys()
unconnected=[]
remainder=[]
for u in keys:
for k in self.cstr_order.values():
if u in k:
remainder.append(u)
break
else:
unconnected.append(u)
toreturn=[]
for y in unconnected:
toreturn.extend(self.cstr_groups[y])
for y in unconnected:
try:self.cstr_order.__delitem__(y)
except KeyError:pass
self.cstr_groups.__delitem__(y)
if not toreturn and remainder:
raise Utils.WafError("circular order constraint detected %r"%remainder)
return toreturn
def tasks_by_max_jobs(self):
if not self.ready:self.prepare()
if not self.temp_tasks:self.temp_tasks=self.tasks_in_parallel()
if not self.temp_tasks:return(None,None)
maxjobs=MAXJOBS
ret=[]
remaining=[]
for t in self.temp_tasks:
m=getattr(t,"maxjobs",getattr(self.__class__,"maxjobs",MAXJOBS))
if m>maxjobs:
remaining.append(t)
elif m<maxjobs:
remaining+=ret
ret=[t]
maxjobs=m
else:
ret.append(t)
self.temp_tasks=remaining
return(maxjobs,ret)
def tasks_with_inner_constraints(self):
if not self.ready:self.prepare()
if getattr(self,"done",None):return None
for p in self.cstr_order:
for v in self.cstr_order[p]:
for m in self.cstr_groups[p]:
for n in self.cstr_groups[v]:
n.set_run_after(m)
self.cstr_order=Utils.DefaultDict(set)
self.cstr_groups=Utils.DefaultDict(list)
self.done=1
return self.tasks[:]
class store_task_type(type):
def __init__(cls,name,bases,dict):
super(store_task_type,cls).__init__(name,bases,dict)
name=cls.__name__
if name.endswith('_task'):
name=name.replace('_task','')
TaskBase.classes[name]=cls
class TaskBase(object):
__metaclass__=store_task_type
color="GREEN"
maxjobs=MAXJOBS
classes={}
stat=None
def __init__(self,*k,**kw):
self.hasrun=NOT_RUN
try:
self.generator=kw['generator']
except KeyError:
self.generator=self
self.bld=Build.bld
if kw.get('normal',1):
self.generator.bld.task_manager.add_task(self)
def __repr__(self):
return'\n\t{task: %s %s}'%(self.__class__.__name__,str(getattr(self,"fun","")))
def __str__(self):
if hasattr(self,'fun'):
return'executing: %s\n'%self.fun.__name__
return self.__class__.__name__+'\n'
def exec_command(self,*k,**kw):
return self.generator.bld.exec_command(*k,**kw)
def runnable_status(self):
return RUN_ME
def can_retrieve_cache(self):
return False
def call_run(self):
if self.can_retrieve_cache():
return 0
return self.run()
def run(self):
if hasattr(self,'fun'):
return self.fun(self)
return 0
def post_run(self):
pass
def display(self):
col1=Logs.colors(self.color)
col2=Logs.colors.NORMAL
if Options.options.progress_bar==1:
return self.generator.bld.progress_line(self.position[0],self.position[1],col1,col2)
if Options.options.progress_bar==2:
ela=Utils.get_elapsed_time(self.generator.bld.ini)
try:
ins=','.join([n.name for n in self.inputs])
except AttributeError:
ins=''
try:
outs=','.join([n.name for n in self.outputs])
except AttributeError:
outs=''
return'|Total %s|Current %s|Inputs %s|Outputs %s|Time %s|\n'%(self.position[1],self.position[0],ins,outs,ela)
total=self.position[1]
n=len(str(total))
fs='[%%%dd/%%%dd] %%s%%s%%s'%(n,n)
return fs%(self.position[0],self.position[1],col1,str(self),col2)
def attr(self,att,default=None):
ret=getattr(self,att,self)
if ret is self:return getattr(self.__class__,att,default)
return ret
def hash_constraints(self):
a=self.attr
sum=hash((self.__class__.__name__,str(a('before','')),str(a('after','')),str(a('ext_in','')),str(a('ext_out','')),self.__class__.maxjobs))
return sum
def format_error(self):
if getattr(self,"err_msg",None):
return self.err_msg
elif self.hasrun==CRASHED:
try:
return" -> task failed (err #%d): %r"%(self.err_code,self)
except AttributeError:
return" -> task failed: %r"%self
elif self.hasrun==MISSING:
return" -> missing files: %r"%self
else:
return''
def install(self):
bld=self.generator.bld
d=self.attr('install')
if self.attr('install_path'):
lst=[a.relpath_gen(bld.srcnode)for a in self.outputs]
perm=self.attr('chmod',O644)
if self.attr('src'):
lst+=[a.relpath_gen(bld.srcnode)for a in self.inputs]
if self.attr('filename'):
dir=self.install_path.rstrip(os.sep)+os.sep+self.attr('filename')
bld.install_as(dir,lst[0],self.env,perm)
else:
bld.install_files(self.install_path,lst,self.env,perm)
class Task(TaskBase):
vars=[]
def __init__(self,env,**kw):
TaskBase.__init__(self,**kw)
self.env=env
self.inputs=[]
self.outputs=[]
self.deps_nodes=[]
self.run_after=[]
def __str__(self):
env=self.env
src_str=' '.join([a.nice_path(env)for a in self.inputs])
tgt_str=' '.join([a.nice_path(env)for a in self.outputs])
if self.outputs:sep=' -> '
else:sep=''
return'%s: %s%s%s\n'%(self.__class__.__name__.replace('_task',''),src_str,sep,tgt_str)
def __repr__(self):
return"".join(['\n\t{task: ',self.__class__.__name__," ",",".join([x.name for x in self.inputs])," -> ",",".join([x.name for x in self.outputs]),'}'])
def unique_id(self):
try:
return self.uid
except AttributeError:
m=md5()
up=m.update
up(self.__class__.__name__)
up(self.env.variant())
p=None
for x in self.inputs+self.outputs:
if p!=x.parent.id:
p=x.parent.id
up(x.parent.abspath())
up(x.name)
self.uid=m.digest()
return self.uid
def set_inputs(self,inp):
if isinstance(inp,list):self.inputs+=inp
else:self.inputs.append(inp)
def set_outputs(self,out):
if isinstance(out,list):self.outputs+=out
else:self.outputs.append(out)
def set_run_after(self,task):
assert isinstance(task,TaskBase)
self.run_after.append(task)
def add_file_dependency(self,filename):
node=self.generator.bld.current.find_resource(filename)
self.deps_nodes.append(node)
def signature(self):
try:return self.cache_sig[0]
except AttributeError:pass
m=md5()
exp_sig=self.sig_explicit_deps()
m.update(exp_sig)
imp_sig=self.scan and self.sig_implicit_deps()or SIG_NIL
m.update(imp_sig)
var_sig=self.sig_vars()
m.update(var_sig)
ret=m.digest()
self.cache_sig=(ret,exp_sig,imp_sig,var_sig)
return ret
def runnable_status(self):
if self.inputs and(not self.outputs):
if not getattr(self.__class__,'quiet',None):
warn("invalid task (no inputs OR outputs): override in a Task subclass or set the attribute 'quiet' %r"%self)
for t in self.run_after:
if not t.hasrun:
return ASK_LATER
env=self.env
bld=self.generator.bld
try:
new_sig=self.signature()
except KeyError:
debug("task: something is wrong, computing the task %r signature failed"%self)
return RUN_ME
key=self.unique_id()
try:
prev_sig=bld.task_sigs[key][0]
except KeyError:
debug("task: task %r must run as it was never run before or the task code changed"%self)
return RUN_ME
try:
for node in self.outputs:
variant=node.variant(env)
if bld.node_sigs[variant][node.id]!=new_sig:
return RUN_ME
except KeyError:
debug("task: task %r must run as the output nodes do not exist"%self)
return RUN_ME
if Logs.verbose:self.debug_why(bld.task_sigs[key])
if new_sig!=prev_sig:
return RUN_ME
return SKIP_ME
def post_run(self):
bld=self.generator.bld
env=self.env
sig=self.signature()
cnt=0
variant=env.variant()
for node in self.outputs:
try:
os.stat(node.abspath(env))
except OSError:
self.has_run=MISSING
self.err_msg='-> missing file: %r'%node.abspath(env)
raise Utils.WafError
bld.node_sigs[variant][node.id]=sig
if Options.cache_global:
ssig=sig.encode('hex')
dest=os.path.join(Options.cache_global,'%s_%d_%s'%(ssig,cnt,node.name))
try:shutil.copy2(node.abspath(env),dest)
except IOError:warn('Could not write the file to the cache')
cnt+=1
bld.task_sigs[self.unique_id()]=self.cache_sig
def can_retrieve_cache(self):
if not Options.cache_global:return None
if Options.options.nocache:return None
if not self.outputs:return None
env=self.env
sig=self.signature()
cnt=0
for node in self.outputs:
variant=node.variant(env)
ssig=sig.encode('hex')
orig=os.path.join(Options.cache_global,'%s_%d_%s'%(ssig,cnt,node.name))
try:
shutil.copy2(orig,node.abspath(env))
os.utime(orig,None)
except(OSError,IOError):
debug('task: failed retrieving file')
return None
else:
cnt+=1
for node in self.outputs:
self.generator.bld.node_sigs[variant][node.id]=sig
self.generator.bld.printout('restoring from cache %r\n'%node.bldpath(env))
return 1
def debug_why(self,old_sigs):
new_sigs=self.cache_sig
def v(x):
return x.encode('hex')
debug("Task %r"%self)
msgs=['Task must run','* Source file or manual dependency','* Implicit dependency','* Environment variable']
tmp='task: -> %s: %s %s'
for x in xrange(len(msgs)):
if(new_sigs[x]!=old_sigs[x]):
debug(tmp%(msgs[x],v(old_sigs[x]),v(new_sigs[x])))
def sig_explicit_deps(self):
bld=self.generator.bld
m=md5()
for x in self.inputs+getattr(self,'dep_nodes',[]):
if not x.parent.id in bld.cache_scanned_folders:
bld.rescan(x.parent)
variant=x.variant(self.env)
m.update(bld.node_sigs[variant][x.id])
if bld.deps_man:
additional_deps=bld.deps_man
for x in self.inputs+self.outputs:
try:
d=additional_deps[x.id]
except KeyError:
continue
for v in d:
if isinstance(v,Node.Node):
bld.rescan(v.parent)
variant=v.variant(self.env)
try:
v=bld.node_sigs[variant][v.id]
except KeyError:
v=''
elif hasattr(v,'__call__'):
v=v()
m.update(v)
return m.digest()
def sig_vars(self):
m=md5()
bld=self.generator.bld
env=self.env
act_sig=bld.hash_env_vars(env,self.__class__.vars)
m.update(act_sig)
dep_vars=getattr(self,'dep_vars',None)
if dep_vars:
m.update(bld.hash_env_vars(env,dep_vars))
return m.digest()
scan=None
def sig_implicit_deps(self):
bld=self.generator.bld
key=self.unique_id()
prev_sigs=bld.task_sigs.get(key,())
if prev_sigs:
try:
if prev_sigs[2]==self.compute_sig_implicit_deps():
return prev_sigs[2]
except(KeyError,OSError):
pass
(nodes,names)=self.scan()
if Logs.verbose:
debug('deps: scanner for %s returned %s %s'%(str(self),str(nodes),str(names)))
bld.node_deps[key]=nodes
bld.raw_deps[key]=names
sig=self.compute_sig_implicit_deps()
return sig
def compute_sig_implicit_deps(self):
m=md5()
upd=m.update
bld=self.generator.bld
tstamp=bld.node_sigs
env=self.env
for k in bld.node_deps.get(self.unique_id(),[]):
if not k.parent.id in bld.cache_scanned_folders:
bld.rescan(k.parent)
if k.id&3==2:
upd(tstamp[0][k.id])
else:
upd(tstamp[env.variant()][k.id])
return m.digest()
def funex(c):
dc={}
exec(c,dc)
return dc['f']
reg_act=re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<var>\w+)(?P<code>.*?)\})",re.M)
def compile_fun_shell(name,line):
extr=[]
def repl(match):
g=match.group
if g('dollar'):return"$"
elif g('backslash'):return'\\\\'
elif g('subst'):extr.append((g('var'),g('code')));return"%s"
return None
line=reg_act.sub(repl,line)
parm=[]
dvars=[]
app=parm.append
for(var,meth)in extr:
if var=='SRC':
if meth:app('task.inputs%s'%meth)
else:app('" ".join([a.srcpath(env) for a in task.inputs])')
elif var=='TGT':
if meth:app('task.outputs%s'%meth)
else:app('" ".join([a.bldpath(env) for a in task.outputs])')
else:
if not var in dvars:dvars.append(var)
app("p('%s')"%var)
if parm:parm="%% (%s) "%(',\n\t\t'.join(parm))
else:parm=''
c=COMPILE_TEMPLATE_SHELL%(line,parm)
debug('action: %s'%c)
return(funex(c),dvars)
def compile_fun_noshell(name,line):
extr=[]
def repl(match):
g=match.group
if g('dollar'):return"$"
elif g('subst'):extr.append((g('var'),g('code')));return"<<|@|>>"
return None
line2=reg_act.sub(repl,line)
params=line2.split('<<|@|>>')
buf=[]
dvars=[]
app=buf.append
for x in xrange(len(extr)):
params[x]=params[x].strip()
if params[x]:
app("lst.extend(%r)"%params[x].split())
(var,meth)=extr[x]
if var=='SRC':
if meth:app('lst.append(task.inputs%s)'%meth)
else:app("lst.extend([a.srcpath(env) for a in task.inputs])")
elif var=='TGT':
if meth:app('lst.append(task.outputs%s)'%meth)
else:app("lst.extend([a.bldpath(env) for a in task.outputs])")
else:
app('lst.extend(to_list(env[%r]))'%var)
if not var in dvars:dvars.append(var)
if extr:
if params[-1]:
app("lst.extend(%r)"%params[-1].split())
fun=COMPILE_TEMPLATE_NOSHELL%"\n\t".join(buf)
debug('action: %s'%fun)
return(funex(fun),dvars)
def compile_fun(name,line,shell=None):
if line.find('<')>0 or line.find('>')>0 or line.find('&&')>0:
shell=True
if shell is None:
if sys.platform=='win32':
shell=False
else:
shell=True
if shell:
return compile_fun_shell(name,line)
else:
return compile_fun_noshell(name,line)
def simple_task_type(name,line,color='GREEN',vars=[],ext_in=[],ext_out=[],before=[],after=[],shell=None):
(fun,dvars)=compile_fun(name,line,shell)
fun.code=line
return task_type_from_func(name,fun,vars or dvars,color,ext_in,ext_out,before,after)
def task_type_from_func(name,func,vars=[],color='GREEN',ext_in=[],ext_out=[],before=[],after=[]):
params={'run':func,'vars':vars,'color':color,'name':name,'ext_in':Utils.to_list(ext_in),'ext_out':Utils.to_list(ext_out),'before':Utils.to_list(before),'after':Utils.to_list(after),}
cls=type(Task)(name,(Task,),params)
TaskBase.classes[name]=cls
return cls
def always_run(cls):
old=cls.runnable_status
def always(self):
old(self)
return RUN_ME
cls.runnable_status=always
def update_outputs(cls):
old_post_run=cls.post_run
def post_run(self):
old_post_run(self)
bld=self.outputs[0].__class__.bld
bld.node_sigs[self.env.variant()][self.outputs[0].id]=Utils.h_file(self.outputs[0].abspath(self.env))
cls.post_run=post_run
def extract_outputs(tasks):
v={}
for x in tasks:
try:
(ins,outs)=v[x.env.variant()]
except KeyError:
ins={}
outs={}
v[x.env.variant()]=(ins,outs)
for a in getattr(x,'inputs',[]):
try:ins[a.id].append(x)
except KeyError:ins[a.id]=[x]
for a in getattr(x,'outputs',[]):
try:outs[a.id].append(x)
except KeyError:outs[a.id]=[x]
for(ins,outs)in v.values():
links=set(ins.iterkeys()).intersection(outs.iterkeys())
for k in links:
for a in ins[k]:
for b in outs[k]:
a.set_run_after(b)
def extract_deps(tasks):
extract_outputs(tasks)
out_to_task={}
for x in tasks:
v=x.env.variant()
try:
lst=x.outputs
except AttributeError:
pass
else:
for node in lst:
out_to_task[(v,node.id)]=x
dep_to_task={}
for x in tasks:
try:
x.signature()
except:
pass
variant=x.env.variant()
key=x.unique_id()
for k in x.generator.bld.node_deps.get(x.unique_id(),[]):
try:dep_to_task[(v,k.id)].append(x)
except KeyError:dep_to_task[(v,k.id)]=[x]
deps=set(dep_to_task.keys()).intersection(set(out_to_task.keys()))
for idx in deps:
for k in dep_to_task[idx]:
k.set_run_after(out_to_task[idx])
for x in tasks:
try:
delattr(x,'cache_sig')
except AttributeError:
pass
| micove/libdesktop-agnostic | wafadmin/Task.py | Python | lgpl-2.1 | 19,543 |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Collection of useful coders.
Only those coders listed in __all__ are part of the public API of this module.
"""
from __future__ import absolute_import
import base64
from builtins import object
import google.protobuf
from google.protobuf import wrappers_pb2
from apache_beam.coders import coder_impl
from apache_beam.portability import common_urns
from apache_beam.portability import python_urns
from apache_beam.portability.api import beam_runner_api_pb2
from apache_beam.utils import proto_utils
# This is for py2/3 compatibility. cPickle was renamed pickle in python 3.
try:
import cPickle as pickle # Python 2
except ImportError:
import pickle # Python 3
# pylint: disable=wrong-import-order, wrong-import-position, ungrouped-imports
try:
from .stream import get_varint_size
except ImportError:
from .slow_stream import get_varint_size
# pylint: enable=wrong-import-order, wrong-import-position, ungrouped-imports
# pylint: disable=wrong-import-order, wrong-import-position
# Avoid dependencies on the full SDK.
try:
# Import dill from the pickler module to make sure our monkey-patching of dill
# occurs.
from apache_beam.internal.pickler import dill
except ImportError:
# We fall back to using the stock dill library in tests that don't use the
# full Python SDK.
import dill
__all__ = ['Coder',
'BytesCoder', 'DillCoder', 'FastPrimitivesCoder', 'FloatCoder',
'IterableCoder', 'PickleCoder', 'ProtoCoder', 'SingletonCoder',
'StrUtf8Coder', 'TimestampCoder', 'TupleCoder',
'TupleSequenceCoder', 'VarIntCoder', 'WindowedValueCoder']
def serialize_coder(coder):
from apache_beam.internal import pickler
return '%s$%s' % (coder.__class__.__name__, pickler.dumps(coder))
def deserialize_coder(serialized):
from apache_beam.internal import pickler
return pickler.loads(serialized.split('$', 1)[1])
# pylint: enable=wrong-import-order, wrong-import-position
class Coder(object):
"""Base class for coders."""
def encode(self, value):
"""Encodes the given object into a byte string."""
raise NotImplementedError('Encode not implemented: %s.' % self)
def decode(self, encoded):
"""Decodes the given byte string into the corresponding object."""
raise NotImplementedError('Decode not implemented: %s.' % self)
def is_deterministic(self):
"""Whether this coder is guaranteed to encode values deterministically.
A deterministic coder is required for key coders in GroupByKey operations
to produce consistent results.
For example, note that the default coder, the PickleCoder, is not
deterministic: the ordering of picked entries in maps may vary across
executions since there is no defined order, and such a coder is not in
general suitable for usage as a key coder in GroupByKey operations, since
each instance of the same key may be encoded differently.
Returns:
Whether coder is deterministic.
"""
return False
def as_deterministic_coder(self, step_label, error_message=None):
"""Returns a deterministic version of self, if possible.
Otherwise raises a value error.
"""
if self.is_deterministic():
return self
else:
raise ValueError(error_message or "'%s' cannot be made deterministic.")
def estimate_size(self, value):
"""Estimates the encoded size of the given value, in bytes.
Dataflow estimates the encoded size of a PCollection processed in a pipeline
step by using the estimated size of a random sample of elements in that
PCollection.
The default implementation encodes the given value and returns its byte
size. If a coder can provide a fast estimate of the encoded size of a value
(e.g., if the encoding has a fixed size), it can provide its estimate here
to improve performance.
Arguments:
value: the value whose encoded size is to be estimated.
Returns:
The estimated encoded size of the given value.
"""
return len(self.encode(value))
# ===========================================================================
# Methods below are internal SDK details that don't need to be modified for
# user-defined coders.
# ===========================================================================
def _create_impl(self):
"""Creates a CoderImpl to do the actual encoding and decoding.
"""
return coder_impl.CallbackCoderImpl(self.encode, self.decode,
self.estimate_size)
def get_impl(self):
"""For internal use only; no backwards-compatibility guarantees.
Returns the CoderImpl backing this Coder.
"""
if not hasattr(self, '_impl'):
self._impl = self._create_impl()
assert isinstance(self._impl, coder_impl.CoderImpl)
return self._impl
def __getstate__(self):
return self._dict_without_impl()
def _dict_without_impl(self):
if hasattr(self, '_impl'):
d = dict(self.__dict__)
del d['_impl']
return d
return self.__dict__
@classmethod
def from_type_hint(cls, unused_typehint, unused_registry):
# If not overridden, just construct the coder without arguments.
return cls()
def is_kv_coder(self):
return False
def key_coder(self):
if self.is_kv_coder():
raise NotImplementedError('key_coder: %s' % self)
else:
raise ValueError('Not a KV coder: %s.' % self)
def value_coder(self):
if self.is_kv_coder():
raise NotImplementedError('value_coder: %s' % self)
else:
raise ValueError('Not a KV coder: %s.' % self)
def _get_component_coders(self):
"""For internal use only; no backwards-compatibility guarantees.
Returns the internal component coders of this coder."""
# This is an internal detail of the Coder API and does not need to be
# refined in user-defined Coders.
return []
def as_cloud_object(self):
"""For internal use only; no backwards-compatibility guarantees.
Returns Google Cloud Dataflow API description of this coder."""
# This is an internal detail of the Coder API and does not need to be
# refined in user-defined Coders.
value = {
# We pass coders in the form "<coder_name>$<pickled_data>" to make the
# job description JSON more readable. Data before the $ is ignored by
# the worker.
'@type': serialize_coder(self),
'component_encodings': list(
component.as_cloud_object()
for component in self._get_component_coders()
),
}
return value
def __repr__(self):
return self.__class__.__name__
# pylint: disable=protected-access
def __eq__(self, other):
return (self.__class__ == other.__class__
and self._dict_without_impl() == other._dict_without_impl())
def __hash__(self):
return hash((self.__class__,) +
tuple(sorted(self._dict_without_impl().items())))
# pylint: enable=protected-access
_known_urns = {}
@classmethod
def register_urn(cls, urn, parameter_type, fn=None):
"""Registers a urn with a constructor.
For example, if 'beam:fn:foo' had parameter type FooPayload, one could
write `RunnerApiFn.register_urn('bean:fn:foo', FooPayload, foo_from_proto)`
where foo_from_proto took as arguments a FooPayload and a PipelineContext.
This function can also be used as a decorator rather than passing the
callable in as the final parameter.
A corresponding to_runner_api_parameter method would be expected that
returns the tuple ('beam:fn:foo', FooPayload)
"""
def register(fn):
cls._known_urns[urn] = parameter_type, fn
return staticmethod(fn)
if fn:
# Used as a statement.
register(fn)
else:
# Used as a decorator.
return register
def to_runner_api(self, context):
urn, typed_param, components = self.to_runner_api_parameter(context)
return beam_runner_api_pb2.Coder(
spec=beam_runner_api_pb2.SdkFunctionSpec(
environment_id=(
context.default_environment_id() if context else None),
spec=beam_runner_api_pb2.FunctionSpec(
urn=urn,
payload=typed_param.SerializeToString()
if typed_param is not None else None)),
component_coder_ids=[context.coders.get_id(c) for c in components])
@classmethod
def from_runner_api(cls, coder_proto, context):
"""Converts from an SdkFunctionSpec to a Fn object.
Prefer registering a urn with its parameter type and constructor.
"""
parameter_type, constructor = cls._known_urns[coder_proto.spec.spec.urn]
return constructor(
proto_utils.parse_Bytes(coder_proto.spec.spec.payload, parameter_type),
[context.coders.get_by_id(c) for c in coder_proto.component_coder_ids],
context)
def to_runner_api_parameter(self, context):
return (
python_urns.PICKLED_CODER,
wrappers_pb2.BytesValue(value=serialize_coder(self)),
())
@staticmethod
def register_structured_urn(urn, cls):
"""Register a coder that's completely defined by its urn and its
component(s), if any, which are passed to construct the instance.
"""
cls.to_runner_api_parameter = (
lambda self, unused_context: (urn, None, self._get_component_coders()))
# pylint: disable=unused-variable
@Coder.register_urn(urn, None)
def from_runner_api_parameter(unused_payload, components, unused_context):
if components:
return cls(*components)
else:
return cls()
@Coder.register_urn(
python_urns.PICKLED_CODER, google.protobuf.wrappers_pb2.BytesValue)
def _pickle_from_runner_api_parameter(payload, components, context):
return deserialize_coder(payload.value)
class StrUtf8Coder(Coder):
"""A coder used for reading and writing strings as UTF-8."""
def encode(self, value):
return value.encode('utf-8')
def decode(self, value):
return value.decode('utf-8')
def is_deterministic(self):
return True
class ToStringCoder(Coder):
"""A default string coder used if no sink coder is specified."""
def encode(self, value):
try: # Python 2
if isinstance(value, unicode): # pylint: disable=unicode-builtin
return value.encode('utf-8')
except NameError: # Python 3
pass
return str(value)
def decode(self, _):
raise NotImplementedError('ToStringCoder cannot be used for decoding.')
def is_deterministic(self):
return True
class FastCoder(Coder):
"""Coder subclass used when a (faster) CoderImpl is supplied directly.
The Coder class defines _create_impl in terms of encode() and decode();
this class inverts that by defining encode() and decode() in terms of
_create_impl().
"""
def encode(self, value):
"""Encodes the given object into a byte string."""
return self.get_impl().encode(value)
def decode(self, encoded):
"""Decodes the given byte string into the corresponding object."""
return self.get_impl().decode(encoded)
def estimate_size(self, value):
return self.get_impl().estimate_size(value)
def _create_impl(self):
raise NotImplementedError
class BytesCoder(FastCoder):
"""Byte string coder."""
def _create_impl(self):
return coder_impl.BytesCoderImpl()
def is_deterministic(self):
return True
def as_cloud_object(self):
return {
'@type': 'kind:bytes',
}
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return hash(type(self))
Coder.register_structured_urn(common_urns.coders.BYTES.urn, BytesCoder)
class VarIntCoder(FastCoder):
"""Variable-length integer coder."""
def _create_impl(self):
return coder_impl.VarIntCoderImpl()
def is_deterministic(self):
return True
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return hash(type(self))
Coder.register_structured_urn(common_urns.coders.VARINT.urn, VarIntCoder)
class FloatCoder(FastCoder):
"""A coder used for floating-point values."""
def _create_impl(self):
return coder_impl.FloatCoderImpl()
def is_deterministic(self):
return True
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return hash(type(self))
class TimestampCoder(FastCoder):
"""A coder used for timeutil.Timestamp values."""
def _create_impl(self):
return coder_impl.TimestampCoderImpl()
def is_deterministic(self):
return True
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return hash(type(self))
class SingletonCoder(FastCoder):
"""A coder that always encodes exactly one value."""
def __init__(self, value):
self._value = value
def _create_impl(self):
return coder_impl.SingletonCoderImpl(self._value)
def is_deterministic(self):
return True
def __eq__(self, other):
return type(self) == type(other) and self._value == other._value
def __hash__(self):
return hash(self._value)
def maybe_dill_dumps(o):
"""Pickle using cPickle or the Dill pickler as a fallback."""
# We need to use the dill pickler for objects of certain custom classes,
# including, for example, ones that contain lambdas.
try:
return pickle.dumps(o, pickle.HIGHEST_PROTOCOL)
except Exception: # pylint: disable=broad-except
return dill.dumps(o)
def maybe_dill_loads(o):
"""Unpickle using cPickle or the Dill pickler as a fallback."""
try:
return pickle.loads(o)
except Exception: # pylint: disable=broad-except
return dill.loads(o)
class _PickleCoderBase(FastCoder):
"""Base class for pickling coders."""
def is_deterministic(self):
# Note that the default coder, the PickleCoder, is not deterministic (for
# example, the ordering of picked entries in maps may vary across
# executions), and so is not in general suitable for usage as a key coder in
# GroupByKey operations.
return False
def as_cloud_object(self, is_pair_like=True):
value = super(_PickleCoderBase, self).as_cloud_object()
# We currently use this coder in places where we cannot infer the coder to
# use for the value type in a more granular way. In places where the
# service expects a pair, it checks for the "is_pair_like" key, in which
# case we would fail without the hack below.
if is_pair_like:
value['is_pair_like'] = True
value['component_encodings'] = [
self.as_cloud_object(is_pair_like=False),
self.as_cloud_object(is_pair_like=False)
]
return value
# We allow .key_coder() and .value_coder() to be called on PickleCoder since
# we can't always infer the return values of lambdas in ParDo operations, the
# result of which may be used in a GroupBykey.
def is_kv_coder(self):
return True
def key_coder(self):
return self
def value_coder(self):
return self
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return hash(type(self))
class PickleCoder(_PickleCoderBase):
"""Coder using Python's pickle functionality."""
def _create_impl(self):
dumps = pickle.dumps
HIGHEST_PROTOCOL = pickle.HIGHEST_PROTOCOL
return coder_impl.CallbackCoderImpl(
lambda x: dumps(x, HIGHEST_PROTOCOL), pickle.loads)
def as_deterministic_coder(self, step_label, error_message=None):
return DeterministicFastPrimitivesCoder(self, step_label)
class DillCoder(_PickleCoderBase):
"""Coder using dill's pickle functionality."""
def _create_impl(self):
return coder_impl.CallbackCoderImpl(maybe_dill_dumps, maybe_dill_loads)
class DeterministicFastPrimitivesCoder(FastCoder):
"""Throws runtime errors when encoding non-deterministic values."""
def __init__(self, coder, step_label):
self._underlying_coder = coder
self._step_label = step_label
def _create_impl(self):
return coder_impl.DeterministicFastPrimitivesCoderImpl(
self._underlying_coder.get_impl(), self._step_label)
def is_deterministic(self):
return True
def is_kv_coder(self):
return True
def key_coder(self):
return self
def value_coder(self):
return self
class FastPrimitivesCoder(FastCoder):
"""Encodes simple primitives (e.g. str, int) efficiently.
For unknown types, falls back to another coder (e.g. PickleCoder).
"""
def __init__(self, fallback_coder=PickleCoder()):
self._fallback_coder = fallback_coder
def _create_impl(self):
return coder_impl.FastPrimitivesCoderImpl(
self._fallback_coder.get_impl())
def is_deterministic(self):
return self._fallback_coder.is_deterministic()
def as_deterministic_coder(self, step_label, error_message=None):
if self.is_deterministic():
return self
else:
return DeterministicFastPrimitivesCoder(self, step_label)
def as_cloud_object(self, is_pair_like=True):
value = super(FastCoder, self).as_cloud_object()
# We currently use this coder in places where we cannot infer the coder to
# use for the value type in a more granular way. In places where the
# service expects a pair, it checks for the "is_pair_like" key, in which
# case we would fail without the hack below.
if is_pair_like:
value['is_pair_like'] = True
value['component_encodings'] = [
self.as_cloud_object(is_pair_like=False),
self.as_cloud_object(is_pair_like=False)
]
return value
# We allow .key_coder() and .value_coder() to be called on FastPrimitivesCoder
# since we can't always infer the return values of lambdas in ParDo
# operations, the result of which may be used in a GroupBykey.
def is_kv_coder(self):
return True
def key_coder(self):
return self
def value_coder(self):
return self
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return hash(type(self))
class Base64PickleCoder(Coder):
"""Coder of objects by Python pickle, then base64 encoding."""
# TODO(robertwb): Do base64 encoding where it's needed (e.g. in json) rather
# than via a special Coder.
def encode(self, value):
return base64.b64encode(pickle.dumps(value, pickle.HIGHEST_PROTOCOL))
def decode(self, encoded):
return pickle.loads(base64.b64decode(encoded))
def is_deterministic(self):
# Note that the Base64PickleCoder is not deterministic. See the
# corresponding comments for PickleCoder above.
return False
# We allow .key_coder() and .value_coder() to be called on Base64PickleCoder
# since we can't always infer the return values of lambdas in ParDo
# operations, the result of which may be used in a GroupBykey.
#
# TODO(ccy): this is currently only used for KV values from Create transforms.
# Investigate a way to unify this with PickleCoder.
def is_kv_coder(self):
return True
def key_coder(self):
return self
def value_coder(self):
return self
class ProtoCoder(FastCoder):
"""A Coder for Google Protocol Buffers.
It supports both Protocol Buffers syntax versions 2 and 3. However,
the runtime version of the python protobuf library must exactly match the
version of the protoc compiler what was used to generate the protobuf
messages.
ProtoCoder is registered in the global CoderRegistry as the default coder for
any protobuf Message object.
"""
def __init__(self, proto_message_type):
self.proto_message_type = proto_message_type
def _create_impl(self):
return coder_impl.ProtoCoderImpl(self.proto_message_type)
def is_deterministic(self):
# TODO(vikasrk): A proto message can be deterministic if it does not contain
# a Map.
return False
def __eq__(self, other):
return (type(self) == type(other)
and self.proto_message_type == other.proto_message_type)
def __hash__(self):
return hash(self.proto_message_type)
@staticmethod
def from_type_hint(typehint, unused_registry):
if issubclass(typehint, google.protobuf.message.Message):
return ProtoCoder(typehint)
else:
raise ValueError(('Expected a subclass of google.protobuf.message.Message'
', but got a %s' % typehint))
class TupleCoder(FastCoder):
"""Coder of tuple objects."""
def __init__(self, components):
self._coders = tuple(components)
def _create_impl(self):
return coder_impl.TupleCoderImpl([c.get_impl() for c in self._coders])
def is_deterministic(self):
return all(c.is_deterministic() for c in self._coders)
def as_deterministic_coder(self, step_label, error_message=None):
if self.is_deterministic():
return self
else:
return TupleCoder([c.as_deterministic_coder(step_label, error_message)
for c in self._coders])
@staticmethod
def from_type_hint(typehint, registry):
return TupleCoder([registry.get_coder(t) for t in typehint.tuple_types])
def as_cloud_object(self):
if self.is_kv_coder():
return {
'@type': 'kind:pair',
'is_pair_like': True,
'component_encodings': list(
component.as_cloud_object()
for component in self._get_component_coders()
),
}
return super(TupleCoder, self).as_cloud_object()
def _get_component_coders(self):
return self.coders()
def coders(self):
return self._coders
def is_kv_coder(self):
return len(self._coders) == 2
def key_coder(self):
if len(self._coders) != 2:
raise ValueError('TupleCoder does not have exactly 2 components.')
return self._coders[0]
def value_coder(self):
if len(self._coders) != 2:
raise ValueError('TupleCoder does not have exactly 2 components.')
return self._coders[1]
def __repr__(self):
return 'TupleCoder[%s]' % ', '.join(str(c) for c in self._coders)
def __eq__(self, other):
return (type(self) == type(other)
and self._coders == self._coders)
def __hash__(self):
return hash(self._coders)
def to_runner_api_parameter(self, context):
if self.is_kv_coder():
return common_urns.coders.KV.urn, None, self.coders()
else:
return super(TupleCoder, self).to_runner_api_parameter(context)
@Coder.register_urn(common_urns.coders.KV.urn, None)
def from_runner_api_parameter(unused_payload, components, unused_context):
return TupleCoder(components)
class TupleSequenceCoder(FastCoder):
"""Coder of homogeneous tuple objects."""
def __init__(self, elem_coder):
self._elem_coder = elem_coder
def _create_impl(self):
return coder_impl.TupleSequenceCoderImpl(self._elem_coder.get_impl())
def is_deterministic(self):
return self._elem_coder.is_deterministic()
def as_deterministic_coder(self, step_label, error_message=None):
if self.is_deterministic():
return self
else:
return TupleSequenceCoder(
self._elem_coder.as_deterministic_coder(step_label, error_message))
@staticmethod
def from_type_hint(typehint, registry):
return TupleSequenceCoder(registry.get_coder(typehint.inner_type))
def _get_component_coders(self):
return (self._elem_coder,)
def __repr__(self):
return 'TupleSequenceCoder[%r]' % self._elem_coder
def __eq__(self, other):
return (type(self) == type(other)
and self._elem_coder == self._elem_coder)
def __hash__(self):
return hash((type(self), self._elem_coder))
class IterableCoder(FastCoder):
"""Coder of iterables of homogeneous objects."""
def __init__(self, elem_coder):
self._elem_coder = elem_coder
def _create_impl(self):
return coder_impl.IterableCoderImpl(self._elem_coder.get_impl())
def is_deterministic(self):
return self._elem_coder.is_deterministic()
def as_deterministic_coder(self, step_label, error_message=None):
if self.is_deterministic():
return self
else:
return IterableCoder(
self._elem_coder.as_deterministic_coder(step_label, error_message))
def as_cloud_object(self):
return {
'@type': 'kind:stream',
'is_stream_like': True,
'component_encodings': [self._elem_coder.as_cloud_object()],
}
def value_coder(self):
return self._elem_coder
@staticmethod
def from_type_hint(typehint, registry):
return IterableCoder(registry.get_coder(typehint.inner_type))
def _get_component_coders(self):
return (self._elem_coder,)
def __repr__(self):
return 'IterableCoder[%r]' % self._elem_coder
def __eq__(self, other):
return (type(self) == type(other)
and self._elem_coder == self._elem_coder)
def __hash__(self):
return hash((type(self), self._elem_coder))
Coder.register_structured_urn(common_urns.coders.ITERABLE.urn, IterableCoder)
class GlobalWindowCoder(SingletonCoder):
"""Coder for global windows."""
def __init__(self):
from apache_beam.transforms import window
super(GlobalWindowCoder, self).__init__(window.GlobalWindow())
def as_cloud_object(self):
return {
'@type': 'kind:global_window',
}
Coder.register_structured_urn(
common_urns.coders.GLOBAL_WINDOW.urn, GlobalWindowCoder)
class IntervalWindowCoder(FastCoder):
"""Coder for an window defined by a start timestamp and a duration."""
def _create_impl(self):
return coder_impl.IntervalWindowCoderImpl()
def is_deterministic(self):
return True
def as_cloud_object(self):
return {
'@type': 'kind:interval_window',
}
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return hash(type(self))
Coder.register_structured_urn(
common_urns.coders.INTERVAL_WINDOW.urn, IntervalWindowCoder)
class WindowedValueCoder(FastCoder):
"""Coder for windowed values."""
def __init__(self, wrapped_value_coder, window_coder=None):
if not window_coder:
window_coder = PickleCoder()
self.wrapped_value_coder = wrapped_value_coder
self.timestamp_coder = TimestampCoder()
self.window_coder = window_coder
def _create_impl(self):
return coder_impl.WindowedValueCoderImpl(
self.wrapped_value_coder.get_impl(),
self.timestamp_coder.get_impl(),
self.window_coder.get_impl())
def is_deterministic(self):
return all(c.is_deterministic() for c in [self.wrapped_value_coder,
self.timestamp_coder,
self.window_coder])
def as_cloud_object(self):
return {
'@type': 'kind:windowed_value',
'is_wrapper': True,
'component_encodings': [
component.as_cloud_object()
for component in self._get_component_coders()],
}
def _get_component_coders(self):
return [self.wrapped_value_coder, self.window_coder]
def is_kv_coder(self):
return self.wrapped_value_coder.is_kv_coder()
def key_coder(self):
return self.wrapped_value_coder.key_coder()
def value_coder(self):
return self.wrapped_value_coder.value_coder()
def __repr__(self):
return 'WindowedValueCoder[%s]' % self.wrapped_value_coder
def __eq__(self, other):
return (type(self) == type(other)
and self.wrapped_value_coder == other.wrapped_value_coder
and self.timestamp_coder == other.timestamp_coder
and self.window_coder == other.window_coder)
def __hash__(self):
return hash(
(self.wrapped_value_coder, self.timestamp_coder, self.window_coder))
Coder.register_structured_urn(
common_urns.coders.WINDOWED_VALUE.urn, WindowedValueCoder)
class LengthPrefixCoder(FastCoder):
"""For internal use only; no backwards-compatibility guarantees.
Coder which prefixes the length of the encoded object in the stream."""
def __init__(self, value_coder):
self._value_coder = value_coder
def _create_impl(self):
return coder_impl.LengthPrefixCoderImpl(self._value_coder)
def is_deterministic(self):
return self._value_coder.is_deterministic()
def estimate_size(self, value):
value_size = self._value_coder.estimate_size(value)
return get_varint_size(value_size) + value_size
def value_coder(self):
return self._value_coder
def as_cloud_object(self):
return {
'@type': 'kind:length_prefix',
'component_encodings': [self._value_coder.as_cloud_object()],
}
def _get_component_coders(self):
return (self._value_coder,)
def __repr__(self):
return 'LengthPrefixCoder[%r]' % self._value_coder
def __eq__(self, other):
return (type(self) == type(other)
and self._value_coder == other._value_coder)
def __hash__(self):
return hash((type(self), self._value_coder))
Coder.register_structured_urn(
common_urns.coders.LENGTH_PREFIX.urn, LengthPrefixCoder)
| tgroh/incubator-beam | sdks/python/apache_beam/coders/coders.py | Python | apache-2.0 | 29,611 |
# Copyright (C) 2012 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import struct
import socket
from . import packet_base
from . import packet_utils
from . import icmp
from . import udp
from . import tcp
from ryu.ofproto import inet
class ipv4(packet_base.PacketBase):
_PACK_STR = '!BBHHHBBHII'
_MIN_LEN = struct.calcsize(_PACK_STR)
def __init__(self, version, header_length, tos, total_length,
identification, flags, offset, ttl, proto, csum,
src, dst, option=None):
super(ipv4, self).__init__()
self.version = version
self.header_length = header_length
self.tos = tos
self.total_length = total_length
self.identification = identification
self.flags = flags
self.offset = offset
self.ttl = ttl
self.proto = proto
self.csum = csum
self.src = src
self.dst = dst
self.length = header_length * 4
self.option = option
@classmethod
def parser(cls, buf):
(version, tos, total_length, identification, flags, ttl, proto, csum,
src, dst) = struct.unpack_from(cls._PACK_STR, buf)
header_length = version & 0xf
version = version >> 4
offset = flags & ((1 << 13) - 1)
flags = flags >> 13
msg = cls(version, header_length, tos, total_length, identification,
flags, offset, ttl, proto, csum, src, dst)
if msg.length > ipv4._MIN_LEN:
msg.option = buf[ipv4._MIN_LEN:msg.length]
return msg, ipv4.get_packet_type(proto)
def serialize(self, payload, prev):
hdr = bytearray(self.header_length * 4)
version = self.version << 4 | self.header_length
flags = self.flags << 13 | self.offset
if self.total_length == 0:
self.total_length = self.header_length * 4 + len(payload)
struct.pack_into(ipv4._PACK_STR, hdr, 0, version, self.tos,
self.total_length, self.identification, flags,
self.ttl, self.proto, 0, self.src, self.dst)
if self.option:
assert (self.length - ipv4._MIN_LEN) >= len(self.option)
hdr[ipv4._MIN_LEN:ipv4._MIN_LEN + len(self.option)] = self.option
self.csum = socket.htons(packet_utils.checksum(hdr))
struct.pack_into('!H', hdr, 10, self.csum)
return hdr
ipv4.register_packet_type(icmp.icmp, inet.IPPROTO_ICMP)
ipv4.register_packet_type(tcp.tcp, inet.IPPROTO_TCP)
ipv4.register_packet_type(udp.udp, inet.IPPROTO_UDP)
| 09zwcbupt/ryu | ryu/lib/packet/ipv4.py | Python | apache-2.0 | 3,105 |
#!/usr/bin/env python3
# vim:fileencoding=utf-8:ts=8:et:sw=4:sts=4:tw=79
"""
papatcher.py: simple python PA patcher
Copyright (c) 2014 Pyrus <[email protected]>
See the file LICENSE for copying permission.
"""
from argparse import ArgumentParser
from concurrent import futures
from contextlib import contextmanager
from getpass import getpass
from gzip import decompress
from hashlib import sha1
from http.client import OK as HTTP_OK, HTTPSConnection
from json import dumps, loads
from operator import itemgetter
from os import cpu_count, environ
from pathlib import Path
from ssl import create_default_context
from signal import signal, SIGINT
from stat import S_IEXEC
from urllib.error import URLError
from urllib.request import urlopen
import atexit
import sys
import pycurl
CPU_COUNT = cpu_count()
UBERNET_HOST = "uberent.com"
# set up paths according to XDG basedir spec
if "XDG_DATA_HOME" in environ:
DATA_HOME = Path(environ["XDG_DATA_HOME"])
else:
DATA_HOME = Path(environ["HOME"], ".local", "share")
if "XDG_CACHE_HOME" in environ:
CACHE_HOME = Path(environ["XDG_CACHE_HOME"])
else:
CACHE_HOME = Path(environ["HOME"], ".cache")
GAME_ROOT = DATA_HOME / "Planetary Annihilation"
CACHE_DIR = CACHE_HOME / "Planetary Annihilation"
class Cursor(object):
@staticmethod
def hide():
"""Hide the cursor using ANSI escape codes."""
sys.stdout.write("\033[?25l")
sys.stdout.flush()
@staticmethod
def show():
"""Show the cursor using ANSI escape codes."""
sys.stdout.write("\033[?25h")
sys.stdout.flush()
@contextmanager
def shown():
"""Show the cursor within a context."""
Cursor.show()
yield
Cursor.hide()
class ProgressMeter(object):
def __init__(self):
self.last_fraction = None
def display_progress(self, download_total, downloaded,
upload_total, uploaded):
if not int(download_total):
return
fraction = (downloaded / download_total) if downloaded else 0
# display progress only if it has advanced by at least 1 percent
if self.last_fraction and abs(self.last_fraction - fraction) < 0.01:
return
self.last_fraction = fraction
print("* Progress: {0: >4.0%} of {1} bytes.".format(
fraction, int(download_total)), end="\r")
class PAPatcher(object):
"""
PA Patcher class.
Logs in to UberNet, retrieves stream information and downloads patches.
"""
def __init__(self, ubername, password, threads, ratelimit):
"""
Initialize the patcher with UberNet credentials. They will be used to
login, check for and retrieve patches.
"""
self.credentials = dumps({"TitleId": 4,
"AuthMethod": "UberCredentials",
"UberName": ubername,
"Password": password})
ssl_context = create_default_context()
self.connection = HTTPSConnection(UBERNET_HOST,
context=ssl_context)
self.threads = threads
self.ratelimit = ratelimit
def login(self):
"""
Login to UberNet and store a session ticket if successful.
"""
# return immediately if we already have a session ticket
if hasattr(self, "_session"):
return True
# otherwise request a new one
headers = {"Content-Type": "application/json;charset=utf-8"}
self.connection.request("POST", "/GC/Authenticate", headers=headers,
body=self.credentials)
response = self.connection.getresponse()
if response.status is not HTTP_OK:
print("! Encountered an error: {0} {1}.".format(response.status,
response.reason))
return False
# get and parse response data
raw_data = response.read()
result = loads(raw_data.decode("utf-8"))
if "SessionTicket" not in result:
print("! Result doesn't contain a session ticket.")
return False
self._session = result["SessionTicket"]
print("* Got Session Ticket: {0}.".format(self._session))
return True
def get_streams(self):
"""
Request and return a list of streams we can download from UberNet.
"""
# we can't continue without a session ticket
if not hasattr(self, "_session"):
return None
headers = {"X-Authorization": self._session}
# we no longer need the session ticket
del self._session
self.connection.request("GET", "/Launcher/ListStreams?Platform=Linux",
headers=headers)
response = self.connection.getresponse()
if response.status is not HTTP_OK:
print("! Encountered an error: {0} {1}.".format(response.status,
response.reason))
return None
# get and parse response data
raw_data = response.read()
result = loads(raw_data.decode("utf-8"))
self._streams = {stream["StreamName"]: stream
for stream in result["Streams"]}
return self._streams
def get_manifest(self, stream, full):
if not hasattr(self, "_streams") or stream not in self._streams:
return False
self._stream = self._streams[stream]
# we no longer need all streams
del self._streams
print("* Downloading manifest from {0}/{1}/{2}.".format(
self._stream["DownloadUrl"],
self._stream["TitleFolder"],
self._stream["ManifestName"]))
# we still need to add the AuthSuffix for the download to work
manifest_url = "{0}/{1}/{2}{3}".format(
self._stream["DownloadUrl"],
self._stream["TitleFolder"],
self._stream["ManifestName"],
self._stream["AuthSuffix"])
try:
with urlopen(manifest_url) as response:
manifest_raw = decompress(response.read())
self._manifest = loads(manifest_raw.decode("utf-8"))
return self._verify_manifest(full)
except URLError as err:
print("! Could not retrieve manifest: {0}.".format(err.reason))
return False
def _verify_manifest(self, full):
if not hasattr(self, "_stream") or not hasattr(self, "_manifest"):
return False
# clean up cache in the process
cache_dir = CACHE_DIR / self._stream["StreamName"]
print("* Verifying contents of cache folder {0}.".format(
str(cache_dir)))
if cache_dir.exists():
bundle_names = [bundle["checksum"]
for bundle in self._manifest["bundles"]]
old_bundles = 0
for cache_file in cache_dir.iterdir():
if full or cache_file.name not in bundle_names:
cache_file.unlink()
old_bundles += 1
if old_bundles:
print("* Purged {0} old bundle(s).".format(old_bundles))
# verify bundles in parallel
with futures.ThreadPoolExecutor(max_workers=self.threads) as executor:
# this list will contain the bundles we actually need to download
self._bundles = list()
bundle_futures = [executor.submit(self._verify_bundle, bundle)
for bundle in self._manifest["bundles"]]
for completed in futures.as_completed(bundle_futures):
if not completed.result():
# cancel waiting futures
for future in bundle_futures:
future.cancel()
return False
print("* Need to get {0} bundle(s).".format(len(self._bundles)))
# if we get here there, all bundles were verified
# we no longer need the manifest
del self._manifest
return True
def _verify_bundle(self, bundle):
if not hasattr(self, "_stream") or not hasattr(self, "_bundles"):
return False
bundle_checksum = bundle["checksum"]
cache_file = CACHE_DIR / self._stream["StreamName"] / bundle_checksum
# if we don't have that file we need to download it
if not cache_file.exists():
self._bundles.append(bundle)
return True
# if we have it, make sure the checksum is correct
with cache_file.open("rb") as cache_fp:
sha = sha1()
sha.update(cache_fp.read())
checksum = sha.hexdigest()
if checksum != bundle_checksum:
self._bundles.append(bundle)
return True
# we have that file and checksums match, nothing to do
return True
def patch(self):
if not hasattr(self, "_bundles"):
return False
with futures.ThreadPoolExecutor(max_workers=self.threads) as executor:
bundle_futures = list()
# download bundles sorted by size
self._bundles.sort(key=lambda bundle: int(bundle["size"]),
reverse=True)
for bundle in self._bundles:
bundle_checksum = bundle["checksum"]
print("* Downloading bundle {0}.".format(bundle_checksum))
if not self._download_bundle(bundle):
return False
# bundle was downloaded, start extraction in parallel
print("* Extracting bundle {0}.".format(bundle_checksum))
bundle_future = executor.submit(self._extract_bundle, bundle)
bundle_futures.append(bundle_future)
for completed in futures.as_completed(bundle_futures):
if not completed.result():
# cancel waiting futures
for future in bundle_futures:
future.cancel()
return False
# if we're here everything has been downloaded and extracted
return True
def _download_bundle(self, bundle):
if not hasattr(self, "_stream"):
return False
bundle_checksum = bundle["checksum"]
cache_base = CACHE_DIR / self._stream["StreamName"]
# make sure that path exists
if not cache_base.exists():
cache_base.mkdir(parents=True)
cache_file = cache_base / bundle_checksum
# remove the file first if it already exists
if cache_file.exists():
cache_file.unlink()
bundle_url = "{0}/{1}/hashed/{2}{3}".format(
self._stream["DownloadUrl"],
self._stream["TitleFolder"],
bundle_checksum,
self._stream["AuthSuffix"])
with cache_file.open("x+b") as cache_fp:
curl = pycurl.Curl()
curl.setopt(pycurl.URL, bundle_url)
curl.setopt(pycurl.FOLLOWLOCATION, 1)
curl.setopt(pycurl.MAXREDIRS, 5)
curl.setopt(pycurl.CONNECTTIMEOUT, 30)
curl.setopt(pycurl.NOSIGNAL, 1)
curl.setopt(pycurl.MAX_RECV_SPEED_LARGE, self.ratelimit)
curl.setopt(pycurl.WRITEDATA, cache_fp)
curl.setopt(pycurl.NOPROGRESS, 0)
progress_meter = ProgressMeter()
curl.setopt(pycurl.PROGRESSFUNCTION,
progress_meter.display_progress)
try:
curl.perform()
except:
print("! Downloading bundle {0} failed!".format(
bundle_checksum))
return False
finally:
curl.close()
# verify checksum
cache_fp.seek(0)
sha = sha1()
sha.update(cache_fp.read())
checksum = sha.hexdigest()
if checksum != bundle_checksum:
print("! Checksums don't match. Expected {0}, got {1}.".format(
bundle_checksum, checksum))
return False
# everything worked out OK
return True
def _extract_bundle(self, bundle):
if not hasattr(self, "_stream"):
return False
bundle_checksum = bundle["checksum"]
cache_file = CACHE_DIR / self._stream["StreamName"] / bundle_checksum
# open cache file with gzip
with cache_file.open("rb") as cache_fp:
game_base = GAME_ROOT / self._stream["StreamName"]
# get entries sorted by offset
entries = sorted(bundle["entries"], key=itemgetter("offset"))
for entry in entries:
entry_file = game_base / entry["filename"][1:]
# make sure that path exists
if not entry_file.parent.exists():
entry_file.parent.mkdir(parents=True)
entry_offset = int(entry["offset"])
cache_fp.seek(entry_offset)
# remove the file first if it already exists
if entry_file.exists():
entry_file.unlink()
with entry_file.open("xb") as entry_fp:
# data might be compressed further, check sizeZ for that
if entry["sizeZ"] != "0":
entry_size = int(entry["sizeZ"])
raw_data = cache_fp.read(entry_size)
entry_fp.write(decompress(raw_data))
else:
entry_size = int(entry["size"])
entry_fp.write(cache_fp.read(entry_size))
# set executable
if "executable" in entry:
entry_file.chmod(entry_file.stat().st_mode | S_IEXEC)
return True
if __name__ == "__main__":
Cursor.hide()
atexit.register(Cursor.show)
signal(SIGINT, lambda sig, frame: sys.exit(SIGINT))
print("Python PA Patcher\n"
"=================")
arg_parser = ArgumentParser()
arg_parser.add_argument("-u", "--ubername",
action="store", type=str,
help="UberName used for login.")
arg_parser.add_argument("-p", "--password",
action="store", type=str,
help="Password used for login.")
arg_parser.add_argument("-s", "--stream",
action="store", type=str,
help="Stream being downloaded.")
arg_parser.add_argument("-f", "--full",
action="store_true",
help="Patch even unchanged files.")
arg_parser.add_argument("-t", "--threads",
action="store", type=int,
default=CPU_COUNT,
help="Number of threads used.")
arg_parser.add_argument("-r", "--ratelimit",
action="store", type=int,
default=0,
help="Limit downloads to bytes/sec.")
arg_parser.add_argument("--unattended",
action="store_true",
help="Don't ask any questions. If you use this "
"option, --ubername, --password and --stream "
"are mandatory")
arguments = arg_parser.parse_args()
unattended = arguments.unattended
if (unattended and not (arguments.ubername and
arguments.password and
arguments.stream)):
print("! For unattended mode you need to use "
"--ubername, --password and --stream. "
"Exiting...")
sys.exit(-1)
with Cursor.shown():
ubername = arguments.ubername or input("? UberName: ")
password = arguments.password or getpass("? Password: ")
print("* Creating patcher...")
patcher = PAPatcher(ubername, password,
arguments.threads, arguments.ratelimit)
print("* Logging in to UberNet...")
if not patcher.login():
print("! Login failed. Exiting...")
sys.exit(-1)
print("* Requesting streams...")
streams = patcher.get_streams()
if not streams:
print("! Could not acquire streams. Exiting...")
sys.exit(-1)
stream = arguments.stream
if not stream or stream not in streams:
if unattended:
print("! Invalid Stream. "
"For a selection of streams use interactive mode. "
"Exiting...")
sys.exit(-1)
with Cursor.shown():
while True:
print("* Available streams: {0}.".format(
", ".join(streams.keys())))
stream = input("? Select stream: ")
if stream in streams:
break
print("! Invalid Stream.")
print("* Downloading manifest for stream '{0}'...".format(stream))
if not patcher.get_manifest(stream, arguments.full):
print("! Could not download manifest. Exiting...")
sys.exit(-1)
print("* Patching installation for stream '{0}'...".format(stream))
if not patcher.patch():
print("! Could not patch stream. Exiting...")
sys.exit(-1)
print("* Successfully updated stream '{0}'.".format(stream))
sys.exit(0)
| pa-pyrus/papatcher | papatcher.py | Python | mit | 17,605 |
import sys
import argparse
from svtools.external_cmd import ExternalCmd
class BedpeSort(ExternalCmd):
def __init__(self):
super(BedpeSort, self).__init__('bedpesort', 'bin/bedpesort')
def description():
return 'sort a BEDPE file'
def epilog():
return 'To read in stdin and output to a file, use /dev/stdin or - as the first positional argument.'
def add_arguments_to_parser(parser):
parser.add_argument('input', metavar='<BEDPE file>', nargs='?', help='BEDPE file to sort')
parser.add_argument('output', metavar='<output file>', nargs='?', help='output file to write to')
parser.set_defaults(entry_point=run_from_args)
def command_parser():
parser = argparse.ArgumentParser(description=description())
add_arguments_to_parser(parser)
return parser
def run_from_args(args):
opts = list()
if args.input:
opts.append(args.input)
if args.output:
opts.append(args.output)
sort_cmd_runner = BedpeSort()
sort_cmd_runner.run_cmd_with_options(opts)
if __name__ == "__main__":
parser = command_parser()
args = parser.parse_args()
sys.exit(args.entry_point(args))
| hall-lab/svtools | svtools/bedpesort.py | Python | mit | 1,152 |
#! /usr/bin/env python
"""
Copyright 2010-2019 University Of Southern California
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import division, print_function
# Import Python modules
import os
import unittest
# Import Broadband modules
import seqnum
import bband_utils
import cmp_bbp
from install_cfg import InstallCfg
from irikura_gen_srf import IrikuraGenSrf
class TestGenSRF(unittest.TestCase):
"""
Acceptance Test for irikura_gen_srf.py
"""
def setUp(self):
self.install = InstallCfg()
self.srcfile = "whittier_v12_11_0_fs.src"
self.outsrf = "whittier_v12_11_0_fs.srf"
self.velmodel = "nr02-vs500.fk1d"
self.sim_id = int(seqnum.get_seq_num())
indir = os.path.join(self.install.A_IN_DATA_DIR, str(self.sim_id))
tmpdir = os.path.join(self.install.A_TMP_DATA_DIR, str(self.sim_id))
outdir = os.path.join(self.install.A_OUT_DATA_DIR, str(self.sim_id))
logdir = os.path.join(self.install.A_OUT_LOG_DIR, str(self.sim_id))
refdir = os.path.join(self.install.A_TEST_REF_DIR, "irikura")
# Create all directories
bband_utils.mkdirs([indir, tmpdir, outdir, logdir],
print_cmd=False)
# Copy input files
cmd = "cp %s %s" % (os.path.join(refdir, self.velmodel), indir)
bband_utils.runprog(cmd, print_cmd=False)
cmd = "cp %s %s" % (os.path.join(refdir, self.srcfile), indir)
bband_utils.runprog(cmd, print_cmd=False)
os.chdir(tmpdir)
def tearDown(self):
os.chdir(self.install.A_TEST_DIR)
def test_gensrf(self):
"""
Test Irikura rupture generator
"""
a_ref_dir = os.path.join(self.install.A_TEST_REF_DIR, "irikura")
a_res_dir = os.path.join(self.install.A_OUT_DATA_DIR, str(self.sim_id))
# Run rupture generator
gen_srf = IrikuraGenSrf(self.velmodel, self.srcfile,
self.outsrf, "LABasin500",
sim_id=self.sim_id)
gen_srf.run()
#
# Check results
#
a_ref_file = os.path.join(a_ref_dir, self.outsrf)
a_newfile = os.path.join(a_res_dir, self.outsrf)
errmsg = ("Output file %s does not match reference file %s" %
(a_newfile, a_ref_file))
self.failIf(not cmp_bbp.cmp_srf(a_ref_file, a_newfile,
tolerance=0.0011) == 0, errmsg)
if __name__ == '__main__':
SUITE = unittest.TestLoader().loadTestsFromTestCase(TestGenSRF)
unittest.TextTestRunner(verbosity=2).run(SUITE)
| SCECcode/BBP | bbp/tests/test_gensrf.py | Python | apache-2.0 | 3,101 |
#!/usr/bin/env python
# Author: Alex Li
import multiprocessing
import sys, os, time
import db_connector, logger, MultiRunCounter
# ----------------Use Django Mysql model-------------
cur_dir = os.path.dirname(os.path.abspath(__file__))
script = 'python %s/run_command4.py' % cur_dir
try:
if sys.argv[1] == '-h':
print '''\n\033[32;1mUsage: python multiprocessing_runCMD.py track_num 'ip_list' cmd run_user\033[0m
Example: python multiprocessing_runCMD.py 34 '192.168.2.13 202.106.0.23 10.0.0.2' 'df -h' alex \n
--auto : auto add the track_mark
./multiprocessing_runCMD2.py --auto '192.168.91.171 192.168.10.43 192.168.10.160 192.168.91.171' 'df -h' alex'''
sys.exit()
except IndexError:
print "argument error,try -h for help"
sys.exit()
try:
if sys.argv[1] == "--auto":
track_num = MultiRunCounter.AddNumber()
else:
track_num = sys.argv[1]
except IndexError:
print "argument error,try -h for help"
sys.exit()
if __name__ == "__main__":
run_user = sys.argv[4]
raw_ip_list = sys.argv[2].split()
remove_duplicate_ip = set(raw_ip_list)
ip_list = list(remove_duplicate_ip)
cmd = sys.argv[3]
# batch run process
logger.RecordLogSummary('CREATE', 'BatchRunCommand', track_num, run_user, cmd, len(ip_list),
'/tmp/opt_%s.log' % track_num)
result = []
def run(host):
task = '''%s %s '%s' %s %s''' % (script, host, cmd, run_user, track_num)
os.system(task)
if len(ip_list) < 50:
thread_num = len(ip_list)
else:
thread_num = 30
pool = multiprocessing.Pool(processes=thread_num)
for ip in ip_list:
result.append(pool.apply_async(run, (ip,)))
# time.sleep(5)
# pool.terminate()
pool.close()
pool.join()
for res in result:
res.get(timeout=5)
| Hpower96/Power | backend/multiprocessing_runCMD2.py | Python | gpl-3.0 | 1,849 |
from django.db.models import Manager
from django.contrib.contenttypes.models import ContentType
from .models import Attachment
class ResourceManager(Manager):
def __init__(self, resource_type):
super().__init__(self)
self.resource_type = resource_type
def get_queryset(self):
ct = ContentType.objects.get_for_model(self.model)
resource_ids = Attachment.objects.filter(
content_type=ct, previews__preview_type='resource_type'
).values_list('objects_id', flat=True).distinct()
return super().get_queryset().filter(id__in=resource_ids)
| vladimiroff/humble-media | humblemedia/resources/managers.py | Python | mit | 604 |
"""
Support for MQTT JSON lights.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/light.mqtt_json/
"""
import logging
import json
import voluptuous as vol
import homeassistant.components.mqtt as mqtt
from homeassistant.components.light import (
ATTR_BRIGHTNESS, ATTR_RGB_COLOR, ATTR_TRANSITION, PLATFORM_SCHEMA,
ATTR_FLASH, FLASH_LONG, FLASH_SHORT, SUPPORT_BRIGHTNESS, SUPPORT_FLASH,
SUPPORT_RGB_COLOR, SUPPORT_TRANSITION, Light)
from homeassistant.const import (
CONF_NAME, CONF_OPTIMISTIC, CONF_BRIGHTNESS, CONF_RGB)
from homeassistant.components.mqtt import (
CONF_STATE_TOPIC, CONF_COMMAND_TOPIC, CONF_QOS, CONF_RETAIN)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'mqtt_json'
DEPENDENCIES = ['mqtt']
DEFAULT_NAME = 'MQTT JSON Light'
DEFAULT_OPTIMISTIC = False
DEFAULT_BRIGHTNESS = False
DEFAULT_RGB = False
DEFAULT_FLASH_TIME_SHORT = 2
DEFAULT_FLASH_TIME_LONG = 10
CONF_FLASH_TIME_SHORT = 'flash_time_short'
CONF_FLASH_TIME_LONG = 'flash_time_long'
SUPPORT_MQTT_JSON = (SUPPORT_BRIGHTNESS | SUPPORT_FLASH | SUPPORT_RGB_COLOR |
SUPPORT_TRANSITION)
# Stealing some of these from the base MQTT configs.
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_QOS, default=mqtt.DEFAULT_QOS):
vol.All(vol.Coerce(int), vol.In([0, 1, 2])),
vol.Required(CONF_COMMAND_TOPIC): mqtt.valid_publish_topic,
vol.Optional(CONF_RETAIN, default=mqtt.DEFAULT_RETAIN): cv.boolean,
vol.Optional(CONF_STATE_TOPIC): mqtt.valid_subscribe_topic,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
vol.Optional(CONF_BRIGHTNESS, default=DEFAULT_BRIGHTNESS): cv.boolean,
vol.Optional(CONF_RGB, default=DEFAULT_RGB): cv.boolean,
vol.Optional(CONF_FLASH_TIME_SHORT, default=DEFAULT_FLASH_TIME_SHORT):
cv.positive_int,
vol.Optional(CONF_FLASH_TIME_LONG, default=DEFAULT_FLASH_TIME_LONG):
cv.positive_int
})
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup a MQTT JSON Light."""
add_devices([MqttJson(
hass,
config.get(CONF_NAME),
{
key: config.get(key) for key in (
CONF_STATE_TOPIC,
CONF_COMMAND_TOPIC
)
},
config.get(CONF_QOS),
config.get(CONF_RETAIN),
config.get(CONF_OPTIMISTIC),
config.get(CONF_BRIGHTNESS),
config.get(CONF_RGB),
{
key: config.get(key) for key in (
CONF_FLASH_TIME_SHORT,
CONF_FLASH_TIME_LONG
)
}
)])
class MqttJson(Light):
"""Representation of a MQTT JSON light."""
def __init__(self, hass, name, topic, qos, retain,
optimistic, brightness, rgb, flash_times):
"""Initialize MQTT JSON light."""
self._hass = hass
self._name = name
self._topic = topic
self._qos = qos
self._retain = retain
self._optimistic = optimistic or topic[CONF_STATE_TOPIC] is None
self._state = False
if brightness:
self._brightness = 255
else:
self._brightness = None
if rgb:
self._rgb = [0, 0, 0]
else:
self._rgb = None
self._flash_times = flash_times
def state_received(topic, payload, qos):
"""A new MQTT message has been received."""
values = json.loads(payload)
if values['state'] == 'ON':
self._state = True
elif values['state'] == 'OFF':
self._state = False
if self._rgb is not None:
try:
red = int(values['color']['r'])
green = int(values['color']['g'])
blue = int(values['color']['b'])
self._rgb = [red, green, blue]
except KeyError:
pass
except ValueError:
_LOGGER.warning("Invalid color value received")
if self._brightness is not None:
try:
self._brightness = int(values['brightness'])
except KeyError:
pass
except ValueError:
_LOGGER.warning('Invalid brightness value received')
self.update_ha_state()
if self._topic[CONF_STATE_TOPIC] is not None:
mqtt.subscribe(self._hass, self._topic[CONF_STATE_TOPIC],
state_received, self._qos)
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return self._brightness
@property
def rgb_color(self):
"""Return the RGB color value."""
return self._rgb
@property
def should_poll(self):
"""No polling needed for a MQTT light."""
return False
@property
def name(self):
"""Return the name of the device if any."""
return self._name
@property
def is_on(self):
"""Return true if device is on."""
return self._state
@property
def assumed_state(self):
"""Return true if we do optimistic updates."""
return self._optimistic
def turn_on(self, **kwargs):
"""Turn the device on."""
should_update = False
message = {'state': 'ON'}
if ATTR_RGB_COLOR in kwargs:
message['color'] = {
'r': kwargs[ATTR_RGB_COLOR][0],
'g': kwargs[ATTR_RGB_COLOR][1],
'b': kwargs[ATTR_RGB_COLOR][2]
}
if self._optimistic:
self._rgb = kwargs[ATTR_RGB_COLOR]
should_update = True
if ATTR_FLASH in kwargs:
flash = kwargs.get(ATTR_FLASH)
if flash == FLASH_LONG:
message['flash'] = self._flash_times[CONF_FLASH_TIME_LONG]
elif flash == FLASH_SHORT:
message['flash'] = self._flash_times[CONF_FLASH_TIME_SHORT]
if ATTR_TRANSITION in kwargs:
message['transition'] = kwargs[ATTR_TRANSITION]
if ATTR_BRIGHTNESS in kwargs:
message['brightness'] = int(kwargs[ATTR_BRIGHTNESS])
if self._optimistic:
self._brightness = kwargs[ATTR_BRIGHTNESS]
should_update = True
mqtt.publish(self._hass, self._topic[CONF_COMMAND_TOPIC],
json.dumps(message), self._qos, self._retain)
if self._optimistic:
# Optimistically assume that the light has changed state.
self._state = True
should_update = True
if should_update:
self.update_ha_state()
def turn_off(self, **kwargs):
"""Turn the device off."""
message = {'state': 'OFF'}
if ATTR_TRANSITION in kwargs:
message['transition'] = kwargs[ATTR_TRANSITION]
mqtt.publish(self._hass, self._topic[CONF_COMMAND_TOPIC],
json.dumps(message), self._qos, self._retain)
if self._optimistic:
# Optimistically assume that the light has changed state.
self._state = False
self.update_ha_state()
| srcLurker/home-assistant | homeassistant/components/light/mqtt_json.py | Python | mit | 7,405 |
"""
WSGI config for news_site project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "news_site.settings")
application = get_wsgi_application()
| maistrovas/News_app | news_site/wsgi.py | Python | mit | 395 |
# fr5969 model parameters
reg_size = 16
reg_bits = 20
mem_bits = 8
word_bits = 2 * mem_bits
ram_size = 2048
ram_start = 0x1c00
fram_size = 64512
fram_start = 0x4400
ivec_start = 0xff90
ivec_count = 56
lower_start = fram_start
lower_size = 0xbb80
upper_start = 0x10000
upper_size = 0x4000
resetvec = 0xfffe
reg_bitmask = (2 ** reg_bits) - 1
mem_bitmask = (2 ** mem_bits) - 1
import msp_base as base
import utils
def iotrace_init():
trace = []
iotrace_next(trace)
return trace
def iotrace_next(trace):
trace.append({'r':{'reg':[], 'mem':[]},
'w':{'reg':[], 'mem':[]}})
def iotrace_append(trace, rw, regmem, addr, value):
trace[-1][rw][regmem].append((addr, value))
def invoke_mmio(addr, v, handlers):
if addr in handlers:
return handlers[addr](v)
else:
raise base.ExecuteError('Unmapped address: {:05x}'.format(addr))
def mk_readreg(regs, trace = None):
if trace is None:
def readreg(r):
return regs[r]
else:
def readreg(r):
v = regs[r]
iotrace_append(trace, 'r', 'reg', r, v)
return v
return readreg
def mk_writereg(regs, trace = None):
if trace is None:
def writereg(r, regval):
assert isinstance(regval, int) and 0 <= regval and regval < 2**reg_bits
if r != 3:
regs[r] = regval
return
else:
def writereg(r, regval):
assert isinstance(regval, int) and 0 <= regval and regval < 2**reg_bits
iotrace_append(trace, 'w', 'reg', r, regval)
if r != 3:
regs[r] = regval
return
return writereg
def mk_read8(ram, fram, handlers, trace = None):
if trace is None:
def read8(addr):
if ram_start <= addr and addr < ram_start + ram_size:
v = ram[addr - ram_start]
elif fram_start <= addr and addr < fram_start + fram_size:
v = fram[addr - fram_start]
else:
v = invoke_mmio(addr, None, handlers)
#print('read {:05x} == {:02x}, notrace'.format(addr, v))
return v
else:
def read8(addr):
if ram_start <= addr and addr < ram_start + ram_size:
v = ram[addr - ram_start]
elif fram_start <= addr and addr < fram_start + fram_size:
v = fram[addr - fram_start]
else:
v = invoke_mmio(addr, None, handlers)
iotrace_append(trace, 'r', 'mem', addr, v)
#print('read {:05x} == {:02x}, trace'.format(addr, v))
return v
return read8
def mk_write8(ram, fram, handlers, trace = None):
if trace is None:
def write8(addr, byte):
#print('write {:05x} <- {:02x}, notrace'.format(addr, byte))
assert isinstance(byte, int) and 0 <= byte and byte < 2**mem_bits
if ram_start <= addr and addr < ram_start + ram_size:
ram[addr - ram_start] = byte
elif fram_start <= addr and addr < fram_start + fram_size:
fram[addr - fram_start] = byte
else:
invoke_mmio(addr, byte, handlers)
return
else:
def write8(addr, byte):
#print('write {:05x} <- {:02x}, trace'.format(addr, byte))
assert isinstance(byte, int) and 0 <= byte and byte < 2**mem_bits
iotrace_append(trace, 'w', 'mem', addr, byte)
if ram_start <= addr and addr < ram_start + ram_size:
ram[addr - ram_start] = byte
elif fram_start <= addr and addr < fram_start + fram_size:
fram[addr - fram_start] = byte
else:
invoke_mmio(addr, byte, handlers)
return
return write8
def mk_read16(read8):
# little endian
def read16(addr):
lo_bits = read8(addr)
hi_bits = read8(addr+1)
return lo_bits | (hi_bits << 8)
return read16
def mk_write16(write8):
# little endian
def write16(addr, word):
write8(addr, word & 0xff)
write8(addr+1, (word >> 8) & 0xff)
return
return write16
class Model(object):
def __init__(self, trace = None):
self.regs = [0 for _ in range(reg_size)]
self.ram = [(0xff if i % 2 == 0 else 0x3f) for i in range(ram_size)]
self.fram = [0xff for _ in range(fram_size)]
self.mmio_read = {}
self.mmio_write = {}
self.readreg = mk_readreg(self.regs, trace=trace)
self.writereg = mk_writereg(self.regs, trace=trace)
self.read8 = mk_read8(self.ram, self.fram, self.mmio_read, trace=trace)
self.write8 = mk_write8(self.ram, self.fram, self.mmio_write, trace=trace)
def set_mmio_read_handler(self, addr, handler):
assert (isinstance(addr, int) and not ((ram_start <= addr and addr < ram_start+ram_size)
or (fram_start <= addr and addr < fram_start+fram_size)))
self.mmio_read[addr] = handler
def set_mmio_write_handler(self, addr, handler):
assert (isinstance(addr, int) and not ((ram_start <= addr and addr < ram_start+ram_size)
or (fram_start <= addr and addr < fram_start+fram_size)))
self.mmio_write[addr] = handler
def mmio_handle_default(self, addr, initial_value = 0):
buf = [initial_value]
def read_handler(v):
return buf[0]
def write_handler(v):
buf[0] = v
return
self.set_mmio_read_handler(addr, read_handler)
self.set_mmio_write_handler(addr, write_handler)
def dump(self, check=True):
print(repr(self))
print('-- registers --')
if check:
regdump = utils.describe_regs(self.regs)
assert(self.regs == utils.parse_regs(regdump))
print(regdump)
print('-- ram --')
ramidump = utils.describe_interesting_memory(self.ram, ram_start, fill=[0xff, 0x3f])
if check:
ramdump = utils.describe_memory(self.ram, ram_start)
assert(ramidump == utils.summarize_interesting(ramdump, fill=[0xff, 0x3f]))
assert(self.ram == utils.parse_memory(ramdump))
print(ramidump)
print('-- fram --')
framidump = utils.describe_interesting_memory(self.fram, fram_start, fill=[0xff])
if check:
framdump = utils.describe_memory(self.fram, fram_start)
assert(framidump == utils.summarize_interesting(framdump, fill=[0xff]))
assert(self.fram == utils.parse_memory(framdump))
print(framidump)
def segments(self):
return (utils.interesting_regions(self.ram, ram_start, fill=[0xff, 0x3f], align=8) +
utils.interesting_regions(self.fram, fram_start, fill=[0xff], align=8))
def entry(self):
return mk_read16(self.read8)(resetvec)
def registers(self):
return [self.readreg(i) for i in range(len(self.regs))]
| billzorn/msp-pymodel | lib/msp_fr5969_model.py | Python | mit | 7,045 |
"""
mediatum - a multimedia content repository
Copyright (C) 2007 Arne Seifert <[email protected]>
Copyright (C) 2007 Matthias Kramm <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from web.admin.adminutils import adminNavigation
def validate(req, op):
v = {}
v["navigation"] = adminNavigation()
return req.getTAL("/web/admin/modules/default.html", v, macro="view")
| mediatum/mediatum | web/admin/modules/default.py | Python | gpl-3.0 | 983 |
from __future__ import print_function
from pprint import pprint as pp
from yamicache import Cache
c = Cache(hashing=False)
@c.cached()
def function1(argument, power=4, addition=0, division=2):
return argument ** power + addition / division
def test_main():
"""use default args"""
# `function1` uses default arguments. These calls are all equivalent, so
# there should only be 1 item in cache.
function1(1)
function1(1, 4)
function1(1, 4, 0)
function1(1, 4, addition=0, division=2)
assert len(c) == 1
pp(c._data_store)
def main():
test_main()
if __name__ == "__main__":
main()
| mtik00/yamicache | tests/test_default_args.py | Python | mit | 638 |
import sys
import glob
import pkgutil
import os
import fnmatch
from setuptools import setup
from pycmm.settings import DNASEQ_SLURM_MONITOR_PIPELINE_BIN
from pycmm.settings import DUMMY_TABLE_ANNOVAR_BIN
from pycmm.settings import MUTREP_SLURM_MONITOR_PIPELINE_BIN
from pycmm.settings import MUTREP_FAMILY_REPORT_BIN
from pycmm.settings import MUTREP_SUMMARY_REPORT_BIN
from pycmm.settings import MUTREPDB_SEQ_REPORT_BIN
from pycmm.settings import PLINK_SLURM_MONITOR_PIPELINE_BIN
from pycmm.settings import PLINK_HAP_ASSOCS_REPORT_BIN
from pycmm.settings import PLINK_MERGE_HAP_ASSOCS_BIN
from pycmm.settings import DBMS_EXECUTE_DB_JOBS_BIN
def opj(*args):
path = os.path.join(*args)
return os.path.normpath(path)
def find_data_files(srcdir, *wildcards, **kw):
# get a list of all files under the srcdir matching wildcards,
# returned in a format to be used for install_data
def walk_helper(arg, dirname, files):
if '.svn' in dirname:
return
names = []
lst, wildcards = arg
for wc in wildcards:
wc_name = opj(dirname, wc)
for f in files:
filename = opj(dirname, f)
if fnmatch.fnmatch(filename, wc_name) and not os.path.isdir(filename):
names.append(filename)
if names:
lst.append( (dirname, names ) )
file_list = []
recursive = kw.get('recursive', True)
if recursive:
os.path.walk(srcdir, walk_helper, (file_list, wildcards))
else:
walk_helper((file_list, wildcards),
srcdir,
[os.path.basename(f) for f in glob.glob(opj(srcdir, '*'))])
return file_list
#csv_files = find_data_files('data/', '*.csv')
all_data_files = find_data_files('data/', '*.*')
#all_data_files = find_data_files('script/', '*.*')
setup(
name='pyCMM',
version='0.0.1',
author='Jessada Thutkawkorapin',
author_email='[email protected]',
packages=['pycmm',
'pycmm.app',
'pycmm.utils',
'pycmm.cmmlib',
'pycmm.flow',
'pycmm.proc',
'pycmm.proc.db',
'pycmm.proc.mutrep',
],
scripts=['bin/'+DNASEQ_SLURM_MONITOR_PIPELINE_BIN,
'bin/pyCMM-dnaseq-pipeline',
'bin/pyCMM-dnaseq-create-job-setup-file',
'bin/pyCMM-cmmdb-cal-mut-stat',
'bin/pyCMM-cmmdb-vcf-AF-to-annovar',
'bin/pyCMM-cmmdb-table-annovar',
'bin/pyCMM-cmmdb-create-job-setup-file',
'bin/'+DUMMY_TABLE_ANNOVAR_BIN,
'bin/'+MUTREP_SLURM_MONITOR_PIPELINE_BIN,
'bin/pyCMM-mutrep-pipeline',
'bin/pyCMM-mutrep-mutation-reports',
'bin/'+MUTREP_FAMILY_REPORT_BIN,
'bin/'+MUTREP_SUMMARY_REPORT_BIN,
'bin/pyCMM-mutrep-create-job-setup-file',
'bin/pyCMM-mutrepdb-create-job-setup-file',
'bin/'+MUTREPDB_SEQ_REPORT_BIN,
'bin/pyCMM-mutrepdb-controller',
'bin/pyCMM-plink-create-job-setup-file',
'bin/pyCMM-plink-pipeline',
'bin/'+PLINK_SLURM_MONITOR_PIPELINE_BIN,
'bin/'+PLINK_HAP_ASSOCS_REPORT_BIN,
'bin/'+PLINK_MERGE_HAP_ASSOCS_BIN,
'bin/pyCMM-dbms-controller',
'bin/pyCMM-dbms-create-job-setup-file',
'bin/'+DBMS_EXECUTE_DB_JOBS_BIN,
],
package=['pyCMM'],
# package_data={'': ['data/CBV/*.cbv']
# },
data_files=all_data_files,
url='http://pypi.python.org/pypi/pyCMM/',
license='LICENSE.txt',
description='Python packages for my sequencing data analysis at Center of Molecular Medicine, Karolinska Institute, Stockholm, Sweden',
long_description=open('README.md').read(),
install_requires=[
"pysam >= 0.7",
"pyvcf >= 0.6.0",
"pyaml >= 15.5.7",
"openpyxl >= 2.3.3",
"xlsxwriter >= 0.5.3",
],
)
| jessada/pyCMM | setup.py | Python | gpl-2.0 | 4,017 |
""" Fixtures for pyCurrentCost project """ | linkdd/phase-currentcost | tests/fixtures/__init__.py | Python | mit | 42 |
# Copyright 2015 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A Client for interacting with the Resource Manager API."""
import six
from google.api_core import page_iterator
from google.cloud.client import Client as BaseClient
from google.cloud.resource_manager._http import Connection
from google.cloud.resource_manager.project import Project
class Client(BaseClient):
"""Client to bundle configuration needed for API requests.
See
https://cloud.google.com/resource-manager/reference/rest/
for more information on this API.
Automatically get credentials::
>>> from google.cloud import resource_manager
>>> client = resource_manager.Client()
:type credentials: :class:`~google.auth.credentials.Credentials`
:param credentials: (Optional) The OAuth2 Credentials to use for this
client. If not passed (and if no ``_http`` object is
passed), falls back to the default inferred from the
environment.
:type _http: :class:`~requests.Session`
:param _http: (Optional) HTTP object to make requests. Can be any object
that defines ``request()`` with the same interface as
:meth:`requests.Session.request`. If not passed, an
``_http`` object is created that is bound to the
``credentials`` for the current object.
This parameter should be considered private, and could
change in the future.
"""
SCOPE = ('https://www.googleapis.com/auth/cloud-platform',)
"""The scopes required for authenticating as a Resouce Manager consumer."""
def __init__(self, credentials=None, _http=None):
super(Client, self).__init__(
credentials=credentials, _http=_http)
self._connection = Connection(self)
def new_project(self, project_id, name=None, labels=None):
"""Create a project bound to the current client.
Use :meth:`Project.reload() \
<google.cloud.resource_manager.project.Project.reload>` to retrieve
project metadata after creating a
:class:`~google.cloud.resource_manager.project.Project` instance.
.. note:
This does not make an API call.
:type project_id: str
:param project_id: The ID for this project.
:type name: str
:param name: The display name of the project.
:type labels: dict
:param labels: A list of labels associated with the project.
:rtype: :class:`~google.cloud.resource_manager.project.Project`
:returns: A new instance of a
:class:`~google.cloud.resource_manager.project.Project`
**without** any metadata loaded.
"""
return Project(project_id=project_id,
client=self, name=name, labels=labels)
def fetch_project(self, project_id):
"""Fetch an existing project and it's relevant metadata by ID.
.. note::
If the project does not exist, this will raise a
:class:`NotFound <google.cloud.exceptions.NotFound>` error.
:type project_id: str
:param project_id: The ID for this project.
:rtype: :class:`~google.cloud.resource_manager.project.Project`
:returns: A :class:`~google.cloud.resource_manager.project.Project`
with metadata fetched from the API.
"""
project = self.new_project(project_id)
project.reload()
return project
def list_projects(self, filter_params=None, page_size=None):
"""List the projects visible to this client.
Example::
>>> from google.cloud import resource_manager
>>> client = resource_manager.Client()
>>> for project in client.list_projects():
... print(project.project_id)
List all projects with label ``'environment'`` set to ``'prod'``
(filtering by labels)::
>>> from google.cloud import resource_manager
>>> client = resource_manager.Client()
>>> env_filter = {'labels.environment': 'prod'}
>>> for project in client.list_projects(env_filter):
... print(project.project_id)
See
https://cloud.google.com/resource-manager/reference/rest/v1beta1/projects/list
Complete filtering example::
>>> project_filter = { # Return projects with...
... 'name': 'My Project', # name set to 'My Project'.
... 'id': 'my-project-id', # id set to 'my-project-id'.
... 'labels.stage': 'prod', # the label 'stage' set to 'prod'
... 'labels.color': '*' # a label 'color' set to anything.
... }
>>> client.list_projects(project_filter)
:type filter_params: dict
:param filter_params: (Optional) A dictionary of filter options where
each key is a property to filter on, and each
value is the (case-insensitive) value to check
(or the glob ``*`` to check for existence of the
property). See the example above for more
details.
:type page_size: int
:param page_size: (Optional) Maximum number of projects to return in a
single page. If not passed, defaults to a value set
by the API.
:rtype: :class:`~google.api_core.page_iterator.Iterator`
:returns: Iterator of all
:class:`~google.cloud.resource_manager.project.Project`.
that the current user has access to.
"""
extra_params = {}
if page_size is not None:
extra_params['pageSize'] = page_size
if filter_params is not None:
extra_params['filter'] = [
'{}:{}'.format(key, value)
for key, value in six.iteritems(filter_params)
]
return page_iterator.HTTPIterator(
client=self,
api_request=self._connection.api_request,
path='/projects',
item_to_value=_item_to_project,
items_key='projects',
extra_params=extra_params)
def _item_to_project(iterator, resource):
"""Convert a JSON project to the native object.
:type iterator: :class:`~google.api_core.page_iterator.Iterator`
:param iterator: The iterator that has retrieved the item.
:type resource: dict
:param resource: A resource to be converted to a project.
:rtype: :class:`.Project`
:returns: The next project in the page.
"""
return Project.from_api_repr(resource, client=iterator.client)
| jonparrott/google-cloud-python | resource_manager/google/cloud/resource_manager/client.py | Python | apache-2.0 | 7,343 |
import os
import psutil
COEFFICIENT = 2 ** 20
def get_other_ram() -> int:
"""Ram used by other processes"""
return get_ram_used() - get_process_ram()
def get_total_ram() -> int:
mem = psutil.virtual_memory()
return mem[0] / COEFFICIENT
def get_process_ram() -> int:
process = psutil.Process(os.getpid())
return process.memory_info()[0] / COEFFICIENT
def get_ram_used() -> int:
"""ram used by all processes"""
mem = psutil.virtual_memory()
return mem[4] / COEFFICIENT
def get_cpu() -> list:
"""get all cpu core usage"""
percentage = psutil.cpu_percent()
return percentage
| timlyo/personalWebsite | website/system.py | Python | apache-2.0 | 595 |
Subsets and Splits