code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
# -*- encoding: utf-8 -*-
"""
This class has been adapted from Twython. Thanks to Erik Scheffers.
"""
import urllib
import urlparse
import inspect
import oauth2 as oauth
from settings import REQUEST_TOKEN_URL
from settings import ACCESS_TOKEN_URL
from settings import AUTHORIZE_URL
from settings import AUTHENTICATE_URL
# Detect if oauth2 supports the callback_url argument to request
OAUTH_LIB_SUPPORTS_CALLBACK = 'callback_url'\
in inspect.getargspec(oauth.Client.request).args
class AuthError(AttributeError):
"""
Raised when you try to access a protected resource and it fails due\
to some issue with your authentication.
"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return repr(self.msg)
class Twitter(object):
def __init__(self, twitter_token=None, twitter_secret=None,
oauth_token=None, oauth_token_secret=None, headers=None,
callback_url=None):
# Needed for hitting that there API.
self.request_token_url = REQUEST_TOKEN_URL
self.access_token_url = ACCESS_TOKEN_URL
self.authorize_url = AUTHORIZE_URL
self.authenticate_url = AUTHENTICATE_URL
self.twitter_token = twitter_token
self.twitter_secret = twitter_secret
self.oauth_token = oauth_token
self.oauth_secret = oauth_token_secret
self.callback_url = callback_url
# If there's headers, set them, otherwise be an embarassing parent for\
# their own good.
self.headers = headers
if self.headers is None:
self.headers = {'User-agent': 'Django-Twitter Library V1.0'}
consumer = None
token = None
if self.twitter_token is not None and self.twitter_secret is not None:
consumer = oauth.Consumer(self.twitter_token, self.twitter_secret)
if self.oauth_token is not None and self.oauth_secret is not None:
token = oauth.Token(oauth_token, oauth_token_secret)
# Filter down through the possibilities here - if they have a token, \
# if they're first stage, etc.
if consumer is not None and token is not None:
self.client = oauth.Client(consumer, token)
elif consumer is not None:
self.client = oauth.Client(consumer)
def get_authentication_tokens(self):
"""
get_auth_url(self)
Returns an authorization URL for a user to hit.
"""
callback_url = self.callback_url or 'oob'
request_args = {}
if OAUTH_LIB_SUPPORTS_CALLBACK:
request_args['callback_url'] = callback_url
resp, content = self.client.request(self.request_token_url,
"GET", **request_args)
if resp['status'] != '200':
raise AuthError("Seems something couldn't be verified "\
"withyour OAuth junk. Error: %s, Message: %s" \
% (resp['status'], content))
request_tokens = dict(urlparse.parse_qsl(content))
oauth_callback_confirmed = request_tokens\
.get('oauth_callback_confirmed') == 'true'
if not OAUTH_LIB_SUPPORTS_CALLBACK and callback_url != 'oob'\
and oauth_callback_confirmed:
import warnings
warnings.warn("oauth2 library doesn't support OAuth 1.0a"\
" type callback, but remote requires it")
oauth_callback_confirmed = False
auth_url_params = {'oauth_token': request_tokens['oauth_token']}
# Use old-style callback argument
if callback_url != 'oob' and not oauth_callback_confirmed:
auth_url_params['oauth_callback'] = callback_url
request_tokens['auth_url'] = self.authenticate_url + '?'\
+ urllib.urlencode(auth_url_params)
return request_tokens
def get_authorized_tokens(self):
"""
get_authorized_tokens
Returns authorized tokens after they go through the auth_url phase.
"""
resp, content = self.client.request(self.access_token_url, "GET")
return dict(urlparse.parse_qsl(content))
| ahmontero/django-twitter | twitter/toauth.py | Python | mit | 4,149 |
"""Tests for certbot.configuration."""
import os
import unittest
import mock
from certbot import errors
class NamespaceConfigTest(unittest.TestCase):
"""Tests for certbot.configuration.NamespaceConfig."""
def setUp(self):
self.namespace = mock.MagicMock(
config_dir='/tmp/config', work_dir='/tmp/foo',
logs_dir="/tmp/bar", foo='bar',
server='https://acme-server.org:443/new',
tls_sni_01_port=1234, http01_port=4321)
from certbot.configuration import NamespaceConfig
self.config = NamespaceConfig(self.namespace)
def test_init_same_ports(self):
self.namespace.tls_sni_01_port = 4321
from certbot.configuration import NamespaceConfig
self.assertRaises(errors.Error, NamespaceConfig, self.namespace)
def test_proxy_getattr(self):
self.assertEqual(self.config.foo, 'bar')
self.assertEqual(self.config.work_dir, '/tmp/foo')
def test_server_path(self):
self.assertEqual(['acme-server.org:443', 'new'],
self.config.server_path.split(os.path.sep))
self.namespace.server = ('http://user:[email protected]:443'
'/p/a/t/h;parameters?query#fragment')
self.assertEqual(['user:[email protected]:443', 'p', 'a', 't', 'h'],
self.config.server_path.split(os.path.sep))
@mock.patch('certbot.configuration.constants')
def test_dynamic_dirs(self, constants):
constants.ACCOUNTS_DIR = 'acc'
constants.BACKUP_DIR = 'backups'
constants.CSR_DIR = 'csr'
constants.IN_PROGRESS_DIR = '../p'
constants.KEY_DIR = 'keys'
constants.TEMP_CHECKPOINT_DIR = 't'
self.assertEqual(
self.config.accounts_dir, '/tmp/config/acc/acme-server.org:443/new')
self.assertEqual(self.config.backup_dir, '/tmp/foo/backups')
self.assertEqual(self.config.csr_dir, '/tmp/config/csr')
self.assertEqual(self.config.in_progress_dir, '/tmp/foo/../p')
self.assertEqual(self.config.key_dir, '/tmp/config/keys')
self.assertEqual(self.config.temp_checkpoint_dir, '/tmp/foo/t')
def test_absolute_paths(self):
from certbot.configuration import NamespaceConfig
config_base = "foo"
work_base = "bar"
logs_base = "baz"
server = "mock.server"
mock_namespace = mock.MagicMock(spec=['config_dir', 'work_dir',
'logs_dir', 'http01_port',
'tls_sni_01_port',
'domains', 'server'])
mock_namespace.config_dir = config_base
mock_namespace.work_dir = work_base
mock_namespace.logs_dir = logs_base
mock_namespace.server = server
config = NamespaceConfig(mock_namespace)
self.assertTrue(os.path.isabs(config.config_dir))
self.assertEqual(config.config_dir,
os.path.join(os.getcwd(), config_base))
self.assertTrue(os.path.isabs(config.work_dir))
self.assertEqual(config.work_dir,
os.path.join(os.getcwd(), work_base))
self.assertTrue(os.path.isabs(config.logs_dir))
self.assertEqual(config.logs_dir,
os.path.join(os.getcwd(), logs_base))
self.assertTrue(os.path.isabs(config.accounts_dir))
self.assertTrue(os.path.isabs(config.backup_dir))
self.assertTrue(os.path.isabs(config.csr_dir))
self.assertTrue(os.path.isabs(config.in_progress_dir))
self.assertTrue(os.path.isabs(config.key_dir))
self.assertTrue(os.path.isabs(config.temp_checkpoint_dir))
@mock.patch('certbot.configuration.constants')
def test_renewal_dynamic_dirs(self, constants):
constants.ARCHIVE_DIR = 'a'
constants.LIVE_DIR = 'l'
constants.RENEWAL_CONFIGS_DIR = 'renewal_configs'
self.assertEqual(self.config.default_archive_dir, '/tmp/config/a')
self.assertEqual(self.config.live_dir, '/tmp/config/l')
self.assertEqual(
self.config.renewal_configs_dir, '/tmp/config/renewal_configs')
def test_renewal_absolute_paths(self):
from certbot.configuration import NamespaceConfig
config_base = "foo"
work_base = "bar"
logs_base = "baz"
mock_namespace = mock.MagicMock(spec=['config_dir', 'work_dir',
'logs_dir', 'http01_port',
'tls_sni_01_port',
'domains', 'server'])
mock_namespace.config_dir = config_base
mock_namespace.work_dir = work_base
mock_namespace.logs_dir = logs_base
config = NamespaceConfig(mock_namespace)
self.assertTrue(os.path.isabs(config.default_archive_dir))
self.assertTrue(os.path.isabs(config.live_dir))
self.assertTrue(os.path.isabs(config.renewal_configs_dir))
def test_get_and_set_attr(self):
self.config.foo = 42
self.assertEqual(self.config.namespace.foo, 42)
self.config.namespace.bar = 1337
self.assertEqual(self.config.bar, 1337)
if __name__ == '__main__':
unittest.main() # pragma: no cover
| nohona/cron-crm | usr/local/certbot/certbot/tests/configuration_test.py | Python | gpl-3.0 | 5,331 |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from .manager import Manager
class VanillaManager(Manager):
"""
"""
def __init__(self, monolithe_config):
"""
"""
super(VanillaManager, self).__init__(monolithe_config=monolithe_config, target_name='VanillaWriter')
def execute(self, output_path):
"""
"""
klass = self.get_managed_class()
if klass:
writer = klass(monolithe_config=self.monolithe_config, output_path=output_path)
writer.perform()
| nuagenetworks/monolithe | monolithe/generators/managers/vanillamanager.py | Python | bsd-3-clause | 2,093 |
from django.template import Context
from django.template.loader import get_template
def get_header_navbar(acc_type, name, title):
header = get_template("header.html").render(
Context( {
'type': acc_type,
'name': name,
'title': title,
'loggedIn':True } ))
navbar = get_template("navbar.html").render(
Context( {
'loggedIn':True,
'type': acc_type,
'name': name
})
)
return {'header':header, 'navbar':navbar} | varun-verma11/CodeDrill | djangoSRV/Views/utils.py | Python | bsd-2-clause | 586 |
# Copyright 2013 Rackspace Hosting
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
from oslo_serialization import jsonutils as json
from oslo_utils import timeutils
from glance.api.v2 import tasks
from glance.tests.integration.v2 import base
TENANT1 = '6838eb7b-6ded-434a-882c-b344c77fe8df'
TENANT2 = '2c014f32-55eb-467d-8fcb-4bd706012f81'
TENANT3 = '5a3e60e8-cfa9-4a9e-a90a-62b42cea92b8'
TENANT4 = 'c6c87f25-8a94-47ed-8c83-053c25f42df4'
def minimal_task_headers(owner='tenant1'):
headers = {
'X-Auth-Token': 'user1:%s:admin' % owner,
'Content-Type': 'application/json',
}
return headers
def _new_task_fixture(**kwargs):
task_data = {
"type": "import",
"input": {
"import_from": "http://example.com",
"import_from_format": "qcow2",
"image_properties": {
'disk_format': 'vhd',
'container_format': 'ovf'
}
}
}
task_data.update(kwargs)
return task_data
class TestTasksApi(base.ApiTest):
def __init__(self, *args, **kwargs):
super(TestTasksApi, self).__init__(*args, **kwargs)
self.api_flavor = 'fakeauth'
self.registry_flavor = 'fakeauth'
def _wait_on_task_execution(self):
"""Wait until all the tasks have finished execution and are in
state of success or failure.
"""
start = timeutils.utcnow()
# wait for maximum of 5 seconds
while timeutils.delta_seconds(start, timeutils.utcnow()) < 5:
wait = False
# Verify that no task is in status of pending or processing
path = "/v2/tasks"
res, content = self.http.request(path, 'GET',
headers=minimal_task_headers())
content_dict = json.loads(content)
self.assertEqual(200, res.status)
res_tasks = content_dict['tasks']
if len(res_tasks) != 0:
for task in res_tasks:
if task['status'] in ('pending', 'processing'):
wait = True
break
if wait:
time.sleep(0.05)
continue
else:
break
def _post_new_task(self, **kwargs):
task_owner = kwargs.get('owner')
headers = minimal_task_headers(task_owner)
task_data = _new_task_fixture()
task_data['input']['import_from'] = "http://example.com"
body_content = json.dumps(task_data)
path = "/v2/tasks"
response, content = self.http.request(path, 'POST',
headers=headers,
body=body_content)
self.assertEqual(201, response.status)
task = json.loads(content)
task_id = task['id']
self.assertIsNotNone(task_id)
self.assertEqual(task_owner, task['owner'])
self.assertEqual(task_data['type'], task['type'])
self.assertEqual(task_data['input'], task['input'])
return task, task_data
def test_all_task_api(self):
# 0. GET /tasks
# Verify no tasks
path = "/v2/tasks"
response, content = self.http.request(path, 'GET',
headers=minimal_task_headers())
content_dict = json.loads(content)
self.assertEqual(200, response.status)
self.assertFalse(content_dict['tasks'])
# 1. GET /tasks/{task_id}
# Verify non-existent task
task_id = 'NON_EXISTENT_TASK'
path = "/v2/tasks/%s" % task_id
response, content = self.http.request(path, 'GET',
headers=minimal_task_headers())
self.assertEqual(404, response.status)
# 2. POST /tasks
# Create a new task
task_owner = 'tenant1'
data, req_input = self._post_new_task(owner=task_owner)
# 3. GET /tasks/{task_id}
# Get an existing task
task_id = data['id']
path = "/v2/tasks/%s" % task_id
response, content = self.http.request(path, 'GET',
headers=minimal_task_headers())
self.assertEqual(200, response.status)
# NOTE(sabari): wait for all task executions to finish before checking
# task status.
self._wait_on_task_execution()
# 4. GET /tasks
# Get all tasks (not deleted)
path = "/v2/tasks"
response, content = self.http.request(path, 'GET',
headers=minimal_task_headers())
self.assertEqual(200, response.status)
self.assertIsNotNone(content)
data = json.loads(content)
self.assertIsNotNone(data)
self.assertEqual(1, len(data['tasks']))
# NOTE(venkatesh) find a way to get expected_keys from tasks controller
expected_keys = set(['id', 'expires_at', 'type', 'owner', 'status',
'created_at', 'updated_at', 'self', 'schema'])
task = data['tasks'][0]
self.assertEqual(expected_keys, set(task.keys()))
self.assertEqual(req_input['type'], task['type'])
self.assertEqual(task_owner, task['owner'])
self.assertEqual('success', task['status'])
self.assertIsNotNone(task['created_at'])
self.assertIsNotNone(task['updated_at'])
def test_task_schema_api(self):
# 0. GET /schemas/task
# Verify schema for task
path = "/v2/schemas/task"
response, content = self.http.request(path, 'GET',
headers=minimal_task_headers())
self.assertEqual(200, response.status)
schema = tasks.get_task_schema()
expected_schema = schema.minimal()
data = json.loads(content)
self.assertIsNotNone(data)
self.assertEqual(expected_schema, data)
# 1. GET /schemas/tasks
# Verify schema for tasks
path = "/v2/schemas/tasks"
response, content = self.http.request(path, 'GET',
headers=minimal_task_headers())
self.assertEqual(200, response.status)
schema = tasks.get_collection_schema()
expected_schema = schema.minimal()
data = json.loads(content)
self.assertIsNotNone(data)
self.assertEqual(expected_schema, data)
# NOTE(nikhil): wait for all task executions to finish before exiting
# else there is a risk of running into deadlock
self._wait_on_task_execution()
def test_create_new_task(self):
# 0. POST /tasks
# Create a new task with valid input and type
task_data = _new_task_fixture()
task_owner = 'tenant1'
body_content = json.dumps(task_data)
path = "/v2/tasks"
response, content = self.http.request(
path, 'POST', headers=minimal_task_headers(task_owner),
body=body_content)
self.assertEqual(201, response.status)
data = json.loads(content)
task_id = data['id']
self.assertIsNotNone(task_id)
self.assertEqual(task_owner, data['owner'])
self.assertEqual(task_data['type'], data['type'])
self.assertEqual(task_data['input'], data['input'])
# 1. POST /tasks
# Create a new task with invalid type
# Expect BadRequest(400) Error as response
task_data = _new_task_fixture(type='invalid')
task_owner = 'tenant1'
body_content = json.dumps(task_data)
path = "/v2/tasks"
response, content = self.http.request(
path, 'POST', headers=minimal_task_headers(task_owner),
body=body_content)
self.assertEqual(400, response.status)
# 1. POST /tasks
# Create a new task with invalid input for type 'import'
# Expect BadRequest(400) Error as response
task_data = _new_task_fixture(task_input='{something: invalid}')
task_owner = 'tenant1'
body_content = json.dumps(task_data)
path = "/v2/tasks"
response, content = self.http.request(
path, 'POST', headers=minimal_task_headers(task_owner),
body=body_content)
self.assertEqual(400, response.status)
# NOTE(nikhil): wait for all task executions to finish before exiting
# else there is a risk of running into deadlock
self._wait_on_task_execution()
def test_tasks_with_filter(self):
# 0. GET /v2/tasks
# Verify no tasks
path = "/v2/tasks"
response, content = self.http.request(path, 'GET',
headers=minimal_task_headers())
self.assertEqual(200, response.status)
content_dict = json.loads(content)
self.assertFalse(content_dict['tasks'])
task_ids = []
# 1. Make 2 POST requests on /tasks with various attributes
task_owner = TENANT1
data, req_input1 = self._post_new_task(owner=task_owner)
task_ids.append(data['id'])
task_owner = TENANT2
data, req_input2 = self._post_new_task(owner=task_owner)
task_ids.append(data['id'])
# 2. GET /tasks
# Verify two import tasks
path = "/v2/tasks"
response, content = self.http.request(path, 'GET',
headers=minimal_task_headers())
self.assertEqual(200, response.status)
content_dict = json.loads(content)
self.assertEqual(2, len(content_dict['tasks']))
# 3. GET /tasks with owner filter
# Verify correct task returned with owner
params = "owner=%s" % TENANT1
path = "/v2/tasks?%s" % params
response, content = self.http.request(path, 'GET',
headers=minimal_task_headers())
self.assertEqual(200, response.status)
content_dict = json.loads(content)
self.assertEqual(1, len(content_dict['tasks']))
self.assertEqual(TENANT1, content_dict['tasks'][0]['owner'])
# Check the same for different owner.
params = "owner=%s" % TENANT2
path = "/v2/tasks?%s" % params
response, content = self.http.request(path, 'GET',
headers=minimal_task_headers())
self.assertEqual(200, response.status)
content_dict = json.loads(content)
self.assertEqual(1, len(content_dict['tasks']))
self.assertEqual(TENANT2, content_dict['tasks'][0]['owner'])
# 4. GET /tasks with type filter
# Verify correct task returned with type
params = "type=import"
path = "/v2/tasks?%s" % params
response, content = self.http.request(path, 'GET',
headers=minimal_task_headers())
self.assertEqual(200, response.status)
content_dict = json.loads(content)
self.assertEqual(2, len(content_dict['tasks']))
actual_task_ids = [task['id'] for task in content_dict['tasks']]
self.assertEqual(set(task_ids), set(actual_task_ids))
# NOTE(nikhil): wait for all task executions to finish before exiting
# else there is a risk of running into deadlock
self._wait_on_task_execution()
def test_limited_tasks(self):
"""
Ensure marker and limit query params work
"""
# 0. GET /tasks
# Verify no tasks
path = "/v2/tasks"
response, content = self.http.request(path, 'GET',
headers=minimal_task_headers())
self.assertEqual(200, response.status)
tasks = json.loads(content)
self.assertFalse(tasks['tasks'])
task_ids = []
# 1. POST /tasks with three tasks with various attributes
task, _ = self._post_new_task(owner=TENANT1)
task_ids.append(task['id'])
task, _ = self._post_new_task(owner=TENANT2)
task_ids.append(task['id'])
task, _ = self._post_new_task(owner=TENANT3)
task_ids.append(task['id'])
# 2. GET /tasks
# Verify 3 tasks are returned
path = "/v2/tasks"
response, content = self.http.request(path, 'GET',
headers=minimal_task_headers())
self.assertEqual(200, response.status)
tasks = json.loads(content)['tasks']
self.assertEqual(3, len(tasks))
# 3. GET /tasks with limit of 2
# Verify only two tasks were returned
params = "limit=2"
path = "/v2/tasks?%s" % params
response, content = self.http.request(path, 'GET',
headers=minimal_task_headers())
self.assertEqual(200, response.status)
actual_tasks = json.loads(content)['tasks']
self.assertEqual(2, len(actual_tasks))
self.assertEqual(tasks[0]['id'], actual_tasks[0]['id'])
self.assertEqual(tasks[1]['id'], actual_tasks[1]['id'])
# 4. GET /tasks with marker
# Verify only two tasks were returned
params = "marker=%s" % tasks[0]['id']
path = "/v2/tasks?%s" % params
response, content = self.http.request(path, 'GET',
headers=minimal_task_headers())
self.assertEqual(200, response.status)
actual_tasks = json.loads(content)['tasks']
self.assertEqual(2, len(actual_tasks))
self.assertEqual(tasks[1]['id'], actual_tasks[0]['id'])
self.assertEqual(tasks[2]['id'], actual_tasks[1]['id'])
# 5. GET /tasks with marker and limit
# Verify only one task was returned with the correct id
params = "limit=1&marker=%s" % tasks[1]['id']
path = "/v2/tasks?%s" % params
response, content = self.http.request(path, 'GET',
headers=minimal_task_headers())
self.assertEqual(200, response.status)
actual_tasks = json.loads(content)['tasks']
self.assertEqual(1, len(actual_tasks))
self.assertEqual(tasks[2]['id'], actual_tasks[0]['id'])
# NOTE(nikhil): wait for all task executions to finish before exiting
# else there is a risk of running into deadlock
self._wait_on_task_execution()
def test_ordered_tasks(self):
# 0. GET /tasks
# Verify no tasks
path = "/v2/tasks"
response, content = self.http.request(path, 'GET',
headers=minimal_task_headers())
self.assertEqual(200, response.status)
tasks = json.loads(content)
self.assertFalse(tasks['tasks'])
task_ids = []
# 1. POST /tasks with three tasks with various attributes
task, _ = self._post_new_task(owner=TENANT1)
task_ids.append(task['id'])
task, _ = self._post_new_task(owner=TENANT2)
task_ids.append(task['id'])
task, _ = self._post_new_task(owner=TENANT3)
task_ids.append(task['id'])
# 2. GET /tasks with no query params
# Verify three tasks sorted by created_at desc
# 2. GET /tasks
# Verify 3 tasks are returned
path = "/v2/tasks"
response, content = self.http.request(path, 'GET',
headers=minimal_task_headers())
self.assertEqual(200, response.status)
actual_tasks = json.loads(content)['tasks']
self.assertEqual(3, len(actual_tasks))
self.assertEqual(task_ids[2], actual_tasks[0]['id'])
self.assertEqual(task_ids[1], actual_tasks[1]['id'])
self.assertEqual(task_ids[0], actual_tasks[2]['id'])
# 3. GET /tasks sorted by owner asc
params = 'sort_key=owner&sort_dir=asc'
path = '/v2/tasks?%s' % params
response, content = self.http.request(path, 'GET',
headers=minimal_task_headers())
self.assertEqual(200, response.status)
expected_task_owners = [TENANT1, TENANT2, TENANT3]
expected_task_owners.sort()
actual_tasks = json.loads(content)['tasks']
self.assertEqual(3, len(actual_tasks))
self.assertEqual(expected_task_owners,
[t['owner'] for t in actual_tasks])
# 4. GET /tasks sorted by owner desc with a marker
params = 'sort_key=owner&sort_dir=desc&marker=%s' % task_ids[0]
path = '/v2/tasks?%s' % params
response, content = self.http.request(path, 'GET',
headers=minimal_task_headers())
self.assertEqual(200, response.status)
actual_tasks = json.loads(content)['tasks']
self.assertEqual(2, len(actual_tasks))
self.assertEqual(task_ids[2], actual_tasks[0]['id'])
self.assertEqual(task_ids[1], actual_tasks[1]['id'])
self.assertEqual(TENANT3, actual_tasks[0]['owner'])
self.assertEqual(TENANT2, actual_tasks[1]['owner'])
# 5. GET /tasks sorted by owner asc with a marker
params = 'sort_key=owner&sort_dir=asc&marker=%s' % task_ids[0]
path = '/v2/tasks?%s' % params
response, content = self.http.request(path, 'GET',
headers=minimal_task_headers())
self.assertEqual(200, response.status)
actual_tasks = json.loads(content)['tasks']
self.assertEqual(0, len(actual_tasks))
# NOTE(nikhil): wait for all task executions to finish before exiting
# else there is a risk of running into deadlock
self._wait_on_task_execution()
def test_delete_task(self):
# 0. POST /tasks
# Create a new task with valid input and type
task_data = _new_task_fixture()
task_owner = 'tenant1'
body_content = json.dumps(task_data)
path = "/v2/tasks"
response, content = self.http.request(
path, 'POST', headers=minimal_task_headers(task_owner),
body=body_content)
self.assertEqual(201, response.status)
data = json.loads(content)
task_id = data['id']
# 1. DELETE on /tasks/{task_id}
# Attempt to delete a task
path = "/v2/tasks/%s" % task_id
response, content = self.http.request(path,
'DELETE',
headers=minimal_task_headers())
self.assertEqual(405, response.status)
self.assertEqual('GET', response.webob_resp.headers.get('Allow'))
self.assertEqual(('GET',), response.webob_resp.allow)
self.assertEqual(('GET',), response.allow)
# 2. GET /tasks/{task_id}
# Ensure that methods mentioned in the Allow header work
path = "/v2/tasks/%s" % task_id
response, content = self.http.request(path,
'GET',
headers=minimal_task_headers())
self.assertEqual(200, response.status)
self.assertIsNotNone(content)
# NOTE(nikhil): wait for all task executions to finish before exiting
# else there is a risk of running into deadlock
self._wait_on_task_execution()
| saeki-masaki/glance | glance/tests/integration/v2/test_tasks_api.py | Python | apache-2.0 | 19,998 |
# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:[email protected]
#
# This file is part of logilab-common.
#
# logilab-common is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 2.1 of the License, or (at your option) any
# later version.
#
# logilab-common is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
"""Prioritized tasks queue
:organization: Logilab
"""
__docformat__ = "restructuredtext en"
from bisect import insort_left
from Queue import Queue
LOW = 0
MEDIUM = 10
HIGH = 100
REVERSE_PRIORITY = {
0: 'LOW',
10: 'MEDIUM',
100: 'HIGH'
}
class PrioritizedTasksQueue(Queue):
def _init(self, maxsize):
"""Initialize the queue representation"""
self.maxsize = maxsize
# ordered list of task, from the lowest to the highest priority
self.queue = []
def _put(self, item):
"""Put a new item in the queue"""
for i, task in enumerate(self.queue):
# equivalent task
if task == item:
# if new task has a higher priority, remove the one already
# queued so the new priority will be considered
if task < item:
item.merge(task)
del self.queue[i]
break
# else keep it so current order is kept
task.merge(item)
return
insort_left(self.queue, item)
def _get(self):
"""Get an item from the queue"""
return self.queue.pop()
def __iter__(self):
return iter(self.queue)
def remove(self, tid):
"""remove a specific task from the queue"""
# XXX acquire lock
for i, task in enumerate(self):
if task.id == tid:
self.queue.pop(i)
return
raise ValueError('not task of id %s in queue' % tid)
class Task(object):
def __init__(self, tid, priority=LOW):
# task id
self.id = tid
# task priority
self.priority = priority
def __repr__(self):
return '<Task %s @%#x>' % (self.id, id(self))
def __cmp__(self, other):
return cmp(self.priority, other.priority)
def __eq__(self, other):
return self.id == other.id
def merge(self, other):
pass
| dbbhattacharya/kitsune | vendor/packages/logilab-common/tasksqueue.py | Python | bsd-3-clause | 2,818 |
# -*- coding: utf-8 -*-
# YAFF is yet another force-field code.
# Copyright (C) 2011 Toon Verstraelen <[email protected]>,
# Louis Vanduyfhuys <[email protected]>, Center for Molecular Modeling
# (CMM), Ghent University, Ghent, Belgium; all rights reserved unless otherwise
# stated.
#
# This file is part of YAFF.
#
# YAFF is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# YAFF is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
# --
from __future__ import division
import tempfile, shutil, os, numpy as np
from yaff import *
from molmod.test.common import tmpdir
def test_blav():
# generate a time-correlated random signal
n = 50000
eps0 = 30.0/n
eps1 = 1.0
y = np.sin(np.random.normal(0, eps0, n).cumsum() + np.random.normal(0, eps1, n))
# create a temporary directory to write the plot to
with tmpdir(__name__, 'test_blav') as dn:
fn_png = '%s/blav.png' % dn
error, sinef = blav(y, 100, fn_png)
assert os.path.isfile(fn_png)
| molmod/yaff | yaff/analysis/test/test_blav.py | Python | gpl-3.0 | 1,509 |
#
# rtlsdr_scan
#
# http://eartoearoak.com/software/rtlsdr-scanner
#
# Copyright 2012 - 2015 Al Brown
#
# A frequency scanning GUI for the OsmoSDR rtl-sdr library at
# http://sdr.osmocom.org/trac/wiki/rtl-sdr
#
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import copy
import math
import os
import re
import threading
from matplotlib import cm
import matplotlib
import matplotlib.animation
from matplotlib.backends.backend_wxagg import \
FigureCanvasWxAgg as FigureCanvas
from matplotlib.colorbar import ColorbarBase
from matplotlib.colors import Normalize
from matplotlib.dates import num2epoch
from matplotlib.lines import Line2D
from matplotlib.ticker import AutoMinorLocator, ScalarFormatter
import wx
import wx.grid as wxGrid
from rtlsdr_scanner.constants import Display
from rtlsdr_scanner.misc import format_precision
from rtlsdr_scanner.plot_3d import Plotter3d
from rtlsdr_scanner.plot_controls import MouseZoom, MouseSelect
from rtlsdr_scanner.plot_line import Plotter
from rtlsdr_scanner.plot_preview import PlotterPreview
from rtlsdr_scanner.plot_spect import Spectrogram
from rtlsdr_scanner.plot_status import PlotterStatus
from rtlsdr_scanner.plot_time import PlotterTime
from rtlsdr_scanner.spectrum import split_spectrum_sort, Measure, reduce_points
from rtlsdr_scanner.toolbars import NavigationToolbar, NavigationToolbarCompare
from rtlsdr_scanner.utils_mpl import find_artists
from rtlsdr_scanner.utils_wx import close_modeless
from rtlsdr_scanner.widgets import GridToolTips, CheckBoxCellRenderer
class PanelGraph(wx.Panel):
def __init__(self, panel, notify, settings, status, remoteControl):
self.panel = panel
self.notify = notify
self.plot = None
self.settings = settings
self.status = status
self.remoteControl = remoteControl
self.spectrum = None
self.isLimited = None
self.limit = None
self.extent = None
self.annotate = None
self.isDrawing = False
self.toolTip = wx.ToolTip('')
self.mouseSelect = None
self.mouseZoom = None
self.measureTable = None
self.background = None
self.selectStart = None
self.selectEnd = None
self.menuClearSelect = []
self.measure = None
self.show = None
self.doDraw = False
wx.Panel.__init__(self, panel)
self.figure = matplotlib.figure.Figure(facecolor='white')
self.figure.set_size_inches(0, 0)
self.canvas = FigureCanvas(self, -1, self.figure)
self.canvas.SetToolTip(self.toolTip)
self.measureTable = PanelMeasure(self, settings)
self.toolbar = NavigationToolbar(self.canvas, self, settings,
self.__hide_overlay)
self.toolbar.Realize()
vbox = wx.BoxSizer(wx.VERTICAL)
vbox.Add(self.canvas, 1, wx.EXPAND)
vbox.Add(self.measureTable, 0, wx.EXPAND)
vbox.Add(self.toolbar, 0, wx.EXPAND)
self.SetSizer(vbox)
vbox.Fit(self)
self.create_plot()
self.canvas.mpl_connect('button_press_event', self.__on_press)
self.canvas.mpl_connect('figure_enter_event', self.__on_enter)
self.canvas.mpl_connect('axes_leave_event', self.__on_leave)
self.canvas.mpl_connect('motion_notify_event', self.__on_motion)
self.canvas.mpl_connect('draw_event', self.__on_draw)
self.Bind (wx.EVT_IDLE, self.__on_idle)
self.Bind(wx.EVT_SIZE, self.__on_size)
self.timer = wx.Timer(self)
self.Bind(wx.EVT_TIMER, self.__on_timer, self.timer)
def __set_fonts(self):
axes = self.plot.get_axes()
if axes is not None:
axes.xaxis.label.set_size('small')
axes.yaxis.label.set_size('small')
if self.settings.display == Display.SURFACE:
axes.zaxis.label.set_size('small')
axes.tick_params(axis='both', which='major', labelsize='small')
axes = self.plot.get_axes_bar()
if axes is not None:
axes.tick_params(axis='both', which='major', labelsize='small')
def __enable_menu(self, state):
for menu in self.menuClearSelect:
menu.Enable(state)
def __on_press(self, event):
if self.settings.clickTune and matplotlib.__version__ >= '1.2' and event.dblclick:
frequency = int(event.xdata * 1e6)
self.remoteControl.tune(frequency)
elif isinstance(self.plot, PlotterPreview):
self.plot.to_front()
def __on_enter(self, _event):
self.toolTip.Enable(False)
def __on_leave(self, _event):
self.toolTip.Enable(True)
self.status.set_info('', level=None)
def __on_motion(self, event):
axes = self.plot.get_axes()
axesBar = self.plot.get_axes_bar()
xpos = event.xdata
ypos = event.ydata
text = ""
if (xpos is None or ypos is None or
self.spectrum is None or event.inaxes is None):
spectrum = None
elif event.inaxes == axesBar:
spectrum = None
level = self.plot.get_bar().norm.inverse(ypos)
text = "{}".format(format_precision(self.settings,
level=level))
elif self.settings.display == Display.PLOT:
timeStamp = max(self.spectrum)
spectrum = self.spectrum[timeStamp]
elif self.settings.display == Display.SPECT:
timeStamp = num2epoch(ypos)
if timeStamp in self.spectrum:
spectrum = self.spectrum[timeStamp]
else:
nearest = min(self.spectrum.keys(),
key=lambda k: abs(k - timeStamp))
spectrum = self.spectrum[nearest]
elif self.settings.display == Display.SURFACE:
spectrum = None
coords = axes.format_coord(event.xdata,
event.ydata)
match = re.match('x=([-|0-9|\.]+).*y=([0-9|\:]+).*z=([-|0-9|\.]+)',
coords)
if match is not None and match.lastindex == 3:
freq = float(match.group(1))
level = float(match.group(3))
text = "{}, {}".format(*format_precision(self.settings,
freq, level))
else:
spectrum = None
if spectrum is not None and len(spectrum) > 0:
x = min(spectrum.keys(), key=lambda freq: abs(freq - xpos))
if min(spectrum.keys(), key=float) <= xpos <= max(spectrum.keys(),
key=float):
y = spectrum[x]
text = "{}, {}".format(*format_precision(self.settings, x, y))
else:
text = format_precision(self.settings, xpos)
markers = find_artists(self.figure, 'peak')
markers.extend(find_artists(self.figure, 'peakThres'))
hit = False
for marker in markers:
if isinstance(marker, Line2D):
location = marker.get_path().vertices[0]
markX, markY = axes.transData.transform(location)
dist = abs(math.hypot(event.x - markX, event.y - markY))
if dist <= 5:
if self.settings.display == Display.PLOT:
tip = "{}, {}".format(*format_precision(self.settings,
location[0],
location[1]))
else:
tip = "{}".format(format_precision(self.settings,
location[0]))
self.toolTip.SetTip(tip)
hit = True
break
self.toolTip.Enable(hit)
self.status.set_info(text, level=None)
def __on_size(self, event):
ppi = wx.ScreenDC().GetPPI()
size = [float(v) for v in self.canvas.GetSize()]
width = size[0] / ppi[0]
height = size[1] / ppi[1]
self.figure.set_figwidth(width)
self.figure.set_figheight(height)
self.figure.set_dpi(ppi[0])
event.Skip()
def __on_draw(self, _event):
axes = self.plot.get_axes()
if axes is not None:
self.background = self.canvas.copy_from_bbox(axes.bbox)
self.__draw_overlay()
def __on_idle(self, _event):
if self.doDraw and self.plot.get_plot_thread() is None:
self.__hide_overlay()
self.doDraw = False
if os.name == 'nt':
threading.Thread(target=self.__draw_canvas, name='Draw').start()
else:
self.__draw_canvas()
def __on_timer(self, _event):
self.timer.Stop()
self.set_plot(None, None, None, None, self.annotate)
def __draw_canvas(self):
try:
self.isDrawing = True
self.canvas.draw()
except wx.PyDeadObjectError:
pass
self.isDrawing = False
wx.CallAfter(self.status.set_busy, False)
def __draw_overlay(self):
if self.background is not None:
self.canvas.restore_region(self.background)
self.__draw_select()
self.draw_measure()
axes = self.plot.get_axes()
if axes is not None:
self.canvas.blit(axes.bbox)
def __draw_select(self):
if self.selectStart is not None and self.selectEnd is not None:
self.mouseSelect.draw(self.selectStart, self.selectEnd)
def __hide_overlay(self):
if self.plot is not None:
self.plot.hide_measure()
self.__hide_select()
def __hide_select(self):
if self.mouseSelect is not None:
self.mouseSelect.hide()
def create_plot(self):
if self.plot is not None:
self.plot.close()
self.toolbar.set_auto(True)
if self.settings.display == Display.PLOT:
self.plot = Plotter(self.notify, self.figure, self.settings)
elif self.settings.display == Display.SPECT:
self.plot = Spectrogram(self.notify, self.figure, self.settings)
elif self.settings.display == Display.SURFACE:
self.plot = Plotter3d(self.notify, self.figure, self.settings)
elif self.settings.display == Display.STATUS:
self.plot = PlotterStatus(self.notify, self.figure, self.settings)
elif self.settings.display == Display.TIMELINE:
self.plot = PlotterTime(self.notify, self.figure, self.settings)
elif self.settings.display == Display.PREVIEW:
self.plot = PlotterPreview(self.notify, self.figure, self.settings)
self.plot.set_window(self)
self.__set_fonts()
self.toolbar.set_plot(self.plot)
self.toolbar.set_type(self.settings.display)
self.measureTable.set_type(self.settings.display)
self.set_plot_title()
self.figure.subplots_adjust(top=0.85)
self.redraw_plot()
self.plot.scale_plot(True)
self.mouseZoom = MouseZoom(self.toolbar, plot=self.plot,
callbackHide=self.__hide_overlay)
self.mouseSelect = MouseSelect(self.plot, self.on_select,
self.on_selected)
self.measureTable.show(self.settings.showMeasure)
self.panel.SetFocus()
def on_select(self):
self.hide_measure()
def on_selected(self, start, end):
self.__enable_menu(True)
self.selectStart = start
self.selectEnd = end
self.measureTable.set_selected(self.spectrum, start, end)
def add_menu_clear_select(self, menu):
self.menuClearSelect.append(menu)
menu.Enable(False)
def draw(self):
self.doDraw = True
def show_measure_table(self, show):
self.measureTable.show(show)
self.Layout()
def set_plot(self, spectrum, isLimited, limit, extent, annotate=False):
if spectrum is not None and extent is not None:
if isLimited is not None and limit is not None:
self.spectrum = copy.copy(spectrum)
self.extent = extent
self.annotate = annotate
self.isLimited = isLimited
self.limit = limit
if self.plot.get_plot_thread() is None and not self.isDrawing:
self.timer.Stop()
self.measureTable.set_selected(self.spectrum, self.selectStart,
self.selectEnd)
if isLimited:
self.spectrum = reduce_points(spectrum, limit)
self.status.set_busy(True)
self.plot.set_plot(self.spectrum, self.extent, annotate)
if self.settings.display == Display.PREVIEW:
self.status.set_busy(False)
else:
self.timer.Start(200, oneShot=True)
def set_plot_title(self):
if len(self.settings.devicesRtl) > 0:
gain = self.settings.devicesRtl[self.settings.indexRtl].gain
else:
gain = 0
self.plot.set_title("Frequency Spectrogram\n{} - {} MHz,"
" gain = {}dB".format(self.settings.start,
self.settings.stop, gain))
def redraw_plot(self):
if self.spectrum is not None:
self.set_plot(self.spectrum,
self.settings.pointsLimit,
self.settings.pointsMax,
self.extent, self.settings.annotate)
def set_grid(self, on):
self.plot.set_grid(on)
def set_selected(self, start, end):
self.selectStart = start
self.selectEnd = end
self.__draw_select()
def hide_toolbar(self, hide):
self.toolbar.Show(not hide)
def hide_measure(self):
if self.plot is not None:
self.plot.hide_measure()
def draw_measure(self):
if self.measure is not None and self.measure.is_valid():
self.plot.draw_measure(self.measure, self.show)
def update_measure(self, measure=None, show=None):
if not measure and not show:
self.measureTable.update_measure()
else:
self.measure = measure
self.show = show
self.__draw_overlay()
def get_figure(self):
return self.figure
def get_axes(self):
return self.plot.get_axes()
def get_canvas(self):
return self.canvas
def get_toolbar(self):
return self.toolbar
def get_mouse_select(self):
return self.mouseSelect
def scale_plot(self, force=False):
self.plot.scale_plot(force)
def clear_plots(self):
self.plot.clear_plots()
self.spectrum = None
self.doDraw = True
def clear_selection(self):
self.measure = None
self.measureTable.clear_measurement()
self.selectStart = None
self.selectEnd = None
self.mouseSelect.clear()
self.__enable_menu(False)
def close(self):
self.plot.close()
close_modeless()
class PanelGraphCompare(wx.Panel):
def __init__(self, parent, callback):
self.callback = callback
self.spectrum1 = None
self.spectrum2 = None
self.spectrumDiff = None
self.mouseZoom = None
formatter = ScalarFormatter(useOffset=False)
wx.Panel.__init__(self, parent)
figure = matplotlib.figure.Figure(facecolor='white')
figure.set_size_inches(8, 4.5)
if matplotlib.__version__ >= '1.2':
figure.set_tight_layout(True)
self.axesScan = figure.add_subplot(111)
self.axesScan.xaxis.set_minor_locator(AutoMinorLocator(10))
self.axesScan.yaxis.set_minor_locator(AutoMinorLocator(10))
self.axesScan.xaxis.set_major_formatter(formatter)
self.axesScan.yaxis.set_major_formatter(formatter)
self.axesDiff = self.axesScan.twinx()
self.axesDiff.yaxis.set_minor_locator(AutoMinorLocator(10))
self.plotScan1, = self.axesScan.plot([], [], 'b-',
linewidth=0.4)
self.plotScan2, = self.axesScan.plot([], [], 'g-',
linewidth=0.4)
self.plotDiff, = self.axesDiff.plot([], [], 'r-', linewidth=0.4)
self.axesScan.set_ylim(auto=True)
self.axesDiff.set_ylim(auto=True)
self.axesScan.set_title("Level Comparison")
self.axesScan.set_xlabel("Frequency (MHz)")
self.axesScan.set_ylabel('Level (dB/Hz)')
self.axesDiff.set_ylabel('Difference (dB/Hz)')
self.canvas = FigureCanvas(self, -1, figure)
self.set_grid(True)
self.textIntersect = wx.StaticText(self, label="Intersections: ")
toolbar = NavigationToolbarCompare(self)
toolbar.Realize()
self.mouseZoom = MouseZoom(toolbar, figure=figure)
vbox = wx.BoxSizer(wx.VERTICAL)
vbox.Add(self.canvas, 1, wx.LEFT | wx.TOP | wx.GROW)
vbox.Add(self.textIntersect, 0, wx.EXPAND | wx.ALL, border=5)
vbox.Add(toolbar, 0, wx.EXPAND)
self.SetSizer(vbox)
vbox.Fit(self)
self.canvas.mpl_connect('motion_notify_event', self.__on_motion)
self.canvas.mpl_connect('axes_leave_event', self.__on_leave)
def __on_motion(self, event):
xpos = event.xdata
ypos = event.ydata
if xpos is None or ypos is None:
return
locs = dict.fromkeys(['x1', 'y1', 'x2', 'y2', 'x3', 'y3'], None)
if self.spectrum1 is not None and len(self.spectrum1) > 0:
locs['x1'] = min(self.spectrum1.keys(),
key=lambda freq: abs(freq - xpos))
locs['y1'] = self.spectrum1[locs['x1']]
if self.spectrum2 is not None and len(self.spectrum2) > 0:
locs['x2'] = min(self.spectrum2.keys(),
key=lambda freq: abs(freq - xpos))
locs['y2'] = self.spectrum2[locs['x2']]
if self.spectrumDiff is not None and len(self.spectrumDiff) > 0:
locs['x3'] = min(self.spectrumDiff.keys(),
key=lambda freq: abs(freq - xpos))
locs['y3'] = self.spectrumDiff[locs['x3']]
self.callback(locs)
def __on_leave(self, event):
self.callback(None)
def __relim(self):
self.axesScan.relim()
self.axesDiff.relim()
def __plot_diff(self):
diff = {}
intersections = 0
if self.spectrum1 is not None and self.spectrum2 is not None:
set1 = set(self.spectrum1)
set2 = set(self.spectrum2)
intersect = set1.intersection(set2)
intersections = len(intersect)
for freq in intersect:
diff[freq] = self.spectrum1[freq] - self.spectrum2[freq]
freqs, powers = split_spectrum_sort(diff)
self.plotDiff.set_xdata(freqs)
self.plotDiff.set_ydata(powers)
elif self.spectrum1 is None:
freqs, powers = split_spectrum_sort(self.spectrum2)
intersections = len(freqs)
self.plotDiff.set_xdata(freqs)
self.plotDiff.set_ydata([0] * intersections)
else:
freqs, powers = split_spectrum_sort(self.spectrum1)
intersections = len(freqs)
self.plotDiff.set_xdata(freqs)
self.plotDiff.set_ydata([0] * intersections)
self.spectrumDiff = diff
self.textIntersect.SetLabel('Intersections: {}'.format(intersections))
def get_canvas(self):
return self.canvas
def show_plot1(self, enable):
self.plotScan1.set_visible(enable)
self.canvas.draw()
def show_plot2(self, enable):
self.plotScan2.set_visible(enable)
self.canvas.draw()
def show_plotdiff(self, enable):
self.plotDiff.set_visible(enable)
self.canvas.draw()
def set_spectrum1(self, spectrum):
timeStamp = max(spectrum)
self.spectrum1 = spectrum[timeStamp]
freqs, powers = split_spectrum_sort(self.spectrum1)
self.plotScan1.set_xdata(freqs)
self.plotScan1.set_ydata(powers)
self.__plot_diff()
self.__relim()
self.autoscale()
def set_spectrum2(self, spectrum):
timeStamp = max(spectrum)
self.spectrum2 = spectrum[timeStamp]
freqs, powers = split_spectrum_sort(self.spectrum2)
self.plotScan2.set_xdata(freqs)
self.plotScan2.set_ydata(powers)
self.__plot_diff()
self.__relim()
self.autoscale()
def set_grid(self, grid):
self.axesScan.grid(grid)
self.canvas.draw()
def autoscale(self):
self.axesScan.autoscale_view()
self.axesDiff.autoscale_view()
self.canvas.draw()
class PanelColourBar(wx.Panel):
def __init__(self, parent, colourMap):
wx.Panel.__init__(self, parent)
dpi = wx.ScreenDC().GetPPI()[0]
figure = matplotlib.figure.Figure(facecolor='white', dpi=dpi)
figure.set_size_inches(200.0 / dpi, 25.0 / dpi)
self.canvas = FigureCanvas(self, -1, figure)
axes = figure.add_subplot(111)
figure.subplots_adjust(0, 0, 1, 1)
norm = Normalize(vmin=0, vmax=1)
self.bar = ColorbarBase(axes, norm=norm, orientation='horizontal',
cmap=cm.get_cmap(colourMap))
axes.xaxis.set_visible(False)
def set_map(self, colourMap):
self.bar.set_cmap(colourMap)
self.bar.changed()
self.bar.draw_all()
self.canvas.draw()
class PanelLine(wx.Panel):
def __init__(self, parent, colour):
self.colour = colour
wx.Panel.__init__(self, parent)
self.Bind(wx.EVT_PAINT, self.__on_paint)
def __on_paint(self, _event):
dc = wx.BufferedPaintDC(self)
width, height = self.GetClientSize()
if not width or not height:
return
pen = wx.Pen(self.colour, 2)
dc.SetPen(pen)
colourBack = self.GetBackgroundColour()
brush = wx.Brush(colourBack, wx.SOLID)
dc.SetBackground(brush)
dc.Clear()
dc.DrawLine(0, height / 2., width, height / 2.)
class PanelMeasure(wx.Panel):
def __init__(self, graph, settings):
wx.Panel.__init__(self, graph)
self.spectrum = None
self.graph = graph
self.settings = settings
self.measure = None
self.checked = {Measure.MIN: None,
Measure.MAX: None,
Measure.AVG: None,
Measure.GMEAN: None,
Measure.HBW: None,
Measure.OBW: None}
self.selected = None
self.SetBackgroundColour('white')
self.grid = wxGrid.Grid(self)
self.grid.CreateGrid(3, 19)
self.grid.EnableEditing(True)
self.grid.EnableDragGridSize(False)
self.grid.SetColLabelSize(1)
self.grid.SetRowLabelSize(1)
self.grid.SetColMinimalAcceptableWidth(1)
self.grid.SetColSize(2, 1)
self.grid.SetColSize(7, 1)
self.grid.SetColSize(11, 1)
self.grid.SetColSize(15, 1)
self.grid.SetMargins(0, wx.SystemSettings_GetMetric(wx.SYS_HSCROLL_Y))
for x in range(self.grid.GetNumberRows()):
self.grid.SetRowLabelValue(x, '')
for y in range(self.grid.GetNumberCols()):
self.grid.SetColLabelValue(y, '')
for row in range(self.grid.GetNumberRows()):
for col in range(self.grid.GetNumberCols()):
self.grid.SetReadOnly(row, col, True)
self.locsDesc = {'F Start': (0, 0),
'F End': (1, 0),
'F Delta': (2, 0),
'P Min': (0, 4),
'P Max': (1, 4),
'P Delta': (2, 4),
'Mean': (0, 9),
'GMean': (1, 9),
'Flatness': (2, 9),
'-3dB Start': (0, 13),
'-3dB End': (1, 13),
'-3dB Delta': (2, 13),
'OBW Start': (0, 17),
'OBW End': (1, 17),
'OBW Delta': (2, 17)}
self.__set_descs()
self.locsCheck = {Measure.MIN: (0, 3), Measure.MAX: (1, 3),
Measure.AVG: (0, 8), Measure.GMEAN: (1, 8),
Measure.HBW: (0, 12),
Measure.OBW: (0, 16)}
self.__set_check_editor()
self.locsFreq = [(0, 1), (1, 1)]
self.__set_freq_editor()
colour = self.grid.GetBackgroundColour()
self.grid.SetCellTextColour(2, 3, colour)
self.grid.SetCellTextColour(2, 8, colour)
self.grid.SetCellTextColour(1, 12, colour)
self.grid.SetCellTextColour(2, 12, colour)
self.grid.SetCellTextColour(1, 16, colour)
self.grid.SetCellTextColour(2, 16, colour)
self.__clear_checks()
self.locsMeasure = {'start': (0, 1), 'end': (1, 1), 'deltaF': (2, 1),
'minFP': (0, 5), 'maxFP': (1, 5), 'deltaFP': (2, 5),
'minP': (0, 6), 'maxP': (1, 6), 'deltaP': (2, 6),
'avg': (0, 10), 'gmean': (1, 10), 'flat': (2, 10),
'hbwstart': (0, 14), 'hbwend': (1, 14), 'hbwdelta': (2, 14),
'obwstart': (0, 18), 'obwend': (1, 18), 'obwdelta': (2, 18)}
fontCell = self.grid.GetDefaultCellFont()
fontSize = fontCell.GetPointSize()
fontStyle = fontCell.GetStyle()
fontWeight = fontCell.GetWeight()
font = wx.Font(fontSize, wx.FONTFAMILY_MODERN, fontStyle,
fontWeight)
dc = wx.WindowDC(self.grid)
dc.SetFont(font)
widthMHz = dc.GetTextExtent('###.######')[0] * 1.2
widthdB = dc.GetTextExtent('-##.##')[0] * 1.2
for _desc, (_row, col) in self.locsDesc.iteritems():
self.grid.AutoSizeColumn(col)
for col in [1, 5, 14, 18]:
self.grid.SetColSize(col, widthMHz)
for row in xrange(self.grid.GetNumberRows()):
self.grid.SetCellFont(row, col, font)
for col in [6, 10]:
self.grid.SetColSize(col, widthdB)
for row in xrange(self.grid.GetNumberRows()):
self.grid.SetCellFont(row, col, font)
for _desc, (_row, col) in self.locsCheck.iteritems():
self.grid.AutoSizeColumn(col)
toolTips = {}
toolTips[self.locsMeasure['start']] = 'Selection start (MHz)'
toolTips[self.locsMeasure['end']] = 'Selection end (MHz)'
toolTips[self.locsMeasure['deltaF']] = 'Selection bandwidth (MHz)'
toolTips[self.locsMeasure['minFP']] = 'Minimum power location (MHz)'
toolTips[self.locsMeasure['maxFP']] = 'Maximum power location (MHz)'
toolTips[self.locsMeasure['deltaFP']] = 'Power location difference (MHz)'
toolTips[self.locsMeasure['minP']] = 'Minimum power (dB)'
toolTips[self.locsMeasure['maxP']] = 'Maximum power (dB)'
toolTips[self.locsMeasure['deltaP']] = 'Power difference (dB)'
toolTips[self.locsMeasure['avg']] = 'Mean power (dB)'
toolTips[self.locsMeasure['gmean']] = 'Geometric mean power (dB)'
toolTips[self.locsMeasure['flat']] = 'Spectral flatness'
toolTips[self.locsMeasure['hbwstart']] = '-3db start location (MHz)'
toolTips[self.locsMeasure['hbwend']] = '-3db end location (MHz)'
toolTips[self.locsMeasure['hbwdelta']] = '-3db bandwidth (MHz)'
toolTips[self.locsMeasure['obwstart']] = '99% start location (MHz)'
toolTips[self.locsMeasure['obwend']] = '99% end location (MHz)'
toolTips[self.locsMeasure['obwdelta']] = '99% bandwidth (MHz)'
self.toolTips = GridToolTips(self.grid, toolTips)
self.popupMenu = wx.Menu()
self.popupMenuCopy = self.popupMenu.Append(wx.ID_ANY, "&Copy",
"Copy entry")
self.Bind(wx.EVT_MENU, self.__on_copy, self.popupMenuCopy)
self.Bind(wxGrid.EVT_GRID_CELL_RIGHT_CLICK, self.__on_popup_menu)
self.Bind(wxGrid.EVT_GRID_CELL_LEFT_CLICK, self.__on_cell_click)
if wx.VERSION >= (3, 0, 0, 0):
self.Bind(wxGrid.EVT_GRID_CELL_CHANGED, self.__on_cell_change)
box = wx.BoxSizer(wx.VERTICAL)
box.Add(self.grid, 0, wx.EXPAND | wx.TOP | wx.LEFT | wx.RIGHT,
border=10)
self.SetSizer(box)
def __set_descs(self):
font = self.grid.GetCellFont(0, 0)
font.SetWeight(wx.BOLD)
for desc, (row, col) in self.locsDesc.iteritems():
self.grid.SetCellValue(row, col, desc)
self.grid.SetCellFont(row, col, font)
def __set_check_editor(self):
for _desc, (row, col) in self.locsCheck.iteritems():
self.grid.SetCellEditor(row, col, wxGrid.GridCellBoolEditor())
self.grid.SetCellAlignment(row, col, wx.ALIGN_RIGHT, wx.ALIGN_CENTRE)
self.grid.SetCellRenderer(row, col, CheckBoxCellRenderer(self))
def __set_freq_editor(self):
for (row, col) in self.locsFreq:
self.grid.SetReadOnly(row, col, False)
self.grid.SetCellAlignment(row, col, wx.ALIGN_RIGHT, wx.ALIGN_CENTRE)
self.grid.SetCellEditor(row, col, wxGrid.GridCellFloatEditor(precision=4))
def __set_check_value(self, cell, value):
(row, col) = self.locsCheck[cell]
self.grid.SetCellValue(row, col, value)
def __set_measure_value(self, cell, value):
(row, col) = self.locsMeasure[cell]
try:
self.grid.SetCellValue(row, col, value)
except TypeError:
pass
def __set_check_enable(self, cell, enable):
(row, col) = self.locsCheck[cell]
renderer = self.grid.GetCellRenderer(row, col)
renderer.Enable(not enable)
def __get_checks(self):
checks = {}
for cell in self.checked:
if self.checked[cell] == '1':
checks[cell] = True
else:
checks[cell] = False
return checks
def __update_checks(self):
for cell in self.checked:
self.__set_check_value(cell, self.checked[cell])
def __clear_checks(self):
for cell in self.checked:
self.checked[cell] = '0'
self.__update_checks()
def __on_cell_click(self, event):
self.grid.ClearSelection()
row = event.GetRow()
col = event.GetCol()
if (row, col) in self.locsCheck.values():
if self.grid.GetCellRenderer(row, col).enabled and self.measure is not None:
check = self.grid.GetCellValue(row, col)
if check == '1':
check = '0'
else:
check = '1'
self.grid.SetCellValue(row, col, check)
for control, (r, c) in self.locsCheck.iteritems():
if (r, c) == (row, col):
self.checked[control] = check
if self.selected is None:
self.selected = self.locsMeasure['start']
row = self.selected[0]
col = self.selected[1]
self.grid.SetGridCursor(row, col)
self.update_measure()
elif (row, col) in self.locsMeasure.itervalues():
self.selected = (row, col)
self.grid.SetGridCursor(row, col)
elif self.selected is None:
self.selected = self.locsMeasure['start']
row = self.selected[0]
col = self.selected[1]
self.grid.SetGridCursor(row, col)
def __on_cell_change(self, event):
row = event.GetRow()
col = event.GetCol()
if (row, col) in self.locsFreq:
start = None
end = None
try:
start = float(self.grid.GetCellValue(self.locsFreq[0][0], self.locsFreq[0][1]))
except ValueError:
pass
try:
end = float(self.grid.GetCellValue(self.locsFreq[1][0], self.locsFreq[1][1]))
except ValueError:
pass
if start is None and end is None:
return
elif start is None and end is not None:
start = end - 1
elif start is not None and end is None:
end = start + 1
if start > end:
swap = start
start = end
end = swap
self.graph.set_selected(start, end)
self.set_selected(self.spectrum, start, end)
def __on_popup_menu(self, _event):
if self.selected:
self.popupMenuCopy.Enable(True)
else:
self.popupMenuCopy.Enable(False)
self.PopupMenu(self.popupMenu)
def __on_copy(self, _event):
value = self.grid.GetCellValue(self.selected[0], self.selected[1])
clip = wx.TextDataObject(value)
wx.TheClipboard.Open()
wx.TheClipboard.SetData(clip)
wx.TheClipboard.Close()
def update_measure(self):
show = self.__get_checks()
self.graph.update_measure(self.measure, show)
def clear_measurement(self):
for control in self.locsMeasure:
self.__set_measure_value(control, "")
self.__clear_checks()
self.update_measure()
self.measure = None
def set_selected(self, spectrum, start, end):
self.spectrum = spectrum
if start is None:
return
self.measure = Measure(spectrum, start, end)
if not self.measure.is_valid():
self.clear_measurement()
return
minF, maxF = self.measure.get_f()
minP = self.measure.get_min_p()
maxP = self.measure.get_max_p()
avgP = self.measure.get_avg_p()
gMeanP = self.measure.get_gmean_p()
flatness = self.measure.get_flatness()
hbw = self.measure.get_hpw()
obw = self.measure.get_obw()
self.__set_measure_value('start',
format_precision(self.settings,
minF,
units=False))
self.__set_measure_value('end',
format_precision(self.settings,
maxF,
units=False))
self.__set_measure_value('deltaF',
format_precision(self.settings,
maxF - minF,
units=False))
self.__set_measure_value('minFP',
format_precision(self.settings,
minP[0],
units=False))
self.__set_measure_value('maxFP',
format_precision(self.settings,
maxP[0],
units=False))
self.__set_measure_value('deltaFP',
format_precision(self.settings,
maxP[0] - minP[0],
units=False))
self.__set_measure_value('minP',
format_precision(self.settings,
level=minP[1],
units=False))
self.__set_measure_value('maxP',
format_precision(self.settings,
level=maxP[1],
units=False))
self.__set_measure_value('deltaP',
format_precision(self.settings,
level=maxP[1] - minP[1],
units=False))
self.__set_measure_value('avg',
format_precision(self.settings,
level=avgP,
units=False))
self.__set_measure_value('gmean',
format_precision(self.settings,
level=gMeanP,
units=False))
self.__set_measure_value('flat',
"{0:.4f}".format(flatness))
if hbw[0] is not None:
text = format_precision(self.settings, hbw[0], units=False)
else:
text = ''
self.__set_measure_value('hbwstart', text)
if hbw[1] is not None:
text = format_precision(self.settings, hbw[1], units=False)
else:
text = ''
self.__set_measure_value('hbwend', text)
if hbw[0] is not None and hbw[1] is not None:
text = format_precision(self.settings, hbw[1] - hbw[0], units=False)
else:
text = ''
self.__set_measure_value('hbwdelta', text)
if obw[0] is not None:
text = format_precision(self.settings, obw[0], units=False)
else:
text = ''
self.__set_measure_value('obwstart', text)
if obw[1] is not None:
text = text = format_precision(self.settings, obw[1], units=False)
else:
text = ''
self.__set_measure_value('obwend', text)
if obw[0] is not None and obw[1] is not None:
text = text = format_precision(self.settings, obw[1] - obw[0],
units=False)
else:
text = ''
self.__set_measure_value('obwdelta', text)
self.update_measure()
def show(self, show):
if show:
self.Show()
else:
self.Hide()
self.Layout()
def set_type(self, display):
for cell in self.locsCheck:
self.__set_check_enable(cell, True)
if display == Display.PLOT:
for cell in self.locsCheck:
self.__set_check_enable(cell, False)
elif display == Display.SPECT:
self.__set_check_enable(Measure.HBW, False)
self.__set_check_enable(Measure.OBW, False)
self.grid.Refresh()
if __name__ == '__main__':
print 'Please run rtlsdr_scan.py'
exit(1)
| EarToEarOak/RTLSDR-Scanner | rtlsdr_scanner/panels.py | Python | gpl-3.0 | 39,759 |
import re
string1 = "1C|2C++|3Java|4C#|5Python|6JavaScript"
string2 = "100000001"
string3 = "AC35B9V2L$40K43ll6af52hf7r"
string4 = "life is short, i use python, i love python"
# match,match尝试在字符串的首字母开始匹配,如果首字母没有匹配到,则返回None,匹配第一个
print(re.match("...", string1))
print(re.match("...", string1).group(0))
print(re.match("\d", string2)) # 1
print(re.match("\d", string2).group(0)) # 1
print(re.match("\w\w(\d\d)\w", string3).group(0)) # AC35B
print(re.match("\w\w(\d\d)\w", string3).group(1)) # 35
| Ztiany/CodeRepository | Python/Python3-Base/08_Regular/Match.py | Python | apache-2.0 | 572 |
from lms.envs.devstack_with_worker import *
| Stanford-Online/edx-platform | openedx/stanford/lms/envs/devstack_with_worker.py | Python | agpl-3.0 | 44 |
# Copyright 2013 - Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
from oslo_config import fixture as config
from solum.api import auth
from solum.common import context
from solum.tests import base
from solum.tests import fakes
@mock.patch('solum.api.auth.AuthProtocolWrapper',
new_callable=fakes.FakeAuthProtocol)
class TestAuth(base.BaseTestCase):
def setUp(self):
super(TestAuth, self).setUp()
self.CONF = self.useFixture(config.Config())
self.app = fakes.FakeApp()
def test_check_auth_option_enabled(self, mock_auth):
self.CONF.config(auth_protocol="http",
auth_version="v2.0",
www_authenticate_uri=None,
group=auth.OPT_GROUP_NAME)
self.CONF.config(enable_authentication=True)
result = auth.install(self.app, self.CONF.conf)
self.assertIsInstance(result, fakes.FakeAuthProtocol)
def test_check_auth_option_disabled(self, mock_auth):
self.CONF.config(auth_protocol="http",
auth_version="v2.0",
www_authenticate_uri=None,
group=auth.OPT_GROUP_NAME)
self.CONF.config(enable_authentication=False)
result = auth.install(self.app, self.CONF.conf)
self.assertIsInstance(result, fakes.FakeApp)
def test_auth_hook_before_method(self, mock_cls):
state = mock.Mock(request=fakes.FakePecanRequest())
hook = auth.ContextHook()
hook.before(state)
ctx = state.request.security_context
self.assertIsInstance(ctx, context.RequestContext)
self.assertEqual(ctx.auth_token,
fakes.fakeAuthTokenHeaders['X-Auth-Token'])
self.assertEqual(ctx.tenant,
fakes.fakeAuthTokenHeaders['X-Project-Id'])
self.assertEqual(ctx.user,
fakes.fakeAuthTokenHeaders['X-User-Id'])
self.assertEqual(ctx.roles,
[u'admin', u'ResellerAdmin', u'_member_'])
self.assertEqual(ctx.user_name,
fakes.fakeAuthTokenHeaders['X-User-Name'])
self.assertEqual(ctx.domain,
fakes.fakeAuthTokenHeaders['X-Domain-Name'])
self.assertEqual(ctx.project_domain,
fakes.fakeAuthTokenHeaders['X-Project-Domain-Id'])
self.assertEqual(ctx.user_domain,
fakes.fakeAuthTokenHeaders['X-User-Domain-Id'])
self.assertIsNone(ctx.auth_token_info)
def test_auth_hook_before_method_auth_info(self, mock_cls):
state = mock.Mock(request=fakes.FakePecanRequest())
state.request.environ['keystone.token_info'] = 'assert_this'
hook = auth.ContextHook()
hook.before(state)
ctx = state.request.security_context
self.assertIsInstance(ctx, context.RequestContext)
self.assertEqual(fakes.fakeAuthTokenHeaders['X-Auth-Token'],
ctx.auth_token)
self.assertEqual('assert_this', ctx.auth_token_info)
| openstack/solum | solum/tests/api/test_auth.py | Python | apache-2.0 | 3,629 |
import warnings
from wtforms import form
from wtforms.ext.i18n.utils import get_translations
translations_cache = {}
class Form(form.Form):
"""
Base form for a simple localized WTForms form.
**NOTE** this class is now un-necessary as the i18n features have
been moved into the core of WTForms, and will be removed in WTForms 3.0.
This will use the stdlib gettext library to retrieve an appropriate
translations object for the language, by default using the locale
information from the environment.
If the LANGUAGES class variable is overridden and set to a sequence of
strings, this will be a list of languages by priority to use instead, e.g::
LANGUAGES = ['en_GB', 'en']
One can also provide the languages by passing `LANGUAGES=` to the
constructor of the form.
Translations objects are cached to prevent having to get a new one for the
same languages every instantiation.
"""
LANGUAGES = None
def __init__(self, *args, **kwargs):
warnings.warn('i18n is now in core, wtforms.ext.i18n will be removed in WTForms 3.0', DeprecationWarning)
if 'LANGUAGES' in kwargs:
self.LANGUAGES = kwargs.pop('LANGUAGES')
super(Form, self).__init__(*args, **kwargs)
def _get_translations(self):
languages = tuple(self.LANGUAGES) if self.LANGUAGES else (self.meta.locales or None)
if languages not in translations_cache:
translations_cache[languages] = get_translations(languages)
return translations_cache[languages]
| Lessig2016/teams | wtforms/ext/i18n/form.py | Python | agpl-3.0 | 1,560 |
# -*- coding: utf-8 -*-
import os
import collections
from six.moves import cPickle
import numpy as np
import re
import itertools
class TextLoader():
def __init__(self, data_dir, batch_size, seq_length):
self.data_dir = data_dir
self.batch_size = batch_size
self.seq_length = seq_length
input_file = os.path.join(data_dir, "input.txt")
vocab_file = os.path.join(data_dir, "vocab.pkl")
tensor_file = os.path.join(data_dir, "data.npy")
# Let's not read voca and data from file. We many change them.
if True or not (os.path.exists(vocab_file) and os.path.exists(tensor_file)):
print("reading text file")
self.preprocess(input_file, vocab_file, tensor_file)
else:
print("loading preprocessed files")
self.load_preprocessed(vocab_file, tensor_file)
self.create_batches()
self.reset_batch_pointer()
def clean_str(self, string):
"""
Tokenization/string cleaning for all datasets except for SST.
Original taken from https://github.com/yoonkim/CNN_sentence/blob/master/process_data
"""
string = re.sub(r"[^가-힣A-Za-z0-9(),!?\'\`]", " ", string)
string = re.sub(r"\'s", " \'s", string)
string = re.sub(r"\'ve", " \'ve", string)
string = re.sub(r"n\'t", " n\'t", string)
string = re.sub(r"\'re", " \'re", string)
string = re.sub(r"\'d", " \'d", string)
string = re.sub(r"\'ll", " \'ll", string)
string = re.sub(r",", " , ", string)
string = re.sub(r"!", " ! ", string)
string = re.sub(r"\(", " \( ", string)
string = re.sub(r"\)", " \) ", string)
string = re.sub(r"\?", " \? ", string)
string = re.sub(r"\s{2,}", " ", string)
return string.strip().lower()
def build_vocab(self, sentences):
"""
Builds a vocabulary mapping from word to index based on the sentences.
Returns vocabulary mapping and inverse vocabulary mapping.
"""
# Build vocabulary
word_counts = collections.Counter(sentences)
# Mapping from index to word
vocabulary_inv = [x[0] for x in word_counts.most_common()]
vocabulary_inv = list(sorted(vocabulary_inv))
# Mapping from word to index
vocabulary = {x: i for i, x in enumerate(vocabulary_inv)}
return [vocabulary, vocabulary_inv]
def preprocess(self, input_file, vocab_file, tensor_file):
with open(input_file, "r") as f:
data = f.read()
# Optional text cleaning or make them lower case, etc.
#data = self.clean_str(data)
x_text = data.split()
self.vocab, self.words = self.build_vocab(x_text)
self.vocab_size = len(self.words)
with open(vocab_file, 'wb') as f:
cPickle.dump(self.words, f)
#The same operation liek this [self.vocab[word] for word in x_text]
# index of words as our basic data
self.tensor = np.array(list(map(self.vocab.get, x_text)))
# Save the data to data.npy
np.save(tensor_file, self.tensor)
def load_preprocessed(self, vocab_file, tensor_file):
with open(vocab_file, 'rb') as f:
self.words = cPickle.load(f)
self.vocab_size = len(self.words)
self.vocab = dict(zip(self.words, range(len(self.words))))
self.tensor = np.load(tensor_file)
self.num_batches = int(self.tensor.size / (self.batch_size *
self.seq_length))
def create_batches(self):
self.num_batches = int(self.tensor.size / (self.batch_size *
self.seq_length))
if self.num_batches==0:
assert False, "Not enough data. Make seq_length and batch_size small."
self.tensor = self.tensor[:self.num_batches * self.batch_size * self.seq_length]
xdata = self.tensor
ydata = np.copy(self.tensor)
ydata[:-1] = xdata[1:]
ydata[-1] = xdata[0]
self.x_batches = np.split(xdata.reshape(self.batch_size, -1), self.num_batches, 1)
self.y_batches = np.split(ydata.reshape(self.batch_size, -1), self.num_batches, 1)
def next_batch(self):
x, y = self.x_batches[self.pointer], self.y_batches[self.pointer]
self.pointer += 1
return x, y
def reset_batch_pointer(self):
self.pointer = 0
| bahmanh/word-rnn-tensorflow | utils.py | Python | mit | 4,469 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Created on 2017-8-30
@author: generated by @lolobosse script
'''
LOCALE = [
["μόλις τώρα", "σε λίγο"],
["%s δευτερόλεπτα πριν", "σε %s δευτερόλεπτα"],
["1 λεπτό πριν", "σε 1 λεπτό"],
["%s λεπτά πριν", "σε %s λεπτά"],
["1 ώρα πριν", "σε 1 ώρα"],
["%s ώρες πριν", "σε %s ώρες"],
["1 μέρα πριν", "σε 1 μέρα"],
["%s μέρες πριν", "σε %s μέρες"],
["1 εβδομάδα πριν", "σε 1 εβδομάδα"],
["%s εβδομάδες πριν", "σε %s εβδομάδες"],
["1 μήνα πριν", "σε 1 μήνα"],
["%s μήνες πριν", "σε %s μήνες"],
["1 χρόνο πριν", "σε 1 χρόνο"],
["%s χρόνια πριν", "σε %s χρόνια"]
]
| Vagab0nd/SiCKRAGE | lib3/timeago/locales/el.py | Python | gpl-3.0 | 912 |
#coding=utf-8
#并没有redis所以我只是抄官网的示例代码
import os,re,redis
path = os.path.split(os.path.realpath(__file__))[0]+"/"
f = open(path+"code.txt","r")
A = f.read()
arr = re.split("\s+",A)
r = redis.Redis(host='localhost', port=6379, db=0)
for i in range(len(arr)):
if i:
r.set(str(i),arr[i])
r.save() | luoxufeiyan/python | NKUCodingCat/0003/0003.py | Python | mit | 326 |
# -*- coding: utf-8 -*-
#
# fgmm documentation build configuration file, created by
# sphinx-quickstart on Wed Dec 1 16:26:50 2010.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
rep_path,_ = os.path.split(os.path.dirname(__file__))
sys.path.append(os.path.join(rep_path, 'breathe'))
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc','breathe']
breathe_projects = {"fgmm":os.path.join(rep_path,"doc","xml")}
breathe_default_project = "fgmm"
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'fgmm'
copyright = u'2010, Florent D\'halluin'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '.1'
# The full version, including alpha/beta/rc tags.
release = '.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'haiku'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'fgmmdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'fgmm.tex', u'fgmm Documentation',
u'Florent D\'halluin', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'fgmm', u'fgmm Documentation',
[u'Florent D\'halluin'], 1)
]
| flo-dhalluin/fgmm | docsrc/conf.py | Python | lgpl-3.0 | 7,192 |
import random
from compositecore import Leaf
import spawner
from messenger import msg
class RemoveEntityOnDeath(Leaf):
"""
Will remove the parent from the dungeon when parent Entity dies.
"""
def __init__(self):
super(RemoveEntityOnDeath, self).__init__()
self.component_type = "remove_on_death"
def after_tick(self, time):
if self.parent.health.is_dead():
self.parent.dungeon_level.value
self.parent.mover.try_remove_from_dungeon()
class PrintDeathMessageOnDeath(Leaf):
"""
Will print death message when parent Entity dies.
"""
def __init__(self):
super(PrintDeathMessageOnDeath, self).__init__()
self.component_type = "print_death_message_on_death"
def on_tick(self, time):
if self.parent.health.is_dead():
msg.send_visual_message(self.parent.entity_messages.death, self.parent.position.value)
class LeaveCorpseOnDeath(Leaf):
"""
Will remove the parent from the dungeon when parent Entity dies.
"""
def __init__(self):
super(LeaveCorpseOnDeath, self).__init__()
self.component_type = "leave_corpse_on_death"
def on_tick(self, time):
if self.parent.health.is_dead():
spawner.spawn_corpse_of_entity(self.parent)
class LeaveCorpseTurnIntoEntityOnDeath(Leaf):
"""
Will remove the parent from the dungeon when parent Entity dies.
"""
def __init__(self, entity_factory, fail_chance):
super(LeaveCorpseTurnIntoEntityOnDeath, self).__init__()
self.component_type = "leave_corpse_on_death"
self.entity_factory = entity_factory
self.fail_chance = fail_chance
def on_tick(self, time):
if self.parent.health.is_dead():
if random.uniform(0, 1) < self.fail_chance:
spawner.spawn_corpse_turn_into_entity(self.parent, self.entity_factory)
else:
spawner.spawn_corpse_of_entity(self.parent)
| co/TheLastRogue | ondeath.py | Python | bsd-2-clause | 1,983 |
#!/usr/bin/env python
import math
## functions ##
## classes ##
class GenomicDirections():
''' Class to represent genomic directions
'''
UPSTREAM="upstream"
DOWNSTREAM="downstream"
class ModBlatHit(object):
''' Class for the modified blat/psl format alignment hit (cf. QT).
Argument: String in modblat format
'''
def __init__(self, s):
# split and tokenize input
fields = s.strip().split()
numFields = len(fields)
matches, mismatches, repmatches, countN, qgapcount, qgapbases, \
tgapcount, tgapbases, strand, qname, qsize, qstart, qend, \
tname, tsize, tstart, tend, blockcount, blocksizes, qstarts, \
tstarts = fields[0:21]
self.matches = int(matches)
self.mismatches = int(mismatches)
self.repmatches = int(repmatches)
self.countN = int(countN)
self.qgapcount = int(qgapcount)
self.qgapbases = int(qgapbases)
self.tgapcount = int(tgapcount)
self.tgapbases = int(tgapbases)
self.strand = strand
self.qname = qname
self.qsize = int(qsize)
self.qstart = int(qstart)
self.qend = int(qend)
self.tname = tname
self.tsize = int(tsize)
self.tstart = int(tstart)
self.tend = int(tend)
self.blockcount = int(blockcount)
self.blocksizes = [int(x) for x in blocksizes.split(',')[0:-1]]
self.qstarts = [int(x) for x in qstarts.split(',')[0:-1]]
self.tstarts = [int(x) for x in tstarts.strip().split(',')[0:-1]]
def computeGenomicSequenceBedItem(self, ltr_size, transcript_size):
''' compute start, end by strand
set and return bed item
'''
if self.strand == '+':
chrom = self.tname
chromStart = self.computeGenomicSequenceCoord(ltr_size, self.tstart, 'upstream')
chromEnd = self.computeGenomicSequenceCoord(transcript_size, self.tstart, 'downstream')
score = 0
strand = '+'
bi = BedItem([chrom, chromStart, chromEnd])
length = bi.chromEnd - bi.chromStart + 1
name = " ; ".join([self.tname, self.qname, str(bi.chromStart) + ":" + str(bi.chromEnd), str(length)])
bi.set_name(name)
bi.set_score(score)
bi.set_strand(strand)
return bi
elif self.strand == '-':
chrom = self.tname
chromStart = self.computeGenomicSequenceCoord(transcript_size, self.tend, 'upstream')
chromEnd = self.computeGenomicSequenceCoord(ltr_size, self.tend, 'downstream')
score = 0
strand = '-'
bi = BedItem([chrom, chromStart, chromEnd])
length = bi.chromEnd - bi.chromStart + 1
name = " ; ".join([self.tname, self.qname, str(bi.chromStart) + ":" + str(bi.chromEnd), str(length), 'rc'])
bi.set_name(name)
bi.set_score(score)
bi.set_strand(strand)
return bi
else:
pass
def computeGenomicSequenceCoord(self, frag_size, ref_position, genomic_direction):
''' compute genomic sequence coordinate given the genomic fragment size to extract, the reference position to start from
and the direction
'''
assert genomic_direction in set([GenomicDirections.UPSTREAM, GenomicDirections.DOWNSTREAM])
if genomic_direction == GenomicDirections.UPSTREAM:
target_upstream_length = ref_position
if target_upstream_length > frag_size:
genomicCoord = ref_position - frag_size
else:
genomicCoord = 0
elif genomic_direction == GenomicDirections.DOWNSTREAM:
target_downstream_length = self.tsize - ref_position
if target_downstream_length > frag_size:
genomicCoord = ref_position + frag_size
else:
genomicCoord = self.tsize - 1
return genomicCoord
class ModBlat(object):
''' Class for representing the modified blat/psl file alignments.
Argument: String to modblat file
'''
def __init__(self, s):
self.filename = s
self.hits = self.__load()
def __load(self):
modblathits = []
fp = open(self.filename)
with fp as f:
# skip header line
next(f)
for line in f:
hit = ModBlatHit(line)
modblathits.append(hit)
return modblathits
class BedItem():
''' Class for representing a bed item
'''
def __init__(self, a):
chrom, chromStart, chromEnd = a[0:3]
self.chrom = chrom
if int(chromStart) == 0:
self.chromStart = int(chromStart)
else:
self.chromStart = int(chromStart) - 1
if int(chromEnd) == 0:
self.chromEnd = int(chromEnd)
else:
self.chromEnd = int(chromEnd) - 1
def set_name(self, s):
self.name = s
def set_score(self, i):
self.score = int(i)
def set_strand(self, s):
self.strand = s
def totuple(self):
t = (self.chrom,
self.chromStart,
self.chromEnd,
self.name,
self.score,
self.strand)
return t
| jos4uke/getSeqFlankBlatHit | getSeqFlankBlatHitLib.py | Python | gpl-2.0 | 5,395 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for the VFS data stream interface."""
import unittest
from dfvfs.vfs import data_stream
from tests import test_lib as shared_test_lib
class DataStreamTest(shared_test_lib.BaseTestCase):
"""Tests the VFS data stream interface."""
def testName(self):
"""Test the name property."""
test_data_stream = data_stream.DataStream(None)
self.assertEqual(test_data_stream.name, '')
def testGetExtents(self):
"""Test the GetExtents function."""
test_data_stream = data_stream.DataStream(None)
extents = test_data_stream.GetExtents()
self.assertEqual(extents, [])
def testIsDefault(self):
"""Test the IsDefault function."""
test_data_stream = data_stream.DataStream(None)
result = test_data_stream.IsDefault()
self.assertTrue(result)
if __name__ == '__main__':
unittest.main()
| joachimmetz/dfvfs | tests/vfs/data_stream.py | Python | apache-2.0 | 883 |
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 09 14:51:02 2015
@author: Methinee
"""
import pandas as pd
import numpy as np
from collections import defaultdict
from astropy.table import Table, Column
df = pd.read_csv('../data/CS_table_No2_No4_new.csv',delimiter=";", skip_blank_lines = True,
error_bad_lines=False)
headers=list(df.columns.values)
subjects = {'courseId':[]}
students = {'studentId':[]}
years = [52,53,54,55,56]
semester = [1,2]
key_sub = defaultdict(list)
key_std = defaultdict(list)
key=[]
countSub = 0
countStd = 0
#Create dictionary of list subjects
for sub in df[headers[4]]:
if sub not in subjects['courseId']:
subjects['courseId'].append(sub)
countSub = countSub+1
for keyCol in subjects['courseId']:
key_sub[countSub] = keyCol
#print subjects["courseId"]
#print "number of subjects are ",countSub
print "-----------------------------------------------"
print key_sub
print "-----------------------------------------------"
#Create dictionary of list students
for std in df[headers[0]]:
if std not in students['studentId']:
students['studentId'].append(std)
countStd = countStd+1
# for keyRow in students['studentId']:
# for y in years:
# students['studentId'].append(y)
#print students['studentId']
#print "number of students are ",countStd
print "-----------------------------------------------"
#create table row are stdId+years+semester, column is key of subjects
column = key_sub
t = Table(column , names=(subjects['courseId']))
firstCol = students
t = Table(firstCol, names=(firstCol))
print t
"""table_No2_No4_out = pd.DataFrame(subjects)
writer = pd.ExcelWriter("table_No2_No4_fomat.xlsx")
table_No2_No4_out.to_excel(writer,"grade")
writer.save()"""
| wasit7/book_pae | pae/forcast/src/csv/CS_table_No2_No4.py | Python | mit | 1,865 |
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Options/ListOption.py 5023 2010/06/14 22:05:46 scons"
__doc__ = """Place-holder for the old SCons.Options module hierarchy
This is for backwards compatibility. The new equivalent is the Variables/
class hierarchy. These will have deprecation warnings added (some day),
and will then be removed entirely (some day).
"""
import SCons.Variables
import SCons.Warnings
warned = False
def ListOption(*args, **kw):
global warned
if not warned:
msg = "The ListOption() function is deprecated; use the ListVariable() function instead."
SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg)
warned = True
return SCons.Variables.ListVariable(*args, **kw)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| faarwa/EngSocP5 | zxing/cpp/scons/scons-local-2.0.0.final.0/SCons/Options/ListOption.py | Python | gpl-3.0 | 2,001 |
from . import paste
from . import detach
| uclouvain/OSIS-Louvain | program_management/forms/tree/__init__.py | Python | agpl-3.0 | 41 |
'''
Created on Dec 13, 2015
@author: Shannon Litwin
'''
import Adafruit_BBIO.GPIO as GPIO
import Adafruit_BBIO.PWM as PWM
import Lib_LCD as LCD
import Lib_Main as BBB
import sys
import signal
import time
leftForward = "P8_46"
leftBackward = "P8_45"
rightForward = "P9_14"
rightBackward = "P9_16"
def Control_C_Exit(signal, frame):
GPIO.cleanup()
PWM.cleanup()
print("\nProgram halted! Exiting program!")
sys.exit()
signal.signal(signal.SIGINT, Control_C_Exit) # For cleaning up mid run
'''Keep to show Dr. Berry'''
LCD.init()
time.sleep(1)
LCD.backlight("on")
time.sleep(2)
LCD.backlight("off")
time.sleep(1)
line_message = "Hi Dr. Berry."
LCD.write_line(line_message)
time.sleep(5)
LCD.cursor_home()
long_message = "This is 35 chars and needs 2 lines."
LCD.write_screen(long_message)
time.sleep(5)
LCD.cursor_home()
long_message = "Which is fine because the screen can hold up to 80 characters."
LCD.write_screen(long_message)
time.sleep(5)
LCD.cursor_home()
long_message = "However, if the message is too long it will truncate. That is why you cannot read this entire message."
LCD.write_screen(long_message)
time.sleep(5)
LCD.clear()
m1 = "It works 1"
m2 = "It works 2"
m3 = "It works 3"
m4 = "It works 4"
time.sleep(1)
LCD.goto_line(4)
LCD.write_line(m4)
time.sleep(1)
LCD.goto_line(3)
LCD.write_line(m3)
time.sleep(1)
LCD.goto_line(2)
LCD.write_line(m2)
time.sleep(1)
LCD.goto_line(1)
LCD.write_line(m1)
LCD.clear()
#pause with while loop example
#start = time.time()
#end = time.time()
#while((end - start) < 3):
# end = time.time()
BBB.cleanup_all()
| ValRose/Rose_Bone | PythonLibraries/lcd_demo.py | Python | mit | 1,617 |
from . import AbstractEngine
class HybridEngine(AbstractEngine):
def __init__(self, name, taxonomy, components, settings = {}):
super(HybridEngine, self).__init__(name, taxonomy, settings)
self.components = components
def get_components(self):
return self.components
def recommend(self, body):
raise NotImplementedError('this method must be overridden and implemented')
| halk/recowise | core/engine/hybrid.py | Python | mit | 417 |
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
"""
Robustness check for Agent-Based Models
(conceivably other models as well)
across the whole of the multidimensional
parameter space.
Author: Pawel Fiedor ([email protected])
Co-Pierre Georg ([email protected])
Version: 0.2
Date of last update: 19-11-2015 (Cape Town)
"""
if __name__ == '__main__':
import sys
from src.goodness import Goodness
args = sys.argv
if len(args) != 2:
print "Usage: ./calc_goodness config_file_path.xml"
sys.exit()
goodness = Goodness()
goodness.do_run(args[1])
| cogeorg/BlackRhino | examples/withclearing/abm_template/calc_goodness.py | Python | gpl-3.0 | 595 |
class CyCyError(Exception):
"""
Base class for non-runtime internal errors.
"""
def rstr(self):
name = self.__class__.__name__
return "%s\n%s\n\n%s" % (name, "-" * len(name), self.__str__())
| Magnetic/cycy | cycy/exceptions.py | Python | mit | 225 |
#!/usr/bin/env python3
import torch
from .. import settings
def lanczos_tridiag(
matmul_closure,
max_iter,
dtype,
device,
matrix_shape,
batch_shape=torch.Size(),
init_vecs=None,
num_init_vecs=1,
tol=1e-5,
):
"""
"""
# Determine batch mode
multiple_init_vecs = False
if not callable(matmul_closure):
raise RuntimeError(
"matmul_closure should be a function callable object that multiples a (Lazy)Tensor "
"by a vector. Got a {} instead.".format(matmul_closure.__class__.__name__)
)
# Get initial probe ectors - and define if not available
if init_vecs is None:
init_vecs = torch.randn(matrix_shape[-1], num_init_vecs, dtype=dtype, device=device)
init_vecs = init_vecs.expand(*batch_shape, matrix_shape[-1], num_init_vecs)
else:
if settings.debug.on():
if dtype != init_vecs.dtype:
raise RuntimeError(
"Supplied dtype {} and init_vecs.dtype {} do not agree!".format(dtype, init_vecs.dtype)
)
if device != init_vecs.device:
raise RuntimeError(
"Supplied device {} and init_vecs.device {} do not agree!".format(device, init_vecs.device)
)
if batch_shape != init_vecs.shape[:-2]:
raise RuntimeError(
"batch_shape {} and init_vecs.shape {} do not agree!".format(batch_shape, init_vecs.shape)
)
if matrix_shape[-1] != init_vecs.size(-2):
raise RuntimeError(
"matrix_shape {} and init_vecs.shape {} do not agree!".format(matrix_shape, init_vecs.shape)
)
num_init_vecs = init_vecs.size(-1)
# Define some constants
num_iter = min(max_iter, matrix_shape[-1])
dim_dimension = -2
# Create storage for q_mat, alpha,and beta
# q_mat - batch version of Q - orthogonal matrix of decomp
# alpha - batch version main diagonal of T
# beta - batch version of off diagonal of T
q_mat = torch.zeros(num_iter, *batch_shape, matrix_shape[-1], num_init_vecs, dtype=dtype, device=device)
t_mat = torch.zeros(num_iter, num_iter, *batch_shape, num_init_vecs, dtype=dtype, device=device)
# Begin algorithm
# Initial Q vector: q_0_vec
q_0_vec = init_vecs / torch.norm(init_vecs, 2, dim=dim_dimension).unsqueeze(dim_dimension)
q_mat[0].copy_(q_0_vec)
# Initial alpha value: alpha_0
r_vec = matmul_closure(q_0_vec)
alpha_0 = q_0_vec.mul(r_vec).sum(dim_dimension)
# Initial beta value: beta_0
r_vec.sub_(alpha_0.unsqueeze(dim_dimension).mul(q_0_vec))
beta_0 = torch.norm(r_vec, 2, dim=dim_dimension)
# Copy over alpha_0 and beta_0 to t_mat
t_mat[0, 0].copy_(alpha_0)
t_mat[0, 1].copy_(beta_0)
t_mat[1, 0].copy_(beta_0)
# Compute the first new vector
q_mat[1].copy_(r_vec.div_(beta_0.unsqueeze(dim_dimension)))
# Now we start the iteration
for k in range(1, num_iter):
# Get previous values
q_prev_vec = q_mat[k - 1]
q_curr_vec = q_mat[k]
beta_prev = t_mat[k, k - 1].unsqueeze(dim_dimension)
# Compute next alpha value
r_vec = matmul_closure(q_curr_vec) - q_prev_vec.mul(beta_prev)
alpha_curr = q_curr_vec.mul(r_vec).sum(dim_dimension, keepdim=True)
# Copy over to t_mat
t_mat[k, k].copy_(alpha_curr.squeeze(dim_dimension))
# Copy over alpha_curr, beta_curr to t_mat
if (k + 1) < num_iter:
# Compute next residual value
r_vec.sub_(alpha_curr.mul(q_curr_vec))
# Full reorthogonalization: r <- r - Q (Q^T r)
correction = r_vec.unsqueeze(0).mul(q_mat[: k + 1]).sum(dim_dimension, keepdim=True)
correction = q_mat[: k + 1].mul(correction).sum(0)
r_vec.sub_(correction)
r_vec_norm = torch.norm(r_vec, 2, dim=dim_dimension, keepdim=True)
r_vec.div_(r_vec_norm)
# Get next beta value
beta_curr = r_vec_norm.squeeze_(dim_dimension)
# Update t_mat with new beta value
t_mat[k, k + 1].copy_(beta_curr)
t_mat[k + 1, k].copy_(beta_curr)
# Run more reorthoganilzation if necessary
inner_products = q_mat[: k + 1].mul(r_vec.unsqueeze(0)).sum(dim_dimension)
could_reorthogonalize = False
for _ in range(10):
if not torch.sum(inner_products > tol):
could_reorthogonalize = True
break
correction = r_vec.unsqueeze(0).mul(q_mat[: k + 1]).sum(dim_dimension, keepdim=True)
correction = q_mat[: k + 1].mul(correction).sum(0)
r_vec.sub_(correction)
r_vec_norm = torch.norm(r_vec, 2, dim=dim_dimension, keepdim=True)
r_vec.div_(r_vec_norm)
inner_products = q_mat[: k + 1].mul(r_vec.unsqueeze(0)).sum(dim_dimension)
# Update q_mat with new q value
q_mat[k + 1].copy_(r_vec)
if torch.sum(beta_curr.abs() > 1e-6) == 0 or not could_reorthogonalize:
break
# Now let's transpose q_mat, t_mat intot the correct shape
num_iter = k + 1
# num_init_vecs x batch_shape x matrix_shape[-1] x num_iter
q_mat = q_mat[: num_iter + 1].permute(-1, *range(1, 1 + len(batch_shape)), -2, 0).contiguous()
# num_init_vecs x batch_shape x num_iter x num_iter
t_mat = t_mat[: num_iter + 1, : num_iter + 1].permute(-1, *range(2, 2 + len(batch_shape)), 0, 1).contiguous()
# If we weren't in batch mode, remove batch dimension
if not multiple_init_vecs:
q_mat.squeeze_(0)
t_mat.squeeze_(0)
# We're done!
return q_mat, t_mat
def lanczos_tridiag_to_diag(t_mat):
"""
Given a num_init_vecs x num_batch x k x k tridiagonal matrix t_mat,
returns a num_init_vecs x num_batch x k set of eigenvalues
and a num_init_vecs x num_batch x k x k set of eigenvectors.
TODO: make the eigenvalue computations done in batch mode.
"""
orig_device = t_mat.device
if t_mat.size(-1) < 32:
retr = torch.symeig(t_mat.cpu(), eigenvectors=True)
else:
retr = torch.symeig(t_mat, eigenvectors=True)
evals, evecs = retr
mask = evals.ge(0)
evecs = evecs * mask.type_as(evecs).unsqueeze(-2)
evals = evals.masked_fill_(~mask, 1)
return evals.to(orig_device), evecs.to(orig_device)
| jrg365/gpytorch | gpytorch/utils/lanczos.py | Python | mit | 6,520 |
#!/usr/bin/env python
'Conjugate-gradient method'
## Copyright (C) 2008 University of Texas at Austin
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import rsf.api as rsf
def conjgrad(oper,shape,eps,d,p0,niter):
'Conjugate-gradient algorithm for shaping regularization'
p = p0
x = shape(adj=0)[p]
r = oper(adj=0)[x]-d
for iter in range(niter):
gx = oper(adj=1)[r]-x*eps
gp = shape(adj=1)[gx]+p*eps
gx = shape(adj=0)[gp]
gr = oper(adj=0)[gx]
gn = gp.dot(gp)
print "iter %d: %g" % (iter+1,gn)
if 0==iter:
sp = gp
sx = gx
sr = gr
else:
beta = gn/gnp
sp = gp + sp*beta
sx = gx + sx*beta
sr = gr + sr*beta
gnp = gn
alpha = sr.dot(sr)+eps*(sp.dot(sp)-sx.dot(sx))
alpha = -gn/alpha
p = p + sp*alpha
x = x + sx*alpha
r = r + sr*alpha
return x
if __name__ == "__main__":
# test matrix and data
matrix = rsf.File([[1,1,1,0],
[1,2,0,0],
[1,3,1,0],
[1,4,0,1],
[1,5,1,1]])
y = rsf.File([3,3,5,7,9])
x0 = rsf.File([0,0,0,0])
# matrix multiplication operator
matmult = rsf.matmult(mat=matrix)
copy = rsf.cp(x=1)
x = conjgrad(matmult,copy,1,y,x0,6)
y2 = matmult[x]
print x[:]
print y2[:]
| TobbeTripitaka/src | user/fomels/shaping.py | Python | gpl-2.0 | 2,149 |
__all__ = ["orderly","combin","graph","draw"]
| rmanders/unlabeled-graphical-enumeration | graphs/__init__.py | Python | mit | 46 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Belgium - Accounting',
'version': '1.1',
'category': 'Localization/Account Charts',
'description': """
This is the base module to manage the accounting chart for Belgium in OpenERP.
==============================================================================
After installing this module, the Configuration wizard for accounting is launched.
* We have the account templates which can be helpful to generate Charts of Accounts.
* On that particular wizard, you will be asked to pass the name of the company,
the chart template to follow, the no. of digits to generate, the code for your
account and bank account, currency to create journals.
Thus, the pure copy of Chart Template is generated.
Wizards provided by this module:
--------------------------------
* Partner VAT Intra: Enlist the partners with their related VAT and invoiced
amounts. Prepares an XML file format.
**Path to access :** Invoicing/Reporting/Legal Reports/Belgium Statements/Partner VAT Intra
* Periodical VAT Declaration: Prepares an XML file for Vat Declaration of
the Main company of the User currently Logged in.
**Path to access :** Invoicing/Reporting/Legal Reports/Belgium Statements/Periodical VAT Declaration
* Annual Listing Of VAT-Subjected Customers: Prepares an XML file for Vat
Declaration of the Main company of the User currently Logged in Based on
Fiscal year.
**Path to access :** Invoicing/Reporting/Legal Reports/Belgium Statements/Annual Listing Of VAT-Subjected Customers
""",
'author': 'Noviat & OpenERP SA',
'depends': [
'account',
'base_vat',
'base_iban',
'l10n_multilang',
],
'data': [
'account_chart_template.xml',
'account_pcmn_belgium.xml',
'account_tax_template.xml',
'l10n_be_sequence.xml',
'fiscal_templates.xml',
'account_fiscal_position_tax_template.xml',
'account_chart_template.yml',
'security/ir.model.access.csv',
'menuitem.xml'
],
'demo': [
'demo/l10n_be_demo.yml',
'../account/demo/account_bank_statement.yml',
'../account/demo/account_invoice_demo.yml',
],
'test': [
],
'installable': True,
'website': 'https://www.odoo.com/page/accounting',
'post_init_hook': 'load_translations',
}
| minhphung171093/GreenERP | openerp/addons/l10n_be/__openerp__.py | Python | gpl-3.0 | 2,511 |
from . import base
class TestTlsHttpClientWithProxy(base.TestHttpClient):
with_proxy = True
with_tls = True
| ymero/pulsar | tests/http/tunnel.py | Python | bsd-3-clause | 118 |
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This test suite verifies comm_power capability."""
import logging
from typing import Type
from gazoo_device.tests.functional_tests.utils import gdm_test_base
from mobly import asserts
_ON = "on"
_OFF = "off"
class FakeManagerEthernetSwitch():
"""Mock of ManagedEthernetSwitch."""
def __init__(self):
self.port_modes = {}
def get_port_status(self, port):
"""Returns the status of the ethernet port."""
return self.port_modes.setdefault(port, _OFF)
def turn_on_port(self, port):
"""Turns the ethernet port on."""
self.port_modes[port] = _ON
def turn_off_port(self, port):
"""Turns the ethernet port off."""
self.port_modes[port] = _OFF
class CommPowerTestSuite(gdm_test_base.GDMTestBase):
"""Functional test suite for the comm_power capability."""
@classmethod
def is_applicable_to(cls, device_type: str,
device_class: Type[gdm_test_base.DeviceType],
device_name: str) -> bool:
"""Determine if this test suite can run on the given device."""
if not device_class.has_capabilities(["comm_power"]):
return False
props = ["comm_power.hub_name", "comm_power.port_number"]
return cls.check_properties_set(device_name, props)
@classmethod
def requires_pairing(cls) -> bool:
"""Returns True if the device must be paired to run this test suite."""
return False
def setup_test(self):
"""Called at the beginning of each test."""
super().setup_test()
if self.device.comm_power.hub_type == "ethernet_switch":
self.device.ethernet_switch = FakeManagerEthernetSwitch()
def test_comm_power_on_and_off(self):
"""Verifies comm_power methods on and off work."""
original_mode = self.device.comm_power.port_mode
# pylint: disable=protected-access
if self.device.comm_power._power_and_data_share_cable:
expected_mode_off = "charge"
else:
expected_mode_off = "off"
# pylint: enable=protected-access
try:
self.device.comm_power.off()
asserts.assert_equal(
self.device.comm_power.port_mode, expected_mode_off,
f"{self.device.name} port {self.device.comm_power.port_number} "
f"should have been set to {expected_mode_off}")
self.device.comm_power.on()
asserts.assert_in(
self.device.comm_power.port_mode, ["sync", "on"],
f"{self.device.name} port {self.device.comm_power.port_number} "
"should have been set to 'on' or 'sync' but is "
f"{self.device.comm_power.port_mode}")
finally:
if self.device.comm_power.port_mode != original_mode:
logging.info(
"Restoring device communication power back to its "
"original mode %r", original_mode)
if original_mode in ["off", "charge"]:
self.device.comm_power.off()
else:
self.device.comm_power.on()
if __name__ == "__main__":
gdm_test_base.main()
| google/gazoo-device | gazoo_device/tests/functional_tests/comm_power_test_suite.py | Python | apache-2.0 | 3,518 |
from sample_code_file_maps import *
from jinja2 import Environment, FileSystemLoader
regiones = [
','.join(['chr15', str(48791193-3000), str(48791193+3000)]),
','.join(['chr2', str(21231387-3000), str(21231387+3000)]),
','.join(['chr2', str(21233999-3000), str(21233999+3000)]),
]
env = Environment(loader=FileSystemLoader('.'))
template =env.get_template('snap.tt')
for region in regiones:
(chrom,start,end) = region.split(',')
for sample in ethnicity_code:
script = open( "evidence_%s_%s.sh" % (sample, region.replace(',','_')), 'w' )
script.write( template.render( region = region,
evidence = evidence[ethnicity_code[sample]] % chrom,
output = "/export/home/rgarcia/non-coding-NGS/igv_snapshots/sam/%s_%s.sam" % (sample, region.replace(',','_')) ) )
| CSB-IG/non-coding-NGS | igv_snapshots/genera_scripts.py | Python | gpl-3.0 | 877 |
"""
Copyright 2013 Steven Diamond
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Constants for operators.
PLUS = "+"
MINUS = "-"
MUL = "*"
# Prefix for default named variables.
VAR_PREFIX = "var"
# Prefix for default named parameters.
PARAM_PREFIX = "param"
# Used to trick Numpy so cvxpy can overload ==.
NP_EQUAL_STR = "equal"
# Constraint types
EQ_CONSTR = "=="
INEQ_CONSTR = "<="
# Solver Constants
OPTIMAL = "optimal"
OPTIMAL_INACCURATE = "optimal_inaccurate"
INFEASIBLE = "infeasible"
INFEASIBLE_INACCURATE = "infeasible_inaccurate"
UNBOUNDED = "unbounded"
UNBOUNDED_INACCURATE = "unbounded_inaccurate"
USER_LIMIT = "user_limit"
SOLVER_ERROR = "solver_error"
# Statuses that indicate a solution was found.
SOLUTION_PRESENT = [OPTIMAL, OPTIMAL_INACCURATE]
# Statuses that indicate the problem is infeasible or unbounded.
INF_OR_UNB = [INFEASIBLE, INFEASIBLE_INACCURATE,
UNBOUNDED, UNBOUNDED_INACCURATE]
# Statuses that indicate an error.
ERROR = [USER_LIMIT, SOLVER_ERROR]
# Solver names.
CVXOPT = "CVXOPT"
GLPK = "GLPK"
GLPK_MI = "GLPK_MI"
CBC = "CBC"
CPLEX = "CPLEX"
ECOS = "ECOS"
SCS = "SCS"
DIFFCP = "DIFFCP"
SUPER_SCS = "SUPER_SCS"
GUROBI = "GUROBI"
OSQP = "OSQP"
CPLEX = "CPLEX"
MOSEK = "MOSEK"
XPRESS = "XPRESS"
NAG = "NAG"
SOLVERS = [ECOS, CVXOPT, GLPK,
GLPK_MI, SCS, GUROBI, OSQP, CPLEX,
MOSEK, CBC, XPRESS, SUPER_SCS, NAG]
# Xpress-specific items
XPRESS_IIS = "XPRESS_IIS"
XPRESS_TROW = "XPRESS_TROW"
# Parameterized problem.
PARAM_PROB = "param_prob"
# Parallel (meta) solver.
PARALLEL = "parallel"
# Robust CVXOPT LDL KKT solver.
ROBUST_KKTSOLVER = "robust"
# Map of constraint types.
# TODO(akshayka): These should be defined in a solver module.
EQ, LEQ, SOC, SOC_EW, PSD, EXP, BOOL, INT = range(8)
# Keys in the dictionary of cone dimensions.
# TODO(akshayka): These should be defined in a solver module.
EQ_DIM = "f"
LEQ_DIM = "l"
SOC_DIM = "q"
PSD_DIM = "s"
EXP_DIM = "ep"
# Keys for non-convex constraints.
BOOL_IDS = "bool_ids"
BOOL_IDX = "bool_idx"
INT_IDS = "int_ids"
INT_IDX = "int_idx"
# Keys for results_dict.
STATUS = "status"
VALUE = "value"
OBJ_OFFSET = "obj_offset"
PRIMAL = "primal"
EQ_DUAL = "eq_dual"
INEQ_DUAL = "ineq_dual"
SOLVE_TIME = "solve_time" # in seconds
SETUP_TIME = "setup_time" # in seconds
NUM_ITERS = "num_iters" # number of iterations
# Keys for problem data dict.
C = "c"
OFFSET = "offset"
P = "P"
Q = "q"
A = "A"
B = "b"
G = "G"
H = "h"
F = "F"
DIMS = "dims"
BOOL_IDX = "bool_vars_idx"
INT_IDX = "int_vars_idx"
# Keys for curvature and sign.
CONSTANT = "CONSTANT"
AFFINE = "AFFINE"
CONVEX = "CONVEX"
CONCAVE = "CONCAVE"
QUASILINEAR = "QUASILINEAR"
QUASICONVEX = "QUASICONVEX"
QUASICONCAVE = "QUASICONCAVE"
LOG_LOG_CONSTANT = "LOG-LOG CONSTANT"
LOG_LOG_AFFINE = "LOG-LOG AFFINE"
LOG_LOG_CONVEX = "LOG-LOG CONVEX"
LOG_LOG_CONCAVE = "LOG-LOG CONCAVE"
ZERO = "ZERO"
NONNEG = "NONNEGATIVE"
NONPOS = "NONPOSITIVE"
UNKNOWN = "UNKNOWN"
# Numerical tolerances
EIGVAL_TOL = 1e-10
PSD_NSD_PROJECTION_TOL = 1e-8
GENERAL_PROJECTION_TOL = 1e-10
SPARSE_PROJECTION_TOL = 1e-10
| SteveDiamond/cvxpy | cvxpy/settings.py | Python | gpl-3.0 | 3,552 |
# -*- coding: utf-8 -*-
import functools
import httplib as http
from furl import furl
from flask import request
from framework import status
from framework.auth import Auth, cas
from framework.flask import redirect # VOL-aware redirect
from framework.exceptions import HTTPError
from framework.auth.decorators import collect_auth
from framework.database import get_or_http_error
from osf.models import AbstractNode
from website import settings, language
from website.util import web_url_for
_load_node_or_fail = lambda pk: get_or_http_error(AbstractNode, pk)
def _kwargs_to_nodes(kwargs):
"""Retrieve project and component objects from keyword arguments.
:param dict kwargs: Dictionary of keyword arguments
:return: Tuple of parent and node
"""
node = kwargs.get('node') or kwargs.get('project')
parent = kwargs.get('parent')
if node:
return parent, node
pid = kwargs.get('pid')
nid = kwargs.get('nid')
if pid and nid:
node = _load_node_or_fail(nid)
parent = _load_node_or_fail(pid)
elif pid and not nid:
node = _load_node_or_fail(pid)
elif nid and not pid:
node = _load_node_or_fail(nid)
elif not pid and not nid:
raise HTTPError(
http.NOT_FOUND,
data={
'message_short': 'Node not found',
'message_long': 'No Node with that primary key could be found',
}
)
return parent, node
def _inject_nodes(kwargs):
kwargs['parent'], kwargs['node'] = _kwargs_to_nodes(kwargs)
def must_not_be_rejected(func):
"""Ensures approval/disapproval requests can't reach Sanctions that have
already been rejected.
"""
@functools.wraps(func)
def wrapped(*args, **kwargs):
node = get_or_http_error(AbstractNode, kwargs.get('nid', kwargs.get('pid')), allow_deleted=True)
if node.sanction and node.sanction.is_rejected:
raise HTTPError(http.GONE, data=dict(
message_long='This registration has been rejected'
))
return func(*args, **kwargs)
return wrapped
def must_be_valid_project(func=None, retractions_valid=False, quickfiles_valid=False):
""" Ensures permissions to retractions are never implicitly granted. """
# TODO: Check private link
def must_be_valid_project_inner(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
_inject_nodes(kwargs)
if getattr(kwargs['node'], 'is_collection', True) or (getattr(kwargs['node'], 'is_quickfiles', True) and not quickfiles_valid):
raise HTTPError(
http.NOT_FOUND
)
if not retractions_valid and getattr(kwargs['node'].retraction, 'is_retracted', False):
raise HTTPError(
http.BAD_REQUEST,
data=dict(message_long='Viewing withdrawn registrations is not permitted')
)
else:
return func(*args, **kwargs)
return wrapped
if func:
return must_be_valid_project_inner(func)
return must_be_valid_project_inner
def must_be_public_registration(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
_inject_nodes(kwargs)
node = kwargs['node']
if not node.is_public or not node.is_registration:
raise HTTPError(
http.BAD_REQUEST,
data=dict(message_long='Must be a public registration to view')
)
return func(*args, **kwargs)
return wrapped
def must_not_be_retracted_registration(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
_inject_nodes(kwargs)
node = kwargs['node']
if node.is_retracted:
return redirect(
web_url_for('resolve_guid', guid=node._id)
)
return func(*args, **kwargs)
return wrapped
def must_not_be_registration(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
_inject_nodes(kwargs)
node = kwargs['node']
if node.is_registration and not node.archiving:
raise HTTPError(
http.BAD_REQUEST,
data={
'message_short': 'Registrations cannot be changed',
'message_long': "The operation you're trying to do cannot be applied to registered projects, which are not allowed to be changed",
}
)
return func(*args, **kwargs)
return wrapped
def must_be_registration(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
_inject_nodes(kwargs)
node = kwargs['node']
if not node.is_registration:
raise HTTPError(
http.BAD_REQUEST,
data={
'message_short': 'Registered Nodes only',
'message_long': 'This view is restricted to registered Nodes only',
}
)
return func(*args, **kwargs)
return wrapped
def check_can_access(node, user, key=None, api_node=None):
"""View helper that returns whether a given user can access a node.
If ``user`` is None, returns False.
:rtype: boolean
:raises: HTTPError (403) if user cannot access the node
"""
if user is None:
return False
if not node.can_view(Auth(user=user)) and api_node != node:
if key in node.private_link_keys_deleted:
status.push_status_message('The view-only links you used are expired.', trust=False)
raise HTTPError(
http.FORBIDDEN,
data={'message_long': ('User has restricted access to this page. If this should not '
'have occurred and the issue persists, ' + language.SUPPORT_LINK)}
)
return True
def check_key_expired(key, node, url):
"""check if key expired if is return url with args so it will push status message
else return url
:param str key: the private link key passed in
:param Node node: the node object wants to access
:param str url: the url redirect to
:return: url with pushed message added if key expired else just url
"""
if key in node.private_link_keys_deleted:
url = furl(url).add({'status': 'expired'}).url
return url
def _must_be_contributor_factory(include_public, include_view_only_anon=True):
"""Decorator factory for authorization wrappers. Decorators verify whether
the current user is a contributor on the current project, or optionally
whether the current project is public.
:param bool include_public: Check whether current project is public
:param bool include_view_only_anon: Checks view_only anonymized links
:return: Authorization decorator
"""
def wrapper(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
response = None
_inject_nodes(kwargs)
node = kwargs['node']
kwargs['auth'] = Auth.from_kwargs(request.args.to_dict(), kwargs)
user = kwargs['auth'].user
key = request.args.get('view_only', '').strip('/')
#if not login user check if the key is valid or the other privilege
kwargs['auth'].private_key = key
if not include_view_only_anon:
from osf.models import PrivateLink
try:
link_anon = PrivateLink.objects.filter(key=key).values_list('anonymous', flat=True).get()
except PrivateLink.DoesNotExist:
link_anon = None
if not node.is_public or not include_public:
if not include_view_only_anon and link_anon:
if not check_can_access(node=node, user=user):
raise HTTPError(http.UNAUTHORIZED)
elif key not in node.private_link_keys_active:
if not check_can_access(node=node, user=user, key=key):
redirect_url = check_key_expired(key=key, node=node, url=request.url)
if request.headers.get('Content-Type') == 'application/json':
raise HTTPError(http.UNAUTHORIZED)
else:
response = redirect(cas.get_login_url(redirect_url))
return response or func(*args, **kwargs)
return wrapped
return wrapper
# Create authorization decorators
must_be_contributor = _must_be_contributor_factory(False)
must_be_contributor_or_public = _must_be_contributor_factory(True)
must_be_contributor_or_public_but_not_anonymized = _must_be_contributor_factory(include_public=True, include_view_only_anon=False)
def must_have_addon(addon_name, model):
"""Decorator factory that ensures that a given addon has been added to
the target node. The decorated function will throw a 404 if the required
addon is not found. Must be applied after a decorator that adds `node` and
`project` to the target function's keyword arguments, such as
`must_be_contributor.
:param str addon_name: Name of addon
:param str model: Name of model
:returns: Decorator function
"""
def wrapper(func):
@functools.wraps(func)
@collect_auth
def wrapped(*args, **kwargs):
if model == 'node':
_inject_nodes(kwargs)
owner = kwargs['node']
elif model == 'user':
auth = kwargs.get('auth')
owner = auth.user if auth else None
if owner is None:
raise HTTPError(http.UNAUTHORIZED)
else:
raise HTTPError(http.BAD_REQUEST)
addon = owner.get_addon(addon_name)
if addon is None:
raise HTTPError(http.BAD_REQUEST)
kwargs['{0}_addon'.format(model)] = addon
return func(*args, **kwargs)
return wrapped
return wrapper
def must_be_addon_authorizer(addon_name):
"""
:param str addon_name: Name of addon
:returns: Decorator function
"""
def wrapper(func):
@functools.wraps(func)
@collect_auth
def wrapped(*args, **kwargs):
node_addon = kwargs.get('node_addon')
if not node_addon:
_inject_nodes(kwargs)
node = kwargs['node']
node_addon = node.get_addon(addon_name)
if not node_addon:
raise HTTPError(http.BAD_REQUEST)
if not node_addon.user_settings:
raise HTTPError(http.BAD_REQUEST)
auth = kwargs.get('auth')
user = kwargs.get('user') or (auth.user if auth else None)
if node_addon.user_settings.owner != user:
raise HTTPError(http.FORBIDDEN)
return func(*args, **kwargs)
return wrapped
return wrapper
def must_have_permission(permission):
"""Decorator factory for checking permissions. Checks that user is logged
in and has necessary permissions for node. Node must be passed in keyword
arguments to view function.
:param list permissions: List of accepted permissions
:returns: Decorator function for checking permissions
:raises: HTTPError(http.UNAUTHORIZED) if not logged in
:raises: HTTPError(http.FORBIDDEN) if missing permissions
"""
def wrapper(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
# Ensure `project` and `node` kwargs
_inject_nodes(kwargs)
node = kwargs['node']
kwargs['auth'] = Auth.from_kwargs(request.args.to_dict(), kwargs)
user = kwargs['auth'].user
# User must be logged in
if user is None:
raise HTTPError(http.UNAUTHORIZED)
# User must have permissions
if not node.has_permission(user, permission):
raise HTTPError(http.FORBIDDEN)
# Call view function
return func(*args, **kwargs)
# Return decorated function
return wrapped
# Return decorator
return wrapper
def must_have_write_permission_or_public_wiki(func):
""" Checks if user has write permission or wiki is public and publicly editable. """
@functools.wraps(func)
def wrapped(*args, **kwargs):
# Ensure `project` and `node` kwargs
_inject_nodes(kwargs)
wiki = kwargs['node'].get_addon('wiki')
if wiki and wiki.is_publicly_editable:
return func(*args, **kwargs)
else:
return must_have_permission('write')(func)(*args, **kwargs)
# Return decorated function
return wrapped
def http_error_if_disk_saving_mode(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
_inject_nodes(kwargs)
node = kwargs['node']
if settings.DISK_SAVING_MODE:
raise HTTPError(
http.METHOD_NOT_ALLOWED,
redirect_url=node.url
)
return func(*args, **kwargs)
return wrapper
| TomBaxter/osf.io | website/project/decorators.py | Python | apache-2.0 | 13,204 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
DOCUMENTATION = """
---
module: kube
short_description: Manage Kubernetes Cluster
description:
- Create, replace, remove, and stop resources within a Kubernetes Cluster
version_added: "2.0"
options:
name:
required: false
default: null
description:
- The name associated with resource
filename:
required: false
default: null
description:
- The path and filename of the resource(s) definition file(s).
- To operate on several files this can accept a comma separated list of files or a list of files.
aliases: [ 'files', 'file', 'filenames' ]
kubectl:
required: false
default: null
description:
- The path to the kubectl bin
namespace:
required: false
default: null
description:
- The namespace associated with the resource(s)
resource:
required: false
default: null
description:
- The resource to perform an action on. pods (po), replicationControllers (rc), services (svc)
label:
required: false
default: null
description:
- The labels used to filter specific resources.
server:
required: false
default: null
description:
- The url for the API server that commands are executed against.
force:
required: false
default: false
description:
- A flag to indicate to force delete, replace, or stop.
all:
required: false
default: false
description:
- A flag to indicate delete all, stop all, or all namespaces when checking exists.
log_level:
required: false
default: 0
description:
- Indicates the level of verbosity of logging by kubectl.
state:
required: false
choices: ['present', 'absent', 'latest', 'reloaded', 'stopped']
default: present
description:
- present handles checking existence or creating if definition file provided,
absent handles deleting resource(s) based on other options,
latest handles creating or updating based on existence,
reloaded handles updating resource(s) definition using definition file,
stopped handles stopping resource(s) based on other options.
requirements:
- kubectl
author: "Kenny Jones (@kenjones-cisco)"
"""
EXAMPLES = """
- name: test nginx is present
kube: name=nginx resource=rc state=present
- name: test nginx is stopped
kube: name=nginx resource=rc state=stopped
- name: test nginx is absent
kube: name=nginx resource=rc state=absent
- name: test nginx is present
kube: filename=/tmp/nginx.yml
- name: test nginx and postgresql are present
kube: files=/tmp/nginx.yml,/tmp/postgresql.yml
- name: test nginx and postgresql are present
kube:
files:
- /tmp/nginx.yml
- /tmp/postgresql.yml
"""
class KubeManager(object):
def __init__(self, module):
self.module = module
self.kubectl = module.params.get('kubectl')
if self.kubectl is None:
self.kubectl = module.get_bin_path('kubectl', True)
self.base_cmd = [self.kubectl]
if module.params.get('server'):
self.base_cmd.append('--server=' + module.params.get('server'))
if module.params.get('log_level'):
self.base_cmd.append('--v=' + str(module.params.get('log_level')))
if module.params.get('namespace'):
self.base_cmd.append('--namespace=' + module.params.get('namespace'))
self.all = module.params.get('all')
self.force = module.params.get('force')
self.name = module.params.get('name')
self.filename = [f.strip() for f in module.params.get('filename') or []]
self.resource = module.params.get('resource')
self.label = module.params.get('label')
def _execute(self, cmd):
args = self.base_cmd + cmd
try:
rc, out, err = self.module.run_command(args)
if rc != 0:
self.module.fail_json(
msg='error running kubectl (%s) command (rc=%d), out=\'%s\', err=\'%s\'' % (' '.join(args), rc, out, err))
except Exception as exc:
self.module.fail_json(
msg='error running kubectl (%s) command: %s' % (' '.join(args), str(exc)))
return out.splitlines()
def _execute_nofail(self, cmd):
args = self.base_cmd + cmd
rc, out, err = self.module.run_command(args)
if rc != 0:
return None
return out.splitlines()
def create(self, check=True, force=True):
if check and self.exists():
return []
cmd = ['apply']
if force:
cmd.append('--force')
if not self.filename:
self.module.fail_json(msg='filename required to create')
cmd.append('--filename=' + ','.join(self.filename))
return self._execute(cmd)
def replace(self, force=True):
cmd = ['apply']
if force:
cmd.append('--force')
if not self.filename:
self.module.fail_json(msg='filename required to reload')
cmd.append('--filename=' + ','.join(self.filename))
return self._execute(cmd)
def delete(self):
if not self.force and not self.exists():
return []
cmd = ['delete']
if self.filename:
cmd.append('--filename=' + ','.join(self.filename))
else:
if not self.resource:
self.module.fail_json(msg='resource required to delete without filename')
cmd.append(self.resource)
if self.name:
cmd.append(self.name)
if self.label:
cmd.append('--selector=' + self.label)
if self.all:
cmd.append('--all')
if self.force:
cmd.append('--ignore-not-found')
return self._execute(cmd)
def exists(self):
cmd = ['get']
if self.filename:
cmd.append('--filename=' + ','.join(self.filename))
else:
if not self.resource:
self.module.fail_json(msg='resource required without filename')
cmd.append(self.resource)
if self.name:
cmd.append(self.name)
if self.label:
cmd.append('--selector=' + self.label)
if self.all:
cmd.append('--all-namespaces')
cmd.append('--no-headers')
result = self._execute_nofail(cmd)
if not result:
return False
return True
# TODO: This is currently unused, perhaps convert to 'scale' with a replicas param?
def stop(self):
if not self.force and not self.exists():
return []
cmd = ['stop']
if self.filename:
cmd.append('--filename=' + ','.join(self.filename))
else:
if not self.resource:
self.module.fail_json(msg='resource required to stop without filename')
cmd.append(self.resource)
if self.name:
cmd.append(self.name)
if self.label:
cmd.append('--selector=' + self.label)
if self.all:
cmd.append('--all')
if self.force:
cmd.append('--ignore-not-found')
return self._execute(cmd)
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(),
filename=dict(type='list', aliases=['files', 'file', 'filenames']),
namespace=dict(),
resource=dict(),
label=dict(),
server=dict(),
kubectl=dict(),
force=dict(default=False, type='bool'),
all=dict(default=False, type='bool'),
log_level=dict(default=0, type='int'),
state=dict(default='present', choices=['present', 'absent', 'latest', 'reloaded', 'stopped']),
),
mutually_exclusive=[['filename', 'list']]
)
changed = False
manager = KubeManager(module)
state = module.params.get('state')
if state == 'present':
result = manager.create(check=False)
elif state == 'absent':
result = manager.delete()
elif state == 'reloaded':
result = manager.replace()
elif state == 'stopped':
result = manager.stop()
elif state == 'latest':
result = manager.replace()
else:
module.fail_json(msg='Unrecognized state %s.' % state)
module.exit_json(changed=changed,
msg='success: %s' % (' '.join(result))
)
from ansible.module_utils.basic import * # noqa
if __name__ == '__main__':
main()
| insequent/kargo | library/kube.py | Python | apache-2.0 | 8,694 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2010 Vauxoo C.A. (http://openerp.com.ve/) All Rights Reserved.
# Javier Duran <[email protected]>
#
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import trial_cost
| 3dfxsoftware/cbss-addons | report_profit/report/__init__.py | Python | gpl-2.0 | 1,424 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class WorkflowRunActionsOperations(object):
"""WorkflowRunActionsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.logic.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name, # type: str
workflow_name, # type: str
run_name, # type: str
top=None, # type: Optional[int]
filter=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.WorkflowRunActionListResult"]
"""Gets a list of workflow run actions.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param workflow_name: The workflow name.
:type workflow_name: str
:param run_name: The workflow run name.
:type run_name: str
:param top: The number of items to be included in the result.
:type top: int
:param filter: The filter to apply on the operation. Options for filters include: Status.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either WorkflowRunActionListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.logic.models.WorkflowRunActionListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkflowRunActionListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-05-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workflowName': self._serialize.url("workflow_name", workflow_name, 'str'),
'runName': self._serialize.url("run_name", run_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query("top", top, 'int')
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('WorkflowRunActionListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logic/workflows/{workflowName}/runs/{runName}/actions'} # type: ignore
def get(
self,
resource_group_name, # type: str
workflow_name, # type: str
run_name, # type: str
action_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.WorkflowRunAction"
"""Gets a workflow run action.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param workflow_name: The workflow name.
:type workflow_name: str
:param run_name: The workflow run name.
:type run_name: str
:param action_name: The workflow action name.
:type action_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: WorkflowRunAction, or the result of cls(response)
:rtype: ~azure.mgmt.logic.models.WorkflowRunAction
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkflowRunAction"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-05-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workflowName': self._serialize.url("workflow_name", workflow_name, 'str'),
'runName': self._serialize.url("run_name", run_name, 'str'),
'actionName': self._serialize.url("action_name", action_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('WorkflowRunAction', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logic/workflows/{workflowName}/runs/{runName}/actions/{actionName}'} # type: ignore
def list_expression_traces(
self,
resource_group_name, # type: str
workflow_name, # type: str
run_name, # type: str
action_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ExpressionTraces"]
"""Lists a workflow run expression trace.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param workflow_name: The workflow name.
:type workflow_name: str
:param run_name: The workflow run name.
:type run_name: str
:param action_name: The workflow action name.
:type action_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressionTraces or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.logic.models.ExpressionTraces]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressionTraces"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-05-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_expression_traces.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workflowName': self._serialize.url("workflow_name", workflow_name, 'str'),
'runName': self._serialize.url("run_name", run_name, 'str'),
'actionName': self._serialize.url("action_name", action_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.post(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ExpressionTraces', pipeline_response)
list_of_elem = deserialized.inputs
if cls:
list_of_elem = cls(list_of_elem)
return None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_expression_traces.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logic/workflows/{workflowName}/runs/{runName}/actions/{actionName}/listExpressionTraces'} # type: ignore
| Azure/azure-sdk-for-python | sdk/logic/azure-mgmt-logic/azure/mgmt/logic/operations/_workflow_run_actions_operations.py | Python | mit | 13,765 |
"""Constants for the Minecraft Server integration."""
ATTR_PLAYERS_LIST = "players_list"
DEFAULT_HOST = "localhost"
DEFAULT_NAME = "Minecraft Server"
DEFAULT_PORT = 25565
DOMAIN = "minecraft_server"
ICON_LATENCY_TIME = "mdi:signal"
ICON_PLAYERS_MAX = "mdi:account-multiple"
ICON_PLAYERS_ONLINE = "mdi:account-multiple"
ICON_PROTOCOL_VERSION = "mdi:numeric"
ICON_STATUS = "mdi:lan"
ICON_VERSION = "mdi:numeric"
KEY_SERVERS = "servers"
MANUFACTURER = "Mojang AB"
NAME_LATENCY_TIME = "Latency Time"
NAME_PLAYERS_MAX = "Players Max"
NAME_PLAYERS_ONLINE = "Players Online"
NAME_PROTOCOL_VERSION = "Protocol Version"
NAME_STATUS = "Status"
NAME_VERSION = "Version"
SCAN_INTERVAL = 60
SIGNAL_NAME_PREFIX = f"signal_{DOMAIN}"
UNIT_LATENCY_TIME = "ms"
UNIT_PLAYERS_MAX = "players"
UNIT_PLAYERS_ONLINE = "players"
UNIT_PROTOCOL_VERSION = None
UNIT_VERSION = None
| postlund/home-assistant | homeassistant/components/minecraft_server/const.py | Python | apache-2.0 | 863 |
"""
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
import pytest
from byceps.services.user import command_service as user_command_service
from byceps.services.user import event_service
from byceps.services.user import service as user_service
@pytest.fixture(scope='module')
def admin_user(make_user):
return make_user('SuspensionAdmin')
@pytest.fixture
def cheater(make_user):
return make_user('Cheater')
@pytest.fixture
def remorseful_user(make_user):
return make_user('TemporaryNuisance')
def test_suspend(admin_app, cheater, admin_user):
user_id = cheater.id
reason = 'User has been caught cheating.'
user_before = user_service.get_user(user_id)
assert not user_before.suspended
events_before = event_service.get_events_for_user(user_before.id)
assert len(events_before) == 0
# -------------------------------- #
user_command_service.suspend_account(user_id, admin_user.id, reason)
# -------------------------------- #
user_after = user_service.get_user(user_id)
assert user_after.suspended
events_after = event_service.get_events_for_user(user_after.id)
assert len(events_after) == 1
suspended_event = events_after[0]
assert suspended_event.event_type == 'user-suspended'
assert suspended_event.data == {
'initiator_id': str(admin_user.id),
'reason': reason,
}
def test_unsuspend(admin_app, remorseful_user, admin_user):
user_id = remorseful_user.id
user_command_service.suspend_account(user_id, admin_user.id, 'Annoying')
reason = 'User showed penitence. Drop the ban.'
user_before = user_service.get_user(user_id)
assert user_before.suspended
events_before = event_service.get_events_for_user(user_before.id)
assert len(events_before) == 1
# -------------------------------- #
user_command_service.unsuspend_account(user_id, admin_user.id, reason)
# -------------------------------- #
user_after = user_service.get_user(user_id)
assert not user_after.suspended
events_after = event_service.get_events_for_user(user_after.id)
assert len(events_after) == 2
unsuspended_event = events_after[1]
assert unsuspended_event.event_type == 'user-unsuspended'
assert unsuspended_event.data == {
'initiator_id': str(admin_user.id),
'reason': reason,
}
| homeworkprod/byceps | tests/integration/services/user/test_suspend_unsuspend.py | Python | bsd-3-clause | 2,416 |
""" EC2Endpoint class is the implementation of the EC2 interface to
a cloud endpoint
"""
import os
import json
import boto3
from DIRAC import gLogger, S_OK, S_ERROR
from DIRAC.Core.Utilities.File import makeGuid
from VMDIRAC.Resources.Cloud.Endpoint import Endpoint
__RCSID__ = '$Id$'
class EC2Endpoint( Endpoint ):
def __init__( self, parameters = {} ):
"""
"""
Endpoint.__init__( self, parameters = parameters )
# logger
self.log = gLogger.getSubLogger( 'EC2Endpoint' )
self.valid = False
result = self.initialize()
if result['OK']:
self.log.debug( 'EC2Endpoint created and validated' )
self.valid = True
else:
self.log.error( result['Message'] )
def initialize( self ):
availableParams = {
'RegionName': 'region_name',
'AccessKey': 'aws_access_key_id',
'SecretKey': 'aws_secret_access_key',
'EndpointUrl': 'endpoint_url', # EndpointUrl is optional
}
connDict = {}
for var in availableParams:
if var in self.parameters:
connDict[ availableParams[ var ] ] = self.parameters[ var ]
try:
self.__ec2 = boto3.resource( 'ec2', **connDict )
except Exception, e:
errorStatus = "Can't connect to EC2: " + str(e)
return S_ERROR( errorStatus )
result = self.__loadInstanceType()
if not result['OK']:
return result
result = self.__checkConnection()
return result
def __loadInstanceType( self ):
currentDir = os.path.dirname( os.path.abspath( __file__ ) )
instanceTypeFile = os.path.join( currentDir, 'ec2_instance_type.json' )
try:
with open( instanceTypeFile, 'r' ) as f:
self.__instanceTypeInfo = json.load( f )
except Exception, e:
errmsg = "Exception loading EC2 instance type info: %s" % e
self.log.error( errmsg )
return S_ERROR( errmsg )
return S_OK()
def __checkConnection( self ):
"""
Checks connection status by trying to list the images.
:return: S_OK | S_ERROR
"""
try:
self.__ec2.images.filter( Owners = ['self'] )
except Exception, e:
return S_ERROR( e )
return S_OK()
def createInstances( self, vmsToSubmit ):
outputDict = {}
for nvm in xrange( vmsToSubmit ):
instanceID = makeGuid()[:8]
result = self.createInstance( instanceID )
if result['OK']:
ec2Id, nodeDict = result['Value']
self.log.debug( 'Created VM instance %s/%s' % ( ec2Id, instanceID ) )
outputDict[ec2Id] = nodeDict
else:
self.log.error( 'Create EC2 instance error:', result['Message'] )
break
return S_OK( outputDict )
def createInstance( self, instanceID = '' ):
if not instanceID:
instanceID = makeGuid()[:8]
self.parameters['VMUUID'] = instanceID
self.parameters['VMType'] = self.parameters.get( 'CEType', 'EC2' )
createNodeDict = {}
# Image
if not "ImageID" in self.parameters and 'ImageName' in self.parameters:
try:
images = self.__ec2.images.filter( Filters = [{'Name': 'name', 'Values': [self.parameters['ImageName']]}] )
imageId = None
for image in images:
imageId = image.id
break
except Exception as e:
self.log.error( "Exception when get ID from image name %s:" % self.parameters['ImageName'], e )
return S_ERROR( "Failed to get image for Name %s" % self.parameters['ImageName'] )
if imageId is None:
return S_ERROR( "Image name %s not found" % self.parameters['ImageName'] )
elif "ImageID" in self.parameters:
try:
self.__ec2.images.filter( ImageIds = [self.parameters['ImageID']] )
except Exception as e:
return S_ERROR( "Failed to get image for ID %s" % self.parameters['ImageID'] )
imageId = self.parameters['ImageID']
else:
return S_ERROR( 'No image specified' )
createNodeDict['ImageId'] = imageId
# Instance type
if 'FlavorName' not in self.parameters:
return S_ERROR( 'No flavor specified' )
instanceType = self.parameters['FlavorName']
createNodeDict['InstanceType'] = instanceType
# User data
result = self._createUserDataScript()
if not result['OK']:
return result
createNodeDict['UserData'] = str( result['Value'] )
# Other params
for param in [ 'KeyName', 'SubnetId', 'EbsOptimized' ]:
if param in self.parameters:
createNodeDict[param] = self.parameters[param]
self.log.info( "Creating node:" )
for key, value in createNodeDict.items():
self.log.verbose( "%s: %s" % ( key, value ) )
# Create the VM instance now
try:
instances = self.__ec2.create_instances( MinCount = 1, MaxCount = 1, **createNodeDict )
except Exception as e:
errmsg = 'Exception in ec2 create_instances: %s' % e
self.log.error( errmsg )
return S_ERROR( errmsg )
if len(instances) < 1:
errmsg = 'ec2 create_instances failed to create any VM'
self.log.error( errmsg )
return S_ERROR( errmsg )
# Create the name in tags
ec2Id = instances[0].id
tags = [{ 'Key': 'Name', 'Value': 'DIRAC_%s' % instanceID }]
try:
self.__ec2.create_tags( Resources = [ec2Id], Tags = tags )
except Exception as e:
errmsg = 'Exception setup name for %s: %s' % ( ec2Id, e )
self.log.error( errmsg )
return S_ERROR( errmsg )
# Properties of the instance
nodeDict = {}
# nodeDict['PublicIP'] = publicIP
nodeDict['InstanceID'] = instanceID
if instanceType in self.__instanceTypeInfo:
nodeDict['NumberOfCPUs'] = self.__instanceTypeInfo[instanceType]['vCPU']
nodeDict['RAM'] = self.__instanceTypeInfo[instanceType]['Memory']
else:
nodeDict['NumberOfCPUs'] = 1
return S_OK( ( ec2Id, nodeDict ) )
def stopVM( self, nodeID, publicIP = '' ):
"""
Given the node ID it gets the node details, which are used to destroy the
node making use of the libcloud.openstack driver. If three is any public IP
( floating IP ) assigned, frees it as well.
:Parameters:
**uniqueId** - `string`
openstack node id ( not uuid ! )
**public_ip** - `string`
public IP assigned to the node if any
:return: S_OK | S_ERROR
"""
try:
self.__ec2.Instance( nodeID ).terminate()
except Exception as e:
errmsg = 'Exception terminate instance %s: %s' % ( nodeID, e )
self.log.error( errmsg )
return S_ERROR( errmsg )
return S_OK()
| xianghuzhao/VMDIRAC | VMDIRAC/Resources/Cloud/EC2Endpoint.py | Python | gpl-3.0 | 6,505 |
#!/usr/bin/python
# $Id$
# vim:ft=python:sw=4:sta:et
#
# test_cronwatch.py - Unit tests for cronwatch
# Copyright (C) 2011 David Lowry < wdlowry at gmail dot com >
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import unittest
import os
import re
from tempfile import NamedTemporaryFile, TemporaryFile, mkdtemp, mkstemp
from StringIO import StringIO
from test_base import *
from validate import VdtTypeError, VdtValueError
from configobj import get_extra_values
from getpass import getuser
from datetime import datetime
import cronwatch
###############################################################################
# Cronwatch functionality tests
###############################################################################
class TestCommandLine(TestBase):
'''Test the command line functionality
Note: The command line options will not be tested. See the comment for
main().'''
def test_empty_command_line(self):
'''Should raise an error if the executable is missing from the command
line'''
self.assertRaisesError(cronwatch.Error,
'missing command line argument: executable',
cronwatch.main, ['cronwatch'])
class TestRun(TestBase):
'''Test the run() function'''
def test_run_error(self):
'''Should throw an exception when there's an error running the
executable'''
self.assertRaisesError(cronwatch.Error,
'could not run missing: [Errno 2] No such file or directory',
cronwatch.run, ['missing'])
def test_simple_output(self):
'''Should return the output'''
(o, r) = cronwatch.run(['./test_script.sh', 'simple'])
self.assertEquals(10, r)
o = o.read()
self.assertEquals('stdout\nstderr\nstdout again\n', o)
def test_stdin(self):
'''Should close stdin just to be safe'''
# This will hang if something is not done about stdin
(o, r) = cronwatch.run(['./test_script.sh', 'read'])
self.assertEquals(0, r)
o = o.read()
self.assertEquals('\n', o)
def test_timeout(self):
'''Should timeout and terminate the process'''
(o, r) = cronwatch.run(['./test_script.sh', 'timeout'], 0)
self.assertEquals(-1, r)
o = o.read()
self.assertEquals('', o)
class TestLineSearch(TestBase):
def test_match(self):
'''Should tell if a list of regular expressions matches a line and
which ones'''
r = cronwatch.line_search('test', [re.compile('t'),
re.compile('e'),
re.compile('1')])
self.assertEquals((True, ['t', 'e']), r)
def test_no_match(self):
'''Should return an empty list if there were no matches'''
r = cronwatch.line_search('test', [re.compile('1'),
re.compile('2'),
re.compile('3')])
self.assertEquals((False, []), r)
def test_all(self):
'''Should return a list of just the ones that were found'''
r = cronwatch.line_search('test', [re.compile('t'),
re.compile('e'),
re.compile('s')], find_all = True)
self.assertEquals((True, ['t', 'e', 's']), r)
r = cronwatch.line_search('test', [re.compile('t'),
re.compile('2'),
re.compile('3')], find_all = True)
self.assertEquals((False, ['t']), r)
class TestIsReadableFile(TestBase):
def test_file(self):
'''Should return a filename'''
self.assertEquals('test_file.txt',
cronwatch.is_readable_file('test_file.txt'))
def test_no_file(self):
'''Should raise a validation error'''
self.assertRaisesError(cronwatch.VdtValueMsgError,
"""could not read file: [Errno 2] No such file or directory: 'not_a_file.txt'""",
cronwatch.is_readable_file, 'not_a_file.txt')
class TestIsRegex(TestBase):
def test_not_string(self):
'''Should raise VdtTypeError if it's not a string'''
self.assertRaises(VdtTypeError, cronwatch.is_regex, 1)
def test_bad_regex(self):
'''Should raise VdtValueError if it's not a valid regex'''
self.assertRaisesError(cronwatch.VdtValueMsgError,
'''invalid regular expression: (: unbalanced parenthesis''',
cronwatch.is_regex, '(')
def test_return(self):
'''Should return a valid regular expression object'''
self.assertTrue(cronwatch.is_regex('reg').match)
class TestIsRegexList(TestBase):
def test_not_list(self):
'''Should raise VdtTypeError if not a list'''
self.assertRaises(VdtTypeError, cronwatch.is_regex_list, 'reg')
def test_regex(self):
'''Should return a list of regex objects'''
self.assertTrue(cronwatch.is_regex_list(['reg', 'reg'])[0].match)
class TestForceRegexList(TestBase):
def test_list(self):
'''Should return a list of regex objects if already in list form'''
self.assertTrue(cronwatch.force_regex_list(['reg', 'reg'])[0].match)
def test_not_list(self):
'''Should create a list if not a list already'''
self.assertTrue(cronwatch.force_regex_list('reg')[0].match)
class TestForceIntList(TestBase):
def test_list(self):
'''Should return a list of integers if already in list form'''
self.assertEquals([1, 2], cronwatch.force_int_list(['1', '2']))
def test_not_list(self):
'''Should create a list if not a list already'''
self.assertEquals([1], cronwatch.force_int_list(1))
class TestReadConfig(TestBase):
'''Test the read_config() function'''
def setUp(self):
self.old_config = cronwatch.CONFIGFILE
cronwatch.CONFIGFILE = 'this_is_not_a_file.forsure'
def tearDown(self):
cronwatch.CONFIGFILE = self.old_config
def config(self, text):
'''Create a NamedTemporaryFile and return the object'''
cf = NamedTemporaryFile()
cf.write(text)
cf.seek(0)
return cf
def test_defaults(self):
'''Should set defaults if no config is found'''
cf = self.config('[test]');
c = cronwatch.read_config(cf.name)
for s in ['test', '_default_']:
self.assertEquals([], c[s]['required'])
self.assertEquals(None, c[s]['whitelist'])
self.assertEquals([], c[s]['blacklist'])
self.assertEquals([0], c[s]['exit_codes'])
self.assertEquals(None, c[s]['preamble_file'])
self.assertEquals(None, c[s]['email_to'])
self.assertEquals(None, c[s]['email_from'])
self.assertEquals(102400, c[s]['email_maxsize'])
self.assertEquals(False, c[s]['email_success'])
self.assertEquals('/usr/lib/sendmail', c[s]['email_sendmail'])
self.assertEquals(None, c[s]['logfile'])
self.assertEquals([], get_extra_values(c))
def test_parse_error(self):
'''Should raise an error when the config file is bad'''
cf = self.config('[test')
self.assertRaisesError(cronwatch.Error,
'could not read %s: Invalid line at line "1".' % cf.name,
cronwatch.read_config, cf.name)
def test_extra_settings(self):
'''Should fail if there are extra configuration settings'''
cf = self.config('''[test]
a=1
b=2''')
self.assertRaisesError(cronwatch.Error,
'unknown setting in configuration: a',
cronwatch.read_config, cf.name)
def test_validation_error(self):
'''Should raise an Exception with a helpful error message'''
cf = self.config('[test]\nrequired = (')
self.assertRaisesError(cronwatch.Error,
'configuration error for test.required: ' +
'invalid regular expression: (: unbalanced parenthesis',
cronwatch.read_config, cf.name)
def test_regexes(self):
'''Should verify and normalize the regular expresions'''
for r in ['required', 'whitelist', 'blacklist']:
cf = self.config('[test]\n%s = val' % r)
c = cronwatch.read_config(cf.name)
self.assertTrue(c['test'][r][0].match)
cf = self.config('[test]\n%s = (')
self.assertRaises(cronwatch.Error, cronwatch.read_config, cf.name)
def test_exit_codes(self):
'''Should verify and normalize the exit codes'''
cf = self.config('[test]\nexit_codes = 1')
c = cronwatch.read_config(cf.name)
self.assertEquals([1], c['test']['exit_codes'])
cf = self.config('[test]\nexit_codes = 1, 2')
c = cronwatch.read_config(cf.name)
self.assertEquals([1, 2], c['test']['exit_codes'])
cf = self.config('[test]\nexit_codes = a')
self.assertRaises(cronwatch.Error, cronwatch.read_config, cf.name)
def test_preamble_file(self):
'''Should verify the preamble_file'''
cf = self.config('[test1]\npreamble_file = test_file.txt')
c = cronwatch.read_config(cf.name)
self.assertEquals('test_file.txt', c['test1']['preamble_file'])
cf = self.config('[test1]\npreamble_file = not_a_file.txt')
self.assertRaises(cronwatch.Error, cronwatch.read_config, cf.name)
def test_emails(self):
'''Should verify and normalize the email addresses'''
cf = self.config('[test]\nemail_to = default\nemail_from = [email protected]')
c = cronwatch.read_config(cf.name)
self.assertEquals('default', c['test']['email_to'])
self.assertEquals('[email protected]', c['test']['email_from'])
cf = self.config('[test]\nemail_to = me,too')
self.assertRaises(cronwatch.Error, cronwatch.read_config, cf.name)
def test_email_maxsize(self):
'''Should verify and normalize the email maximum size'''
cf = self.config('[test]\nemail_maxsize = -1')
c = cronwatch.read_config(cf.name)
self.assertEquals(-1, c['test']['email_maxsize'])
cf = self.config('[test]\nemail_maxsize = -2')
self.assertRaises(cronwatch.Error, cronwatch.read_config, cf.name)
def test_email_success(self):
'''Should verify and normalize the email_sucess parameter'''
cf = self.config('[test1]\nemail_success = on\n' +
'[test2]\nemail_success = off')
c = cronwatch.read_config(cf.name)
self.assertEquals(True, c['test1']['email_success'])
self.assertEquals(False, c['test2']['email_success'])
cf = self.config('[test]\nemail_success = 1, 2')
self.assertRaises(cronwatch.Error, cronwatch.read_config, cf.name)
def test_paths(self):
'''Should verify the path variables get set'''
cf = self.config('[test]\nemail_sendmail = /l/sendmail -t"s 1"\n' +
'logfile = file%var%')
c = cronwatch.read_config(cf.name)
self.assertEquals('/l/sendmail -t"s 1"', c['test']['email_sendmail'])
self.assertEquals('file%var%', c['test']['logfile'])
def test_default_configfile(self):
'''Should read the main configuration file if it exists'''
cf = self.config('[test]\nexit_codes = 1')
cronwatch.CONFIGFILE = cf.name
c = cronwatch.read_config()
self.assertEquals([1], c['test']['exit_codes'])
def test_configfile_command_line(self):
'''Should read an alternate config file'''
cf = self.config('[test]\nexit_codes = 2') #
cronwatch.CONFIGFILE = cf.name
cf2 = self.config('[test]\nexit_codes = 1') #
c = cronwatch.read_config(config_file = cf2.name)
self.assertEquals([1], c['test']['exit_codes'])
def test_require_configfile(self):
'''Should raise an exception if the config file doesn't exist'''
self.assertRaisesError(cronwatch.Error,
'Config file not found: "this_is_not_a_file.forsure".',
cronwatch.read_config, 'this_is_not_a_file.forsure')
class TestCallSendmail(TestBase):
'''Test the call_sendmail() function'''
def setUp(self):
self.tempdir = mkdtemp()
self.register_cleanup(self.tempdir)
def test_simple(self):
'''Should run sendmail and pass the file in as input'''
out = os.path.join(self.tempdir, 'sendmailoutput')
cronwatch.call_sendmail(['./test_script.sh', 'sendmail', out], 'output')
o = open(out).read()
self.assertEquals('output', o)
def test_sendmail_error_running(self):
'''Should raise an exception when sendmail can't be run'''
self.assertRaisesError(cronwatch.Error,
'could not run sendmail: ./this_is_not_a_script.forsure: ' +
'[Errno 2] No such file or directory',
cronwatch.call_sendmail, ['./this_is_not_a_script.forsure'],
'output')
def test_sendmail_exitcode(self):
'''Should raise an exception if there's a non-standard exit code'''
self.assertRaisesError(cronwatch.Error,
'sendmail returned exit code 10: ' +
'stdout\nstderr\nstdout again\n',
cronwatch.call_sendmail, ['./test_script.sh', 'simple'],
'outputtmp')
class TestSendMail(TestBase):
def call_sendmail(self, *args):
self.args = args
def setUp(self):
self.args = None
self.old_call_sendmail = cronwatch.call_sendmail
cronwatch.call_sendmail = self.call_sendmail
def tearDown(self):
cronwatch.call_sendmail = self.old_call_sendmail
def test_call_sendmail(self):
'''Should parse the sendmail command line and pass it to
call_sendmail'''
cronwatch.send_mail('''/usr/bin/sendmail 'this is a "test"' "*"''',
'subject', 'text', 'to')
self.assertEquals(self.args[0], ['/usr/bin/sendmail',
'this is a "test"', '*', 'to'])
def test_formatted_mail(self):
'''Should prepare an e-mail message'''
cronwatch.send_mail('sendmail', 'my subject',
'e-mail body\nmore text', '[email protected]',
'[email protected]')
lines = self.args[1].split('\n')
self.assertEquals('Content-Type: text/plain; charset="us-ascii"',
lines[0])
self.assertEquals('To: [email protected]', lines[3])
self.assertEquals('From: [email protected]', lines[4])
self.assertEquals('Subject: my subject', lines[5])
self.assertEquals('', lines[6])
self.assertEquals('e-mail body', lines[7])
self.assertEquals('more text', lines[8])
def test_auto_from(self):
'''Should auto generate the from address'''
cronwatch.send_mail('sendmail', 'subject', 'text', 'to')
lines = self.args[1].split('\n')
self.assertEquals('From: %s' % get_user_hostname(), lines[4])
def test_auto_to(self):
'''Should auto generate the to address'''
cronwatch.send_mail('sendmail', 'subject', 'text')
lines = self.args[1].split('\n')
self.assertEquals('To: %s' % getuser(), lines[3])
def test_html(self):
'''Should create a html part'''
cronwatch.send_mail('sendmail', 'subject', 'text', html = 'html')
lines = self.args[1].split('\n')
while lines[0].find('Content-Type') == -1: lines.pop(0)
self.assertEquals('Content-Type: multipart/alternative',
lines[0].split(';')[0])
lines.pop(0)
while lines[0].find('Content-Type') == -1: lines.pop(0)
self.assertEquals('Content-Type: text/plain; charset="us-ascii"',
lines[0])
lines.pop(0)
while lines[0].find('Content-Type') == -1: lines.pop(0)
self.assertEquals('Content-Type: text/html; charset="us-ascii"',
lines[0])
class TestGetNow(TestBase):
def test_get_now(self):
'''Should return a formatted string for right now'''
# I'm not sure this is always going to work
self.assertEquals(datetime.now().strftime('%c'), cronwatch.get_now())
class TestWatch(TestBase):
'''Test the watch() function'''
def setUp(self):
self.time = 0
self.old_config = cronwatch.CONFIGFILE
cronwatch.CONFIGFILE = 'this_is_not_a_file.forsure'
self.old_send_mail = cronwatch.send_mail
cronwatch.send_mail = self.send_mail
self.old_get_now = cronwatch.get_now
cronwatch.get_now = self.get_now
def tearDown(self):
cronwatch.CONFIGFILE = self.old_config
cronwatch.send_mail = self.old_send_mail
cronwatch.get_now = self.old_get_now
def send_mail(self, sendmail, subject, text, to_addr = None,
from_addr = None, html = None):
self.send = True
self.send_sendmail = sendmail
self.send_to = to_addr
self.send_subject = subject
self.send_text = text.split('\n')
self.send_text_raw = text
self.send_from = from_addr
def get_now(self):
t = self.time
self.time += 1
return 'time%i' % t
def watch(self, conf, cmd, *args, **kwargs):
self.send = False
cf = NamedTemporaryFile()
cf.write('[job]\n%s' % conf)
cf.seek(0)
tf = NamedTemporaryFile()
self.cmd_line = ['./test_script.sh', cmd, tf.name] + list(args)
tag = 'job'
if kwargs.has_key('tag'): tag = kwargs['tag']
force = False
if kwargs.has_key('force_blacklist'): force = kwargs['force_blacklist']
cronwatch.watch(self.cmd_line, config = cf.name, tag = tag,
force_blacklist = force)
self.cmd_line = ' '.join(self.cmd_line)
return tf.read()
def test_no_output(self):
'''Should run the executable with arguments and just quit'''
o = self.watch('', 'quiet', 'arg')
self.assertEquals('quiet arg\n', o)
self.assertFalse(self.send)
def test_email_success(self):
'''Should send an e-mail if the email_success flag is set'''
self.watch('email_success = on', 'quiet', 'arg')
self.assertTrue(self.send)
def test_no_tag(self):
'''Should use the default section if the tag doesn't exist in the
config'''
self.watch('[_default_]\nemail_success = on\n',
'quiet', 'arg', tag = 'doesnotexist')
self.assertTrue(self.send)
def test_auto_tag(self):
'''Should figure out the tag from the script name by default'''
self.watch('[test_script.sh]\nemail_success = on\n',
'quiet', 'arg', tag = None)
self.assertTrue(self.send)
def test_email_subject(self):
'''Should set the e-mail subject'''
self.watch('email_success = on', 'quiet', 'arg')
self.assertEquals('cronwatch <%s> %s' %
(get_user_hostname(), self.cmd_line),
self.send_subject)
def test_email_to(self):
'''Should set the e-mail to address'''
self.watch('email_success = on', 'quiet', 'arg')
self.assertEquals(None, self.send_to)
self.watch('email_success = on\nemail_to = testuser', 'quiet', 'arg')
self.assertEquals('testuser', self.send_to)
def test_email_from(self):
'''Should set the e-mail from address'''
self.watch('email_success = on', 'quiet', 'arg')
self.assertEquals(None, self.send_from)
self.watch('email_success = on\nemail_from = testuser', 'quiet', 'arg')
self.assertEquals('testuser', self.send_from)
def test_email_sendmail(self):
'''Should set the sendmail path'''
self.watch('email_success = on', 'quiet', 'arg')
self.assertEquals('/usr/lib/sendmail', self.send_sendmail)
self.watch('email_success = on\nemail_sendmail = sm', 'quiet', 'arg')
self.assertEquals('sm', self.send_sendmail)
def test_email_body(self):
'''Should format the body correctly'''
self.watch('email_success = on', 'quiet', 'arg')
self.assertEquals('The following command line executed successfully:',
self.send_text[0])
self.assertEquals(self.cmd_line, self.send_text[1])
self.assertEquals('', self.send_text[2])
self.assertEquals('Started execution at: time0', self.send_text[3])
self.assertEquals('Finished execution at: time1', self.send_text[4])
self.assertEquals('Exit code: 0', self.send_text[5])
self.assertEquals('', self.send_text[6])
self.assertEquals('Output:', self.send_text[7])
self.assertEquals(' No output', self.send_text[8])
def test_email_output(self):
'''Should append the output to the end of the file'''
self.watch('email_success = on', 'out', 'a', 'b')
self.assertEquals('Output:', self.send_text[7])
self.assertEquals(' a', self.send_text[8])
self.assertEquals(' b', self.send_text[9])
self.assertEquals('[EOF]', self.send_text[10])
def test_preamble_text(self):
'''Should add the pramble to the output'''
self.watch('email_success = on\npreamble_file = test_file.txt', 'out',
'a', 'b')
self.assertEquals('', self.send_text[6])
self.assertEquals('This is sample text.', self.send_text[7])
self.assertEquals('', self.send_text[8])
self.assertEquals('Output:', self.send_text[9])
def test_email_maxsize(self):
'''Should truncate the e-mail output if it's too big'''
self.watch('email_success = on\nemail_maxsize = -1', 'out', 'a' * 4097)
self.assertEquals(' ' + 'a' * 4097, self.send_text[8])
self.assertEquals('[EOF]', self.send_text[9])
self.watch('email_success = on\nemail_maxsize = -1', 'out', 'line1')
size = len(self.send_text_raw) - len('[EOF]')
self.watch('email_success = on\nemail_maxsize = %i' % size,
'out', 'line1')
self.assertEquals(' line1', self.send_text[8])
self.assertEquals('[EOF]', self.send_text[9])
self.watch('email_success = on\nemail_maxsize = %i' % (size - 1),
'out', 'line1')
self.assertEquals(' line1', self.send_text[8])
self.assertEquals('[Output truncated]', self.send_text[9])
def test_email_error(self):
'''Should change the status line if there were errors in execution'''
self.watch('exit_codes = 1, 2', 'exit', '3')
self.assertEquals('The following command line executed with errors:',
self.send_text[0])
def test_exit_codes(self):
'''Should send a mail if the exit code doesn't match'''
self.watch('exit_codes = 1, 2', 'exit', '1')
self.assertFalse(self.send)
self.watch('exit_codes = 1, 2', 'exit', '2')
self.assertFalse(self.send)
self.watch('exit_codes = 1, 2', 'exit', '3')
self.assertEquals(' * Exit code (3) is not a valid exit code',
self.send_text[8])
def test_required(self):
'''Should search for required output'''
self.watch('required = req, line', 'out', 'line1', 'req', 'line3')
self.assertFalse(self.send)
self.watch('required = req, more', 'out', 'line1', 'line2', 'line3')
self.assertEquals(' * Required output missing (more)',
self.send_text[8])
self.assertEquals(' * Required output missing (req)',
self.send_text[9])
def test_whitelist(self):
'''Should cause an error if there is non-whitelist output'''
self.watch('whitelist = white, bright',
'out', 'whitelight', 'brightlight', 'whitebright')
self.assertFalse(self.send)
self.watch('whitelist = white, bright',
'out', 'whitelight', 'black', 'whitebright')
self.assertEquals(' * Output not matched by whitelist ' +
'(denoted by "*" in output)', self.send_text[8])
self.assertEquals(' whitelight', self.send_text[12])
self.assertEquals('* black', self.send_text[13])
self.assertEquals(' whitebright', self.send_text[14])
self.assertEquals('[EOF]', self.send_text[15])
def test_blacklist(self):
'''Should cause an error if there is blacklist output'''
self.watch('blacklist = black, dark', 'out', 'line1', 'line2', 'line3')
self.assertFalse(self.send)
self.watch('blacklist = black, dark', 'out', 'black', 'dark', 'line3')
self.assertEquals(' * Output matched by blacklist (black) ' +
'(denoted by "!" in output)',
self.send_text[8])
self.assertEquals(' * Output matched by blacklist (dark) ' +
'(denoted by "!" in output)',
self.send_text[9])
self.assertEquals('! black', self.send_text[13])
self.assertEquals('! dark', self.send_text[14])
self.assertEquals(' line3', self.send_text[15])
def test_default_blacklist(self):
'''Should create a blacklist if none of the regex options are
specified'''
self.watch('', 'out', 'line1', 'line2', force_blacklist = True)
self.assertEquals(' * Output matched by blacklist (.*) ' +
'(denoted by "!" in output)',
self.send_text[8])
self.assertEquals('! line1', self.send_text[12])
self.assertEquals('! line2', self.send_text[13])
def test_logfile(self):
'''Should open and append to a log file'''
logfile = NamedTemporaryFile()
logfile.write('line1\n')
logfile.seek(0)
self.watch(('logfile = %s\nemail_maxsize = 1\n' +
'preamble_file = test_file.txt') % logfile.name,
'out', 'line1', 'line2')
o = logfile.read().split('\n')
self.assertEquals('line1', o[0])
self.assertEquals('The following command line executed successfully:',
o[1])
self.assertEquals(self.cmd_line, o[2])
self.assertEquals('', o[3])
self.assertEquals('Started execution at: time0', o[4])
self.assertEquals('Finished execution at: time1', o[5])
self.assertEquals('Exit code: 0', o[6])
self.assertEquals('', o[7])
self.assertEquals('This is sample text.', o[8])
self.assertEquals('', o[9])
self.assertEquals('Output:', o[10])
self.assertEquals(' line1', o[11])
self.assertEquals(' line2', o[12])
self.assertEquals('[EOF]', o[13])
self.assertEquals('', o[14])
self.assertEquals('', o[15])
def test_logfile_empty_output(self):
'''Should open and write to a log file even if there is no output'''
logfile = NamedTemporaryFile()
self.watch('logfile = %s\nemail_maxsize = 1' % logfile.name, 'out')
o = logfile.read().split('\n')
self.assertEquals('The following command line executed successfully:',
o[0])
self.assertEquals(self.cmd_line, o[1])
self.assertEquals('', o[2])
self.assertEquals('Started execution at: time0', o[3])
self.assertEquals('Finished execution at: time1', o[4])
self.assertEquals('Exit code: 0', o[5])
self.assertEquals('', o[6])
self.assertEquals('Output:', o[7])
self.assertEquals(' No output', o[8])
self.assertEquals('', o[9])
self.assertEquals('', o[10])
def test_logfile_name(self):
'''Should format the name of the logfile correctly'''
d = mkdtemp()
self.register_cleanup(d)
logfile = os.path.join(d, 'job_%Y')
self.watch('logfile = %s\nemail_maxsize = 1' % logfile,
'out', 'line1', 'line2')
logfile = os.path.join(d, datetime.now().strftime('job_%Y'))
o = open(logfile).read().split('\n')
self.assertEquals(' line1', o[8])
if __name__ == '__main__':
unittest.main()
| wdlowry/cronwatch | test_cronwatch.py | Python | gpl-3.0 | 29,395 |
class Manipulator(object):
KINDS=()
SEPERATOR="$"
def __call__(self, txt, **kwargs):
data = {}
splitted = txt.split("$")
for kind, part in zip(self.KINDS, splitted):
data[kind]=part
for info, new_value in kwargs.items():
kind, position = info.rsplit("_",1)
part = data[kind]
if position=="start":
part = new_value + part[1:]
elif position=="mid":
mid=int(len(part)/2)
part = part[:mid] + new_value + part[mid+1:]
elif position=="end":
part = part[:-1] + new_value
else:
raise AssertionError
data[kind] = part
return "$".join([data[kind] for kind in self.KINDS])
class CryptManipulator(Manipulator):
KINDS = ("algorithm", "iterations", "salt", "hash", "data")
xor_crypt_manipulator = CryptManipulator()
class SecurePassManipulator(Manipulator):
KINDS = ("pbkdf2_hash", "second_pbkdf2_part", "cnonce")
secure_pass_manipulator = SecurePassManipulator()
| jedie/django-secure-js-login | tests/test_utils/manipulators.py | Python | gpl-3.0 | 1,102 |
# telemetry.py listens for psas packets and sends them to the front-end.
# Copyright (c) 2017 Jeff Patterson, Amanda Murphy, Paolo Villanueva,
# Patrick Overton, Connor Picken, Yun Cong Chen, Seth Amundsen, Michael
# Ohl, Matthew Tighe
# ALL RIGHTS RESERVED
# [This program is licensed under the "GNU General Public License"]
# Please see the file COPYING in the source distribution of this
# software for license terms.
from psas_packet import io
from psas_packet import messages
from Queue import Empty, Queue
import socket
from threading import Event, Thread
import logging
import traceback
import sys
class Telemetry:
"""Listens for psas packet data via listen() and emits them via sender()
"""
def __init__(self, address, port, sio, lock=None, log=None):
"""Initializes data members of an instance of the Telemetry class"""
self.address = address
self.event = Event()
self.port = port
# From the Docs: (https://docs.python.org/2/library/queue.html)
# "It is especially useful in threaded programming when information
# must be exchanged safely between multiple threads. The Queue class
# in this module implements all the required locking semantics."
self.queue = Queue()
self.queue_log = Queue()
self.sio = sio
# Instantiates the sender thread.
self.thread = Thread(target=self.sender)
self.thread.daemon = True
# Creates a data logging object. This object keeps track of the
# data that the server processes
self.loggerThread = Thread(target=self.log)
self.loggerThread.daemon = True
self.lock = lock
self.log = log
# error_log is a logging object that logs any error messages thrown in
# telemetry.py to telemery_error.log
self.error_log = logging.getLogger('telemetry')
fh = logging.FileHandler('telemetry_error.log', mode='w')
self.error_log.addHandler(fh)
def listen(self):
"""Listens for incoming psas packets
network.listen() returns a tuple of timestamp and data
adds the tuple to the queue
"""
print("The telemetry server is running.")
# Starts the sender thread.
self.thread.start()
# Starts logger thread if lock and log exists
if self.lock and self.log:
self.loggerThread.start()
# Use PSAS Packet to listen for incoming telemetry data
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.bind((self.address, self.port))
network = io.Network(sock)
while True:
try:
# listen for incoming data and split it up into
# usable chuncks, fourcc and vaues
collection = []
for timestamp, data in network.listen():
fourcc, values = data
# Skips packet with unrecognized fourcc
if not fourcc in messages.MESSAGES:
continue
collection.append((fourcc, values, timestamp))
# Enqueues collection of packets without blocking.
if len(collection) > 0:
self.queue.put_nowait(collection)
except KeyboardInterrupt:
sock.close()
# Sets the shared event (breaks the sender thread's loop).
self.event.set()
# Waits until the sender thread terminates.
self.thread.join()
return
def sender(self):
"""Emits a socketio event for each message that listen receives"""
while self.event.is_set() == False:
try:
# if there is something in the Queue organize it into an
# event
if not self.queue.empty():
send_data = {}
collection = self.queue.get_nowait()
for fourcc, values, timestamp in collection:
values["recv"] = timestamp
send_data[fourcc] = values
# Emit the generated event
self.sio.emit("telemetry", send_data, namespace="/main")
self.queue_log.put_nowait(collection)
except KeyError:
self.error_log.error(traceback.format_exc())
traceback.print_exc(file=sys.stdout)
except ValueError:
self.error_log.error(traceback.format_exc())
traceback.print_exc(file=sys.stdout)
except KeyboardInterrupt:
return None
return
def log(self):
while True:
try:
if not self.queue_log.empty():
collection = self.queue_log.get_nowait()
# Iterates through collection of packets
for fourcc, values, timestamp in collection:
# Obtain MESSAGES information for this fourcc
fourcc_message = messages.MESSAGES[fourcc]
# Encode values
encoded_values = fourcc_message.encode(values)
# Synchronously log binary data to the file
self.lock.acquire()
self.log.write(messages.HEADER.encode(fourcc_message, int(timestamp)))
self.log.write(encoded_values)
self.lock.release()
except KeyboardInterrupt:
return None
| patter5/rv3k | back-end/telemetry.py | Python | gpl-2.0 | 5,743 |
class IncorrectEmailException(Exception):
def __init__(self):
message = """Incorrect email address encountered, expected
something like [email protected]"""
super(IncorrectEmailException, self).__init__(message)
| dhruvagarwal/termailer | exceptions.py | Python | mit | 253 |
from django.apps import AppConfig
class $fileinputname$Config(AppConfig):
name = '$fileinputname$'
| DinoV/PTVS | Python/Templates/Django/ItemTemplates/Python/DjangoNewAppFiles19/apps.py | Python | apache-2.0 | 105 |
#!/usr/bin/env python
from matplotlib import pyplot, rc
import itertools
def figureItOut(x_lists, y_lists, axis_bounds):
colors = 'kbrg'
shapes = ['', '*', 'o', 's']
lines = [':', '-', '-', '-']
labels = ['Random Actions',
'Learning with 360$^\circ$ Sensor FoV',
'Learning with 270$^\circ$ Sensor FoV',
'Learning with 900$^\circ$ Sensor FoV']
for x, y, color, shape, line, label in itertools.izip(x_lists, y_lists, colors, shapes, lines, labels):
pyplot.plot(x, y,color+line+shape, label=label, markersize=8)
pyplot.axis(axis_bounds)
pyplot.xlabel('Number of Rovers')
pyplot.ylabel('System Reward Per Rover after Learning')
pyplot.title('Effect of Sensor Field-of-View on Learned Performance')
pyplot.xticks([1, 2, 3], [3, 10, 100]) # make the evenly-spaced ticks refer to unevenly-spaced values
pyplot.legend(loc='upper right')
pyplot.show()
if __name__ == "__main__":
font = {'family' : 'normal',
'weight' : 'bold',
'size' : 16}
rc('font', **font)
x_lists = [[1, 2, 3]]*4
x_lists[0] = [0, 4]
y_lists = [[26, 26], # the baseline; gets treated differently
[180, 150, 142],
[131, 132, 131],
[99, 125, 112]]
axis_bounds = [0.5, 3.5, 0, 300]
figureItOut(x_lists, y_lists, axis_bounds)
| MatthewRueben/multiple-explorers | scripts/test/figure_template.py | Python | mit | 1,391 |
# vim:fileencoding=utf-8:noet
from __future__ import division
from powerline.lib import mergedicts, add_divider_highlight_group, REMOVE_THIS_KEY
from powerline.lib.humanize_bytes import humanize_bytes
from powerline.lib.vcs import guess, get_fallback_create_watcher
from powerline.lib.threaded import ThreadedSegment, KwThreadedSegment
from powerline.lib.monotonic import monotonic
from powerline.lib.file_watcher import create_file_watcher, INotifyError
from powerline.lib.vcs.git import git_directory
from powerline import get_fallback_logger
import threading
import os
import sys
import re
import platform
from time import sleep
from subprocess import call, PIPE
from functools import partial
from tests import TestCase, SkipTest
from tests.lib import Pl
def thread_number():
return len(threading.enumerate())
class TestThreaded(TestCase):
def test_threaded_segment(self):
log = []
pl = Pl()
updates = [(None,)]
lock = threading.Lock()
event = threading.Event()
block_event = threading.Event()
class TestSegment(ThreadedSegment):
interval = 10
def set_state(self, **kwargs):
event.clear()
log.append(('set_state', kwargs))
return super(TestSegment, self).set_state(**kwargs)
def update(self, update_value):
block_event.wait()
event.set()
# Make sleep first to prevent some race conditions
log.append(('update', update_value))
with lock:
ret = updates[0]
if isinstance(ret, Exception):
raise ret
else:
return ret[0]
def render(self, update, **kwargs):
log.append(('render', update, kwargs))
if isinstance(update, Exception):
raise update
else:
return update
# Non-threaded tests
segment = TestSegment()
block_event.set()
updates[0] = (None,)
self.assertEqual(segment(pl=pl), None)
self.assertEqual(thread_number(), 1)
self.assertEqual(log, [
('set_state', {}),
('update', None),
('render', None, {'pl': pl, 'update_first': True}),
])
log[:] = ()
segment = TestSegment()
block_event.set()
updates[0] = ('abc',)
self.assertEqual(segment(pl=pl), 'abc')
self.assertEqual(thread_number(), 1)
self.assertEqual(log, [
('set_state', {}),
('update', None),
('render', 'abc', {'pl': pl, 'update_first': True}),
])
log[:] = ()
segment = TestSegment()
block_event.set()
updates[0] = ('abc',)
self.assertEqual(segment(pl=pl, update_first=False), 'abc')
self.assertEqual(thread_number(), 1)
self.assertEqual(log, [
('set_state', {}),
('update', None),
('render', 'abc', {'pl': pl, 'update_first': False}),
])
log[:] = ()
segment = TestSegment()
block_event.set()
updates[0] = ValueError('abc')
self.assertEqual(segment(pl=pl), None)
self.assertEqual(thread_number(), 1)
self.assertEqual(len(pl.exceptions), 1)
self.assertEqual(log, [
('set_state', {}),
('update', None),
])
log[:] = ()
pl.exceptions[:] = ()
segment = TestSegment()
block_event.set()
updates[0] = (TypeError('def'),)
self.assertRaises(TypeError, segment, pl=pl)
self.assertEqual(thread_number(), 1)
self.assertEqual(log, [
('set_state', {}),
('update', None),
('render', updates[0][0], {'pl': pl, 'update_first': True}),
])
log[:] = ()
# Threaded tests
segment = TestSegment()
block_event.clear()
kwargs = {'pl': pl, 'update_first': False, 'other': 1}
with lock:
updates[0] = ('abc',)
segment.startup(**kwargs)
ret = segment(**kwargs)
self.assertEqual(thread_number(), 2)
block_event.set()
event.wait()
segment.shutdown_event.set()
segment.thread.join()
self.assertEqual(ret, None)
self.assertEqual(log, [
('set_state', {'update_first': False, 'other': 1}),
('render', None, {'pl': pl, 'update_first': False, 'other': 1}),
('update', None),
])
log[:] = ()
segment = TestSegment()
block_event.set()
kwargs = {'pl': pl, 'update_first': True, 'other': 1}
with lock:
updates[0] = ('def',)
segment.startup(**kwargs)
ret = segment(**kwargs)
self.assertEqual(thread_number(), 2)
segment.shutdown_event.set()
segment.thread.join()
self.assertEqual(ret, 'def')
self.assertEqual(log, [
('set_state', {'update_first': True, 'other': 1}),
('update', None),
('render', 'def', {'pl': pl, 'update_first': True, 'other': 1}),
])
log[:] = ()
segment = TestSegment()
block_event.set()
kwargs = {'pl': pl, 'update_first': True, 'interval': 0.2}
with lock:
updates[0] = ('abc',)
segment.startup(**kwargs)
start = monotonic()
ret1 = segment(**kwargs)
with lock:
updates[0] = ('def',)
self.assertEqual(thread_number(), 2)
sleep(0.5)
ret2 = segment(**kwargs)
segment.shutdown_event.set()
segment.thread.join()
end = monotonic()
duration = end - start
self.assertEqual(ret1, 'abc')
self.assertEqual(ret2, 'def')
self.assertEqual(log[:5], [
('set_state', {'update_first': True, 'interval': 0.2}),
('update', None),
('render', 'abc', {'pl': pl, 'update_first': True, 'interval': 0.2}),
('update', 'abc'),
('update', 'def'),
])
num_runs = len([e for e in log if e[0] == 'update'])
self.assertAlmostEqual(duration / 0.2, num_runs, delta=1)
log[:] = ()
segment = TestSegment()
block_event.set()
kwargs = {'pl': pl, 'update_first': True, 'interval': 0.2}
with lock:
updates[0] = ('ghi',)
segment.startup(**kwargs)
start = monotonic()
ret1 = segment(**kwargs)
with lock:
updates[0] = TypeError('jkl')
self.assertEqual(thread_number(), 2)
sleep(0.5)
ret2 = segment(**kwargs)
segment.shutdown_event.set()
segment.thread.join()
end = monotonic()
duration = end - start
self.assertEqual(ret1, 'ghi')
self.assertEqual(ret2, None)
self.assertEqual(log[:5], [
('set_state', {'update_first': True, 'interval': 0.2}),
('update', None),
('render', 'ghi', {'pl': pl, 'update_first': True, 'interval': 0.2}),
('update', 'ghi'),
('update', 'ghi'),
])
num_runs = len([e for e in log if e[0] == 'update'])
self.assertAlmostEqual(duration / 0.2, num_runs, delta=1)
self.assertEqual(num_runs - 1, len(pl.exceptions))
log[:] = ()
def test_kw_threaded_segment(self):
log = []
pl = Pl()
event = threading.Event()
class TestSegment(KwThreadedSegment):
interval = 10
@staticmethod
def key(_key=(None,), **kwargs):
log.append(('key', _key, kwargs))
return _key
def compute_state(self, key):
event.set()
sleep(0.1)
log.append(('compute_state', key))
ret = key
if isinstance(ret, Exception):
raise ret
else:
return ret[0]
def render_one(self, state, **kwargs):
log.append(('render_one', state, kwargs))
if isinstance(state, Exception):
raise state
else:
return state
# Non-threaded tests
segment = TestSegment()
event.clear()
self.assertEqual(segment(pl=pl), None)
self.assertEqual(thread_number(), 1)
self.assertEqual(log, [
('key', (None,), {'pl': pl}),
('compute_state', (None,)),
('render_one', None, {'pl': pl}),
])
log[:] = ()
segment = TestSegment()
kwargs = {'pl': pl, '_key': ('abc',), 'update_first': False}
event.clear()
self.assertEqual(segment(**kwargs), 'abc')
kwargs.update(_key=('def',))
self.assertEqual(segment(**kwargs), 'def')
self.assertEqual(thread_number(), 1)
self.assertEqual(log, [
('key', ('abc',), {'pl': pl}),
('compute_state', ('abc',)),
('render_one', 'abc', {'pl': pl, '_key': ('abc',)}),
('key', ('def',), {'pl': pl}),
('compute_state', ('def',)),
('render_one', 'def', {'pl': pl, '_key': ('def',)}),
])
log[:] = ()
segment = TestSegment()
kwargs = {'pl': pl, '_key': ValueError('xyz'), 'update_first': False}
event.clear()
self.assertEqual(segment(**kwargs), None)
self.assertEqual(thread_number(), 1)
self.assertEqual(log, [
('key', kwargs['_key'], {'pl': pl}),
('compute_state', kwargs['_key']),
])
log[:] = ()
segment = TestSegment()
kwargs = {'pl': pl, '_key': (ValueError('abc'),), 'update_first': False}
event.clear()
self.assertRaises(ValueError, segment, **kwargs)
self.assertEqual(thread_number(), 1)
self.assertEqual(log, [
('key', kwargs['_key'], {'pl': pl}),
('compute_state', kwargs['_key']),
('render_one', kwargs['_key'][0], {'pl': pl, '_key': kwargs['_key']}),
])
log[:] = ()
# Threaded tests
segment = TestSegment()
kwargs = {'pl': pl, 'update_first': False, '_key': ('_abc',)}
event.clear()
segment.startup(**kwargs)
ret = segment(**kwargs)
self.assertEqual(thread_number(), 2)
segment.shutdown_event.set()
segment.thread.join()
self.assertEqual(ret, None)
self.assertEqual(log[:2], [
('key', kwargs['_key'], {'pl': pl}),
('render_one', None, {'pl': pl, '_key': kwargs['_key']}),
])
self.assertLessEqual(len(log), 3)
if len(log) > 2:
self.assertEqual(log[2], ('compute_state', kwargs['_key']))
log[:] = ()
segment = TestSegment()
kwargs = {'pl': pl, 'update_first': True, '_key': ('_abc',)}
event.clear()
segment.startup(**kwargs)
ret1 = segment(**kwargs)
kwargs.update(_key=('_def',))
ret2 = segment(**kwargs)
self.assertEqual(thread_number(), 2)
segment.shutdown_event.set()
segment.thread.join()
self.assertEqual(ret1, '_abc')
self.assertEqual(ret2, '_def')
self.assertEqual(log, [
('key', ('_abc',), {'pl': pl}),
('compute_state', ('_abc',)),
('render_one', '_abc', {'pl': pl, '_key': ('_abc',)}),
('key', ('_def',), {'pl': pl}),
('compute_state', ('_def',)),
('render_one', '_def', {'pl': pl, '_key': ('_def',)}),
])
log[:] = ()
class TestLib(TestCase):
def test_mergedicts(self):
d = {}
mergedicts(d, {'abc': {'def': 'ghi'}})
self.assertEqual(d, {'abc': {'def': 'ghi'}})
mergedicts(d, {'abc': {'def': {'ghi': 'jkl'}}})
self.assertEqual(d, {'abc': {'def': {'ghi': 'jkl'}}})
mergedicts(d, {})
self.assertEqual(d, {'abc': {'def': {'ghi': 'jkl'}}})
mergedicts(d, {'abc': {'mno': 'pqr'}})
self.assertEqual(d, {'abc': {'def': {'ghi': 'jkl'}, 'mno': 'pqr'}})
mergedicts(d, {'abc': {'def': REMOVE_THIS_KEY}})
self.assertEqual(d, {'abc': {'mno': 'pqr'}})
def test_add_divider_highlight_group(self):
def decorated_function_name(**kwargs):
return str(kwargs)
func = add_divider_highlight_group('hl_group')(decorated_function_name)
self.assertEqual(func.__name__, 'decorated_function_name')
self.assertEqual(func(kw={}), [{'contents': repr({'kw': {}}), 'divider_highlight_group': 'hl_group'}])
def test_humanize_bytes(self):
self.assertEqual(humanize_bytes(0), '0 B')
self.assertEqual(humanize_bytes(1), '1 B')
self.assertEqual(humanize_bytes(1, suffix='bit'), '1 bit')
self.assertEqual(humanize_bytes(1000, si_prefix=True), '1 kB')
self.assertEqual(humanize_bytes(1024, si_prefix=True), '1 kB')
self.assertEqual(humanize_bytes(1000000000, si_prefix=True), '1.00 GB')
self.assertEqual(humanize_bytes(1000000000, si_prefix=False), '953.7 MiB')
class TestFilesystemWatchers(TestCase):
def do_test_for_change(self, watcher, path):
st = monotonic()
while monotonic() - st < 1:
if watcher(path):
return
sleep(0.1)
self.fail('The change to {0} was not detected'.format(path))
def test_file_watcher(self):
try:
w = create_file_watcher(pl=get_fallback_logger(), watcher_type='inotify')
except INotifyError:
raise SkipTest('This test is not suitable for a stat based file watcher')
f1, f2, f3 = map(lambda x: os.path.join(INOTIFY_DIR, 'file%d' % x), (1, 2, 3))
with open(f1, 'wb'):
with open(f2, 'wb'):
with open(f3, 'wb'):
pass
ne = os.path.join(INOTIFY_DIR, 'notexists')
self.assertRaises(OSError, w, ne)
self.assertTrue(w(f1))
self.assertTrue(w(f2))
os.utime(f1, None), os.utime(f2, None)
self.do_test_for_change(w, f1)
self.do_test_for_change(w, f2)
# Repeat once
os.utime(f1, None), os.utime(f2, None)
self.do_test_for_change(w, f1)
self.do_test_for_change(w, f2)
# Check that no false changes are reported
self.assertFalse(w(f1), 'Spurious change detected')
self.assertFalse(w(f2), 'Spurious change detected')
# Check that open the file with 'w' triggers a change
with open(f1, 'wb'):
with open(f2, 'wb'):
pass
self.do_test_for_change(w, f1)
self.do_test_for_change(w, f2)
# Check that writing to a file with 'a' triggers a change
with open(f1, 'ab') as f:
f.write(b'1')
self.do_test_for_change(w, f1)
# Check that deleting a file registers as a change
os.unlink(f1)
self.do_test_for_change(w, f1)
# Test that changing the inode of a file does not cause it to stop
# being watched
os.rename(f3, f2)
self.do_test_for_change(w, f2)
self.assertFalse(w(f2), 'Spurious change detected')
os.utime(f2, None)
self.do_test_for_change(w, f2)
def test_tree_watcher(self):
from powerline.lib.tree_watcher import TreeWatcher
tw = TreeWatcher()
subdir = os.path.join(INOTIFY_DIR, 'subdir')
os.mkdir(subdir)
if tw.watch(INOTIFY_DIR).is_dummy:
raise SkipTest('No tree watcher available')
import shutil
self.assertTrue(tw(INOTIFY_DIR))
self.assertFalse(tw(INOTIFY_DIR))
changed = partial(self.do_test_for_change, tw, INOTIFY_DIR)
open(os.path.join(INOTIFY_DIR, 'tree1'), 'w').close()
changed()
open(os.path.join(subdir, 'tree1'), 'w').close()
changed()
os.unlink(os.path.join(subdir, 'tree1'))
changed()
os.rmdir(subdir)
changed()
os.mkdir(subdir)
changed()
os.rename(subdir, subdir + '1')
changed()
shutil.rmtree(subdir + '1')
changed()
os.mkdir(subdir)
f = os.path.join(subdir, 'f')
open(f, 'w').close()
changed()
with open(f, 'a') as s:
s.write(' ')
changed()
os.rename(f, f + '1')
changed()
use_mercurial = use_bzr = (sys.version_info < (3, 0)
and platform.python_implementation() == 'CPython')
class TestVCS(TestCase):
def do_branch_rename_test(self, repo, q):
st = monotonic()
while monotonic() - st < 1:
# Give inotify time to deliver events
ans = repo.branch()
if hasattr(q, '__call__'):
if q(ans):
break
else:
if ans == q:
break
sleep(0.01)
if hasattr(q, '__call__'):
self.assertTrue(q(ans))
else:
self.assertEqual(ans, q)
def test_git(self):
create_watcher = get_fallback_create_watcher()
repo = guess(path=GIT_REPO, create_watcher=create_watcher)
self.assertNotEqual(repo, None)
self.assertEqual(repo.branch(), 'master')
self.assertEqual(repo.status(), None)
self.assertEqual(repo.status('file'), None)
with open(os.path.join(GIT_REPO, 'file'), 'w') as f:
f.write('abc')
f.flush()
self.assertEqual(repo.status(), ' U')
self.assertEqual(repo.status('file'), '??')
call(['git', 'add', '.'], cwd=GIT_REPO)
self.assertEqual(repo.status(), ' I ')
self.assertEqual(repo.status('file'), 'A ')
f.write('def')
f.flush()
self.assertEqual(repo.status(), 'DI ')
self.assertEqual(repo.status('file'), 'AM')
os.remove(os.path.join(GIT_REPO, 'file'))
# Test changing branch
self.assertEqual(repo.branch(), 'master')
try:
call(['git', 'branch', 'branch1'], cwd=GIT_REPO)
call(['git', 'checkout', '-q', 'branch1'], cwd=GIT_REPO)
self.do_branch_rename_test(repo, 'branch1')
call(['git', 'branch', 'branch2'], cwd=GIT_REPO)
call(['git', 'checkout', '-q', 'branch2'], cwd=GIT_REPO)
self.do_branch_rename_test(repo, 'branch2')
call(['git', 'checkout', '-q', '--detach', 'branch1'], cwd=GIT_REPO)
self.do_branch_rename_test(repo, lambda b: re.match(br'^[a-f0-9]+$', b))
finally:
call(['git', 'checkout', '-q', 'master'], cwd=GIT_REPO)
def test_git_sym(self):
create_watcher = get_fallback_create_watcher()
dotgit = os.path.join(GIT_REPO, '.git')
spacegit = os.path.join(GIT_REPO, ' .git ')
os.rename(dotgit, spacegit)
try:
with open(dotgit, 'w') as F:
F.write('gitdir: .git \n')
gitdir = git_directory(GIT_REPO)
self.assertTrue(os.path.isdir(gitdir))
self.assertEqual(gitdir, os.path.abspath(spacegit))
repo = guess(path=GIT_REPO, create_watcher=create_watcher)
self.assertEqual(repo.branch(), 'master')
finally:
os.remove(dotgit)
os.rename(spacegit, dotgit)
if use_mercurial:
def test_mercurial(self):
create_watcher = get_fallback_create_watcher()
repo = guess(path=HG_REPO, create_watcher=create_watcher)
self.assertNotEqual(repo, None)
self.assertEqual(repo.branch(), 'default')
self.assertEqual(repo.status(), None)
with open(os.path.join(HG_REPO, 'file'), 'w') as f:
f.write('abc')
f.flush()
self.assertEqual(repo.status(), ' U')
self.assertEqual(repo.status('file'), 'U')
call(['hg', 'add', '.'], cwd=HG_REPO, stdout=PIPE)
self.assertEqual(repo.status(), 'D ')
self.assertEqual(repo.status('file'), 'A')
os.remove(os.path.join(HG_REPO, 'file'))
if use_bzr:
def test_bzr(self):
create_watcher = get_fallback_create_watcher()
repo = guess(path=BZR_REPO, create_watcher=create_watcher)
self.assertNotEqual(repo, None, 'No bzr repo found. Do you have bzr installed?')
self.assertEqual(repo.branch(), 'test_powerline')
self.assertEqual(repo.status(), None)
with open(os.path.join(BZR_REPO, 'file'), 'w') as f:
f.write('abc')
self.assertEqual(repo.status(), ' U')
self.assertEqual(repo.status('file'), '? ')
call(['bzr', 'add', '-q', '.'], cwd=BZR_REPO, stdout=PIPE)
self.assertEqual(repo.status(), 'D ')
self.assertEqual(repo.status('file'), '+N')
call(['bzr', 'commit', '-q', '-m', 'initial commit'], cwd=BZR_REPO)
self.assertEqual(repo.status(), None)
with open(os.path.join(BZR_REPO, 'file'), 'w') as f:
f.write('def')
self.assertEqual(repo.status(), 'D ')
self.assertEqual(repo.status('file'), ' M')
self.assertEqual(repo.status('notexist'), None)
with open(os.path.join(BZR_REPO, 'ignored'), 'w') as f:
f.write('abc')
self.assertEqual(repo.status('ignored'), '? ')
# Test changing the .bzrignore file should update status
with open(os.path.join(BZR_REPO, '.bzrignore'), 'w') as f:
f.write('ignored')
self.assertEqual(repo.status('ignored'), None)
# Test changing the dirstate file should invalidate the cache for
# all files in the repo
with open(os.path.join(BZR_REPO, 'file2'), 'w') as f:
f.write('abc')
call(['bzr', 'add', 'file2'], cwd=BZR_REPO, stdout=PIPE)
call(['bzr', 'commit', '-q', '-m', 'file2 added'], cwd=BZR_REPO)
with open(os.path.join(BZR_REPO, 'file'), 'a') as f:
f.write('hello')
with open(os.path.join(BZR_REPO, 'file2'), 'a') as f:
f.write('hello')
self.assertEqual(repo.status('file'), ' M')
self.assertEqual(repo.status('file2'), ' M')
call(['bzr', 'commit', '-q', '-m', 'multi'], cwd=BZR_REPO)
self.assertEqual(repo.status('file'), None)
self.assertEqual(repo.status('file2'), None)
# Test changing branch
call(['bzr', 'nick', 'branch1'], cwd=BZR_REPO, stdout=PIPE, stderr=PIPE)
self.do_branch_rename_test(repo, 'branch1')
# Test branch name/status changes when swapping repos
for x in ('b1', 'b2'):
d = os.path.join(BZR_REPO, x)
os.mkdir(d)
call(['bzr', 'init', '-q'], cwd=d)
call(['bzr', 'nick', '-q', x], cwd=d)
repo = guess(path=d, create_watcher=create_watcher)
self.assertEqual(repo.branch(), x)
self.assertFalse(repo.status())
if x == 'b1':
open(os.path.join(d, 'dirty'), 'w').close()
self.assertTrue(repo.status())
os.rename(os.path.join(BZR_REPO, 'b1'), os.path.join(BZR_REPO, 'b'))
os.rename(os.path.join(BZR_REPO, 'b2'), os.path.join(BZR_REPO, 'b1'))
os.rename(os.path.join(BZR_REPO, 'b'), os.path.join(BZR_REPO, 'b2'))
for x, y in (('b1', 'b2'), ('b2', 'b1')):
d = os.path.join(BZR_REPO, x)
repo = guess(path=d, create_watcher=create_watcher)
self.do_branch_rename_test(repo, y)
if x == 'b1':
self.assertFalse(repo.status())
else:
self.assertTrue(repo.status())
old_HGRCPATH = None
old_cwd = None
GIT_REPO = 'git_repo' + os.environ.get('PYTHON', '')
HG_REPO = 'hg_repo' + os.environ.get('PYTHON', '')
BZR_REPO = 'bzr_repo' + os.environ.get('PYTHON', '')
INOTIFY_DIR = 'inotify' + os.environ.get('PYTHON', '')
def setUpModule():
global old_cwd
global old_HGRCPATH
old_cwd = os.getcwd()
os.chdir(os.path.dirname(__file__))
call(['git', 'init', '--quiet', GIT_REPO])
assert os.path.isdir(GIT_REPO)
call(['git', 'config', '--local', 'user.name', 'Foo'], cwd=GIT_REPO)
call(['git', 'config', '--local', 'user.email', '[email protected]'], cwd=GIT_REPO)
call(['git', 'commit', '--allow-empty', '--message', 'Initial commit', '--quiet'], cwd=GIT_REPO)
if use_mercurial:
old_HGRCPATH = os.environ.get('HGRCPATH')
os.environ['HGRCPATH'] = ''
call(['hg', 'init', HG_REPO])
with open(os.path.join(HG_REPO, '.hg', 'hgrc'), 'w') as hgrc:
hgrc.write('[ui]\n')
hgrc.write('username = Foo <[email protected]>\n')
if use_bzr:
call(['bzr', 'init', '--quiet', BZR_REPO])
call(['bzr', 'config', 'email=Foo <[email protected]>'], cwd=BZR_REPO)
call(['bzr', 'config', 'nickname=test_powerline'], cwd=BZR_REPO)
call(['bzr', 'config', 'create_signatures=0'], cwd=BZR_REPO)
os.mkdir(INOTIFY_DIR)
def tearDownModule():
global old_cwd
global old_HGRCPATH
for repo_dir in [INOTIFY_DIR, GIT_REPO] + ([HG_REPO] if use_mercurial else []) + ([BZR_REPO] if use_bzr else []):
for root, dirs, files in list(os.walk(repo_dir, topdown=False)):
for file in files:
os.remove(os.path.join(root, file))
for dir in dirs:
os.rmdir(os.path.join(root, dir))
os.rmdir(repo_dir)
if use_mercurial:
if old_HGRCPATH is None:
os.environ.pop('HGRCPATH')
else:
os.environ['HGRCPATH'] = old_HGRCPATH
os.chdir(old_cwd)
if __name__ == '__main__':
from tests import main
main()
| wezhang/vim-setup | bundle/powerline/tests/test_lib.py | Python | apache-2.0 | 21,628 |
from __future__ import absolute_import
from traits.testing.unittest_tools import UnittestTools, unittest
from ...image_cache import ImageCache
from ...window import Window
from ..action import Action
from ..action_controller import ActionController
from ..action_item import ActionItem
from ..menu_manager import MenuManager
from ..menu_bar_manager import MenuBarManager
from ..tool_bar_manager import ToolBarManager
class FalseActionController(ActionController):
def can_add_to_menu(self, action):
""" Returns True if the action can be added to a menu/menubar. """
return False
def can_add_to_toolbar(self, action):
""" Returns True if the action can be added to a toolbar. """
return False
class TestActionItem(unittest.TestCase, UnittestTools):
def setUp(self):
# test whether function is called by updating list
# XXX should really use mock
self.memo = []
def perform():
self.memo.append('called')
self.action = Action(name='Test', on_perform=perform)
def test_default_id(self):
action_item = ActionItem(action=self.action)
self.assertEqual(action_item.id, 'Test')
def test_enabled_changed(self):
# XXX these are only one-way changes, which seems wrong.
action_item = ActionItem(action=self.action)
with self.assertTraitChanges(self.action, 'enabled', count=1):
action_item.enabled = False
self.assertFalse(self.action.enabled)
with self.assertTraitChanges(self.action, 'enabled', count=1):
action_item.enabled = True
self.assertTrue(self.action.enabled)
def test_visible_changed(self):
# XXX these are only one-way changes, which seems wrong.
action_item = ActionItem(action=self.action)
with self.assertTraitChanges(self.action, 'visible', count=1):
action_item.visible = False
self.assertFalse(self.action.visible)
with self.assertTraitChanges(self.action, 'visible', count=1):
action_item.visible = True
self.assertTrue(self.action.visible)
def test_destroy(self):
action_item = ActionItem(action=self.action)
# XXX test that it calls action.destroy
action_item.destroy()
def test_add_to_menu(self):
window = Window()
window.open()
action_item = ActionItem(action=self.action)
menu_bar_manager = MenuBarManager()
menu_manager = MenuManager(name='Test')
menu_bar = menu_bar_manager.create_menu_bar(window.control)
menu = menu_manager.create_menu(menu_bar)
action_item.add_to_menu(window.control, menu, None)
window.close()
def test_add_to_menu_controller(self):
window = Window()
window.open()
action_item = ActionItem(action=self.action)
menu_bar_manager = MenuBarManager()
menu_manager = MenuManager(name='Test')
menu_bar = menu_bar_manager.create_menu_bar(window.control)
menu = menu_manager.create_menu(menu_bar)
controller = ActionController()
action_item.add_to_menu(window.control, menu, controller)
window.close()
def test_add_to_menu_controller_false(self):
window = Window()
window.open()
action_item = ActionItem(action=self.action)
menu_bar_manager = MenuBarManager()
menu_manager = MenuManager(name='Test')
menu_bar = menu_bar_manager.create_menu_bar(window.control)
menu = menu_manager.create_menu(menu_bar)
controller = FalseActionController()
action_item.add_to_menu(window.control, menu, controller)
window.close()
def test_add_to_toolbar(self):
window = Window()
window.open()
action_item = ActionItem(action=self.action)
toolbar_manager = ToolBarManager(name='Test')
image_cache = ImageCache(height=32, width=32)
menu = toolbar_manager.create_tool_bar(window.control)
action_item.add_to_toolbar(window.control, menu, image_cache, None, True)
window.close()
def test_add_to_toolbar_no_label(self):
window = Window()
window.open()
action_item = ActionItem(action=self.action)
toolbar_manager = ToolBarManager(name='Test')
image_cache = ImageCache(height=32, width=32)
menu = toolbar_manager.create_tool_bar(window.control)
action_item.add_to_toolbar(window.control, menu, image_cache, None, False)
window.close()
def test_add_to_toolbar_controller(self):
window = Window()
window.open()
action_item = ActionItem(action=self.action)
toolbar_manager = ToolBarManager(name='Test')
image_cache = ImageCache(height=32, width=32)
menu = toolbar_manager.create_tool_bar(window.control)
controller = ActionController()
action_item.add_to_toolbar(window.control, menu, image_cache,
controller, True)
window.close()
def test_add_to_toolbar_controller_false(self):
window = Window()
window.open()
action_item = ActionItem(action=self.action)
toolbar_manager = ToolBarManager(name='Test')
image_cache = ImageCache(height=32, width=32)
menu = toolbar_manager.create_tool_bar(window.control)
controller = FalseActionController()
action_item.add_to_toolbar(window.control, menu, image_cache,
controller, True)
window.close()
| brett-patterson/pyface | pyface/action/tests/test_action_item.py | Python | bsd-3-clause | 5,556 |
if __name__ == "__main__":
n = int(input())
set_A = set(list(input().split()))
m = int(input())
set_B = set(list(input().split()))
a_minus_b = set_A.difference(set_B)
b_minus_a = set_B.difference(set_A)
diff = a_minus_b.union(b_minus_a)
arr = list(map(int, diff))
arr.sort()
for item in arr:
print(item) | MithileshCParab/HackerRank-10DaysOfStatistics | Python/Sets/symmetric_difference.py | Python | apache-2.0 | 357 |
# coding=utf-8
"""QGIS plugin implementation.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
.. note:: This source code was copied from the 'postgis viewer' application
with original authors:
Copyright (c) 2010 by Ivan Mincik, [email protected]
Copyright (c) 2011 German Carrillo, [email protected]
Copyright (c) 2014 Tim Sutton, [email protected]
"""
__author__ = '[email protected]'
__revision__ = '$Format:%H$'
__date__ = '10/01/2011'
__copyright__ = (
'Copyright (c) 2010 by Ivan Mincik, [email protected] and '
'Copyright (c) 2011 German Carrillo, [email protected]'
'Copyright (c) 2014 Tim Sutton, [email protected]'
)
import logging
from typing import List
from PyQt5.QtCore import QObject, pyqtSlot, pyqtSignal, QSize
from qgis.PyQt.QtWidgets import QDockWidget
from qgis.core import QgsProject, QgsMapLayer
from qgis.gui import (QgsMapCanvas,
QgsMessageBar)
LOGGER = logging.getLogger('QGIS')
# noinspection PyMethodMayBeStatic,PyPep8Naming
class QgisInterface(QObject):
"""Class to expose QGIS objects and functions to plugins.
This class is here for enabling us to run unit tests only,
so most methods are simply stubs.
"""
currentLayerChanged = pyqtSignal(QgsMapLayer)
def __init__(self, canvas: QgsMapCanvas):
"""Constructor
:param canvas:
"""
QObject.__init__(self)
self.canvas = canvas
# Set up slots so we can mimic the behaviour of QGIS when layers
# are added.
LOGGER.debug('Initialising canvas...')
# noinspection PyArgumentList
QgsProject.instance().layersAdded.connect(self.addLayers)
# noinspection PyArgumentList
QgsProject.instance().layerWasAdded.connect(self.addLayer)
# noinspection PyArgumentList
QgsProject.instance().removeAll.connect(self.removeAllLayers)
# For processing module
self.destCrs = None
self.message_bar = QgsMessageBar()
def addLayers(self, layers: List[QgsMapLayer]):
"""Handle layers being added to the registry so they show up in canvas.
:param layers: list<QgsMapLayer> list of map layers that were added
.. note:: The QgsInterface api does not include this method,
it is added here as a helper to facilitate testing.
"""
# LOGGER.debug('addLayers called on qgis_interface')
# LOGGER.debug('Number of layers being added: %s' % len(layers))
# LOGGER.debug('Layer Count Before: %s' % len(self.canvas.layers()))
current_layers = self.canvas.layers()
final_layers = []
for layer in current_layers:
final_layers.append(layer)
for layer in layers:
final_layers.append(layer)
self.canvas.setLayers(final_layers)
# LOGGER.debug('Layer Count After: %s' % len(self.canvas.layers()))
def addLayer(self, layer: QgsMapLayer):
"""Handle a layer being added to the registry so it shows up in canvas.
:param layer: list<QgsMapLayer> list of map layers that were added
.. note: The QgsInterface api does not include this method, it is added
here as a helper to facilitate testing.
.. note: The addLayer method was deprecated in QGIS 1.8 so you should
not need this method much.
"""
pass # pylint: disable=unnecessary-pass
@pyqtSlot()
def removeAllLayers(self): # pylint: disable=no-self-use
"""Remove layers from the canvas before they get deleted."""
self.canvas.setLayers([])
def newProject(self): # pylint: disable=no-self-use
"""Create new project."""
# noinspection PyArgumentList
QgsProject.instance().clear()
# ---------------- API Mock for QgsInterface follows -------------------
def zoomFull(self):
"""Zoom to the map full extent."""
pass # pylint: disable=unnecessary-pass
def zoomToPrevious(self):
"""Zoom to previous view extent."""
pass # pylint: disable=unnecessary-pass
def zoomToNext(self):
"""Zoom to next view extent."""
pass # pylint: disable=unnecessary-pass
def zoomToActiveLayer(self):
"""Zoom to extent of active layer."""
pass # pylint: disable=unnecessary-pass
def addVectorLayer(self, path: str, base_name: str, provider_key: str):
"""Add a vector layer.
:param path: Path to layer.
:type path: str
:param base_name: Base name for layer.
:type base_name: str
:param provider_key: Provider key e.g. 'ogr'
:type provider_key: str
"""
pass # pylint: disable=unnecessary-pass
def addRasterLayer(self, path: str, base_name: str):
"""Add a raster layer given a raster layer file name
:param path: Path to layer.
:type path: str
:param base_name: Base name for layer.
:type base_name: str
"""
pass # pylint: disable=unnecessary-pass
def activeLayer(self) -> QgsMapLayer: # pylint: disable=no-self-use
"""Get pointer to the active layer (layer selected in the legend)."""
# noinspection PyArgumentList
layers = QgsProject.instance().mapLayers()
for item in layers:
return layers[item]
def addToolBarIcon(self, action):
"""Add an icon to the plugins toolbar.
:param action: Action to add to the toolbar.
:type action: QAction
"""
pass # pylint: disable=unnecessary-pass
def removeToolBarIcon(self, action):
"""Remove an action (icon) from the plugin toolbar.
:param action: Action to add to the toolbar.
:type action: QAction
"""
pass # pylint: disable=unnecessary-pass
def addToolBar(self, name):
"""Add toolbar with specified name.
:param name: Name for the toolbar.
:type name: str
"""
pass # pylint: disable=unnecessary-pass
def mapCanvas(self) -> QgsMapCanvas:
"""Return a pointer to the map canvas."""
return self.canvas
def mainWindow(self):
"""Return a pointer to the main window.
In case of QGIS it returns an instance of QgisApp.
"""
pass # pylint: disable=unnecessary-pass
def addDockWidget(self, area, dock_widget: QDockWidget):
"""Add a dock widget to the main window.
:param area: Where in the ui the dock should be placed.
:type area:
:param dock_widget: A dock widget to add to the UI.
:type dock_widget: QDockWidget
"""
pass # pylint: disable=unnecessary-pass
def legendInterface(self):
"""Get the legend."""
return self.canvas
def iconSize(self, dockedToolbar) -> int:
"""
Returns the toolbar icon size.
:param dockedToolbar: If True, the icon size
for toolbars contained within docks is returned.
"""
if dockedToolbar:
return QSize(16, 16)
return QSize(24, 24)
def messageBar(self) -> QgsMessageBar:
"""
Return the message bar of the main app
"""
return self.message_bar
| gltn/stdm | stdm/tests/qgis_interface.py | Python | gpl-2.0 | 7,490 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import wagtail.core.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('articles', '0053_auto_20150903_1738'),
]
operations = [
migrations.AddField(
model_name='seriespage',
name='short_description',
field=wagtail.core.fields.RichTextField(default='', blank=True),
),
]
| OpenCanada/website | articles/migrations/0054_seriespage_short_description.py | Python | mit | 469 |
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.18 (https://github.com/warner/python-versioneer)
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess
import sys
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
git_date = "$Format:%ci$"
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "pep440"
cfg.tag_prefix = ""
cfg.parentdir_prefix = "q2-feature-table-"
cfg.versionfile_source = "q2_feature_table/_version.py"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
return None, p.returncode
return stdout, p.returncode
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags", "date": None}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%s*" % tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
cwd=root)[0].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None,
"date": pieces.get("date")}
def get_versions():
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree",
"date": None}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version", "date": None}
| qiime2-plugins/normalize | q2_feature_table/_version.py | Python | bsd-3-clause | 18,470 |
import numpy as np
import pytest
import pandas as pd
import pandas.util.testing as tm
@pytest.mark.filterwarnings("ignore:Sparse:FutureWarning")
@pytest.mark.filterwarnings("ignore:DataFrame.to_sparse:FutureWarning")
class TestSparseGroupBy:
def setup_method(self, method):
self.dense = pd.DataFrame(
{
"A": ["foo", "bar", "foo", "bar", "foo", "bar", "foo", "foo"],
"B": ["one", "one", "two", "three", "two", "two", "one", "three"],
"C": np.random.randn(8),
"D": np.random.randn(8),
"E": [np.nan, np.nan, 1, 2, np.nan, 1, np.nan, np.nan],
}
)
self.sparse = self.dense.to_sparse()
def test_first_last_nth(self):
# tests for first / last / nth
sparse_grouped = self.sparse.groupby("A")
dense_grouped = self.dense.groupby("A")
sparse_grouped_first = sparse_grouped.first()
sparse_grouped_last = sparse_grouped.last()
sparse_grouped_nth = sparse_grouped.nth(1)
dense_grouped_first = pd.DataFrame(dense_grouped.first().to_sparse())
dense_grouped_last = pd.DataFrame(dense_grouped.last().to_sparse())
dense_grouped_nth = pd.DataFrame(dense_grouped.nth(1).to_sparse())
tm.assert_frame_equal(sparse_grouped_first, dense_grouped_first)
tm.assert_frame_equal(sparse_grouped_last, dense_grouped_last)
tm.assert_frame_equal(sparse_grouped_nth, dense_grouped_nth)
def test_aggfuncs(self):
sparse_grouped = self.sparse.groupby("A")
dense_grouped = self.dense.groupby("A")
result = sparse_grouped.mean().to_sparse()
expected = dense_grouped.mean().to_sparse()
tm.assert_frame_equal(result, expected)
# ToDo: sparse sum includes str column
# tm.assert_frame_equal(sparse_grouped.sum(),
# dense_grouped.sum())
result = sparse_grouped.count().to_sparse()
expected = dense_grouped.count().to_sparse()
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize("fill_value", [0, np.nan])
@pytest.mark.filterwarnings("ignore:Sparse:FutureWarning")
@pytest.mark.filterwarnings("ignore:DataFrame.to_sparse:FutureWarning")
def test_groupby_includes_fill_value(fill_value):
# https://github.com/pandas-dev/pandas/issues/5078
df = pd.DataFrame(
{
"a": [fill_value, 1, fill_value, fill_value],
"b": [fill_value, 1, fill_value, fill_value],
}
)
sdf = df.to_sparse(fill_value=fill_value)
result = sdf.groupby("a").sum()
expected = pd.DataFrame(df.groupby("a").sum().to_sparse(fill_value=fill_value))
tm.assert_frame_equal(result, expected, check_index_type=False)
| toobaz/pandas | pandas/tests/sparse/test_groupby.py | Python | bsd-3-clause | 2,770 |
import sys, os
sys.path.insert(0, os.path.join(os.path.split(__file__)[0], '../'))
import os
from Bio import Entrez
import nesoni
from nesoni import config, io
# ref.: http://wilke.openwetware.org/Parsing_Genbank_files_with_Biopython.html
# replace with your real email (optional):
#Entrez.email = '[email protected]'
@config.Positional('accession')
@config.Positional('email',affects_output=False)
class Fetch_genbank(config.Action_with_output_dir):
accession = None
email = None
def state_filename(self):
return os.path.join(self.output_dir, self.accession + '.state')
def run(self):
work = self.get_workspace()
acc = self.accession
Entrez.email = self.email
#handle=Entrez.efetch(db='nucleotide',id=acc,rettype='gb')
handle=Entrez.efetch(db='nuccore',id=acc,rettype='gbwithparts')
with open(work/(acc+'.gbk'),'wb') as f:
f.write(handle.read())
handle.close()
@config.Positional('run_accession')
class Fetch_sra(config.Action_with_output_dir):
run_accession = None
def state_filename(self):
return os.path.join(self.output_dir, self.run_accession + '.state')
def run(self):
work = self.get_workspace()
acc = self.run_accession
io.execute(
'wget -c URL',
#URL='http://ftp-private.ncbi.nlm.nih.gov/sra/sra-instant/reads/ByRun/sra/%s/%s/%s/%s.sra'
URL='http://ftp-trace.ncbi.nlm.nih.gov/sra/sra-instant/reads/ByRun/sra/%s/%s/%s/%s.sra'
% (acc[:3],acc[:6],acc,acc),
cwd=work.working_dir,
)
io.execute(
'fastq-dump --split-files --bzip2 FILENAME',
FILENAME='./'+acc+'.sra',
cwd=work.working_dir,
)
@config.Positional('email', 'Email address to give when accessing NCBI', affects_output=False)
class Test_analyse_samples(config.Action_with_output_dir):
email = None
genbanks = 'NC_017331.1'.split()
accessions = 'ERR048386 ERR048396 ERR048414 ERR086164 ERR086220 ERR086222'.split()
def run(self):
work = self.get_workspace()
with nesoni.Stage() as stage:
for accession in self.genbanks:
Fetch_genbank(
work/'genbank',
accession,
self.email,
).process_make(stage)
for accession in self.accessions:
Fetch_sra(
work/'sra',
accession
).process_make(stage)
nesoni.Make_reference(
output_dir=work/'TW20',
filenames=[ work/('genbank',accession+'.gbk') for accession in self.genbanks ],
genome=True, bowtie=True, ls=True, snpeff=True,
).make()
analyser = nesoni.Analyse_samples
analyser(
work/'analysis',
work/'TW20',
samples=[
nesoni.Analyse_sample(
accession,
pairs=[[ work/('sra',accession+'_1.fastq.bz2'),work/('sra',accession+'_2.fastq.bz2') ]]
)
for accession in self.accessions
]
).make()
if __name__ == '__main__':
nesoni.run_tool(Test_analyse_samples(output_dir='output'))
| Victorian-Bioinformatics-Consortium/nesoni | test/test_analyse_samples.py | Python | gpl-2.0 | 3,409 |
"""
converter file Handler
In goes a file, out comes a string.
If no convertion is present for filetype:
then return statement of filetype
"""
import textract
import tempfile
import os
import urllib2
class FileConverter:
valid = False
txt = ""
def __init__(self, url):
valid_types = ['doc','txt','docx','pdf','rtf','odt']
filetype = url.split(".")[len(url.split(".")) - 1]
if(filetype in valid_types):
valid = True
content = urllib2.urlopen(url)
with tempfile.NamedTemporaryFile(delete=False, suffix="."+filetype) as temp:
temp.write(content.read())
temp.close()
self.txt = textract.process(temp.name)
os.remove(temp.name)
else:
self.txt = "error, non-valid format"
# def __del__(self):
| turtlekingster/ScubaWeb | scrapewriter/converters.py | Python | mit | 737 |
"""
Forms for the bug tracker app.
"""
from django import forms
from django.utils.translation import ugettext_lazy as _
from apps.txtrender.forms import MarkupCharField
from apps.contentreport.forms import ContentReportCreationForm
from apps.tools.http_utils import get_client_ip_address
from .models import (IssueTicket,
IssueTicketSubscription,
IssueComment,
BugTrackerUserProfile)
from .notifications import (notify_of_new_comment,
notify_of_new_issue)
class IssueTicketCreationForm(forms.Form):
"""
``IssueTicket`` creation form for registered users only.
"""
title = forms.CharField(widget=forms.TextInput(),
max_length=255,
label=_('Title'))
description = MarkupCharField(label=_('Problem description'))
notify_of_reply = forms.BooleanField(widget=forms.CheckboxInput(),
label=_('Notify me of new reply'),
required=False)
def save(self, request, submitter):
"""
Save the form by creating a new ``IssueTicket``.
:param request: The current request.
:param submitter: The ticket's submitter.
:return The newly created ticket.
"""
new_obj = IssueTicket.objects.create(title=self.cleaned_data['title'],
description=self.cleaned_data['description'],
submitter=submitter,
submitter_ip_address=get_client_ip_address(request))
# Add subscriber if necessary
if self.cleaned_data['notify_of_reply']:
IssueTicketSubscription.objects.subscribe_to_issue(submitter, new_obj)
# Notify subscribers
notify_of_new_issue(new_obj, request, submitter)
# Return the newly created object
return new_obj
class IssueTicketEditionForm(forms.ModelForm):
"""
``IssueTicket`` edition form for registered users only.
"""
class Meta:
model = IssueTicket
fields = ('title',
'description')
class IssueCommentCreationForm(forms.Form):
"""
``IssueComment`` creation form for registered users only.
"""
comment_body = MarkupCharField(label=_('Comment text'))
notify_of_reply = forms.BooleanField(widget=forms.CheckboxInput(),
label=_('Notify me of new reply'),
required=False)
def save(self, request, issue, author):
"""
Save the form by creating a new ``IssueComment`` for the given ``IssueTicket``.
Drop a success flash message after saving.
:param request: The current request.
:param issue: The related issue instance.
:param author: The author of this comment.
"""
new_obj = IssueComment.objects.create(issue=issue,
author=author,
body=self.cleaned_data['comment_body'],
author_ip_address=get_client_ip_address(request))
# Add subscriber if necessary
if self.cleaned_data['notify_of_reply']:
IssueTicketSubscription.objects.subscribe_to_issue(author, new_obj.issue)
else:
IssueTicketSubscription.objects.unsubscribe_from_issue(author, new_obj.issue)
# Notify subscribers
notify_of_new_comment(issue, new_obj, request, author)
# Return the newly created object
return new_obj
class IssueCommentReportCreationForm(ContentReportCreationForm):
"""
``IssueCommentReport`` creation form for registered users only.
"""
def get_extra_notification_kwargs(self):
"""
Return extra arguments for the notification template.
"""
return {
'content_object_name': 'comment',
'title_template_name': "bugtracker/issue_comment_report_subject.txt",
'message_template_name': "bugtracker/issue_comment_report_body.txt",
'message_template_name_html': "bugtracker/issue_comment_report_body.html",
}
class BugTrackerProfileModificationForm(forms.ModelForm):
"""
Bug tracker user's account modification form.
"""
class Meta:
model = BugTrackerUserProfile
fields = ('notify_of_new_issue',
'notify_of_reply_by_default')
| TamiaLab/carnetdumaker | apps/bugtracker/forms.py | Python | agpl-3.0 | 4,597 |
from construct import *
class InterfaceCounters(object):
def __init__(self, u):
self.if_index = u.unpack_uint()
self.if_type = u.unpack_uint()
self.if_speed = u.unpack_uhyper()
self.if_mode = u.unpack_uint()
self.if_status = u.unpack_uint()
self.if_inOctets = u.unpack_uhyper()
self.if_inPackets = u.unpack_uint()
self.if_inMcast = u.unpack_uint()
self.if_inBcast = u.unpack_uint()
self.if_inDiscard = u.unpack_uint()
self.if_inError = u.unpack_uint()
self.if_unknown = u.unpack_uint()
self.if_outOctets = u.unpack_uhyper()
self.if_outPackets = u.unpack_uint()
self.if_outMcast = u.unpack_uint()
self.if_outBcast = u.unpack_uint()
self.if_outDiscard = u.unpack_uint()
self.if_outError = u.unpack_uint()
self.if_promisc = u.unpack_uint()
class EthernetCounters(object):
def __init__(self, u):
self.dot3StatsAlignmentErrors = u.unpack_uint()
self.dot3StatsFCSErrors = u.unpack_uint()
self.dot3StatsSingleCollisionFrames = u.unpack_uint()
self.dot3StatsMultipleCollisionFrames = u.unpack_uint()
self.dot3StatsSQETestErrors = u.unpack_uint()
self.dot3StatsDeferredTransmissions = u.unpack_uint()
self.dot3StatsLateCollisions = u.unpack_uint()
self.dot3StatsExcessiveCollisions = u.unpack_uint()
self.dot3StatsInternalMacTransmitErrors = u.unpack_uint()
self.dot3StatsCarrierSenseErrors = u.unpack_uint()
self.dot3StatsFrameTooLongs = u.unpack_uint()
self.dot3StatsInternalMacReceiveErrors = u.unpack_uint()
self.dot3StatsSymbolErrors = u.unpack_uint()
class VLANCounters(object):
def __init__(self, u):
self.vlan_id = u.unpack_uint()
self.octets = u.unpack_uhyper()
self.ucastPkts = u.unpack_uint()
self.multicastPkts = u.unpack_uint()
self.broadcastPkts = u.unpack_uint()
self.discards = u.unpack_uint()
class TokenringCounters(object):
def __init__(self, u):
self.dot5StatsLineErrors = u.unpack_uint()
self.dot5StatsBurstErrors = u.unpack_uint()
self.dot5StatsACErrors = u.unpack_uint()
self.dot5StatsAbortTransErrors = u.unpack_uint()
self.dot5StatsInternalErrors = u.unpack_uint()
self.dot5StatsLostFrameErrors = u.unpack_uint()
self.dot5StatsReceiveCongestions = u.unpack_uint()
self.dot5StatsFrameCopiedErrors = u.unpack_uint()
self.dot5StatsTokenErrors = u.unpack_uint()
self.dot5StatsSoftErrors = u.unpack_uint()
self.dot5StatsHardErrors = u.unpack_uint()
self.dot5StatsSignalLoss = u.unpack_uint()
self.dot5StatsTransmitBeacons = u.unpack_uint()
self.dot5StatsRecoverys = u.unpack_uint()
self.dot5StatsLobeWires = u.unpack_uint()
self.dot5StatsRemoves = u.unpack_uint()
self.dot5StatsSingles = u.unpack_uint()
self.dot5StatsFreqErrors = u.unpack_uint()
class VGCounters(object):
def __init__(self, u):
self.dot5StatsLineErrors = u.unpack_uint()
self.dot5StatsBurstErrors = u.unpack_uint()
self.dot5StatsACErrors = u.unpack_uint()
self.dot5StatsAbortTransErrors = u.unpack_uint()
self.dot5StatsInternalErrors = u.unpack_uint()
self.dot5StatsLostFrameErrors = u.unpack_uint()
self.dot5StatsReceiveCongestions = u.unpack_uint()
self.dot5StatsFrameCopiedErrors = u.unpack_uint()
self.dot5StatsTokenErrors = u.unpack_uint()
self.dot5StatsSoftErrors = u.unpack_uint()
self.dot5StatsHardErrors = u.unpack_uint()
self.dot5StatsSignalLoss = u.unpack_uint()
self.dot5StatsTransmitBeacons = u.unpack_uint()
self.dot5StatsRecoverys = u.unpack_uint()
self.dot5StatsLobeWires = u.unpack_uint()
self.dot5StatsRemoves = u.unpack_uint()
self.dot5StatsSingles = u.unpack_uint()
self.dot5StatsFreqErrors = u.unpack_uint()
class HostCounters(object):
format = 2000
def __init__(self, u):
self.hostname = u.unpack_string()
self.uuid = u.unpack_fopaque(16)
self.machine_type = u.unpack_uint()
self.os_name = u.unpack_uint()
self.os_release = u.unpack_string()
class HostAdapters(object):
format = 2001
def __init__(self, u):
self.adapters = Struct("adapters",
UBInt32("count"),
Array(lambda c: c.count,
Struct("adapter",
UBInt32("index"),
Bytes("MAC", 6)
)
)
).parse(u.get_buffer())
class HostParent(object):
format = 2002
def __init__(self, u):
self.container_type = u.unpack_uint()
self.container_index = u.unpack_uint()
class HostCPUCounters(object):
format = 2003
def __init__(self, u):
self.load_one = u.unpack_float()
self.load_five = u.unpack_float()
self.load_fifteen = u.unpack_float()
self.proc_run = u.unpack_uint()
self.proc_total = u.unpack_uint()
self.cpu_num = u.unpack_uint()
self.cpu_speed = u.unpack_uint()
self.uptime = u.unpack_uint()
self.cpu_user = u.unpack_uint()
self.cpu_nice = u.unpack_uint()
self.cpu_system = u.unpack_uint()
self.cpu_idle = u.unpack_uint()
self.cpu_wio = u.unpack_uint()
self.cpu_intr = u.unpack_uint()
self.cpu_sintr = u.unpack_uint()
self.interrupts = u.unpack_uint()
self.contexts = u.unpack_uint()
class HostMemoryCounters(object):
format = 2004
def __init__(self, u):
self.mem_total = u.unpack_uhyper()
self.mem_free = u.unpack_uhyper()
self.mem_shared = u.unpack_uhyper()
self.mem_buffers = u.unpack_uhyper()
self.mem_cached = u.unpack_uhyper()
self.swap_total = u.unpack_uhyper()
self.swap_free = u.unpack_uhyper()
self.page_in = u.unpack_uint()
self.page_out = u.unpack_uint()
self.swap_in = u.unpack_uint()
self.swap_out = u.unpack_uint()
class DiskIOCounters(object):
format = 2005
def __init__(self, u):
self.disk_total = u.unpack_uhyper()
self.disk_free = u.unpack_uhyper()
self.part_max_used = u.unpack_uint()
self.reads = u.unpack_uint()
self.bytes_read = u.unpack_uhyper()
self.read_time = u.unpack_uint()
self.writes = u.unpack_uint()
self.bytes_written = u.unpack_uhyper()
self.write_time = u.unpack_uint()
class NetIOCounters(object):
format = 2006
def __init__(self, u):
self.bytes_in = u.unpack_uhyper()
self.pkts_in = u.unpack_uint()
self.errs_in = u.unpack_uint()
self.drops_in = u.unpack_uint()
self.bytes_out = u.unpack_uhyper()
self.packets_out = u.unpack_uint()
self.errs_out = u.unpack_uint()
self.drops_out = u.unpack_uint()
class SocketIPv4Counters(object):
format = 2100
def __init__(self, u):
self.protocol = u.unpack_uint()
self.local_ip = u.unpack_fstring(4)
self.remote_ip = u.unpack_fstring(4)
self.local_port = u.unpack_uint()
self.remote_port = u.unpack_uint()
class SocketIPv6Counters(object):
format = 2101
def __init__(self, u):
self.protocol = u.unpack_uint()
self.local_ip = u.unpack_fstring(16)
self.remote_ip = u.unpack_fstring(16)
self.local_port = u.unpack_uint()
self.remote_port = u.unpack_uint()
class VirtMemoryCounters(object):
format = 2102
def __init__(self, u):
self.memory = u.unpack_uhyper()
self.maxMemory = u.unpack_uhyper()
class VirtDiskIOCounters(object):
format = 2103
def __init__(self, u):
self.capacity = u.unpack_uhyper()
self.allocation = u.unpack_uhyper()
self.available = u.unpack_uhyper()
self.rd_req = u.unpack_uint()
self.hyper = u.unpack_unsigend()
self.wr_req = u.unpack_uint()
self.wr_bytes = u.unpack_uhyper()
self.errs = u.unpack_uint()
class VirtNetIOCounters(object):
format = 2104
def __init__(self, u):
self.rx_bytes = u.unpack_uhyper()
self.rx_packets = u.unpack_uint()
self.rx_errs = u.unpack_uint()
self.rx_drop = u.unpack_uint()
self.tx_bytes = u.unpack_uhyper()
self.tx_packets = u.unpack_uint()
self.tx_errs = u.unpack_uint()
self.tx_drop = u.unpack_uint()
def getDecoder(format):
decoders = {
1: InterfaceCounters,
2: EthernetCounters,
3: TokenringCounters,
4: VGCounters,
5: VLANCounters,
2000: HostCounters,
2001: HostAdapters,
2002: HostParent,
2003: HostCPUCounters,
2004: HostMemoryCounters,
2005: DiskIOCounters,
2006: NetIOCounters,
2101: SocketIPv6Counters,
2102: VirtMemoryCounters,
2103: VirtDiskIOCounters,
2104: VirtNetIOCounters
}
return decoders.get(format, None)
| calston/tensor | tensor/protocol/sflow/protocol/counters.py | Python | mit | 9,132 |
try:
from . import generalIO
except ImportError:
import generalIO
import czifile
def _eval(txt):
try:
return eval(txt)
except (NameError, TypeError, SyntaxError):
return txt
class CZIReader(generalIO.GeneralReader):
def __init__(self, fn):
"""
fn: file name
"""
generalIO.GeneralReader.__init__(self, fn)
def openFile(self):
"""
open a file for reading
"""
self.fp = czifile.CziFile(self.fn)
self.handle = self.fp._fh
self.readHeader()
def readHeader(self):
self.readMetaData()
nt = self.fp.shape[self.fp.axes.index('T')]
waves = [self.metadata['Channel%iEmissionWavelength' % w] for w in range(self.metadata['SizeC'])]
for i, imgsq in enumerate(generalIO.IMGSEQ):
if imgsq.replace('W', 'C') in self.fp.axes:
imgSeq = i
break
self.setDim(self.metadata['SizeX'], self.metadata['SizeY'], self.metadata['SizeZ'], nt, self.metadata['SizeC'], self.fp.dtype, waves, imgSeq)
def readMetaData(self):
tree = self.fp.metadata.getroottree()
root = tree.getroot()
self.readTree(root)
def readTree(self, tree):
if tree.tag == 'Channels':
self.readChannels(tree)
else:
children = tree.getchildren()
if children:
for child in children:
self.readTree(child)
else:
#if tree.tag.endswith('Wavelength'):
# raise
self.metadata[tree.tag] = _eval(tree.text)
def readChannels(self, tree):
channels = tree.getchildren()
for w, channel in enumerate(channels):
for cha_info in channel:
self.metadata[('Channel%i' % w) + cha_info.tag] = _eval(cha_info.text)
| macronucleus/chromagnon | Chromagnon/imgio/cziIO.py | Python | mit | 1,890 |
import calendar
import json
import re
import unicodedata
from datetime import datetime, time, timedelta
from enum import Enum
from uuid import UUID
import iso639
import pytz
from dateutil.relativedelta import relativedelta
from temba_client.utils import format_iso8601
from django.utils import timezone
from django.utils.timesince import timeuntil
LANGUAGES_BY_CODE = {} # cache of language lookups
def parse_csv(csv, as_ints=False):
"""
Parses a comma separated list of values as strings or integers
"""
items = []
for val in csv.split(","):
val = val.strip()
if val:
items.append(int(val) if as_ints else val)
return items
def str_to_bool(text):
"""
Parses a boolean value from the given text
"""
return text and text.lower() in ["true", "y", "yes", "1"]
class JSONEncoder(json.JSONEncoder):
"""
JSON encoder which encodes datetime values as strings
"""
def default(self, val):
if isinstance(val, datetime):
return format_iso8601(val)
elif isinstance(val, Enum):
return val.name
elif hasattr(val, "to_json") and callable(val.to_json):
return val.to_json()
return json.JSONEncoder.default(self, val) # pragma: no cover
def json_encode(data):
"""
Encodes the given primitives as JSON using Django's encoder which can handle dates
"""
return json.dumps(data, cls=JSONEncoder)
def json_decode(data):
"""
Decodes the given JSON as primitives
"""
if isinstance(data, bytes):
data = data.decode("utf-8")
return json.loads(data)
def safe_max(*args, **kwargs):
"""
Regular max won't compare dates with NoneType and raises exception for no args
"""
non_nones = [v for v in args if v is not None]
if len(non_nones) == 0:
return None
elif len(non_nones) == 1:
return non_nones[0]
else:
return max(*non_nones, **kwargs)
def normalize(text):
"""
Normalizes text before keyword matching. Converts to lowercase, performs KD unicode normalization and replaces
multiple whitespace characters with single spaces.
"""
return unicodedata.normalize("NFKD", re.sub(r"\s+", " ", text.lower()))
def match_keywords(text, keywords):
"""
Checks the given text for a keyword match
"""
for keyword in keywords:
if re.search(r"\b" + keyword + r"\b", text, flags=re.IGNORECASE):
return True
return False
def truncate(text, length=100, suffix="..."):
"""
Truncates the given text to be no longer than the given length
"""
if len(text) > length:
return text[: length - len(suffix)] + suffix
else:
return text
def date_to_milliseconds(d):
"""
Converts a date to a millisecond accuracy timestamp. Equivalent to Date.UTC(d.year, d.month-1, d.day) in Javascript
"""
return calendar.timegm(datetime.combine(d, time(0, 0, 0)).replace(tzinfo=pytz.UTC).utctimetuple()) * 1000
def datetime_to_microseconds(dt):
"""
Converts a datetime to a microsecond accuracy timestamp
"""
seconds = calendar.timegm(dt.utctimetuple())
return seconds * 1000000 + dt.microsecond
def microseconds_to_datetime(ms):
"""
Converts a microsecond accuracy timestamp to a datetime
"""
return datetime.utcfromtimestamp(ms / 1000000.0).replace(tzinfo=pytz.utc)
def month_range(offset, now=None):
"""
Gets the UTC start and end (exclusive) of a month
:param offset: 0 = this month, -1 last month, 1 = next month etc
:return: the start and end datetimes as a tuple
"""
if not now:
now = timezone.now()
now = now.astimezone(pytz.UTC)
start_of_this_month = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
return start_of_this_month + relativedelta(months=offset), start_of_this_month + relativedelta(months=offset + 1)
def date_range(start, stop):
"""
A date-based range generator
"""
for n in range(int((stop - start).days)):
yield start + timedelta(n)
class TimelineItem(object):
"""
Wraps a message or action for easier inclusion in a merged timeline
"""
def __init__(self, item):
self.item = item
def get_time(self):
return self.item.created_on
def to_json(self):
return {"time": self.get_time(), "type": self.item.TIMELINE_TYPE, "item": self.item.as_json()}
def uuid_to_int(uuid):
"""
Converts a UUID hex string to an int within the range of a Django IntegerField, and also >=0, as the URL regexes
don't account for negative numbers.
From https://docs.djangoproject.com/en/1.9/ref/models/fields/#integerfield
"Values from -2147483648 to 2147483647 are safe in all databases supported by Django"
"""
return UUID(hex=uuid).int % (2147483647 + 1)
def get_language_name(iso_code):
"""
Gets the language name for the given ISO639-3 code.
"""
if iso_code not in LANGUAGES_BY_CODE:
try:
lang = iso639.languages.get(part3=iso_code)
except KeyError:
lang = None
if lang:
# we only show up to the first semi or paren
lang = re.split(r";|\(", lang.name)[0].strip()
LANGUAGES_BY_CODE[iso_code] = lang
return LANGUAGES_BY_CODE[iso_code]
def is_valid_language_code(code):
"""
Gets whether the given code is a valid ISO639-3 code.
"""
try:
iso639.languages.get(part3=code)
return True
except KeyError:
return False
def humanize_seconds(seconds):
now = timezone.now()
return timeuntil(now + timedelta(seconds=seconds), now)
| rapidpro/casepro | casepro/utils/__init__.py | Python | bsd-3-clause | 5,702 |
# -*- coding: utf-8 -*-
#/******************************************************************************
# * Copyright (c) 2012 Jan Rheinländer <[email protected]> *
# * *
# * This file is part of the FreeCAD CAx development system. *
# * *
# * This library is free software; you can redistribute it and/or *
# * modify it under the terms of the GNU Library General Public *
# * License as published by the Free Software Foundation; either *
# * version 2 of the License, or (at your option) any later version. *
# * *
# * This library is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this library; see the file COPYING.LIB. If not, *
# * write to the Free Software Foundation, Inc., 59 Temple Place, *
# * Suite 330, Boston, MA 02111-1307, USA *
# * *
# ******************************************************************************/
import FreeCAD, FreeCADGui
from .SegmentFunction import SegmentFunction, IntervalFunction, StressFunction, TranslationFunction
from .ShaftFeature import ShaftFeature
from .ShaftDiagram import Diagram
import math
class ShaftSegment:
def __init__(self, l, d, di):
self.length = l
self.diameter = d
self.innerdiameter = di
self.constraintType = "None"
self.constraint = None
class Shaft:
"The axis of the shaft is always assumed to correspond to the X-axis"
# Names (note Qy corresponds with Mz, and Qz with My)
Fstr = ["Nx","Qy","Qz"] # Forces
Mstr = ["Mx","Mz","My"] # Moments
wstr = ["", "wy", "wz"] # Translations
sigmaNstr = ["sigmax","sigmay","sigmaz"] # Normal/shear stresses
sigmaBstr = ["taut","sigmabz", "sigmaby"] # Torsion/bending stresses
# For diagram labeling
Qstrings = (("Normal force [x]", "x", "mm", "N_x", "N"),
("Shear force [y]", "x", "mm", "Q_y", "N"),
("Shear force [z]", "x", "mm", "Q_z", "N"))
Mstrings = (("Torque [x]", "x", "mm", "M_t", "Nm"),
("Bending moment [z]", "x", "mm", "M_{b,z}", "Nm"),
("Bending moment [y]", "x", "mm", "M_{b,y}", "Nm"))
wstrings = (("", "", "", "", ""),
("Translation [y]", "x", "mm", "w_y", "mm"),
("Translation [z]", "x", "mm", "w_z", "mm"))
sigmaNstrings = (("Normal stress [x]", "x", "mm", "\\sigma_x", u"N/mm²"),
("Shear stress [y]", "x", "mm", "\\sigma_y", u"N/mm²"),
("Shear stress [z]", "x", "mm", "\\sigma_z", u"N/mm²"))
sigmaBstrings = (("Torque stress [x]", "x", "mm", "\\tau_t", u"N/mm²"),
("Bending stress [z]", "x", "mm", "\\sigma_{b,z}", u"N/mm²"),
("Bending stress [y]", "x", "mm", "\\sigma_{b,y}", u"N/mm²"))
def __init__(self, parent):
self.parent = parent
self.doc = parent.doc
self.feature = ShaftFeature(self.doc)
# List of shaft segments (each segment has a different diameter)
self.segments = []
# The diagrams
self.diagrams = {} # map of function name against Diagram object
# Calculation of shaft
self.F = [None, None, None] # force in direction of [x,y,z]-axis
self.M = [None, None, None] # bending moment around [x,z,y]-axis
self.w = [None, None, None] # Shaft translation due to bending
self.sigmaN = [None, None, None] # normal stress in direction of x-axis, shear stress in direction of [y,z]-axis
self.sigmaB = [None, None, None] # # torque stress around x-axis, maximum bending stress in direction of [y,z]-axis
def getLengthTo(self, index):
"Get the total length of all segments up to the given one"
result = 0.0
for i in range(index):
result += self.segments[i].length
return result
def addSegment(self, l, d, di):
self.segments.append(ShaftSegment(l,d,di))
self.feature.addSegment(l, d, di)
# We don't call equilibrium() here because the new segment has no constraints defined yet
# Fix face reference of fixed segment if it is the last one
for i in range(1, len(self.segments)):
if self.segments[i].constraintType != "Fixed":
continue
if i == len(self.segments) - 1:
self.segments[index].constraint.References = [( self.feature.feature, "Face%u" % (2 * (index+1) + 1) )]
else:
# Remove reference since it is now in the middle of the shaft (which is not allowed)
self.segments[index].constraint.References = [(None, "")]
def updateSegment(self, index, length = None, diameter = None, innerdiameter = None):
oldLength = self.segments[index].length
if length is not None:
self.segments[index].length = length
if diameter is not None:
self.segments[index].diameter = diameter
if innerdiameter is not None:
self.segments[index].innerdiameter = innerdiameter
self.feature.updateSegment(index, oldLength, self.segments[index].length, self.segments[index].diameter, self.segments[index].innerdiameter)
self.equilibrium()
self.updateDiagrams()
def updateConstraint(self, index, constraintType):
if (constraintType is not None):
# Did the constraint type change?
if (self.segments[index].constraintType != "None") and (self.segments[index].constraintType != constraintType):
self.doc.removeObject(self.segments[index].constraint.Name)
self.segments[index].constraint = None
self.segments[index].constraintType = constraintType
# Create constraint if it does not exist yet or has changed
if self.segments[index].constraint is None:
if (constraintType == "Force"):
# TODO: Create a reference point and put the force onto it
constraint = self.doc.addObject("Fem::ConstraintForce","ShaftConstraintForce")
constraint.Force = 1000.0
self.segments[index].constraint = constraint
elif (constraintType == "Fixed"):
# TODO: Use robust reference as soon as it is available for the face
constraint = self.doc.addObject("Fem::ConstraintFixed","ShaftConstraintFixed")
if index == 0:
constraint.References = [( self.feature.feature, "Face1")]
elif index == len(self.segments) - 1:
constraint.References = [( self.feature.feature, "Face%u" % (2 * (index+1) + 1) )]
self.segments[index].constraint = constraint
elif (constraintType == "Bearing"):
# TODO: Use robust reference as soon as it is available for the cylindrical face reference
constraint = self.doc.addObject("Fem::ConstraintBearing","ShaftConstraintBearing")
constraint.References = [( self.feature.feature, "Face%u" % (2 * (index+1)) )]
constraint.AxialFree = True
self.segments[index].constraint = constraint
elif (constraintType == "Pulley"):
constraint= self.doc.addObject("Fem::ConstraintPulley","ShaftConstraintPulley")
constraint.References = [( self.feature.feature, "Face%u" % (2 * (index+1)) )]
self.segments[index].constraint = constraint
elif (constraintType == "Gear"):
constraint = self.doc.addObject("Fem::ConstraintGear","ShaftConstraintGear")
constraint.References = [( self.feature.feature, "Face%u" % (2 * (index+1)) )]
self.segments[index].constraint = constraint
self.equilibrium()
self.updateDiagrams()
def editConstraint(self, index):
if (self.segments[index].constraint is not None):
FreeCADGui.activeDocument().setEdit(self.segments[index].constraint.Name)
def getConstraint(self, index):
return self.segments[index].constraint
def updateEdge(self, column, start):
App.Console.PrintMessage("Not implemented yet - waiting for robust references...")
return
"""
if self.sketchClosed is not True:
return
# Create a chamfer or fillet at the start or end edge of the segment
if start is True:
row = rowStartEdgeType
idx = 0
else:
row = rowEndEdgeType
idx = 1
edgeType = self.tableWidget.item(row, column).text()[0].upper()
if not ((edgeType == "C") or (edgeType == "F")):
return # neither chamfer nor fillet defined
if edgeType == "C":
objName = self.doc.addObject("PartDesign::Chamfer","ChamferShaft%u" % (column * 2 + idx))
else:
objName = self.doc.addObject("PartDesign::Fillet","FilletShaft%u" % (column * 2 + idx))
if objName == "":
return
edgeName = "Edge%u" % self.getEdgeIndex(column, idx, edgeType)
self.doc.getObject(objName).Base = (self.doc.getObject("RevolutionShaft"),"[%s]" % edgeName)
# etc. etc.
"""
def getEdgeIndex(self, column, startIdx):
# FIXME: This is impossible without robust references anchored in the sketch!!!
return
def updateDiagrams(self):
for ax in range(3):
if self.F[ax] is not None:
if self.F[ax].name in self.diagrams:
self.diagrams[self.F[ax].name].update(self.F[ax], self.getLengthTo(len(self.segments)) / 1000.0)
if self.M[ax] is not None:
if self.M[ax].name in self.diagrams:
self.diagrams[self.M[ax].name].update(self.M[ax], self.getLengthTo(len(self.segments)) / 1000.0)
if self.w[ax] is not None:
if self.w[ax].name in self.diagrams:
self.diagrams[self.w[ax].name].update(self.w[ax], self.getLengthTo(len(self.segments)) / 1000.0)
if self.sigmaN[ax] is not None:
if self.sigmaN[ax].name in self.diagrams:
self.diagrams[self.sigmaN[ax].name].update(self.sigmaN[ax], self.getLengthTo(len(self.segments)) / 1000.0)
if self.sigmaB[ax] is not None:
if self.sigmaB[ax].name in self.diagrams:
self.diagrams[self.sigmaB[ax].name].update(self.sigmaB[ax], self.getLengthTo(len(self.segments)) / 1000.0)
def showDiagram(self, which):
if which in self.Fstr:
ax = self.Fstr.index(which)
text = self.Qstrings[ax]
if self.F[ax] is None:
# No data
return
if self.F[ax].name in self.diagrams:
# Diagram is already open, close it again
self.diagrams[self.F[ax].name].close()
del (self.diagrams[self.F[ax].name])
return
self.diagrams[self.F[ax].name] = Diagram()
self.diagrams[self.F[ax].name].create(text[0], self.F[ax], self.getLengthTo(len(self.segments)) / 1000.0, text[1], text[2], 1000.0, text[3], text[4], 1.0, 10)
elif which in self.Mstr:
ax = self.Mstr.index(which)
text = self.Mstrings[ax]
if self.M[ax] is None:
# No data
return
if self.M[ax].name in self.diagrams:
# Diagram is already open, close it again
self.diagrams[self.M[ax].name].close()
del (self.diagrams[self.M[ax].name])
return
self.diagrams[self.M[ax].name] = Diagram()
self.diagrams[self.M[ax].name].create(text[0], self.M[ax], self.getLengthTo(len(self.segments)) / 1000.0, text[1], text[2], 1000.0, text[3], text[4], 1.0, 20)
elif which in self.wstr:
ax = self.wstr.index(which)
text = self.wstrings[ax]
if self.w[ax] is None:
# No data
return
if self.w[ax].name in self.diagrams:
# Diagram is already open, close it again
self.diagrams[self.w[ax].name].close()
del (self.diagrams[self.w[ax].name])
return
self.diagrams[self.w[ax].name] = Diagram()
self.diagrams[self.w[ax].name].create(text[0], self.w[ax], self.getLengthTo(len(self.segments)) / 1000.0, text[1], text[2], 1000.0, text[3], text[4], 1000.0, 30)
elif which in self.sigmaNstr:
ax = self.sigmaNstr.index(which)
text = self.sigmaNstrings[ax]
if self.sigmaN[ax] is None:
# No data
return
if self.sigmaN[ax].name in self.diagrams:
# Diagram is already open, close it again
self.diagrams[self.sigmaN[ax].name].close()
del (self.diagrams[self.sigmaN[ax].name])
return
self.diagrams[self.sigmaN[ax].name] = Diagram()
self.diagrams[self.sigmaN[ax].name].create(text[0], self.sigmaN[ax], self.getLengthTo(len(self.segments)) / 1000.0, text[1], text[2], 1000.0, text[3], text[4], 1.0E-6, 10)
elif which in self.sigmaBstr:
ax = self.sigmaBstr.index(which)
text = self.sigmaBstrings[ax]
if self.sigmaB[ax] is None:
# No data
return
if self.sigmaB[ax].name in self.diagrams:
# Diagram is already open, close it again
self.diagrams[self.sigmaB[ax].name].close()
del (self.diagrams[self.sigmaB[ax].name])
return
self.diagrams[self.sigmaB[ax].name] = Diagram()
self.diagrams[self.sigmaB[ax].name].create(text[0], self.sigmaB[ax], self.getLengthTo(len(self.segments)) / 1000.0, text[1], text[2], 1000.0, text[3], text[4], 1.0E-6, 20)
def addTo(self, dict, location, value):
if location not in dict:
dict[location] = value
else:
dict[location] += value
def equilibrium(self):
# Build equilibrium equations
try:
import numpy as np
except ImportError:
FreeCAD.Console.PrintMessage("numpy is not installed on your system\n")
raise ImportError("numpy not installed")
# Initialization of structures. All three axes are handled separately so everything is 3-fold
# dictionaries of (location : outer force/moment) with reverse sign, which means that the segment functions for the section force and section moment
# created from them will have signs as by the convention in
# http://www.umwelt-campus.de/ucb/fileadmin/users/90_t.preussler/dokumente/Skripte/TEMECH/TMI/Ebene_Balkenstatik.pdf (page 10)
# (see also example on page 19)
forces = [{0.0:0.0}, {0.0:0.0}, {0.0:0.0}]
moments = [{0.0:0.0}, {0.0:0.0}, {0.0:0.0}]
# Boundary conditions for shaft bending line
tangents = [[], [], []] # Tangents to shaft bending line
translations = [[], [], []] # Shaft displacement
# Variable names, e.g. Fx, Mz. Because the system must be exactly determined, not more than two independent variables for each
# force/moment per axis are possible (if there are more no solution is calculated)
variableNames = [[""], [""], [""]]
# # dictionary of (variableName : location) giving the x-coordinate at which the force/moment represented by the variable acts on the shaft
locations = {}
# Coefficients of the equilibrium equations in the form a = b * F1 + c * F2 and d = e * M1 + f * M2
# LHS (variables a1, a2, a3, d3) initialized to zero
coefficientsF = [[0], [0], [0]]
coefficientsM = [[0], [0], [0]]
for i in range(len(self.segments)):
cType = self.segments[i].constraintType
constraint = self.segments[i].constraint
if cType == "Fixed":
# Fixed segment
if i == 0:
# At beginning of shaft
location = 0
elif i == len(self.segments) - 1:
# At end of shaft
location = self.getLengthTo(len(self.segments)) / 1000.0 # convert to meters
else:
# TODO: Better error message
FreeCAD.Console.PrintMessage("Fixed constraint must be at beginning or end of shaft\n")
return
for ax in range(3):
# Create a new reaction force
variableNames[ax].append("%s%u" % (self.Fstr[ax], i))
coefficientsF[ax].append(1)
# Register location of reaction force
locations["%s%u" % (self.Fstr[ax], i)] = location
# Boundary conditions for the translations
tangents[ax].append((location, 0.0))
translations[ax].append((location, 0.0))
coefficientsM[0].append(0) # Reaction force contributes no moment around x axis
coefficientsM[1].append(location) # Reaction force contributes a positive moment around z axis
coefficientsM[2].append(-location) # Reaction force contributes a negative moment around y axis
for ax in range(3):
# Create a new reaction moment
variableNames[ax].append("%s%u" % (self.Mstr[ax], i))
coefficientsF[ax].append(0)
coefficientsM[ax].append(1)
locations["%s%u" % (self.Mstr[ax], i)] = location
elif cType == "Force":
# Static force (currently force on midpoint of segment only)
force = constraint.DirectionVector.multiply(constraint.Force)
# TODO: Extract value of the location from geometry
location = (self.getLengthTo(i) + self.segments[i].length/2.0) / 1000.0
# The force itself
for ax in range(3):
if abs(force[ax]) > 0.0:
coefficientsF[ax][0] = coefficientsF[ax][0] - force[ax] # neg. because this coefficient is on the LHS of the equilibrium equation
self.addTo(forces[ax], location, -force[ax]) # neg. to fulfill the convention mentioned above
# Moments created by the force (by definition no moment is created by the force in x-direction)
if abs(force[1]) > 0.0:
coefficientsM[1][0] = coefficientsM[1][0] - force[1] * location # moment around z-axis
self.addTo(moments[1], location, 0)
if abs(force[2]) > 0.0:
coefficientsM[2][0] = coefficientsM[2][0] + force[2] * location # moment around y-axis
self.addTo(moments[2], location, 0) # No outer moment acts here!
elif cType == "Bearing":
location = constraint.BasePoint.x / 1000.0 # TODO: This assumes that the shaft feature starts with the first segment at (0,0,0) and its axis corresponds to the x-axis
# Bearing reaction forces. TODO: the bearing is assumed to not induce any reaction moments
start = (0 if constraint.AxialFree == False else 1)
for ax in range(start, 3):
variableNames[ax].append("%s%u" % (self.Fstr[ax], i))
coefficientsF[ax].append(1)
locations["%s%u" % (self.Fstr[ax], i)] = location
# Boundary condition
translations[ax].append((location, 0.0))
if constraint.AxialFree == False:
coefficientsM[0].append(0) # Reaction force contributes no moment around x axis
coefficientsM[1].append(location) # Reaction force contributes a positive moment around z axis
coefficientsM[2].append(-location) # Reaction force contributes a negative moment around y axis
elif cType == "Gear":
force = constraint.DirectionVector.multiply(constraint.Force)
location = constraint.BasePoint.x / 1000.0
lever = [0, constraint.Diameter/2.0/1000.0 * math.sin(constraint.ForceAngle / 180.0 * math.pi),
constraint.Diameter/2.0 /1000.0* math.cos(constraint.ForceAngle / 180.0 * math.pi)]
# Effect of the gear force
for ax in range(3):
if abs(force[ax]) > 0.0:
# Effect of the force
coefficientsF[ax][0] = coefficientsF[ax][0] - force[ax]
self.addTo(forces[ax], location, -force[ax])
# Moments created by the force (by definition no moment is created by the force in x-direction)
if abs(force[1]) > 0.0:
coefficientsM[1][0] = coefficientsM[1][0] - force[1] * location # moment around z-axis
self.addTo(moments[1], location, 0)
if abs(force[2]) > 0.0:
coefficientsM[2][0] = coefficientsM[2][0] + force[2] * location # moment around y-axis
self.addTo(moments[2], location, 0) # No outer moment acts here!
# Moments created by the force and lever
if abs(force[0]) > 0.0:
momenty = force[0] * lever[2]
momentz = force[0] * lever[1]
coefficientsM[1][0] = coefficientsM[1][0] + momentz # moment around z-axis
self.addTo(moments[1], location, momentz)
coefficientsM[2][0] = coefficientsM[2][0] - momenty # moment around y-axis
self.addTo(moments[2], location, -momenty)
if abs(force[1]) > 0.0:
moment = force[1] * lever[2]
coefficientsM[0][0] = coefficientsM[0][0] + moment
self.addTo(moments[0], location, moment)
if abs(force[2]) > 0.0:
moment = force[2] * lever[1]
coefficientsM[0][0] = coefficientsM[0][0] - moment
self.addTo(moments[0], location, -moment)
elif cType == "Pulley":
forceAngle1 = (constraint.ForceAngle + constraint.BeltAngle + 90.0) / 180.0 * math.pi
forceAngle2 = (constraint.ForceAngle - constraint.BeltAngle + 90.0) / 180.0 * math.pi
#FreeCAD.Console.PrintMessage("BeltForce1: %f, BeltForce2: %f\n" % (constraint.BeltForce1, constraint.BeltForce2))
#FreeCAD.Console.PrintMessage("Angle1: %f, Angle2: %f\n" % (forceAngle1, forceAngle2))
force = [0, -constraint.BeltForce1 * math.sin(forceAngle1) - constraint.BeltForce2 * math.sin(forceAngle2),
constraint.BeltForce1 * math.cos(forceAngle1) + constraint.BeltForce2 * math.cos(forceAngle2)]
location = constraint.BasePoint.x / 1000.0
# Effect of the pulley forces
for ax in range(3):
if abs(force[ax]) > 0.0:
# Effect of the force
coefficientsF[ax][0] = coefficientsF[ax][0] - force[ax]
self.addTo(forces[ax], location, -force[ax])
# Moments created by the force (by definition no moment is created by the force in x-direction)
if abs(force[1] ) > 0.0:
coefficientsM[1][0] = coefficientsM[1][0] - force[1] * location # moment around z-axis
self.addTo(moments[1], location, 0)
if abs(force[2]) > 0.0:
coefficientsM[2][0] = coefficientsM[2][0] + force[2] * location # moment around y-axis
self.addTo(moments[2], location, 0) # No outer moment acts here!
# Torque
moment = constraint.Force * (1 if constraint.IsDriven is True else -1)
coefficientsM[0][0] = coefficientsM[0][0] + moment
self.addTo(moments[0], location, moment)
areas = [None, None, None]
areamoments = [None, None, None]
bendingmoments = [None, None, None]
torquemoments = [None, None, None]
for ax in range(3):
FreeCAD.Console.PrintMessage("Axis: %u\n" % ax)
self.printEquilibrium(variableNames[ax], coefficientsF[ax])
self.printEquilibrium(variableNames[ax], coefficientsM[ax])
if len(coefficientsF[ax]) <= 1:
# Note: coefficientsF and coefficientsM always have the same length
FreeCAD.Console.PrintMessage("Matrix is singular, no solution possible\n")
self.parent.updateButtons(ax, False)
continue
# Handle special cases. Note that the code above should ensure that coefficientsF and coefficientsM always have same length
solution = [None, None]
if len(coefficientsF[ax]) == 2:
if coefficientsF[ax][1] != 0.0 and coefficientsF[ax][0] != 0.0:
solution[0] = coefficientsF[ax][0] / coefficientsF[ax][1]
if coefficientsM[ax][1] != 0.0 and coefficientsM[ax][0] != 0.0:
solution[1] = coefficientsM[ax][0] / coefficientsM[ax][1]
if abs(solution[0] - solution[1]) < 1E9:
FreeCAD.Console.PrintMessage("System is statically undetermined. No solution possible.\n")
self.parent.updateButtons(ax, False)
continue
else:
# Build matrix and vector for linear algebra solving algorithm
# TODO: This could easily be done manually... there are only 2 variables and 6 coefficients
A = np.array([coefficientsF[ax][1:], coefficientsM[ax][1:]])
b = np.array([coefficientsF[ax][0], coefficientsM[ax][0]])
try:
solution = np.linalg.solve(A, b) # A * solution = b
except np.linalg.linalg.LinAlgError as e:
FreeCAD.Console.PrintMessage(e.message)
FreeCAD.Console.PrintMessage(". No solution possible.\n")
self.parent.updateButtons(ax, False)
continue
# Complete dictionary of forces and moments with the two reaction forces that were calculated
for i in range(2):
if solution[i] is None:
continue
FreeCAD.Console.PrintMessage("Reaction force/moment: %s = %f\n" % (variableNames[ax][i+1], solution[i]))
if variableNames[ax][i+1][0] == "M":
moments[ax][locations[variableNames[ax][i+1]]] = -solution[i]
else:
forces[ax][locations[variableNames[ax][i+1]]] = -solution[i]
FreeCAD.Console.PrintMessage(forces[ax])
FreeCAD.Console.PrintMessage("\n")
FreeCAD.Console.PrintMessage(moments[ax])
FreeCAD.Console.PrintMessage("\n")
# Forces
self.F[ax] = SegmentFunction(self.Fstr[ax])
self.F[ax].buildFromDict("x", forces[ax])
self.parent.updateButton(1, ax, not self.F[ax].isZero())
self.F[ax].output()
# Moments
if ax == 0:
self.M[0] = SegmentFunction(self.Mstr[0])
self.M[0].buildFromDict("x", moments[0])
elif ax == 1:
self.M[1] = self.F[1].integrated().negate()
self.M[1].name = self.Mstr[1]
self.M[1].addSegments(moments[1]) # takes care of boundary conditions
elif ax == 2:
self.M[2] = self.F[2].integrated()
self.M[2].name = self.Mstr[2]
self.M[2].addSegments(moments[2]) # takes care of boundary conditions
self.parent.updateButton(2, ax, not self.M[ax].isZero())
self.M[ax].output()
# Areas and area moments
location = 0.0
areas[ax] = IntervalFunction() # A [m²]
areamoments[ax] = IntervalFunction() # I [m⁴]
bendingmoments[ax] = IntervalFunction() # W_b [m³]
torquemoments[ax] = IntervalFunction() # W_t [m³]
for i in range(len(self.segments)):
od = self.segments[i].diameter/1000.0
id = self.segments[i].innerdiameter/1000.0
length = self.segments[i].length/1000.0
areas[ax].addInterval(location, length, math.pi/4.0 * (math.pow(od, 2.0) - math.pow(id, 2.0)))
areamoment = math.pi/64.0 * (math.pow(od, 4.0) - math.pow(id, 4.0))
areamoments[ax].addInterval(location, length, areamoment)
bendingmoments[ax].addInterval(location, length, areamoment / (od / 2.0))
torquemoments[ax].addInterval(location, length, 2 * (areamoment / (od / 2.0)))
location += length
# Bending line
if ax > 0:
if len(tangents[ax])+ len(translations[ax]) == 2:
# TODO: Get Young's module from material type instead of using 210000 N/mm² = 2.1E12 N/m²
self.w[ax] = TranslationFunction(self.M[ax].negated(), 2.1E12, areamoments[ax], tangents[ax], translations[ax])
self.w[ax].name= self.wstr[ax]
self.parent.updateButton(3, ax, not self.w[ax].isZero())
else:
self.parent.updateButton(3, ax, False)
# Normal/shear stresses and torque/bending stresses
self.sigmaN[ax] = StressFunction(self.F[ax], areas[ax])
self.sigmaN[ax].name = self.sigmaNstr[ax]
self.parent.updateButton(4, ax, not self.sigmaN[ax].isZero())
if ax == 0:
self.sigmaB[ax] = StressFunction(self.M[ax] , torquemoments[ax])
else:
self.sigmaB[ax] = StressFunction(self.M[ax], bendingmoments[ax])
self.sigmaB[ax].name = self.sigmaBstr[ax]
self.parent.updateButton(5, ax, not self.sigmaB[ax].isZero())
def printEquilibrium(self, var, coeff):
# Auxiliary method for debugging purposes
for i in range(len(var)):
if i == 0:
FreeCAD.Console.PrintMessage("%f = " % coeff[i])
else:
FreeCAD.Console.PrintMessage("%f * %s" % (coeff[i], var[i]))
if (i < len(var) - 1) and (i != 0):
FreeCAD.Console.PrintMessage(" + ")
FreeCAD.Console.PrintMessage("\n")
| sanguinariojoe/FreeCAD | src/Mod/PartDesign/WizardShaft/Shaft.py | Python | lgpl-2.1 | 32,813 |
from markdown import Extension
from markdown.util import etree
from markdown.inlinepatterns import Pattern
RE = r'\[code\](.*?)\[\/code\]'
class MultilineCodeExtension(Extension):
def extendMarkdown(self, md, md_globals):
element = NestedElements(RE)
md.inlinePatterns.add('pre', element, '<not_strong')
class NestedElements(Pattern):
def handleMatch(self, m):
el1 = etree.Element('pre')
el2 = etree.SubElement(el1, 'cite')
el2.text = m.group(2).strip()
return el1
def makeExtension(configs=None):
return MultilineCodeExtension(configs=configs) | nuke2015/python_blog | models/mdx_code_multiline.py | Python | mit | 611 |
import _plotly_utils.basevalidators
class LenmodeValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(
self, plotly_name="lenmode", parent_name="histogram.marker.colorbar", **kwargs
):
super(LenmodeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
role=kwargs.pop("role", "info"),
values=kwargs.pop("values", ["fraction", "pixels"]),
**kwargs
)
| plotly/python-api | packages/python/plotly/plotly/validators/histogram/marker/colorbar/_lenmode.py | Python | mit | 547 |
'''
The Common subpackage is of primary interest to all
ALMA developers wishing to use Python. The modules here
are of common use to everyone. A few brief descriptions
on the modules follow:
- Callbacks contains complete BACI callback implementations. This
functionality has not been provided in Java or C++.
- CDBAccess contains an easy to use class which provides read-only access
to the ACS configuration database.
- DurationHelper is a class which wraps the acstime::Duration structure
- EpochHelper is a class which wraps the acstime::Epoch structure
- Err contains the class which all (Python) ACS Error System generated
exceptions/completions are derived from. Look at this module to see
exactly what functionality is provided.
- Log contains a class capable of accessing the ACS logging system. See the
Logger class for details.
- QoS contains functions for changing the quality of service attributes of
CORBA object references.
- TimeHelper provides some general purpose classes and functions for dealing
with the unique ACS time format.
'''
__revision__ = "$Id: __init__.py,v 1.1.1.1 2012/03/07 17:40:45 acaproni Exp $"
| ACS-Community/ACS | LGPL/CommonSoftware/acspycommon/src/Acspy/Common/__init__.py | Python | lgpl-2.1 | 1,144 |
# To be deprecated!!!
from legacy import *
| aksalj/whiskerboard | board/api/__init__.py | Python | mit | 44 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name, unused-argument
"""Backend compiler related feature registration"""
from __future__ import absolute_import
from ..op import register_pattern, OpPattern
from ..op import register_injective_schedule
# reorg
register_pattern("vision.yolo_reorg", OpPattern.INJECTIVE)
register_injective_schedule("vision.yolo_reorg")
| dmlc/tvm | python/tvm/relay/op/vision/_yolo.py | Python | apache-2.0 | 1,131 |
import _plotly_utils.basevalidators
class ColoraxisValidator(_plotly_utils.basevalidators.SubplotidValidator):
def __init__(
self, plotly_name="coloraxis", parent_name="scatter3d.marker.line", **kwargs
):
super(ColoraxisValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
dflt=kwargs.pop("dflt", None),
edit_type=kwargs.pop("edit_type", "calc"),
regex=kwargs.pop("regex", "/^coloraxis([2-9]|[1-9][0-9]+)?$/"),
role=kwargs.pop("role", "info"),
**kwargs
)
| plotly/python-api | packages/python/plotly/plotly/validators/scatter3d/marker/line/_coloraxis.py | Python | mit | 597 |
print "In distro.py"
if not sysconf.getReadOnly():
if not sysconf.has("rpm-root"):
sysconf.set("rpm-root", "/opt/redhat/rpm/solaris/")
if not sysconf.has("channels"):
sysconf.set(("channels", "myrpm-db"),
{"alias": "myrpm-db",
"type": "rpm-sys",
"name": "RPM Database"})
sysconf.set(("channels", "myrpm-dir"),
{"alias": "myrpm-dir",
"type": "rpm-dir",
"name": "Solaris Sparc RPM Directory",
"path": "/export/home/jmartin/rpms/"})
sysconf.set(("channels", "myrhn"),
{"alias": "myrhn",
"type": "rpm-rhn",
"name": "RHN Channel",
"baseurl": "http://rlx-2-06.rhndev.redhat.com/XMLRPC"})
| dmacvicar/spacewalk | client/solaris/smartpm/contrib/solaris/distro-rpm.py | Python | gpl-2.0 | 854 |
# getdents syscall
def generate(in_fd):
"""getdents - lists specific directory in thumb mode
Args:
in_fd - (int/str/reg): in file descriptor
"""
sc = ''
try:
xin_fd = int(in_fd)
sc += 'mov r0, #%s' % (xin_fd)
except:
sc += 'mov r0, %s' % (in_fd)
sc += """
mov r1, sp
mov r2, #255
mov r7, #141
svc 1
"""
return sc
| sigma-random/ARMSCGen | shellcodes/thumb/getdents.py | Python | gpl-2.0 | 403 |
#
# Copyright 2013 Y12Studio
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
# py path
PWD = os.path.abspath('.')
# root path for web site
WWW = os.path.join(PWD,'www')
# web site port
PORT = 8888
# True=enable/False=disable
ENABLE_LOG_FILE = False
| y12studio/pi | testonly/halfsync/m_settings.py | Python | apache-2.0 | 759 |
#!/bin/python3
import random
def getRandom():
# return a random int between 0 and 10
return random.randint(0, 10)
def askQuestion(a, b):
# get user input, answer to the math question
userResponse = int(input("What is {} - {}: ".format(a, b)))
result = a - b
# compare if user input is the same as the result
if (userResponse != result):
# if not right .. try again
print("Try again!")
askQuestion(a, b)
else:
# if correct, print encouraging message
print("Great job!")
def main():
# introduce success counter
success = 0
# run the program until 10 successful attempts
while(success < 10):
# Get two random numbers
i = getRandom()
j = getRandom()
# Check which is the smaller, to avoid negative numbers, and pass to ask question function
if i < j:
askQuestion(j, i)
elif j < i:
askQuestion(i, j)
# increment success counter
success += 1
print("Congratulation. All done!")
if __name__ == "__main__":
main() | elakamarcus/python | simple_mathgame_01.py | Python | gpl-3.0 | 1,093 |
import unittest
import doctest
__author__ = 'David Hain'
__copyright__ = '2007-2008 ' + __author__
__license__ = 'MIT'
def mod_import(name):
mod = __import__(name)
components = name.split('.')
for comp in components[1:]:
mod = getattr(mod, comp)
return mod
modules = (
'webskewer.serve',
'webskewer.serve.headers',
'webskewer.serve.log',
'webskewer.serve.main',
'webskewer.serve.message',
'webskewer.serve.multipart',
'webskewer.serve.recv',
'webskewer.serve.serve',
)
test_modules = (
)
if __name__ == '__main__':
suite = unittest.TestSuite()
doc_suite = unittest.TestSuite()
for m in modules:
mod = mod_import(m)
doc_suite.addTest(doctest.DocTestSuite(mod))
suite.addTests(doc_suite)
for m in test_modules:
mod = mod_import(m)
suite.addTests(unittest.defaultTestLoader.loadTestsFromModule(mod))
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite)
| dhain/webskewer | webskewer.serve/tests/testall.py | Python | mit | 986 |
from nes.bus import BusDevice
from nes.processors.registers import Register
from .apu import Sq1Hi, Sq1Lo, Sq1Sweep, Sq1Vol, Sq2Hi, Sq2Lo, Sq2Sweep, Sq2Vol, TriLinear, TriHi, TriLo, NoiseHi, NoiseLo, NoiseVol, DmcRaw, DmcFreq, DmcStart, DmcLen, SndChn
from .joy import Joy1, Joy2
class ApuIoRegisterSet(BusDevice):
def __init__(self, ppu):
self.registers = [
Sq1Vol(),
Sq1Sweep(),
Sq1Lo(),
Sq1Hi(),
Sq2Vol(),
Sq2Sweep(),
Sq2Lo(),
Sq2Hi(),
TriLinear(),
Register(),
TriLo(),
TriHi(),
NoiseVol(),
Register(),
NoiseLo(),
NoiseHi(),
DmcFreq(),
DmcRaw(),
DmcStart(),
DmcLen(),
ppu.oamdma,
SndChn(),
Joy1(),
Joy2(),
Register(),
Register(),
Register(),
Register(),
Register(),
Register(),
Register(),
Register(),
]
def read(self, addr):
logical_addr = addr % len(self.registers)
return self.registers[logical_addr].read()
def write(self, addr, value):
logical_addr = addr % len(self.registers)
self.registers[logical_addr].write(value)
| Hexadorsimal/pynes | nes/bus/devices/apu_io_register_set.py | Python | mit | 1,376 |
from nose.core import collector, main, run, run_exit, runmodule
# backwards compatibility
from nose.exc import SkipTest, DeprecatedTest
from nose.tools import with_setup
__author__ = 'Jason Pellerin'
__versioninfo__ = (1, 0, 0)
__version__ = '.'.join(map(str, __versioninfo__))
__all__ = [
'main', 'run', 'run_exit', 'runmodule', 'with_setup',
'SkipTest', 'DeprecatedTest', 'collector'
]
| RalphBariz/RalphsDotNet | Old/RalphsDotNet.Apps.OptimizationStudio/Resources/PyLib/nose/__init__.py | Python | gpl-3.0 | 404 |
from flask_restful import Resource, reqparse
import json
import next.utils
import next.broker.broker
import next.api.api_util as api_util
from next.api.api_util import *
from next.api.api_util import APIArgument
from next.api.resource_manager import ResourceManager
resource_manager = ResourceManager()
broker = next.broker.broker.JobBroker()
# Request parser. Checks that necessary dictionary keys are available in a given resource.
# We rely on learningLib functions to ensure that all necessary arguments are available and parsed.
post_parser = reqparse.RequestParser(argument_class=APIArgument)
# Custom errors for GET and POST verbs on experiment resource
meta_error = {
'Error': {
'message': "There was an error calling this API endpoint ",
'code': 400,
'status':'FAIL'
}
}
meta_success = {
'code': 200,
'status': 'OK'
}
class AppHandler(Resource):
def post(self, exp_uid, function_name):
try:
post_parser.add_argument('exp_uid', type=str, required=True, help="Experiment ID Required.")
post_parser.add_argument('args', type=dict, required=False, help="Experiment args Required.")
# Validate args with post_parser
args_data = post_parser.parse_args()
# Pull app_id and exp_uid from parsed args
exp_uid = args_data["exp_uid"]
# Fetch app_id data from resource manager
app_id = resource_manager.get_app_id(exp_uid)
args_json = json.dumps(args_data["args"])
# This allows different apps to define custom functions,
# and hit the API with those functions.
# TODO: test this feature
# implemented by Scott Sievert, 2016-1-26
response_json, didSucceed, message = broker.applyAsync(app_id, exp_uid, function_name, args_json)
if not didSucceed:
raise Exception(message)
response_dict = json.loads(response_json)
return attach_meta(response_dict, meta_success), 200
except Exception, error:
return attach_meta({}, meta_error['Error'], backend_error=message)
| nextml/NEXT | next/api/app_handler.py | Python | apache-2.0 | 2,165 |
import os
class pathNavigator(object):
def __pytermconfig__(self):
return {"command":"cd","callback":self.cd}
def cd(self,*args, **kwargs):
pyTerm = kwargs["pyTerm"]
try:
sequence = kwargs["sequence"][0]
except IndexError:
sequence = ""
if sequence == "..": # upward
currentPath = pyTerm.getPath().split("/")[::-1][1::]
if currentPath[0] == '':
pyTerm.setPath('/')
else:
pyTerm.setPath("/".join(currentPath[::-1]))
elif sequence == "" or sequence == "~":
pyTerm.setPath("/home/"+pyTerm.getUser())
else: # downward
currentPath = os.path.join(pyTerm.getPath(), sequence)
if os.path.isdir(currentPath):
pyTerm.setPath(currentPath)
else:
print 'Invalid Directory!'
os.chdir(pyTerm.currentPath) | jeffersonmourak/pyTerm | plugins/pathNavigator.py | Python | mit | 758 |
import re
from unittest import mock
from dila.application import structures
from dila.frontend.flask import user_tools
def test_login_form(flask_client):
response = flask_client.get('/login/')
assert re.search('<input class="[^"]*" id="username" name="username" type="text" value="">',
response.data.decode())
assert re.search('<input class="[^"]*" id="password" name="password" type="password" value="">',
response.data.decode())
assert re.search('<input class="[^"]*" id="login" value="Log in" type="submit">', response.data.decode())
@mock.patch('dila.application.authenticate')
def test_post_login(authenticate, flask_client):
authenticate.return_value = structures.User(
authenticated=True,
username='username',
first_name='Sheldon',
last_name='Cooper',
is_superuser=False,
)
response = flask_client.post('/login/', data={'username': 'songo', 'password': 'ssj4'})
authenticate.assert_called_once_with('songo', 'ssj4')
assert response.status_code == 302
assert response.location == 'http://localhost/'
@mock.patch('dila.application.authenticate')
def test_post_invalid_login(authenticate, flask_client):
authenticate.return_value = structures.User(
authenticated=False,
username='',
first_name='',
last_name='',
is_superuser=False,
)
response = flask_client.post('/login/', data={'username': 'songo', 'password': 'ssj5'})
authenticate.assert_called_once_with('songo', 'ssj5')
assert "Invalid login or password" in response.data.decode()
def test_post_logout(flask_client):
with flask_client.session_transaction() as session:
user_tools.set_current_user(structures.User(
authenticated=True,
username='username',
first_name='Sheldon',
last_name='Cooper',
is_superuser=False,
), session=session)
response = flask_client.post('/logout/')
assert response.status_code == 302
assert response.location == 'http://localhost/login/'
assert not user_tools.current_user().authenticated
| socialwifi/dila | tests/test_authentication_views.py | Python | bsd-3-clause | 2,165 |
from django.contrib import admin
from announcements.models import Announcement, Dismissal
# import our user model and determine the field we will use to search by user
# support custom user models & username fields in django 1.5+
try:
from django.contrib.auth import get_user_model
except ImportError:
from django.contrib.auth.models import User
username_search = "user__username"
else:
User = get_user_model()
if hasattr(User, "USERNAME_FIELD"):
username_search = "user__%s" % User.USERNAME_FIELD
else:
username_search = "user__username"
class AnnouncementAdmin(admin.ModelAdmin):
list_display = ("title", "creator", "creation_date", "members_only")
list_filter = ("members_only",)
fieldsets = [
(None, {
"fields": ["title", "content", "site_wide", "members_only", "publish_start", "publish_end", "dismissal_type"],
}),
]
def save_model(self, request, obj, form, change):
if not change:
# When creating a new announcement, set the creator field.
obj.creator = request.user
obj.save()
class DismissalAdmin(admin.ModelAdmin):
list_display = ("user", "announcement", "dismissed_at")
search_fields = (username_search, "announcement__title")
admin.site.register(Announcement, AnnouncementAdmin)
admin.site.register(Dismissal, DismissalAdmin)
| state-hiu/geonode-announcements | announcements/admin.py | Python | mit | 1,380 |
# Copyright 2016 Pinterest, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import traceback
import json
from django.shortcuts import render
from django.http import HttpResponse, HttpResponseRedirect
from helpers.exceptions import NotAuthorizedException, FailedAuthenticationException
logger = logging.getLogger(__name__)
# TODO so we are using this as the catch ALL, and report error, as the last resort
# this is fine, except the exception stack trace is not particularly user-friendly
# We should not depends on this too much, but in the code handle as much exceptino
# as we can and generate user friendly message there.
class ExceptionHandlerMiddleware(object):
def process_exception(self, request, exception):
logger.exception('Exception thrown when handling request ' + str(request))
# Error is displayed as a fragment over related feature area
if request.is_ajax():
ajax_vars = {'success': False, 'error': exception.message}
return HttpResponse(json.dumps(ajax_vars), content_type='application/javascript')
else:
# Not authorized
if isinstance(exception, NotAuthorizedException):
return render(request, 'users/not_authorized.html', {
"message": exception.message,
})
elif isinstance(exception, FailedAuthenticationException):
request.session.modified = True
request.session.flush()
return HttpResponseRedirect("/")
return render(request, 'error.html', {
'message': exception.message,
'stacktrace': traceback.format_exc(),
})
| lilida/teletraan | deploy-board/deploy_board/webapp/error_views.py | Python | apache-2.0 | 2,210 |
#!/usr/bin/env python3
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import os
import sys
import platform
from UM.Platform import Platform
#WORKAROUND: GITHUB-88 GITHUB-385 GITHUB-612
if Platform.isLinux(): # Needed for platform.linux_distribution, which is not available on Windows and OSX
# For Ubuntu: https://bugs.launchpad.net/ubuntu/+source/python-qt4/+bug/941826
if platform.linux_distribution()[0] in ("debian", "Ubuntu", "LinuxMint"): # TODO: Needs a "if X11_GFX == 'nvidia'" here. The workaround is only needed on Ubuntu+NVidia drivers. Other drivers are not affected, but fine with this fix.
import ctypes
from ctypes.util import find_library
libGL = find_library("GL")
ctypes.CDLL(libGL, ctypes.RTLD_GLOBAL)
# When frozen, i.e. installer version, don't let PYTHONPATH mess up the search path for DLLs.
if Platform.isWindows() and hasattr(sys, "frozen"):
try:
del os.environ["PYTHONPATH"]
except KeyError: pass
#WORKAROUND: GITHUB-704 GITHUB-708
# It looks like setuptools creates a .pth file in
# the default /usr/lib which causes the default site-packages
# to be inserted into sys.path before PYTHONPATH.
# This can cause issues such as having libsip loaded from
# the system instead of the one provided with Cura, which causes
# incompatibility issues with libArcus
if "PYTHONPATH" in os.environ.keys(): # If PYTHONPATH is used
PYTHONPATH = os.environ["PYTHONPATH"].split(os.pathsep) # Get the value, split it..
PYTHONPATH.reverse() # and reverse it, because we always insert at 1
for PATH in PYTHONPATH: # Now beginning with the last PATH
PATH_real = os.path.realpath(PATH) # Making the the path "real"
if PATH_real in sys.path: # This should always work, but keep it to be sure..
sys.path.remove(PATH_real)
sys.path.insert(1, PATH_real) # Insert it at 1 after os.curdir, which is 0.
def exceptHook(hook_type, value, traceback):
import cura.CrashHandler
cura.CrashHandler.show(hook_type, value, traceback)
sys.excepthook = exceptHook
# Workaround for a race condition on certain systems where there
# is a race condition between Arcus and PyQt. Importing Arcus
# first seems to prevent Sip from going into a state where it
# tries to create PyQt objects on a non-main thread.
import Arcus #@UnusedImport
import cura.CuraApplication
import cura.Settings.CuraContainerRegistry
if Platform.isWindows() and hasattr(sys, "frozen"):
dirpath = os.path.expanduser("~/AppData/Local/cura/")
os.makedirs(dirpath, exist_ok = True)
sys.stdout = open(os.path.join(dirpath, "stdout.log"), "w")
sys.stderr = open(os.path.join(dirpath, "stderr.log"), "w")
# Force an instance of CuraContainerRegistry to be created and reused later.
cura.Settings.CuraContainerRegistry.CuraContainerRegistry.getInstance()
# This prestart up check is needed to determine if we should start the application at all.
if not cura.CuraApplication.CuraApplication.preStartUp():
sys.exit(0)
app = cura.CuraApplication.CuraApplication.getInstance()
app.run()
| hmflash/Cura | cura_app.py | Python | agpl-3.0 | 3,280 |
"""Some common helper functions."""
import sys
import inspect
def _flush():
sys.stderr.flush()
sys.stdout.flush()
def log(msg, *args):
"""Log to stderr with optional formatting."""
if args:
msg = msg % args
pre = inspect.getfile(sys._getframe(1)) + ": "
sys.stderr.write(pre + msg + "\n")
_flush()
| CraigKelly/ted-youtube-data | common.py | Python | mit | 339 |
# coding: utf-8
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.contrib import admin
from archer.views import *
# 这里报红因为没用到 原因就是我们在下面使用了更加面向对象的方法去导入每个应用 app 的后台逻辑
from books import views as books_views
from contact import views as contact_views
# 这里在 index 页面增加 feeds
from archer.feeds import LatestEntries
# 增加 sitemaps
from django.contrib.sitemaps.views import sitemap
from archer.sitemaps import BookSitemap
sitemaps = {
'index': BookSitemap,
}
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'archer.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
# 结合上方的 feeds 字典联合标识 url >>> /feed/latst/ 和 /feed/categories/
url(r'^feed/$', LatestEntries()),
# sitemaps
url(r'^sitemap\.xml$', sitemap, {'sitemaps': sitemaps}, name='django.contrib.sitemaps.views.sitemap'),
# django 自带的后台管理应用 很厉害哦
url(r'^admin/', include(admin.site.urls)),
# 我们的一些小练习
url(r'^index/$', index),
url(r'^cur_time/(\d{1,2})$', cur_time),
# 简单的模板查看 request 对象的 META 字典内容
url(r'^display_meta/$', display_meta),
# 表单的练习
# 最后一次改进 我们吧表单的提交展示放在了同一张页面中
# url(r'^search-form/$', views.search_form),
url(r'^search/$', books_views.search),
# 这里是使用 include 的测试用例
url(r'^test/', include('books.urls')),
# >>> 下面的方法都被再下面的前缀代替了 我保留前后的历史以便进行对比
# 联系人的处理 这里把后台逻辑单独做了一个包 因为跟其他模块耦合性低
# url(r'^contact/$', contact_views.contact),
# 用不同的方法跳转 这里直接使用字符串形式导入了模块
# url(r'^happend/$', 'contact.views.happend'),
)
# 更加高端的方法是前缀混用 一看你就明白了
# 这里为了增加趣味性 我们提供了关键字参数 说穿了就是直接在 url 这里决定参数值 这主要为了相同后台逻辑复用
urlpatterns += patterns('contact.views',
url(r'^contact/$', 'contact'),
url(r'^foo/(?P<count>\d+)/$', 'foobar', {'name': 'archer'}),
url(r'^bar/(?P<count>\d+)/$', 'foobar', {'name': 'saber'}),
)
# 这里有些更好玩的东西 如果你想做一些调试的逻辑展示一些仅在开发阶段才能出现的页面就可以这样
if settings.DEBUG:
urlpatterns += patterns('',
url(r'^debug/$', 'debug'),
)
| L-Jovi/pra_django | archer/urls.py | Python | gpl-2.0 | 2,644 |
import numpy as np
import cv2
cap = cv2.VideoCapture(0)
count = 0
while(True):
# Capture frame-by-frame
ret, frame = cap.read()
# Our operations on the frame come here
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
cv2.imwrite("frame%d.jpg" % count, frame)
count += 1
# Display the resulting frame
cv2.imshow('frame',gray)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# When everything done, release the capture
cap.release()
cv2.destroyAllWindows()
| MiracleAlga/satpam | source/fragmented.py | Python | mit | 500 |
import process_data
data_folder = '../data/state_data_imports_4d_NAICS/'
output_folder = '../output/state_data_imports_4d_NAICS/'
date = '0902'
process_data.process_ISTNAICS(data_folder, output_folder, date)
print 'task completed'
| YangLiu928/NDP_Projects | Python_Projects/Python_MySQL/Foreign_Trade_Data_Pipe_Delimination/scripts/state_data_imports_4d_NAICS.py | Python | mit | 236 |
# coding: utf-8
"""
Generated by: https://openapi-generator.tech
"""
from dataclasses import dataclass
import re # noqa: F401
import sys # noqa: F401
import typing
import urllib3
from urllib3._collections import HTTPHeaderDict
from openapi_client import api_client, exceptions
import decimal # noqa: F401
from datetime import date, datetime # noqa: F401
from frozendict import frozendict # noqa: F401
from openapi_client.schemas import ( # noqa: F401
AnyTypeSchema,
ComposedSchema,
DictSchema,
ListSchema,
StrSchema,
IntSchema,
Int32Schema,
Int64Schema,
Float32Schema,
Float64Schema,
NumberSchema,
DateSchema,
DateTimeSchema,
DecimalSchema,
BoolSchema,
BinarySchema,
NoneSchema,
none_type,
InstantiationMetadata,
Unset,
unset,
ComposedBase,
ListBase,
DictBase,
NoneBase,
StrBase,
IntBase,
NumberBase,
DateBase,
DateTimeBase,
BoolBase,
BinaryBase,
Schema,
_SchemaValidator,
_SchemaTypeChecker,
_SchemaEnumMaker
)
from openapi_client.model.queue_item_impl import QueueItemImpl
# path params
OrganizationSchema = StrSchema
PipelineSchema = StrSchema
RequestRequiredPathParams = typing.TypedDict(
'RequestRequiredPathParams',
{
'organization': OrganizationSchema,
'pipeline': PipelineSchema,
}
)
RequestOptionalPathParams = typing.TypedDict(
'RequestOptionalPathParams',
{
},
total=False
)
class RequestPathParams(RequestRequiredPathParams, RequestOptionalPathParams):
pass
request_path_organization = api_client.PathParameter(
name="organization",
style=api_client.ParameterStyle.SIMPLE,
schema=OrganizationSchema,
required=True,
)
request_path_pipeline = api_client.PathParameter(
name="pipeline",
style=api_client.ParameterStyle.SIMPLE,
schema=PipelineSchema,
required=True,
)
_path = '/blue/rest/organizations/{organization}/pipelines/{pipeline}/runs'
_method = 'POST'
_auth = [
'jenkins_auth',
]
SchemaFor200ResponseBodyApplicationJson = QueueItemImpl
@dataclass
class ApiResponseFor200(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: typing.Union[
SchemaFor200ResponseBodyApplicationJson,
]
headers: Unset = unset
_response_for_200 = api_client.OpenApiResponse(
response_cls=ApiResponseFor200,
content={
'application/json': api_client.MediaType(
schema=SchemaFor200ResponseBodyApplicationJson),
},
)
@dataclass
class ApiResponseFor401(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: Unset = unset
headers: Unset = unset
_response_for_401 = api_client.OpenApiResponse(
response_cls=ApiResponseFor401,
)
@dataclass
class ApiResponseFor403(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: Unset = unset
headers: Unset = unset
_response_for_403 = api_client.OpenApiResponse(
response_cls=ApiResponseFor403,
)
_status_code_to_response = {
'200': _response_for_200,
'401': _response_for_401,
'403': _response_for_403,
}
_all_accept_content_types = (
'application/json',
)
class PostPipelineRuns(api_client.Api):
def post_pipeline_runs(
self: api_client.Api,
path_params: RequestPathParams = frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization
]:
"""
:param skip_deserialization: If true then api_response.response will be set but
api_response.body and api_response.headers will not be deserialized into schema
class instances
"""
self._verify_typed_dict_inputs(RequestPathParams, path_params)
_path_params = {}
for parameter in (
request_path_organization,
request_path_pipeline,
):
parameter_data = path_params.get(parameter.name, unset)
if parameter_data is unset:
continue
serialized_data = parameter.serialize(parameter_data)
_path_params.update(serialized_data)
_headers = HTTPHeaderDict()
# TODO add cookie handling
if accept_content_types:
for accept_content_type in accept_content_types:
_headers.add('Accept', accept_content_type)
response = self.api_client.call_api(
resource_path=_path,
method=_method,
path_params=_path_params,
headers=_headers,
auth_settings=_auth,
stream=stream,
timeout=timeout,
)
if skip_deserialization:
api_response = api_client.ApiResponseWithoutDeserialization(response=response)
else:
response_for_status = _status_code_to_response.get(str(response.status))
if response_for_status:
api_response = response_for_status.deserialize(response, self.api_client.configuration)
else:
api_response = api_client.ApiResponseWithoutDeserialization(response=response)
if not 200 <= response.status <= 299:
raise exceptions.ApiException(api_response=api_response)
return api_response
| cliffano/swaggy-jenkins | clients/python-experimental/generated/openapi_client/api/blue_ocean_api_endpoints/post_pipeline_runs.py | Python | mit | 5,478 |
import random
from entities import Entity
_player = None
def initPlayer(name):
global _player
if not _player:
_player = Player(name)
def getPlayPos():
global _player
if _player:
return _player.position
def d6(n):
suma = 0
for i in range(n):
suma += random.randint(1, 6)
return suma
def d6DropLowest(n):
arr = []
for i in range(n + 1):
arr.append(random.randint(1, 6))
arr = sorted(arr)
return sum(arr[1:])
class Player(Entity):
def __init__(self, name):
character = '@'
color = 2
passable = True
self.name = name
self.abilities = {
'STR': 0,
'DEX': 0,
'CON': 0,
'INT': 0,
'WIS': 0,
'CHA': 0
}
self.position = [random.randint(0, 50), random.randint(0, 50)]
def rollStats(self):
self.abilities['STR'] = d6(3)
self.abilities['DEX'] = d6(3)
self.abilities['CON'] = d6(3)
self.abilities['INT'] = d6(3)
self.abilities['WIS'] = d6(3)
self.abilities['CHA'] = d6(3)
def rollStatsEasy(self):
self.abilities['STR'] = d6DropLowest(3)
self.abilities['DEX'] = d6DropLowest(3)
self.abilities['CON'] = d6DropLowest(3)
self.abilities['INT'] = d6DropLowest(3)
self.abilities['WIS'] = d6DropLowest(3)
self.abilities['CHA'] = d6DropLowest(3)
def __str__(self):
return 'Name: ' + self.name + '\n' + \
'STR: ' + str(self.abilities['STR']) + '\n' + \
'DEX: ' + str(self.abilities['DEX']) + '\n' + \
'CON: ' + str(self.abilities['CON']) + '\n' + \
'INT: ' + str(self.abilities['INT']) + '\n' + \
'WIS: ' + str(self.abilities['WIS']) + '\n' + \
'CHA: ' + str(self.abilities['CHA']) + '\n' + \
'Mean: ' + str(sum(self.abilities.values()) / 6)
if __name__ == '__main__':
pl = Player('GengiBro')
pl.rollStats()
print(pl)
pl.rollStatsEasy()
print(pl)
| TheAwesomeTool/Delve | engine/Player.py | Python | gpl-2.0 | 2,089 |
from conans.model import Generator
class MakeGenerator(Generator):
def __init__(self, conanfile):
Generator.__init__(self, conanfile)
self.makefile_newline = "\n"
self.makefile_line_continuation = " \\\n"
self.assignment_if_absent = " ?= "
self.assignment_append = " += "
@property
def filename(self):
return 'conanbuildinfo.mak'
@property
def content(self):
content = [
"#-------------------------------------------------------------------#",
"# Makefile variables from Conan Dependencies #",
"#-------------------------------------------------------------------#",
"",
]
deps_content = []
for pkg_name, cpp_info in self.deps_build_info.dependencies:
deps_content.extend(self._create_content_from_dep(pkg_name, cpp_info))
deps_content.extend(self._create_combined_content())
for line_as_list in deps_content:
content.append("".join(line_as_list))
content.append("#-------------------------------------------------------------------#")
content.append(self.makefile_newline)
return self.makefile_newline.join(content)
def _create_content_from_dep(self, pkg_name, cpp_info):
vars_info = [("ROOT", self.assignment_if_absent, [cpp_info.rootpath]),
("SYSROOT", self.assignment_if_absent, [cpp_info.sysroot]),
("INCLUDE_DIRS", self.assignment_append, cpp_info.include_paths),
("LIB_DIRS", self.assignment_append, cpp_info.lib_paths),
("BIN_DIRS", self.assignment_append, cpp_info.bin_paths),
("BUILD_DIRS", self.assignment_append, cpp_info.build_paths),
("RES_DIRS", self.assignment_append, cpp_info.res_paths),
("LIBS", self.assignment_append, cpp_info.libs),
("SYSTEM_LIBS", self.assignment_append, cpp_info.system_libs),
("DEFINES", self.assignment_append, cpp_info.defines),
("CFLAGS", self.assignment_append, cpp_info.cflags),
("CXXFLAGS", self.assignment_append, cpp_info.cxxflags),
("SHAREDLINKFLAGS", self.assignment_append, cpp_info.sharedlinkflags),
("EXELINKFLAGS", self.assignment_append, cpp_info.exelinkflags),
("FRAMEWORKS", self.assignment_append, cpp_info.frameworks),
("FRAMEWORK_PATHS", self.assignment_append, cpp_info.framework_paths)]
return [self._create_makefile_var(var_name, operator, values, pkg=pkg_name)
for var_name, operator, values in vars_info]
def _create_combined_content(self):
content = []
for var_name in ["root", "sysroot", "include_dirs", "lib_dirs", "bin_dirs", "build_dirs",
"res_dirs", "libs", "defines", "cflags", "cxxflags", "sharedlinkflags",
"exelinkflags", "frameworks", "framework_paths", "system_libs"]:
values = ["$(CONAN_{var}_{pkg})".format(var=var_name.upper(), pkg=pkg.upper())
for pkg, _ in self.deps_build_info.dependencies]
content.append(self._create_makefile_var(var_name, self.assignment_append, values))
return content
def _create_makefile_var(self, var_name, operator, values, pkg=None):
pkg = "_{}".format(pkg.upper()) if pkg else ""
make_var = ["CONAN_{var}{pkg}{op}".format(var=var_name.upper(), pkg=pkg, op=operator)]
make_var.extend(value.replace("\\", "/") for value in values)
return self.makefile_line_continuation.join(make_var) + self.makefile_newline
| conan-io/conan | conans/client/generators/make.py | Python | mit | 3,777 |
__author__ = '1'
from model.group import Group
import random
import string
import os.path
import jsonpickle
import getopt
import sys
try:
opts, args = getopt.getopt(sys.argv[1:],"n:f:",["number of groups","file"])
except getopt.GetoptError as err:
getopt.usage()
sys.exit(2)
n = 5
f = "data/groups.json"
for o, a in opts:
if o == "-n":
n = int(a)
elif o == "-f":
f = a
def random_string(prefix, maxlen):
symbols = string.ascii_letters + string.digits + string.punctuation + " "*10
return prefix + "".join([random.choice(symbols) for i in range(random.randrange(maxlen))])
testdata =[ Group(name="",header="",footer="")] + [
Group(name=random_string("name",10),header=random_string("header",20),footer=random_string("footer",20))
for i in range(n)
]
file = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", f)
with open(file, "w") as out:
jsonpickle.set_encoder_options("json", indent=2)
out.write(jsonpickle.encode(testdata)) | liliasapurina/python_training | generator/group.py | Python | apache-2.0 | 1,010 |
import os
import time
ret = os.fork()
if ret==0:
while True:
print("----1---")
time.sleep(1)
else:
while True:
print("----2---")
time.sleep(1)
| jameswatt2008/jameswatt2008.github.io | python/Python核心编程/系统编程/截图和代码/系统编程1/02-fork.py | Python | gpl-2.0 | 189 |
#!/usr/bin/env python
import datetime
import serial
import time
from multiprocessing import Process
from rebooter import reboot
class P1:
FAIL_THRESHOLD = 100
ELECTRICITY_1_WITHDRAWAL_CUMULATIVE = '1.8.1'
ELECTRICITY_2_WITHDRAWAL_CUMULATIVE = '1.8.2'
ELECTRICITY_1_SUPPLY_CUMULATIVE = '2.8.1'
ELECTRICITY_2_SUPPLY_CUMULATIVE = '2.8.2'
ELECTRICITY_CURRENT_WITHDRAWAL = '1.7.0'
ELECTRICITY_CURRENT_SUPPLY = '2.7.0'
GAS_CUMULATIVE = '24.2.1'
TELEGRAM_MAP = {
ELECTRICITY_1_WITHDRAWAL_CUMULATIVE: {
'unit': 'kWh 1 (+P)',
'map': [0, 10]
},
ELECTRICITY_2_WITHDRAWAL_CUMULATIVE: {
'unit': 'kWh 2 (+P)',
'map': [0, 10]
},
ELECTRICITY_CURRENT_WITHDRAWAL: {
'unit': 'kW (+P)',
'map': [0, 5]
},
ELECTRICITY_1_SUPPLY_CUMULATIVE: {
'unit': 'kWh 1 (-P)',
'map': [0, 10]
},
ELECTRICITY_2_SUPPLY_CUMULATIVE: {
'unit': 'kWh 2 (-P)',
'map': [0, 10]
},
ELECTRICITY_CURRENT_SUPPLY: {
'unit': 'kW (+P)',
'map': [0, 5]
},
GAS_CUMULATIVE: {
'unit': 'm3',
'map': [15, 24]
}
}
callback = None
timer = None
def __init__(self, debug=False):
ser = serial.Serial()
ser.baudrate = 115200
ser.bytesize = serial.EIGHTBITS
ser.parity = serial.PARITY_NONE
ser.stopbits = serial.STOPBITS_ONE
ser.xonxoff = 0
ser.rtscts = 0
ser.timeout = 20
ser.port = "/dev/ttyUSB0"
self.debug = debug
self.ser = ser
def ser_open(self):
# Open COM port
try:
self.ser.open()
except:
reboot(1)
raise RuntimeError("[P1] Error opening %s" % self.ser.name)
def ser_close(self):
# Close port and show status
try:
self.ser.close()
except:
reboot(1)
raise RuntimeError("[P1] Can't close serial port %s" % self.ser.name)
def wakeup(self):
pass
def hibernate(self):
pass
def event_setup(self, callback):
self.callback = callback
def event_start(self):
self.ser_open()
self.timer = Process(target=self.event_loop)
self.timer.start()
def event_stop(self):
if self.timer is not None and self.timer.is_alive():
try:
self.timer.terminate()
self.ser_close()
except AttributeError:
print("[CONTINUOUS_P1] Could not terminate timer")
def event_loop(self):
data = {}
fail_counter = 0
# Read telegram
while True:
try:
p1_raw = self.ser.readline()
except:
self.debug_log("Can't read serial port %s" % self.ser.name)
fail_counter = fail_counter + 1
if fail_counter > self.FAIL_THRESHOLD:
print("[CONTINUOUS_P1] Giving up reading serial port %s" % self.ser.name)
reboot()
time.sleep(1)
continue
fail_counter = 0
p1_str = str(p1_raw)
p1_line = p1_str.strip()
self.debug_log(p1_line)
if p1_line[:1] == '!':
self.event_callback(data)
self.debug_log("^^^ end")
data = {}
p1_line = p1_line[p1_line.find(":") + 1:]
p1_key = p1_line[:p1_line.find("(")]
p1_line = p1_line[p1_line.find("(") + 1:]
if p1_key not in self.TELEGRAM_MAP:
self.debug_log("^^^ skip")
continue
try:
p1_value = p1_line[
self.TELEGRAM_MAP[p1_key]['map'][0]: self.TELEGRAM_MAP[p1_key]['map'][
1]]
p1_value = float(p1_value)
data[p1_key] = p1_value
self.debug_log("^^^ save")
except ValueError:
self.debug_log("^^^ incomplete")
def event_callback(self, data):
# only fire callback if we get all the data we need
if len(data) >= len(self.TELEGRAM_MAP):
self.callback(data)
def debug_log(self, message):
# Log debug messages
if self.debug:
with open("/home/pi/code/var/p1.log", "a") as log:
log.write(datetime.datetime.utcnow().isoformat() + ": " + message + "\n")
| hongaar/meterkast | components/continuousP1.py | Python | mit | 4,600 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import throw, _
import frappe.defaults
from frappe.utils import cint, flt, get_fullname, cstr
from frappe.contacts.doctype.address.address import get_address_display
from erpnext.shopping_cart.doctype.shopping_cart_settings.shopping_cart_settings import get_shopping_cart_settings
from frappe.utils.nestedset import get_root_of
from erpnext.accounts.utils import get_account_name
from erpnext.utilities.product import get_qty_in_stock
from frappe.contacts.doctype.contact.contact import get_contact_name
class WebsitePriceListMissingError(frappe.ValidationError):
pass
def set_cart_count(quotation=None):
if cint(frappe.db.get_singles_value("Shopping Cart Settings", "enabled")):
if not quotation:
quotation = _get_cart_quotation()
cart_count = cstr(len(quotation.get("items")))
if hasattr(frappe.local, "cookie_manager"):
frappe.local.cookie_manager.set_cookie("cart_count", cart_count)
@frappe.whitelist()
def get_cart_quotation(doc=None):
party = get_party()
if not doc:
quotation = _get_cart_quotation(party)
doc = quotation
set_cart_count(quotation)
addresses = get_address_docs(party=party)
if not doc.customer_address and addresses:
update_cart_address("billing", addresses[0].name)
return {
"doc": decorate_quotation_doc(doc),
"shipping_addresses": [{"name": address.name, "title": address.address_title, "display": address.display}
for address in addresses if address.address_type == "Shipping"],
"billing_addresses": [{"name": address.name, "title": address.address_title, "display": address.display}
for address in addresses if address.address_type == "Billing"],
"shipping_rules": get_applicable_shipping_rules(party),
"cart_settings": frappe.get_cached_doc("Shopping Cart Settings")
}
@frappe.whitelist()
def place_order():
quotation = _get_cart_quotation()
cart_settings = frappe.db.get_value("Shopping Cart Settings", None,
["company", "allow_items_not_in_stock"], as_dict=1)
quotation.company = cart_settings.company
quotation.flags.ignore_permissions = True
quotation.submit()
if quotation.quotation_to == 'Lead' and quotation.party_name:
# company used to create customer accounts
frappe.defaults.set_user_default("company", quotation.company)
if not (quotation.shipping_address_name or quotation.customer_address):
frappe.throw(_("Set Shipping Address or Billing Address"))
from erpnext.selling.doctype.quotation.quotation import _make_sales_order
sales_order = frappe.get_doc(_make_sales_order(quotation.name, ignore_permissions=True))
sales_order.payment_schedule = []
if not cint(cart_settings.allow_items_not_in_stock):
for item in sales_order.get("items"):
item.reserved_warehouse, is_stock_item = frappe.db.get_value("Item",
item.item_code, ["website_warehouse", "is_stock_item"])
if is_stock_item:
item_stock = get_qty_in_stock(item.item_code, "website_warehouse")
if not cint(item_stock.in_stock):
throw(_("{1} Not in Stock").format(item.item_code))
if item.qty > item_stock.stock_qty[0][0]:
throw(_("Only {0} in Stock for item {1}").format(item_stock.stock_qty[0][0], item.item_code))
sales_order.flags.ignore_permissions = True
sales_order.insert()
sales_order.submit()
if hasattr(frappe.local, "cookie_manager"):
frappe.local.cookie_manager.delete_cookie("cart_count")
return sales_order.name
@frappe.whitelist()
def request_for_quotation():
quotation = _get_cart_quotation()
quotation.flags.ignore_permissions = True
quotation.save()
if not get_shopping_cart_settings().save_quotations_as_draft:
quotation.submit()
return quotation.name
@frappe.whitelist()
def update_cart(item_code, qty, additional_notes=None, with_items=False):
quotation = _get_cart_quotation()
empty_card = False
qty = flt(qty)
if qty == 0:
quotation_items = quotation.get("items", {"item_code": ["!=", item_code]})
if quotation_items:
quotation.set("items", quotation_items)
else:
empty_card = True
else:
quotation_items = quotation.get("items", {"item_code": item_code})
if not quotation_items:
quotation.append("items", {
"doctype": "Quotation Item",
"item_code": item_code,
"qty": qty,
"additional_notes": additional_notes
})
else:
quotation_items[0].qty = qty
quotation_items[0].additional_notes = additional_notes
apply_cart_settings(quotation=quotation)
quotation.flags.ignore_permissions = True
quotation.payment_schedule = []
if not empty_card:
quotation.save()
else:
quotation.delete()
quotation = None
set_cart_count(quotation)
context = get_cart_quotation(quotation)
if cint(with_items):
return {
"items": frappe.render_template("templates/includes/cart/cart_items.html",
context),
"taxes": frappe.render_template("templates/includes/order/order_taxes.html",
context),
}
else:
return {
'name': quotation.name,
'shopping_cart_menu': get_shopping_cart_menu(context)
}
@frappe.whitelist()
def get_shopping_cart_menu(context=None):
if not context:
context = get_cart_quotation()
return frappe.render_template('templates/includes/cart/cart_dropdown.html', context)
@frappe.whitelist()
def add_new_address(doc):
doc = frappe.parse_json(doc)
doc.update({
'doctype': 'Address'
})
address = frappe.get_doc(doc)
address.save(ignore_permissions=True)
return address
@frappe.whitelist(allow_guest=True)
def create_lead_for_item_inquiry(lead, subject, message):
lead = frappe.parse_json(lead)
lead_doc = frappe.new_doc('Lead')
lead_doc.update(lead)
lead_doc.set('lead_owner', '')
if not frappe.db.exists('Lead Source', 'Product Inquiry'):
frappe.get_doc({
'doctype': 'Lead Source',
'source_name' : 'Product Inquiry'
}).insert(ignore_permissions=True)
lead_doc.set('source', 'Product Inquiry')
try:
lead_doc.save(ignore_permissions=True)
except frappe.exceptions.DuplicateEntryError:
frappe.clear_messages()
lead_doc = frappe.get_doc('Lead', {'email_id': lead['email_id']})
lead_doc.add_comment('Comment', text='''
<div>
<h5>{subject}</h5>
<p>{message}</p>
</div>
'''.format(subject=subject, message=message))
return lead_doc
@frappe.whitelist()
def get_terms_and_conditions(terms_name):
return frappe.db.get_value('Terms and Conditions', terms_name, 'terms')
@frappe.whitelist()
def update_cart_address(address_type, address_name):
quotation = _get_cart_quotation()
address_display = get_address_display(frappe.get_doc("Address", address_name).as_dict())
if address_type.lower() == "billing":
quotation.customer_address = address_name
quotation.address_display = address_display
quotation.shipping_address_name == quotation.shipping_address_name or address_name
elif address_type.lower() == "shipping":
quotation.shipping_address_name = address_name
quotation.shipping_address = address_display
quotation.customer_address == quotation.customer_address or address_name
apply_cart_settings(quotation=quotation)
quotation.flags.ignore_permissions = True
quotation.save()
context = get_cart_quotation(quotation)
return {
"taxes": frappe.render_template("templates/includes/order/order_taxes.html",
context),
}
def guess_territory():
territory = None
geoip_country = frappe.session.get("session_country")
if geoip_country:
territory = frappe.db.get_value("Territory", geoip_country)
return territory or \
frappe.db.get_value("Shopping Cart Settings", None, "territory") or \
get_root_of("Territory")
def decorate_quotation_doc(doc):
for d in doc.get("items", []):
d.update(frappe.db.get_value("Item", d.item_code,
["thumbnail", "website_image", "description", "route"], as_dict=True))
return doc
def _get_cart_quotation(party=None):
'''Return the open Quotation of type "Shopping Cart" or make a new one'''
if not party:
party = get_party()
quotation = frappe.get_all("Quotation", fields=["name"], filters=
{"party_name": party.name, "order_type": "Shopping Cart", "docstatus": 0},
order_by="modified desc", limit_page_length=1)
if quotation:
qdoc = frappe.get_doc("Quotation", quotation[0].name)
else:
company = frappe.db.get_value("Shopping Cart Settings", None, ["company"])
qdoc = frappe.get_doc({
"doctype": "Quotation",
"naming_series": get_shopping_cart_settings().quotation_series or "QTN-CART-",
"quotation_to": party.doctype,
"company": company,
"order_type": "Shopping Cart",
"status": "Draft",
"docstatus": 0,
"__islocal": 1,
"party_name": party.name
})
qdoc.contact_person = frappe.db.get_value("Contact", {"email_id": frappe.session.user})
qdoc.contact_email = frappe.session.user
qdoc.flags.ignore_permissions = True
qdoc.run_method("set_missing_values")
apply_cart_settings(party, qdoc)
return qdoc
def update_party(fullname, company_name=None, mobile_no=None, phone=None):
party = get_party()
party.customer_name = company_name or fullname
party.customer_type == "Company" if company_name else "Individual"
contact_name = frappe.db.get_value("Contact", {"email_id": frappe.session.user})
contact = frappe.get_doc("Contact", contact_name)
contact.first_name = fullname
contact.last_name = None
contact.customer_name = party.customer_name
contact.mobile_no = mobile_no
contact.phone = phone
contact.flags.ignore_permissions = True
contact.save()
party_doc = frappe.get_doc(party.as_dict())
party_doc.flags.ignore_permissions = True
party_doc.save()
qdoc = _get_cart_quotation(party)
if not qdoc.get("__islocal"):
qdoc.customer_name = company_name or fullname
qdoc.run_method("set_missing_lead_customer_details")
qdoc.flags.ignore_permissions = True
qdoc.save()
def apply_cart_settings(party=None, quotation=None):
if not party:
party = get_party()
if not quotation:
quotation = _get_cart_quotation(party)
cart_settings = frappe.get_doc("Shopping Cart Settings")
set_price_list_and_rate(quotation, cart_settings)
quotation.run_method("calculate_taxes_and_totals")
set_taxes(quotation, cart_settings)
_apply_shipping_rule(party, quotation, cart_settings)
def set_price_list_and_rate(quotation, cart_settings):
"""set price list based on billing territory"""
_set_price_list(cart_settings, quotation)
# reset values
quotation.price_list_currency = quotation.currency = \
quotation.plc_conversion_rate = quotation.conversion_rate = None
for item in quotation.get("items"):
item.price_list_rate = item.discount_percentage = item.rate = item.amount = None
# refetch values
quotation.run_method("set_price_list_and_item_details")
if hasattr(frappe.local, "cookie_manager"):
# set it in cookies for using in product page
frappe.local.cookie_manager.set_cookie("selling_price_list", quotation.selling_price_list)
def _set_price_list(cart_settings, quotation=None):
"""Set price list based on customer or shopping cart default"""
from erpnext.accounts.party import get_default_price_list
party_name = quotation.get("party_name") if quotation else get_party().get("name")
selling_price_list = None
# check if default customer price list exists
if party_name and frappe.db.exists("Customer", party_name):
selling_price_list = get_default_price_list(frappe.get_doc("Customer", party_name))
# check default price list in shopping cart
if not selling_price_list:
selling_price_list = cart_settings.price_list
if quotation:
quotation.selling_price_list = selling_price_list
return selling_price_list
def set_taxes(quotation, cart_settings):
"""set taxes based on billing territory"""
from erpnext.accounts.party import set_taxes
customer_group = frappe.db.get_value("Customer", quotation.party_name, "customer_group")
quotation.taxes_and_charges = set_taxes(quotation.party_name, "Customer",
quotation.transaction_date, quotation.company, customer_group=customer_group, supplier_group=None,
tax_category=quotation.tax_category, billing_address=quotation.customer_address,
shipping_address=quotation.shipping_address_name, use_for_shopping_cart=1)
#
# # clear table
quotation.set("taxes", [])
#
# # append taxes
quotation.append_taxes_from_master()
def get_party(user=None):
if not user:
user = frappe.session.user
contact_name = get_contact_name(user)
party = None
if contact_name:
contact = frappe.get_doc('Contact', contact_name)
if contact.links:
party_doctype = contact.links[0].link_doctype
party = contact.links[0].link_name
cart_settings = frappe.get_doc("Shopping Cart Settings")
debtors_account = ''
if cart_settings.enable_checkout:
debtors_account = get_debtors_account(cart_settings)
if party:
return frappe.get_doc(party_doctype, party)
else:
if not cart_settings.enabled:
frappe.local.flags.redirect_location = "/contact"
raise frappe.Redirect
customer = frappe.new_doc("Customer")
fullname = get_fullname(user)
customer.update({
"customer_name": fullname,
"customer_type": "Individual",
"customer_group": get_shopping_cart_settings().default_customer_group,
"territory": get_root_of("Territory")
})
if debtors_account:
customer.update({
"accounts": [{
"company": cart_settings.company,
"account": debtors_account
}]
})
customer.flags.ignore_mandatory = True
customer.insert(ignore_permissions=True)
contact = frappe.new_doc("Contact")
contact.update({
"first_name": fullname,
"email_ids": [{"email_id": user, "is_primary": 1}]
})
contact.append('links', dict(link_doctype='Customer', link_name=customer.name))
contact.flags.ignore_mandatory = True
contact.insert(ignore_permissions=True)
return customer
def get_debtors_account(cart_settings):
payment_gateway_account_currency = \
frappe.get_doc("Payment Gateway Account", cart_settings.payment_gateway_account).currency
account_name = _("Debtors ({0})").format(payment_gateway_account_currency)
debtors_account_name = get_account_name("Receivable", "Asset", is_group=0,\
account_currency=payment_gateway_account_currency, company=cart_settings.company)
if not debtors_account_name:
debtors_account = frappe.get_doc({
"doctype": "Account",
"account_type": "Receivable",
"root_type": "Asset",
"is_group": 0,
"parent_account": get_account_name(root_type="Asset", is_group=1, company=cart_settings.company),
"account_name": account_name,
"currency": payment_gateway_account_currency
}).insert(ignore_permissions=True)
return debtors_account.name
else:
return debtors_account_name
def get_address_docs(doctype=None, txt=None, filters=None, limit_start=0, limit_page_length=20,
party=None):
if not party:
party = get_party()
if not party:
return []
address_names = frappe.db.get_all('Dynamic Link', fields=('parent'),
filters=dict(parenttype='Address', link_doctype=party.doctype, link_name=party.name))
out = []
for a in address_names:
address = frappe.get_doc('Address', a.parent)
address.display = get_address_display(address.as_dict())
out.append(address)
return out
@frappe.whitelist()
def apply_shipping_rule(shipping_rule):
quotation = _get_cart_quotation()
quotation.shipping_rule = shipping_rule
apply_cart_settings(quotation=quotation)
quotation.flags.ignore_permissions = True
quotation.save()
return get_cart_quotation(quotation)
def _apply_shipping_rule(party=None, quotation=None, cart_settings=None):
if not quotation.shipping_rule:
shipping_rules = get_shipping_rules(quotation, cart_settings)
if not shipping_rules:
return
elif quotation.shipping_rule not in shipping_rules:
quotation.shipping_rule = shipping_rules[0]
if quotation.shipping_rule:
quotation.run_method("apply_shipping_rule")
quotation.run_method("calculate_taxes_and_totals")
def get_applicable_shipping_rules(party=None, quotation=None):
shipping_rules = get_shipping_rules(quotation)
if shipping_rules:
rule_label_map = frappe.db.get_values("Shipping Rule", shipping_rules, "label")
# we need this in sorted order as per the position of the rule in the settings page
return [[rule, rule] for rule in shipping_rules]
def get_shipping_rules(quotation=None, cart_settings=None):
if not quotation:
quotation = _get_cart_quotation()
shipping_rules = []
if quotation.shipping_address_name:
country = frappe.db.get_value("Address", quotation.shipping_address_name, "country")
if country:
shipping_rules = frappe.db.sql_list("""select distinct sr.name
from `tabShipping Rule Country` src, `tabShipping Rule` sr
where src.country = %s and
sr.disabled != 1 and sr.name = src.parent""", country)
return shipping_rules
def get_address_territory(address_name):
"""Tries to match city, state and country of address to existing territory"""
territory = None
if address_name:
address_fields = frappe.db.get_value("Address", address_name,
["city", "state", "country"])
for value in address_fields:
territory = frappe.db.get_value("Territory", value)
if territory:
break
return territory
def show_terms(doc):
return doc.tc_name
@frappe.whitelist(allow_guest=True)
def apply_coupon_code(applied_code, applied_referral_sales_partner):
quotation = True
if not applied_code:
frappe.throw(_("Please enter a coupon code"))
coupon_list = frappe.get_all('Coupon Code', filters={'coupon_code': applied_code})
if not coupon_list:
frappe.throw(_("Please enter a valid coupon code"))
coupon_name = coupon_list[0].name
from erpnext.accounts.doctype.pricing_rule.utils import validate_coupon_code
validate_coupon_code(coupon_name)
quotation = _get_cart_quotation()
quotation.coupon_code = coupon_name
quotation.flags.ignore_permissions = True
quotation.save()
if applied_referral_sales_partner:
sales_partner_list = frappe.get_all('Sales Partner', filters={'referral_code': applied_referral_sales_partner})
if sales_partner_list:
sales_partner_name = sales_partner_list[0].name
quotation.referral_sales_partner = sales_partner_name
quotation.flags.ignore_permissions = True
quotation.save()
return quotation
| saurabh6790/erpnext | erpnext/shopping_cart/cart.py | Python | gpl-3.0 | 18,152 |
#!/usr/bin/env python
"""
Tests for the singleton scope.py
"""
from snakeguice import inject, scopes, Injector, annotate
import cls_heirarchy as ch
class TestSingletonScope(object):
class DomainObject(object):
@inject(logger_a=ch.Logger, logger_b=ch.Logger, logger_c=ch.Logger)
def set_loggers(self, logger_a, logger_b, logger_c):
self.logger_a = logger_a
self.logger_b = logger_b
self.logger_c = logger_c
@inject(place_a=ch.Place)
@annotate(place_a='hot')
def set_place_a(self, place_a):
self.place_a = place_a
@inject(place_b=ch.Place)
@annotate(place_b='hot')
def set_place_b(self, place_b):
self.place_b = place_b
@inject(place_c=ch.Place)
@annotate(place_c='cold')
def set_place_c(self, place_c):
self.place_c = place_c
@inject(place_d=ch.Place)
@annotate(place_d='cold')
def set_place_d(self, place_d):
self.place_d = place_d
class SimpleClass(object):
@inject(place = ch.Place)
def __init__(self, place):
self.place = place
def assert_obj(self, obj):
assert obj.logger_a is obj.logger_b
assert obj.logger_b is obj.logger_c
assert obj.place_a is obj.place_b
assert obj.place_c is obj.place_d
assert obj.place_a is not obj.place_d
def test_to_instance(self):
class MyModule:
def configure(self, binder):
binder.bind(ch.Logger, to_instance=ch.ConcreteLogger())
binder.bind(ch.Place, annotated_with='hot',
to_instance=ch.Beach())
binder.bind(ch.Place, annotated_with='cold',
to_instance=ch.Glacier())
obj = Injector(MyModule()).get_instance(self.DomainObject)
self.assert_obj(obj)
def _test_inject_into_singleton(self):
class MyLogger(object):
hot_place = inject(ch.Place, annotation='hot')
cold_place = inject(ch.Place, annotation='cold')
class MyModule:
def configure(self, binder):
binder.bind(ch.Logger, to=MyLogger, in_scope=scopes.SINGLETON)
binder.bind(ch.Place, annotated_with='hot',
to=ch.Beach, to_scope=scopes.SINGLETON)
binder.bind(ch.Place, annotated_with='cold',
to=ch.Glacier, to_scope=scopes.SINGLETON)
obj = Injector(MyModule()).get_instance(self.DomainObject)
self.assert_obj(obj)
assert obj.logger_a.hot_place is obj.place_a
assert obj.logger_a.cold_place is obj.place_c
def test_simple_singleton(self):
class MyModule:
def configure(self, binder):
binder.bind(ch.Place, to=ch.Beach,
in_scope=scopes.SINGLETON)
obj = Injector(MyModule()).get_instance(self.SimpleClass)
| dstanek/snake-guice | tests/test_singletons.py | Python | mit | 2,988 |
# -*- coding: utf-8 -*-
"""
@created: Thu Jul 02 10:56:57 2015
Usage:
main.py
Options:
-h --help # Show this screen.
--version # Show version.
"""
### Imports
# Standard Library
from __future__ import print_function, division
from __future__ import absolute_import
import logging
import os.path
import functools
import abc
import inspect
import datetime
import time
# Third Party
import wx
import wx.gizmos as wxdv
from docopt import docopt
import bs4
from bs4 import BeautifulSoup
# Package / Application
try:
# Imports used for unittests
from . import (__project_name__,
__version__,
__released__,
)
logging.debug("Imports for UnitTests")
except (SystemError, ValueError):
try:
# Imports used by Spyder
# import blah
from __init__ import (__project_name__,
__version__,
__released__,
)
logging.debug("Imports for Spyder IDE")
except ImportError:
# Imports used by cx_freeze
# from tpedit import blah
from tpedit import (__project_name__,
__version__,
__released__,
)
logging.debug("imports for Executable")
### Module Constants
HIGHLIGHT = wx.Colour(255, 255, 0)
HIGHLIGHT2 = wx.Colour(255, 128, 30)
DEFAULT_LOG_LEVEL = logging.INFO
ROOT_PATH = os.path.join(os.getcwd(), "tests", "data")
TITLE_TEXT = "{} v{} Released {}".format(__project_name__,
__version__,
__released__,
)
def logged(func):
"""
Decorator that logs entry and exit points of a function.
"""
# Customize these messages
entry_msg = '+Entering {}'
exit_msg = '-Exiting {}. Exec took {:.6}ms'
logger = logging.getLogger()
@functools.wraps(func)
def wrapper(*args, **kwds):
logger.debug(entry_msg.format(func.__name__))
start = time.time() # TODO PY3: change to time.monotonic()
# or time.perf_counter()
# or time.process_time()
f_result = func(*args, **kwds)
end = time.time()
elapsed = (end - start) * 1000
logger.debug(exit_msg.format(func.__name__, elapsed))
return f_result
return wrapper
class LocalLogHandler(logging.StreamHandler):
"""
A logging handler that directs logs to a ``target`` wx.TextCtrl.
"""
def __init__(self, target):
logging.StreamHandler.__init__(self)
self.target = target
def emit(self, record):
msg = self.format(record)
self.target.WriteText(msg + "\n")
self.target.ShowPosition(self.target.GetLastPosition())
self.flush()
def _init_logging(target, level=DEFAULT_LOG_LEVEL):
"""
Initialize logging to the on-screen log
"""
logfmt = ("%(asctime)s.%(msecs)03d"
" [%(levelname)-8.8s]" # Note implicit string concatenation.
" %(message)s"
)
datefmt = "%Y-%m-%d %H:%M:%S"
# datefmt = "%H:%M:%S"
logger = logging.getLogger()
handler = LocalLogHandler(target)
handler.setLevel(level)
formatter = logging.Formatter(logfmt, datefmt)
handler.setFormatter(formatter)
handler.set_name("GUI Handler")
logger.addHandler(handler)
logging.info("GUI Logging Initialized, level = {}".format(level))
class MainApp(object):
"""
"""
def __init__(self):
self.app = wx.App()
self.frame = MainFrame(TITLE_TEXT, (1200, 650))
self.frame.Show()
logging.info("App init complete")
self.app.MainLoop()
class MainFrame(wx.Frame):
"""
"""
def __init__(self, title, size):
wx.Frame.__init__(self,
None,
wx.ID_ANY,
title=title,
size=size,
)
self._init_ui()
log_str = "{} init complete"
logging.info(log_str.format(type(self).__name__))
@logged
def _init_ui(self):
""" Initi UI Components """
# normally I'd make the panel later, but I want to be able to log
# things to it.
self.panel = MainPanel(self)
# Start logging.
_init_logging(self.panel.log_panel.log)
# Create the menu bar and bind events
self.menu_bar = wx.MenuBar()
self._create_menus()
self._bind_events()
# Initialize default states
self._set_defaults()
# Set the MenuBar and create a status bar
self.SetMenuBar(self.menu_bar)
self.CreateStatusBar()
_fns = ("1.xml", "2.xml", "3.xml")
# Uncomment this to auto-load some temp files
# self.open_files((os.path.join(ROOT_PATH, _fn) for _fn in _fns))
@logged
def _create_menus(self):
""" Create each menu for the menu bar """
self._create_file_menu()
self._create_edit_menu()
self._create_view_menu()
# self._create_tools_menu()
# self._create_options_menu()
# self._create_help_menu()
@logged
def _set_defaults(self, default_log_level=DEFAULT_LOG_LEVEL):
"""
"""
# TODO: refactor this hack
try:
if default_log_level == logging.DEBUG:
logging.info("Setting log level to DEBUG.")
self.sm_ll_debug.Check()
elif default_log_level == logging.INFO:
logging.info("Setting log level to INFO.")
self.sm_ll_info_.Check()
elif default_log_level == logging.WARNING:
logging.info("Setting log level to WARNING.")
self.sm_ll_warn_.Check()
elif default_log_level == logging.ERROR:
logging.info("Setting log level to ERROR.")
self.sm_ll_error.Check()
elif default_log_level == logging.CRITICAL:
logging.info("Setting log level to CRITICAL.")
self.sm_ll_crit_.Check()
else:
err_txt = "Invalid default log level `{}`."
raise ValueError(err_txt.format(DEFAULT_LOG_LEVEL))
except NameError:
logging.warning("Default log level not found, setting to INFO.")
default_log_level = logging.INFO
self.sm_ll_info_.Check()
except ValueError:
logging.warning("Invalid default log level, setting to INFO.")
default_log_level = logging.INFO
self.sm_ll_info_.Check()
except Exception:
raise
@logged
def _create_file_menu(self):
"""
Creates the File menu.
"""
# Create the menu and items
self.mfile = wx.Menu()
self.mf_new = wx.MenuItem(self.mfile, wx.ID_NEW, "&New\tCtrl+N",
"Create a new FTI Test Program file")
self.mf_open = wx.MenuItem(self.mfile, wx.ID_OPEN, "&Open\tCtrl+O",
"Open a Test Program file")
self.mf_close = wx.MenuItem(self.mfile, wx.ID_CLOSE, "&Close",
"Closes all open files")
self.mf_exit = wx.MenuItem(self.mfile, wx.ID_EXIT, "&Exit\tCtrl+Q",
"Exit the application")
# Add menu items to the menu
self.mfile.AppendItem(self.mf_new)
self.mfile.AppendItem(self.mf_open)
self.mfile.AppendItem(self.mf_close)
self.mfile.AppendSeparator()
self.mfile.AppendItem(self.mf_exit)
self.menu_bar.Append(self.mfile, "&File")
@logged
def _create_edit_menu(self):
"""
Creates the Edit menu
"""
# Create the menu and items
self.medit = wx.Menu()
self.me_temp = wx.MenuItem(self.medit,
wx.ID_EDIT,
"&Temp",
"TempItem")
self.sm_loglevel = wx.Menu()
self.sm_ll_debug = wx.MenuItem(self.sm_loglevel,
wx.ID_ANY,
"&Debug",
"Sets the log level to DEBUG",
wx.ITEM_RADIO)
self.sm_ll_info_ = wx.MenuItem(self.sm_loglevel,
wx.ID_ANY,
"&Info",
"Sets the log level to INFO",
wx.ITEM_RADIO)
self.sm_ll_warn_ = wx.MenuItem(self.sm_loglevel,
wx.ID_ANY,
"&Warning",
"Sets the log level to WARNING",
wx.ITEM_RADIO)
self.sm_ll_error = wx.MenuItem(self.sm_loglevel,
wx.ID_ANY,
"&Error",
"Sets the log level to ERROR",
wx.ITEM_RADIO)
self.sm_ll_crit_ = wx.MenuItem(self.sm_loglevel,
wx.ID_ANY,
"&Critical",
"Sets the log level to CRITICAL",
wx.ITEM_RADIO)
self.sm_loglevel.AppendItem(self.sm_ll_debug)
self.sm_loglevel.AppendItem(self.sm_ll_info_)
self.sm_loglevel.AppendItem(self.sm_ll_warn_)
self.sm_loglevel.AppendItem(self.sm_ll_error)
self.sm_loglevel.AppendItem(self.sm_ll_crit_)
# Add menu items to the menu
self.medit.AppendItem(self.me_temp)
self.medit.AppendMenu(wx.ID_ANY,
"Logging Level",
self.sm_loglevel,
"Change the logging level.")
self.menu_bar.Append(self.medit, "&Edit")
@logged
def _create_view_menu(self):
"""
Creates the View menu.
"""
# Create the menu and items
self.mview = wx.Menu()
self.mv_expand_all = wx.MenuItem(self.mview,
wx.ID_ANY,
"&Expand All",
"Expand All")
self.mv_collapse_all = wx.MenuItem(self.mview,
wx.ID_ANY,
"&Collapse All",
"Collapse All")
self.mv_expand_diffs = wx.MenuItem(self.mview,
wx.ID_ANY,
"Expand &Diffs",
"Expand diffs")
# Add menu items to the menu
self.mview.AppendItem(self.mv_expand_all)
self.mview.AppendItem(self.mv_collapse_all)
self.mview.AppendItem(self.mv_expand_diffs)
self.menu_bar.Append(self.mview, "&View")
@logged
def _bind_events(self):
""" Bind all initial events """
# File Menu
self.Bind(wx.EVT_MENU, self._on_new, id=wx.ID_NEW)
self.Bind(wx.EVT_MENU, self._on_open, id=wx.ID_OPEN)
self.Bind(wx.EVT_MENU, self._on_close, id=wx.ID_CLOSE)
self.Bind(wx.EVT_MENU, self._on_exit, id=wx.ID_EXIT)
# Edit Menu
self.Bind(wx.EVT_MENU, self._on_loglevel_change, self.sm_ll_debug)
self.Bind(wx.EVT_MENU, self._on_loglevel_change, self.sm_ll_info_)
self.Bind(wx.EVT_MENU, self._on_loglevel_change, self.sm_ll_warn_)
self.Bind(wx.EVT_MENU, self._on_loglevel_change, self.sm_ll_error)
self.Bind(wx.EVT_MENU, self._on_loglevel_change, self.sm_ll_crit_)
# View Menu
# self.Bind(wx.EVT_MENU, self._nothing)
self.Bind(wx.EVT_MENU, self._on_expand_all, self.mv_expand_all)
self.Bind(wx.EVT_MENU, self._on_collapse_all, self.mv_collapse_all)
self.Bind(wx.EVT_MENU, self._on_expand_diffs, self.mv_expand_diffs)
# Tools Menu
# Options Menu
# Help Menu
@logged
def _on_loglevel_change(self, event):
""" Process the log level change event """
new_level = event.GetEventObject().GetLabelText(event.GetId()).upper()
logging.info("Log Level Changed to {}".format(new_level))
_set_log_level(new_level)
@logged
def _on_new(self, event):
logging.warn("Command 'New' not yet implemented.")
@logged
def _on_open(self, event):
self.close_files()
self._open_file_dialog()
@logged
def _on_open_multiple(self, event):
logging.warn("'Open Multiple' command not yet implemented.")
@logged
def _on_close(self, event):
""" Delete all items in the tree and remove all file columns. """
self.close_files()
@logged
def _on_expand_all(self, event):
logging.info("Expanding all tree items.")
self.panel.edit_panel.tree.ExpandAll(self.panel.edit_panel.root)
@logged
def _on_collapse_all(self, event):
logging.info("Collapsing all tree items.")
collapse_all(self.panel.edit_panel.tree)
@logged
def _on_expand_diffs(self, event):
logging.info("Expanding differences.")
expand_diffs(self.panel.edit_panel.tree)
def _on_exit(self, event):
""" Execute Exit actions """
logging.info("Exiting app")
self.Close(True)
@logged
def _open_file_dialog(self):
""" Displayes the open file dialog """
file_dialog_style = (wx.FD_OPEN
| wx.FD_FILE_MUST_EXIST
| wx.FD_MULTIPLE
)
open_file_dialog = wx.FileDialog(self,
"prompt",
defaultDir=ROOT_PATH,
defaultFile="",
wildcard="XML Files (*.xml)|*.xml",
style=file_dialog_style
)
if open_file_dialog.ShowModal() == wx.ID_CANCEL:
# don't load
logging.info("User canceled open dialog")
return
paths = open_file_dialog.GetPaths()
for fp in paths:
logging.info(" Chosen file: `{}`".format(fp))
self.open_files(paths)
@logged
def open_files(self, paths):
""" """
# set some shorter names...
edit_panel = self.panel.edit_panel
# Reset the diff counter - don't want to double-count
edit_panel.diff_count = 0
# make sure a root exists:
try:
edit_panel.root = edit_panel.tree.AddRoot("root")
except AssertionError:
# root already exists
pass
# process each file into soup.
soups = []
for _n, fp in enumerate(paths):
with open(fp) as openf:
_, fn = os.path.split(fp)
logging.info("Processing `{}`".format(fn))
soups.append(BeautifulSoup(openf, 'xml'))
edit_panel.tree.AddColumn(fn)
edit_panel.tree.SetColumnWidth(_n + 2, 160)
edit_panel.tree.SetColumnEditable(_n + 2)
edit_panel._build_element_tree_recursively(edit_panel.root, soups)
edit_panel.tree.ExpandAll(edit_panel.root)
log_str = "Total {} differences found."
logging.info(log_str.format(edit_panel.diff_count))
self.panel.status_panel.update_diff_count(edit_panel.diff_count)
@logged
def close_files(self):
""" """
logging.info("Closing all files.")
tree = self.panel.edit_panel.tree
tree.DeleteAllItems()
for col in reversed(range(2, tree.GetColumnCount())):
tree.RemoveColumn(col)
class MainPanel(wx.Panel):
"""
Root Panel of the UI.
Contains the EditPanel, where files are compared and edited, and the
LogPanel.
"""
def __init__(self, parent):
wx.Panel.__init__(self, parent)
self.parent = parent
self._init_ui()
log_str = "{} init complete"
logging.info(log_str.format(type(self).__name__))
def _init_ui(self):
self.edit_panel = EditPanel(self)
self.log_panel = LogPanel(self)
self.status_panel = StatusPanel(self)
self.hbox = wx.BoxSizer(wx.HORIZONTAL)
self.hbox.Add(self.status_panel, 0, wx.EXPAND)
self.hbox.Add(self.log_panel, 1, wx.EXPAND)
self.vbox = wx.BoxSizer(wx.VERTICAL)
self.vbox.Add(self.edit_panel, 4, wx.EXPAND)
self.vbox.Add(self.hbox, 1, wx.EXPAND)
self.SetSizer(self.vbox)
class StatusPanel(wx.Panel):
"""
"""
@logged
def __init__(self, parent):
wx.Panel.__init__(self, parent)
self.parent = parent
self.diff_count = 0
self.count_str = "{} differences found."
self._init_ui()
log_str = "{} init complete"
logging.info(log_str.format(type(self).__name__))
@logged
def _init_ui(self):
self.status_box = wx.StaticBox(self, wx.ID_ANY, "Status",
size=(200, -1),
)
initial_text = "No files open."
self.diff_count_display = wx.StaticText(self, wx.ID_ANY,
initial_text,
)
vbox = wx.StaticBoxSizer(self.status_box, wx.VERTICAL)
vbox.Add(self.diff_count_display, 1, wx.EXPAND)
self.SetSizer(vbox)
@logged
def update_diff_count(self, value):
""" """
self.diff_count = value
self.diff_count_display.SetLabel(self.count_str.format(value))
class LogPanel(wx.Panel):
"""
Logging window.
Contains a read-only TextCtrl that displays logging messages.
"""
def __init__(self, parent):
""" Init the parent class and instance variables """
wx.Panel.__init__(self, parent)
self.parent = parent
self._init_ui()
def _init_ui(self):
""" Init the UI elements """
log_style = (wx.TE_MULTILINE
| wx.TE_READONLY
| wx.HSCROLL
)
self.log = wx.TextCtrl(self, wx.ID_ANY, style=log_style)
monospace_font = wx.Font(10,
family=wx.MODERN,
style=wx.NORMAL,
weight=wx.NORMAL,
underline=False,
face='Consolas',
)
self.log.SetFont(monospace_font)
self.hbox = wx.BoxSizer(wx.HORIZONTAL)
self.hbox.Add(self.log, 1, wx.EXPAND)
self.SetSizer(self.hbox)
class EditPanel(wx.Panel):
"""
Primary Edit panel.
Contains all of the logic for displaying and editing the XML files.
"""
def __init__(self, parent):
""" Init the parent class and instance variables """
wx.Panel.__init__(self, parent)
self.parent = parent
self.diff_count = 0
self.edit_col = -1
self._init_ui()
# must bind events *after* init because they rely on those ui elements
self._bind_events()
log_str = "{} init complete"
logging.info(log_str.format(type(self).__name__))
def _init_ui(self):
"""
Init the UI elements
"""
# A TreeListCtrl contains all of the XML
tree_style = (wx.TR_DEFAULT_STYLE
| wx.TR_ROW_LINES
| wx.TR_COLUMN_LINES
| wx.TR_FULL_ROW_HIGHLIGHT
)
self.tree = wxdv.TreeListCtrl(self,
wx.ID_ANY,
style=tree_style,
)
# Add the columns that always exist.
self.tree.AddColumn("Item")
self.tree.AddColumn("DataType")
self.tree.SetMainColumn(0) # contains the tree
self.tree.SetColumnWidth(0, 325)
self.tree.SetColumnWidth(1, 140)
self.root = self.tree.AddRoot("root")
# Expand some items by default
self.tree.ExpandAll(self.root)
self.vbox = wx.BoxSizer(wx.VERTICAL)
self.vbox.Add(self.tree, 1, wx.EXPAND)
self.SetSizer(self.vbox)
@logged
def _bind_events(self):
"""
Bind various events for the Edit Panel
"""
main_win = self.tree.GetMainWindow()
main_win.Bind(wx.EVT_RIGHT_DCLICK, self._on_right_dclick)
self.tree.Bind(wx.EVT_TREE_ITEM_ACTIVATED, self._on_activate)
self.tree.Bind(wx.EVT_TREE_BEGIN_LABEL_EDIT, self._on_item_edit_start)
self.tree.Bind(wx.EVT_TREE_END_LABEL_EDIT, self._on_item_edit_end)
@logged
def _on_right_dclick(self, event):
"""
Placeholder for value propagation.
"""
logging.info("Double-right click detected")
pos = event.GetPosition()
logging.info(" Pos: {}".format(pos))
item, flags, col = self.tree.HitTest(pos)
logging.info(" {} .. {} .. {}".format(item, flags, col))
if item:
item_text = self.tree.GetItemText(item)
col_text = self.tree.GetItemText(item, col)
log_str = "EXAMPLE: Item `{}: {}` propagated to all open files."
log_str = log_str.format(item_text, col_text)
logging.info(log_str)
@logged
def _on_activate(self, event):
"""
Placeholder - logging only.
"""
item_text = self.tree.GetItemText(event.GetItem())
logging.info("item activated: {}".format(item_text))
@logged
def _on_item_edit_start(self, event):
"""
Primary purpose: record which column is being edited (self.edit_col)
"""
self.edit_col = event.GetInt()
item_text = self.tree.GetItemText(event.GetItem())
item_value = self.tree.GetItemText(event.GetItem(), self.edit_col)
log_str = "Editing column {} for item `{}`"
logging.info(log_str.format(self.edit_col, item_text))
logging.info(" old value: `{}`".format(item_value))
@logged
def _on_item_edit_end(self, event):
"""
http://docs.wxwidgets.org/trunk/classwx_tree_event.html
"""
string = event.GetLabel()
log_str = "Column {} changed to: `{}`"
logging.info(log_str.format(self.edit_col, string))
if event.IsEditCancelled():
# I'm not sure when this would actually happen...
# It's not happening upon pressing ESC, so perhaps it only
# happens if EVT_TREE_BEGIN_LABEL_EDIT is vetoed?
logging.info("Column edit canceled.")
# TODO: move outside of the class?
@logged
def _build_element_tree_recursively(self, parent, soups):
"""
"""
skipped_items = ("FTI.Subsystems.Variables.Variables",
"FTI.TesterInstruments6.TesterInstruments",
"FTI.Subsystems.Coordinators.Coordinators",
)
all_children = ((x for x in soup.children if x != '\n')
for soup in soups)
for childs in zip(*all_children):
# assume that the 1st file is the "master file" that everything
# compares to.
child = childs[0]
# Ignore some stuff that I don't care about.
if child.name in skipped_items:
continue
# if the child is "Properties" then the next two items are
# going to be Name and Value
if child.name == "Properties":
# find the grandchildren
grandchildren = ([x for x in _child.children if x != '\n']
for _child in childs)
# collect the names and values of the grandchildren
names = []
values = []
for grandchild in grandchildren:
names.append(grandchild[0].string)
values.append(grandchild[1].string)
# set the item name as the 1st item
key = self.tree.AppendItem(parent, names[0])
# add the units to the units column
dtype = None
try:
value = unicode(values[0])
dtype, _ = parse_dtype(value)
except IndexError:
pass
if dtype is None:
dtype = ""
self.tree.SetItemText(key, dtype, 1)
# add values to each column
for _n, value in enumerate(values):
try:
value = unicode(value)
_, value = parse_dtype(value)
except IndexError:
pass
if value is None:
value = ""
self.tree.SetItemText(key, value, _n + 2)
# If any values are different, highlight the row and parents
if any(values[0] != x for x in values):
self._highlight_item_and_parents(key)
continue
# if we're at a NavigableString, then we need to add it
if isinstance(child, bs4.element.NavigableString):
# check for duplicates, highlight if true
if any(childs[0].string != x.string for x in childs):
self._highlight_item_and_parents(parent)
for _n, item in enumerate(childs):
self.tree.SetItemText(parent, item.string, _n + 2)
# if the child is a tag, then we set it as the new parent
# and recurse
if isinstance(child, bs4.element.Tag):
new_parent = self.tree.AppendItem(parent, child.name)
self._build_element_tree_recursively(new_parent, childs)
@logged
def _highlight_item_and_parents(self, item):
""" highlights an item row and parents """
self.diff_count += 1
self.tree.SetItemBackgroundColour(item, HIGHLIGHT)
for parent in get_parents(self.tree, item):
self.tree.SetItemBackgroundColour(parent, HIGHLIGHT2)
@logged
def _set_log_level(level_str):
"""
Sets the global logging level
Parmeters:
----------
level_str : string
String representation of logging.level. Accpeted values are::
DEBUG, INFO, WARN, WARNING, ERROR, CRITICAL
Returns:
--------
None
"""
# TODO: figure out a stdlib way to do this:
levels = {50: "CRITICAL",
40: "ERROR",
30: "WARNING",
20: "INFO",
10: "DEBUG",
}
if level_str not in levels.values():
raise ValueError("Invalid log level `{}`".format(level_str))
# Get the Logger and the previous logging level
logger = logging.getLogger()
prev_level = logger.level
new_level = getattr(logging, level_str) # Get numeric value
# Always record log level changes
log_str = "Changing logging level from {} to {}."
logging.log(99, log_str.format(levels[prev_level], levels[new_level]))
# Set the logger and handler levels
logger.setLevel(new_level)
log_str = "Logging Handler `{}` set to {}."
for handler in logger.handlers:
handler.setLevel(new_level)
logging.debug(log_str.format(handler.get_name(), level_str))
# logging.info("Global Log level set to {}".format(level_str))
@logged
def get_parents(tree, item, retval=None):
"""
Gets all the parents of a tree item, recursively.
Parameters:
-----------
tree : wx.gizmos.TreeListCtrl object
The tree to act on.
item : wx._controls.TreeItemId
The item to get the parent of.
retval : list of wx._controls.TreeItemId
Only used during recursion. A list containing all of the parents.
Returns:
--------
retval : list of wx._controls.TreeItemId
A list of all ancestors of `item`.
"""
if retval is None:
retval = []
try:
logging.debug("Getting parent of `{}`".format(tree.GetItemText(item)))
parent = tree.GetItemParent(item)
retval.append(parent)
logging.debug(" Parent is: `{}`".format(tree.GetItemText(parent)))
get_parents(tree, parent, retval)
except AssertionError:
# we're at the top, ignore the error and return.
pass
return retval[:-1]
@logged
def collapse_all(tree):
"""
Collapse all items in a tree, recursively.
Parameters:
-----------
tree : wx.gizmos.TreeListCtrl object
The tree to act on.
Returns:
--------
None
"""
item = tree.GetRootItem()
# get the first child, returning if no children exist.
try:
child = tree.GetFirstExpandedItem()
except AssertionError:
# raise AssertionError("Root item has no children")
return
expanded_items = [item, child]
while True:
try:
child = tree.GetNextExpanded(child)
except:
break
expanded_items.append(child)
for item in reversed(expanded_items):
try:
logging.debug("Collapsing `{}`".format(tree.GetItemText(item)))
tree.Collapse(item)
except:
pass
@logged
def expand_diffs(tree, item=None):
"""
Expand only the items that are different and their parents
"""
if item is None:
collapse_all(tree)
item = tree.GetRootItem()
bg = tree.GetItemBackgroundColour(item)
if bg == HIGHLIGHT2:
text = tree.GetItemText(item)
logging.debug("Expanding `{}`".format(text))
tree.Expand(item)
else:
return
# get the first child, returning if no children exist.
try:
child = tree.GetFirstChild(item)[0]
text = tree.GetItemText(child)
except AssertionError:
# raise AssertionError("Root item has no children")
return
children = [child, ]
while True:
try:
child = tree.GetNextSibling(child)
text = tree.GetItemText(child)
children.append(child)
except:
break
for i in children:
# logging.info("checking `{}`".format(tree.GetItemText(i)))
try:
bg = tree.GetItemBackgroundColour(i)
except TypeError:
continue
if bg == HIGHLIGHT2:
text = tree.GetItemText(i)
logging.debug("Expanding `{}`".format(text))
tree.Expand(i)
expand_diffs(tree, i)
@logged
def parse_dtype(string):
"""
Parses a data type from an FTI value string.
FTI value strings sometimes are of the form::
<Double>6</Double>
which, after translating the HTML codes, becomes valid XML::
<Double>6</Double>
The tag name ``Double`` is the data type and the tag's value ``6`` is
the value to return.
Parmeters:
----------
string : string
The string to parse
Returns:
--------
dtype : string
The parsed data type
value : string
The parsed value
"""
soup = BeautifulSoup(string, 'xml')
dtypes = [x.name for x in soup.find_all(True, recursive=True)]
dtype = ".".join(dtypes)
value = soup.find(dtypes[-1]).string
return dtype, value
def main():
""" Main Code """
docopt(__doc__, version=__version__)
MainApp()
if __name__ == "__main__":
main()
# string = "<Double>-30</Double>"
# string = "<A><B><C>value</C></B></A>"
# parse_dtype(string)
| dougthor42/TPEdit | tpedit/main.py | Python | gpl-3.0 | 33,403 |
# Copyright 2014 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from nova.api.validation import parameter_types
host = copy.deepcopy(parameter_types.hostname)
host['type'] = ['string', 'null']
migrate_live = {
'type': 'object',
'properties': {
'os-migrateLive': {
'type': 'object',
'properties': {
'block_migration': parameter_types.boolean,
'disk_over_commit': parameter_types.boolean,
'host': host
},
'required': ['block_migration', 'disk_over_commit', 'host'],
'additionalProperties': False,
},
},
'required': ['os-migrateLive'],
'additionalProperties': False,
}
block_migration = copy.deepcopy(parameter_types.boolean)
block_migration['enum'].append('auto')
migrate_live_v2_25 = copy.deepcopy(migrate_live)
del migrate_live_v2_25['properties']['os-migrateLive']['properties'][
'disk_over_commit']
migrate_live_v2_25['properties']['os-migrateLive']['properties'][
'block_migration'] = block_migration
migrate_live_v2_25['properties']['os-migrateLive']['required'] = (
['block_migration', 'host'])
| zhimin711/nova | nova/api/openstack/compute/schemas/migrate_server.py | Python | apache-2.0 | 1,742 |
# ElasticQuery
# File: elasticquery.py
# Desc: ElasticQuery itself
import json
from .dsl_util import unroll_struct
def _json_date(obj):
if hasattr(obj, 'isoformat'):
return obj.isoformat()
else:
raise TypeError('{0} is not JSON serializable'.format(obj))
class ElasticQuery(object):
'''
A class for building ES queries.
'''
_es = None
_index = None
_doc_type = None
_query = None
def __init__(self, es=None, index=None, doc_type=None):
'''
Creates a new query object.
'''
self._es = es
self._index = index
self._doc_type = doc_type
self._aggs = []
self._suggesters = []
# An empty query
self._struct = {}
def query(self, query):
'''
Set the query for this query.
'''
self._query = query
def aggregate(self, *aggregates):
'''
Add one or more aggregates to this query.
'''
self._aggs.extend(aggregates)
def suggest(self, *suggesters):
'''
Add one or more suggesters to this query.
'''
self._suggesters.extend(suggesters)
def set(self, key, value):
'''
Set an arbitrary attribute on this query.
'''
self._struct[key] = value
return self
def from_(self, from_):
'''
Set the from/offset for this query.
'''
self._struct['from'] = from_
return self
def size(self, size):
'''
Set the size of this query.
'''
self._struct['size'] = size
return self
def timeout(self, timeout):
'''
Set the timeout for this query.
'''
self._struct['timeout'] = timeout
return self
def fields(self, fields):
'''
Set the fields/_source for this query.
'''
self._struct['_source'] = fields
return self
def sort(self, field, order=None):
'''
Sort this query.
'''
if 'sort' not in self._struct:
self._struct['sort'] = []
if not order:
self._struct['sort'].append(field)
else:
self._struct['sort'].append({
field: {
'order': order,
},
})
return self
def dict(self):
'''
Returns the current query in dict format.
'''
# Just query? Use as-is
if self._query:
self._struct['query'] = self._query
if self._aggs:
aggs = {}
for agg in self._aggs:
aggs.update(agg.dict())
self._struct['aggregations'] = aggs
if self._suggesters:
suggs = {}
for sugg in self._suggesters:
suggs.update(sugg.dict())
self._struct['suggest'] = suggs
return unroll_struct(self._struct)
def get(self):
'''
Execute the current query (requires _es, _index & _doc_type).
'''
if self._es is None:
raise ValueError('No Elasticsearch instance attached to this query')
if self._index is None:
raise ValueError('No index specified for this query')
if self._doc_type is None:
raise ValueError('No doc type specified for this query')
return self._es.search(
index=self._index,
doc_type=self._doc_type,
body=self.dict(),
)
def count(self):
'''
Execute the current query (requires _es, _index & _doc_type).
'''
if self._es is None:
raise ValueError('No Elasticsearch instance attached to this query')
if self._index is None:
raise ValueError('No index specified for this query')
if self._doc_type is None:
raise ValueError('No doc type specified for this query')
return self._es.count(
index=self._index,
doc_type=self._doc_type,
body=self.dict(),
)
def json(self, **kwargs):
'''
Returns a JSON representation of the current query. Kwargs are passed to
``json.dumps``.
'''
return json.dumps(self.dict(), default=_json_date, **kwargs)
| Fizzadar/ElasticQuery | elasticquery/elasticquery.py | Python | mit | 4,341 |
from yuuhpizzakebab import app, admin_required, login_required
from .models import Topping
from flask import render_template, session, redirect, url_for, request
@app.route('/toppings', methods=['GET'])
@admin_required
def list_toppings():
"""Shows a list of toppings"""
return render_template('topping/toppings.html', toppings=Topping.get_all())
@app.route('/topping/create', methods=['GET', 'POST'])
@admin_required
def create_topping():
"""Creates a new drink.
Administrator rights required.
Creates a new topping with POST.
Shows a form to fill with GET.
"""
if request.method == 'POST':
name = request.form['topping_name']
price = request.form['topping_price']
t = Topping(None, name, price)
t.save()
return redirect(url_for('list_toppings'))
return render_template('topping/edit_topping.html')
@app.route('/topping/edit/<int:topping_id>', methods=['GET', 'POST'])
@admin_required
def edit_topping(topping_id):
"""Edits a topping.
arguments:
topping_id -- id of the topping
Saves the information with POST.
Shows a form to edit the contents with GET.
"""
if request.method == 'POST':
name = request.form['topping_name']
price = request.form['topping_price']
t = Topping(topping_id, name, price)
t.save()
return redirect(url_for('list_toppings'))
topping = Topping.get_by_id(topping_id)
if not topping:
return redirect(url_for('list_toppings'))
return render_template('topping/edit_topping.html', topping=topping)
@app.route('/topping/delete/<int:topping_id>', methods=['GET'])
@admin_required
def delete_topping(topping_id):
"""Deletes a topping.
arguments:
topping_id -- id of the topping
"""
Topping.delete_by_id(topping_id)
return redirect(url_for('list_toppings'))
| lex/yuuh-pizza-kebab | yuuhpizzakebab/topping/views.py | Python | bsd-2-clause | 1,884 |
from django.contrib import admin
from il.models import Agency
class AgencyAdmin(admin.ModelAdmin):
list_display = ('name', 'census_profile_id')
admin.site.register(Agency, AgencyAdmin)
| OpenDataPolicingNC/Traffic-Stops | il/admin.py | Python | mit | 193 |
# This file is part of BurnMan - a thermoelastic and thermodynamic toolkit for the Earth and Planetary Sciences
# Copyright (C) 2012 - 2017 by the BurnMan team, released under the GNU
# GPL v2 or later.
"""
HP_2011_fluids
^^^^^^^^
Fluids from Holland and Powell 2011 and references therein.
CORK parameters:
CHO gases from Holland and Powell, 1991. ["CO2",304.2,0.0738],["CH4",190.6,0.0460],["H2",41.2,0.0211],["CO",132.9,0.0350]
H2O and S2 from Wikipedia, 2012/10/23. ["H2O",647.096,0.22060],["S2",1314.00,0.21000]
H2S from ancyclopedia.airliquide.com, 2012/10/23. ["H2S",373.15,0.08937]
NB: Units for cork[i] in Holland and Powell datasets are
a = kJ^2/kbar*K^(1/2)/mol^2 -> multiply by 1e-2
b = kJ/kbar/mol -> multiply by 1e-5
c = kJ/kbar^1.5/mol -> multiply by 1e-9
d = kJ/kbar^2/mol -> multiply by 1e-13
Individual terms are divided through by P, P, P^1.5, P^2, so
[0][j] -> multiply by 1e6
[1][j] -> multiply by 1e3
[2][j] -> multiply by 1e3
[3][j] -> multiply by 1e3
cork_P: kbar -> multiply by 1e8
"""
from __future__ import absolute_import
from ..mineral import Mineral
from ..processchemistry import dictionarize_formula, formula_mass
class CO2 (Mineral):
def __init__(self):
formula = 'CO2'
formula = dictionarize_formula(formula)
self.params = {
'name': 'carbon dioxide',
'formula': formula,
'equation_of_state': 'cork',
'cork_params': [[5.45963e1, -8.63920e0], [9.18301e-1], [-3.30558e-2, 2.30524e-3], [6.93054e-4, -8.38293e-5]],
'cork_T': 304.2,
'cork_P': 0.0738e8,
'H_0': -393.51e3,
'S_0': 213.7,
'Cp': [87.8, -2.644e-3, 706.4e3, -998.9],
'n': sum(formula.values()),
'molar_mass': formula_mass(formula)}
Mineral.__init__(self)
class CH4 (Mineral):
def __init__(self):
formula = 'CH4'
formula = dictionarize_formula(formula)
self.params = {
'name': 'methane',
'formula': formula,
'equation_of_state': 'cork',
'cork_params': [[5.45963e1, -8.63920e0], [9.18301e-1], [-3.30558e-2, 2.30524e-3], [6.93054e-4, -8.38293e-5]],
'cork_T': 190.6,
'cork_P': 0.0460e8,
'H_0': -74.81e3,
'S_0': 186.26,
'Cp': [150.1, 0.002063, 3427700., -2650.4],
'n': sum(formula.values()),
'molar_mass': formula_mass(formula)}
Mineral.__init__(self)
class O2 (Mineral):
def __init__(self):
formula = 'O2'
formula = dictionarize_formula(formula)
self.params = {
'name': 'oxygen',
'formula': formula,
'equation_of_state': 'cork',
'cork_params': [[5.45963e1, -8.63920e0], [9.18301e-1], [-3.30558e-2, 2.30524e-3], [6.93054e-4, -8.38293e-5]],
'cork_T': 0.,
'cork_P': 1.0e5,
'H_0': 0.,
'S_0': 205.2,
'Cp': [48.3, -0.000691, 499200., -420.7],
'n': sum(formula.values()),
'molar_mass': formula_mass(formula)}
Mineral.__init__(self)
class H2 (Mineral):
def __init__(self):
formula = 'H2'
formula = dictionarize_formula(formula)
self.params = {
'name': 'hydrogen',
'formula': formula,
'equation_of_state': 'cork',
'cork_params': [[5.45963e1, -8.63920e0], [9.18301e-1], [-3.30558e-2, 2.30524e-3], [6.93054e-4, -8.38293e-5]],
'cork_T': 41.2,
'cork_P': 0.0211e8,
'H_0': 0.,
'S_0': 130.7,
'Cp': [23.3, 0.004627, 0.0, 76.3],
'n': sum(formula.values()),
'molar_mass': formula_mass(formula)}
Mineral.__init__(self)
class S2 (Mineral):
def __init__(self):
formula = 'S2'
formula = dictionarize_formula(formula)
self.params = {
'name': 'sulfur',
'formula': formula,
'equation_of_state': 'cork',
'cork_params': [[5.45963e1, -8.63920e0], [9.18301e-1], [-3.30558e-2, 2.30524e-3], [6.93054e-4, -8.38293e-5]],
'cork_T': 1314.00,
'cork_P': 0.21000e8,
'H_0': 128.54e3,
'S_0': 231.0,
'Cp': [37.1, 0.002398, -161000.0, -65.0],
'n': sum(formula.values()),
'molar_mass': formula_mass(formula)}
Mineral.__init__(self)
class H2S (Mineral):
def __init__(self):
formula = 'H2S'
formula = dictionarize_formula(formula)
self.params = {
'name': 'hydrogen sulfide',
'formula': formula,
'equation_of_state': 'cork',
'cork_params': [[5.45963e1, -8.63920e0], [9.18301e-1], [-3.30558e-2, 2.30524e-3], [6.93054e-4, -8.38293e-5]],
'cork_T': 373.15,
'cork_P': 0.08937e8,
'H_0': 128.54e3,
'S_0': 231.0,
'Cp': [47.4, 0.010240, 615900., -397.8],
'n': sum(formula.values()),
'molar_mass': formula_mass(formula)}
Mineral.__init__(self)
| CaymanUnterborn/burnman | burnman/minerals/HP_2011_fluids.py | Python | gpl-2.0 | 5,104 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2016, Eric D Helms <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['deprecated'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: katello
short_description: Manage Katello Resources
deprecated:
removed_in: "2.12"
why: "Replaced by re-designed individual modules living at https://github.com/theforeman/foreman-ansible-modules"
alternative: https://github.com/theforeman/foreman-ansible-modules
description:
- Allows the management of Katello resources inside your Foreman server.
version_added: "2.3"
author:
- Eric D Helms (@ehelms)
requirements:
- nailgun >= 0.28.0
- python >= 2.6
- datetime
options:
server_url:
description:
- URL of Foreman server.
required: true
username:
description:
- Username on Foreman server.
required: true
password:
description:
- Password for user accessing Foreman server.
required: true
entity:
description:
- The Foreman resource that the action will be performed on (e.g. organization, host).
choices:
- repository
- manifest
- repository_set
- sync_plan
- content_view
- lifecycle_environment
- activation_key
- product
required: true
action:
description:
- action associated to the entity resource to set or edit in dictionary format.
- Possible Action in relation to Entitys.
- "sync (available when entity=product or entity=repository)"
- "publish (available when entity=content_view)"
- "promote (available when entity=content_view)"
choices:
- sync
- publish
- promote
required: false
params:
description:
- Parameters associated to the entity resource and action, to set or edit in dictionary format.
- Each choice may be only available with specific entitys and actions.
- "Possible Choices are in the format of param_name ([entry,action,action,...],[entity,..],...)."
- The action "None" means no action specified.
- Possible Params in relation to entity and action.
- "name ([product,sync,None], [repository,sync], [repository_set,None], [sync_plan,None],"
- "[content_view,promote,publish,None], [lifecycle_environment,None], [activation_key,None])"
- "organization ([product,sync,None] ,[repository,sync,None], [repository_set,None], [sync_plan,None], "
- "[content_view,promote,publish,None], [lifecycle_environment,None], [activation_key,None])"
- "content ([manifest,None])"
- "product ([repository,sync,None], [repository_set,None], [sync_plan,None])"
- "basearch ([repository_set,None])"
- "releaserver ([repository_set,None])"
- "sync_date ([sync_plan,None])"
- "interval ([sync_plan,None])"
- "repositories ([content_view,None])"
- "from_environment ([content_view,promote])"
- "to_environment([content_view,promote])"
- "prior ([lifecycle_environment,None])"
- "content_view ([activation_key,None])"
- "lifecycle_environment ([activation_key,None])"
required: true
task_timeout:
description:
- The timeout in seconds to wait for the started Foreman action to finish.
- If the timeout is reached and the Foreman action did not complete, the ansible task fails. However the foreman action does not get canceled.
default: 1000
version_added: "2.7"
required: false
verify_ssl:
description:
- verify the ssl/https connection (e.g for a valid certificate)
default: false
type: bool
required: false
'''
EXAMPLES = '''
---
# Simple Example:
- name: Create Product
katello:
username: admin
password: admin
server_url: https://fakeserver.com
entity: product
params:
name: Centos 7
delegate_to: localhost
# Abstraction Example:
# katello.yml
---
- name: "{{ name }}"
katello:
username: admin
password: admin
server_url: https://fakeserver.com
entity: "{{ entity }}"
params: "{{ params }}"
delegate_to: localhost
# tasks.yml
---
- include: katello.yml
vars:
name: Create Dev Environment
entity: lifecycle_environment
params:
name: Dev
prior: Library
organization: Default Organization
- include: katello.yml
vars:
name: Create Centos Product
entity: product
params:
name: Centos 7
organization: Default Organization
- include: katello.yml
vars:
name: Create 7.2 Repository
entity: repository
params:
name: Centos 7.2
product: Centos 7
organization: Default Organization
content_type: yum
url: http://mirror.centos.org/centos/7/os/x86_64/
- include: katello.yml
vars:
name: Create Centos 7 View
entity: content_view
params:
name: Centos 7 View
organization: Default Organization
repositories:
- name: Centos 7.2
product: Centos 7
- include: katello.yml
vars:
name: Enable RHEL Product
entity: repository_set
params:
name: Red Hat Enterprise Linux 7 Server (RPMs)
product: Red Hat Enterprise Linux Server
organization: Default Organization
basearch: x86_64
releasever: 7
- include: katello.yml
vars:
name: Promote Contentview Environment with longer timout
task_timeout: 10800
entity: content_view
action: promote
params:
name: MyContentView
organization: MyOrganisation
from_environment: Testing
to_environment: Production
# Best Practices
# In Foreman, things can be done in paralell.
# When a conflicting action is already running,
# the task will fail instantly instead of waiting for the already running action to complete.
# So you sould use a "until success" loop to catch this.
- name: Promote Contentview Environment with increased Timeout
katello:
username: ansibleuser
password: supersecret
task_timeout: 10800
entity: content_view
action: promote
params:
name: MyContentView
organization: MyOrganisation
from_environment: Testing
to_environment: Production
register: task_result
until: task_result is success
retries: 9
delay: 120
'''
RETURN = '''# '''
import datetime
import os
import traceback
try:
from nailgun import entities, entity_fields, entity_mixins
from nailgun.config import ServerConfig
HAS_NAILGUN_PACKAGE = True
except Exception:
HAS_NAILGUN_PACKAGE = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
class NailGun(object):
def __init__(self, server, entities, module, task_timeout):
self._server = server
self._entities = entities
self._module = module
entity_mixins.TASK_TIMEOUT = task_timeout
def find_organization(self, name, **params):
org = self._entities.Organization(self._server, name=name, **params)
response = org.search(set(), {'search': 'name={0}'.format(name)})
if len(response) == 1:
return response[0]
else:
self._module.fail_json(msg="No organization found for %s" % name)
def find_lifecycle_environment(self, name, organization):
org = self.find_organization(organization)
lifecycle_env = self._entities.LifecycleEnvironment(self._server, name=name, organization=org)
response = lifecycle_env.search()
if len(response) == 1:
return response[0]
else:
self._module.fail_json(msg="No Lifecycle Found found for %s" % name)
def find_product(self, name, organization):
org = self.find_organization(organization)
product = self._entities.Product(self._server, name=name, organization=org)
response = product.search()
if len(response) == 1:
return response[0]
else:
self._module.fail_json(msg="No Product found for %s" % name)
def find_repository(self, name, product, organization):
product = self.find_product(product, organization)
repository = self._entities.Repository(self._server, name=name, product=product)
repository._fields['organization'] = entity_fields.OneToOneField(entities.Organization)
repository.organization = product.organization
response = repository.search()
if len(response) == 1:
return response[0]
else:
self._module.fail_json(msg="No Repository found for %s" % name)
def find_content_view(self, name, organization):
org = self.find_organization(organization)
content_view = self._entities.ContentView(self._server, name=name, organization=org)
response = content_view.search()
if len(response) == 1:
return response[0]
else:
self._module.fail_json(msg="No Content View found for %s" % name)
def organization(self, params):
name = params['name']
del params['name']
org = self.find_organization(name, **params)
if org:
org = self._entities.Organization(self._server, name=name, id=org.id, **params)
org.update()
else:
org = self._entities.Organization(self._server, name=name, **params)
org.create()
return True
def manifest(self, params):
org = self.find_organization(params['organization'])
params['organization'] = org.id
try:
file = open(os.getcwd() + params['content'], 'r')
content = file.read()
finally:
file.close()
manifest = self._entities.Subscription(self._server)
try:
manifest.upload(
data={'organization_id': org.id},
files={'content': content}
)
return True
except Exception as e:
if "Import is the same as existing data" in e.message:
return False
else:
self._module.fail_json(msg="Manifest import failed with %s" % to_native(e),
exception=traceback.format_exc())
def product(self, params):
org = self.find_organization(params['organization'])
params['organization'] = org.id
product = self._entities.Product(self._server, **params)
response = product.search()
if len(response) == 1:
product.id = response[0].id
product.update()
else:
product.create()
return True
def sync_product(self, params):
org = self.find_organization(params['organization'])
product = self.find_product(params['name'], org.name)
return product.sync()
def repository(self, params):
product = self.find_product(params['product'], params['organization'])
params['product'] = product.id
del params['organization']
repository = self._entities.Repository(self._server, **params)
repository._fields['organization'] = entity_fields.OneToOneField(entities.Organization)
repository.organization = product.organization
response = repository.search()
if len(response) == 1:
repository.id = response[0].id
repository.update()
else:
repository.create()
return True
def sync_repository(self, params):
org = self.find_organization(params['organization'])
repository = self.find_repository(params['name'], params['product'], org.name)
return repository.sync()
def repository_set(self, params):
product = self.find_product(params['product'], params['organization'])
del params['product']
del params['organization']
if not product:
return False
else:
reposet = self._entities.RepositorySet(self._server, product=product, name=params['name'])
reposet = reposet.search()[0]
formatted_name = [params['name'].replace('(', '').replace(')', '')]
formatted_name.append(params['basearch'])
if 'releasever' in params:
formatted_name.append(params['releasever'])
formatted_name = ' '.join(formatted_name)
repository = self._entities.Repository(self._server, product=product, name=formatted_name)
repository._fields['organization'] = entity_fields.OneToOneField(entities.Organization)
repository.organization = product.organization
repository = repository.search()
if len(repository) == 0:
if 'releasever' in params:
reposet.enable(data={'basearch': params['basearch'], 'releasever': params['releasever']})
else:
reposet.enable(data={'basearch': params['basearch']})
return True
def sync_plan(self, params):
org = self.find_organization(params['organization'])
params['organization'] = org.id
params['sync_date'] = datetime.datetime.strptime(params['sync_date'], "%H:%M")
products = params['products']
del params['products']
sync_plan = self._entities.SyncPlan(
self._server,
name=params['name'],
organization=org
)
response = sync_plan.search()
sync_plan.sync_date = params['sync_date']
sync_plan.interval = params['interval']
if len(response) == 1:
sync_plan.id = response[0].id
sync_plan.update()
else:
response = sync_plan.create()
sync_plan.id = response[0].id
if products:
ids = []
for name in products:
product = self.find_product(name, org.name)
ids.append(product.id)
sync_plan.add_products(data={'product_ids': ids})
return True
def content_view(self, params):
org = self.find_organization(params['organization'])
content_view = self._entities.ContentView(self._server, name=params['name'], organization=org)
response = content_view.search()
if len(response) == 1:
content_view.id = response[0].id
content_view.update()
else:
content_view = content_view.create()
if params['repositories']:
repos = []
for repository in params['repositories']:
repository = self.find_repository(repository['name'], repository['product'], org.name)
repos.append(repository)
content_view.repository = repos
content_view.update(['repository'])
def find_content_view_version(self, name, organization, environment):
env = self.find_lifecycle_environment(environment, organization)
content_view = self.find_content_view(name, organization)
content_view_version = self._entities.ContentViewVersion(self._server, content_view=content_view)
response = content_view_version.search(['content_view'], {'environment_id': env.id})
if len(response) == 1:
return response[0]
else:
self._module.fail_json(msg="No Content View version found for %s" % response)
def publish(self, params):
content_view = self.find_content_view(params['name'], params['organization'])
return content_view.publish()
def promote(self, params):
to_environment = self.find_lifecycle_environment(params['to_environment'], params['organization'])
version = self.find_content_view_version(params['name'], params['organization'], params['from_environment'])
data = {'environment_id': to_environment.id}
return version.promote(data=data)
def lifecycle_environment(self, params):
org = self.find_organization(params['organization'])
prior_env = self.find_lifecycle_environment(params['prior'], params['organization'])
lifecycle_env = self._entities.LifecycleEnvironment(self._server, name=params['name'], organization=org, prior=prior_env)
response = lifecycle_env.search()
if len(response) == 1:
lifecycle_env.id = response[0].id
lifecycle_env.update()
else:
lifecycle_env.create()
return True
def activation_key(self, params):
org = self.find_organization(params['organization'])
activation_key = self._entities.ActivationKey(self._server, name=params['name'], organization=org)
response = activation_key.search()
if len(response) == 1:
activation_key.id = response[0].id
activation_key.update()
else:
activation_key.create()
if params['content_view']:
content_view = self.find_content_view(params['content_view'], params['organization'])
lifecycle_environment = self.find_lifecycle_environment(params['lifecycle_environment'], params['organization'])
activation_key.content_view = content_view
activation_key.environment = lifecycle_environment
activation_key.update()
return True
def main():
module = AnsibleModule(
argument_spec=dict(
server_url=dict(type='str', required=True),
username=dict(type='str', required=True, no_log=True),
password=dict(type='str', required=True, no_log=True),
entity=dict(type='str', required=True,
choices=['repository', 'manifest', 'repository_set', 'sync_plan',
'content_view', 'lifecycle_environment', 'activation_key', 'product']),
action=dict(type='str', choices=['sync', 'publish', 'promote']),
verify_ssl=dict(type='bool', default=False),
task_timeout=dict(type='int', default=1000),
params=dict(type='dict', required=True, no_log=True),
),
supports_check_mode=True,
)
if not HAS_NAILGUN_PACKAGE:
module.fail_json(msg="Missing required nailgun module (check docs or install with: pip install nailgun")
server_url = module.params['server_url']
username = module.params['username']
password = module.params['password']
entity = module.params['entity']
action = module.params['action']
params = module.params['params']
verify_ssl = module.params['verify_ssl']
task_timeout = module.params['task_timeout']
server = ServerConfig(
url=server_url,
auth=(username, password),
verify=verify_ssl
)
ng = NailGun(server, entities, module, task_timeout)
# Lets make an connection to the server with username and password
try:
org = entities.Organization(server)
org.search()
except Exception as e:
module.fail_json(msg="Failed to connect to Foreman server: %s " % e)
result = False
if entity == 'product':
if action == 'sync':
result = ng.sync_product(params)
else:
result = ng.product(params)
elif entity == 'repository':
if action == 'sync':
result = ng.sync_repository(params)
else:
result = ng.repository(params)
elif entity == 'manifest':
result = ng.manifest(params)
elif entity == 'repository_set':
result = ng.repository_set(params)
elif entity == 'sync_plan':
result = ng.sync_plan(params)
elif entity == 'content_view':
if action == 'publish':
result = ng.publish(params)
elif action == 'promote':
result = ng.promote(params)
else:
result = ng.content_view(params)
elif entity == 'lifecycle_environment':
result = ng.lifecycle_environment(params)
elif entity == 'activation_key':
result = ng.activation_key(params)
else:
module.fail_json(changed=False, result="Unsupported entity supplied")
module.exit_json(changed=result, result="%s updated" % entity)
if __name__ == '__main__':
main()
| tersmitten/ansible | lib/ansible/modules/remote_management/foreman/_katello.py | Python | gpl-3.0 | 20,771 |
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants.contrib.go.targets.go_local_source import GoLocalSource
class GoLibrary(GoLocalSource):
"""Represents a local Go package."""
@classmethod
def alias(cls):
return 'go_library'
| megaserg/pants | contrib/go/src/python/pants/contrib/go/targets/go_library.py | Python | apache-2.0 | 498 |
Subsets and Splits