code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from auto_nag.bzcleaner import BzCleaner
class DupeMe(BzCleaner):
def description(self):
return "Closed bugs with dupeme keyword"
def get_bz_params(self, date):
days_lookup = self.get_config("days_lookup", default=180)
params = {
"bug_status": ["RESOLVED", "VERIFIED", "CLOSED"],
"f1": "keywords",
"o1": "casesubstring",
"v1": "dupeme",
"f2": "days_elapsed",
"o2": "lessthan",
"v2": days_lookup,
}
return params
def get_autofix_change(self):
return {"keywords": {"remove": ["dupeme"]}}
if __name__ == "__main__":
DupeMe().run()
| mozilla/relman-auto-nag | auto_nag/scripts/closed_dupeme.py | Python | bsd-3-clause | 883 |
import matplotlib
from matplotlib.testing.conftest import * # noqa
def pytest_configure(config):
for key, value in [
("markers", "flaky: (Provided by pytest-rerunfailures.)"),
("markers", "timeout: (Provided by pytest-timeout.)"),
("markers", "backend: Set alternate Matplotlib backend temporarily."),
("markers", "style: Set alternate Matplotlib style temporarily."),
("markers", "baseline_images: Compare output against references."),
("markers", "pytz: Tests that require pytz to be installed."),
]:
config.addinivalue_line(key, value)
matplotlib.use("agg", force=True)
matplotlib._called_from_pytest = True
matplotlib._init_tests()
| dfm/daft | test/conftest.py | Python | mit | 716 |
"""Support for Niko Home Control."""
from __future__ import annotations
from datetime import timedelta
import logging
import nikohomecontrol
import voluptuous as vol
# Import the device class from the component that you want to support
from homeassistant.components.light import ATTR_BRIGHTNESS, PLATFORM_SCHEMA, LightEntity
from homeassistant.const import CONF_HOST
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import PlatformNotReady
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=1)
SCAN_INTERVAL = timedelta(seconds=30)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Required(CONF_HOST): cv.string})
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
async_add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Niko Home Control light platform."""
host = config[CONF_HOST]
try:
nhc = nikohomecontrol.NikoHomeControl(
{"ip": host, "port": 8000, "timeout": 20000}
)
niko_data = NikoHomeControlData(hass, nhc)
await niko_data.async_update()
except OSError as err:
_LOGGER.error("Unable to access %s (%s)", host, err)
raise PlatformNotReady from err
async_add_entities(
[NikoHomeControlLight(light, niko_data) for light in nhc.list_actions()], True
)
class NikoHomeControlLight(LightEntity):
"""Representation of an Niko Light."""
def __init__(self, light, data):
"""Set up the Niko Home Control light platform."""
self._data = data
self._light = light
self._unique_id = f"light-{light.id}"
self._name = light.name
self._state = light.is_on
self._brightness = None
@property
def unique_id(self):
"""Return unique ID for light."""
return self._unique_id
@property
def name(self):
"""Return the display name of this light."""
return self._name
@property
def brightness(self):
"""Return the brightness of the light."""
return self._brightness
@property
def is_on(self):
"""Return true if light is on."""
return self._state
def turn_on(self, **kwargs):
"""Instruct the light to turn on."""
self._light.brightness = kwargs.get(ATTR_BRIGHTNESS, 255)
_LOGGER.debug("Turn on: %s", self.name)
self._light.turn_on()
def turn_off(self, **kwargs):
"""Instruct the light to turn off."""
_LOGGER.debug("Turn off: %s", self.name)
self._light.turn_off()
async def async_update(self):
"""Get the latest data from NikoHomeControl API."""
await self._data.async_update()
self._state = self._data.get_state(self._light.id)
class NikoHomeControlData:
"""The class for handling data retrieval."""
def __init__(self, hass, nhc):
"""Set up Niko Home Control Data object."""
self._nhc = nhc
self.hass = hass
self.available = True
self.data = {}
self._system_info = None
@Throttle(MIN_TIME_BETWEEN_UPDATES)
async def async_update(self):
"""Get the latest data from the NikoHomeControl API."""
_LOGGER.debug("Fetching async state in bulk")
try:
self.data = await self.hass.async_add_executor_job(
self._nhc.list_actions_raw
)
self.available = True
except OSError as ex:
_LOGGER.error("Unable to retrieve data from Niko, %s", str(ex))
self.available = False
def get_state(self, aid):
"""Find and filter state based on action id."""
for state in self.data:
if state["id"] == aid:
return state["value1"] != 0
_LOGGER.error("Failed to retrieve state off unknown light")
| rohitranjan1991/home-assistant | homeassistant/components/niko_home_control/light.py | Python | mit | 4,127 |
# Copyright 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from tempest_lib import decorators
from tempest import clients # noqa
from tempest import config # noqa
from tempest import exceptions # noqa
from tempest.scenario import manager_congress # noqa
from tempest import test # noqa
CONF = config.CONF
LOG = logging.getLogger(__name__)
class TestGlanceV2Driver(manager_congress.ScenarioPolicyBase):
@classmethod
def check_preconditions(cls):
super(TestGlanceV2Driver, cls).check_preconditions()
if not (CONF.network.tenant_networks_reachable
or CONF.network.public_network_id):
msg = ('Either tenant_networks_reachable must be "true", or '
'public_network_id must be defined.')
cls.enabled = False
raise cls.skipException(msg)
def setUp(cls):
super(TestGlanceV2Driver, cls).setUp()
if not CONF.service_available.glance:
skip_msg = ("%s skipped as glance is not available" % cls.__name__)
raise cls.skipException(skip_msg)
cls.os = clients.Manager()
cls.glancev2 = cls.os.image_client_v2
cls.datasource_id = manager_congress.get_datasource_id(
cls.admin_manager.congress_client, 'glancev2')
@decorators.skip_because(bug='1486246')
@test.attr(type='smoke')
@test.services('image')
def test_glancev2_images_table(self):
image_schema = (
self.admin_manager.congress_client.show_datasource_table_schema(
self.datasource_id, 'images')['columns'])
image_id_col = next(i for i, c in enumerate(image_schema)
if c['name'] == 'id')
def _check_data_table_glancev2_images():
# Fetch data from glance each time, because this test may start
# before glance has all the users.
images = self.glancev2.list_images()
image_map = {}
for image in images:
image_map[image['id']] = image
results = (
self.admin_manager.congress_client.list_datasource_rows(
self.datasource_id, 'images'))
for row in results['results']:
try:
image_row = image_map[row['data'][image_id_col]]
except KeyError:
return False
for index in range(len(image_schema)):
# glancev2 doesn't return kernel_id/ramdisk_id if
# it isn't present...
if ((image_schema[index]['name'] == 'kernel_id' and
'kernel_id' not in row['data']) or
(image_schema[index]['name'] == 'ramdisk_id' and
'ramdisk_id' not in row['data'])):
continue
# FIXME(arosen): congress-server should retain the type
# but doesn't today.
if (str(row['data'][index]) !=
str(image_row[image_schema[index]['name']])):
return False
return True
if not test.call_until_true(func=_check_data_table_glancev2_images,
duration=100, sleep_for=4):
raise exceptions.TimeoutException("Data did not converge in time "
"or failure in server")
@decorators.skip_because(bug='1486246')
@test.attr(type='smoke')
@test.services('image')
def test_glancev2_tags_table(self):
def _check_data_table_glance_images():
# Fetch data from glance each time, because this test may start
# before glance has all the users.
images = self.glancev2.list_images()
image_tag_map = {}
for image in images:
image_tag_map[image['id']] = image['tags']
results = (
self.admin_manager.congress_client.list_datasource_rows(
self.datasource_id, 'tags'))
for row in results['results']:
image_id, tag = row['data'][0], row['data'][1]
glance_image_tags = image_tag_map.get(image_id)
if not glance_image_tags:
# congress had image that glance doesn't know about.
return False
if tag not in glance_image_tags:
# congress had a tag that wasn't on the image.
return False
return True
if not test.call_until_true(func=_check_data_table_glance_images,
duration=100, sleep_for=5):
raise exceptions.TimeoutException("Data did not converge in time "
"or failure in server")
| ekcs/congress | contrib/tempest/tempest/scenario/congress_datasources/test_glancev2.py | Python | apache-2.0 | 5,461 |
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2014 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Registry definition for fixture datasets."""
from flask.ext.registry import RegistryProxy
from invenio.ext.registry import ModuleAutoDiscoveryRegistry
from invenio.utils.datastructures import LazyDict
fixtures_proxy = RegistryProxy(
'fixtures', ModuleAutoDiscoveryRegistry, 'fixtures')
def fixtures_loader():
"""Load fixtures datasets."""
out = {}
for fixture in fixtures_proxy:
for data in getattr(fixture, '__all__', dir(fixture)):
if data[-4:] != 'Data' or data in out:
continue
out[data] = getattr(fixture, data)
return out
fixtures = LazyDict(fixtures_loader)
| lnielsen/invenio | invenio/ext/fixtures/registry.py | Python | gpl-2.0 | 1,433 |
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
)
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
)
LOGIN_REDIRECT_URL = '/'
LOGIN_URL = '/signin/'
LOGIN_ERROR_URL = '/signin/failed/'
AUTH_USER_MODEL = 'users.User'
| pinkevich/django-project-template | settings/auth.py | Python | mit | 276 |
# This file is part of ranger, the console file manager.
# License: GNU GPL version 3, see the file "AUTHORS" for details.
"""The TaskView allows you to modify what the loader is doing."""
from __future__ import (absolute_import, division, print_function)
from ranger.ext.accumulator import Accumulator
from . import Widget
class TaskView(Widget, Accumulator):
old_lst = None
def __init__(self, win):
Widget.__init__(self, win)
Accumulator.__init__(self)
self.scroll_begin = 0
def draw(self):
base_clr = []
base_clr.append('in_taskview')
lst = self.get_list()
if self.old_lst != lst:
self.old_lst = lst
self.need_redraw = True
if self.need_redraw:
self.win.erase()
if not self.pointer_is_synced():
self.sync_index()
if self.hei <= 0:
return
self.addstr(0, 0, "Task View")
self.color_at(0, 0, self.wid, tuple(base_clr), 'title')
if lst:
for i in range(self.hei - 1):
i += self.scroll_begin
try:
obj = lst[i]
except IndexError:
break
y = i + 1
clr = list(base_clr)
if self.pointer == i:
clr.append('selected')
descr = obj.get_description()
if obj.progressbar_supported and obj.percent >= 0 and obj.percent <= 100:
self.addstr(y, 0, "%3.2f%% - %s" % (obj.percent, descr), self.wid)
wid = int((self.wid / 100) * obj.percent)
self.color_at(y, 0, self.wid, tuple(clr))
self.color_at(y, 0, wid, tuple(clr), 'loaded')
else:
self.addstr(y, 0, descr, self.wid)
self.color_at(y, 0, self.wid, tuple(clr))
else:
if self.hei > 1:
self.addstr(1, 0, "No task in the queue.")
self.color_at(1, 0, self.wid, tuple(base_clr), 'error')
self.color_reset()
def finalize(self):
y = self.y + 1 + self.pointer - self.scroll_begin
self.fm.ui.win.move(y, self.x)
def task_remove(self, i=None):
if i is None:
i = self.pointer
if self.fm.loader.queue:
self.fm.loader.remove(index=i)
def task_move(self, to, i=None): # pylint: disable=invalid-name
if i is None:
i = self.pointer
self.fm.loader.move(pos_src=i, pos_dest=to)
def press(self, key):
self.fm.ui.keymaps.use_keymap('taskview')
self.fm.ui.press(key)
def get_list(self):
return self.fm.loader.queue
| ranger/ranger | ranger/gui/widgets/taskview.py | Python | gpl-3.0 | 2,876 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from cpt.packager import ConanMultiPackager
if __name__ == "__main__":
builder = ConanMultiPackager()
builder.add_common_builds(shared_option_name="libsolace:shared", pure_c=False)
builder.run()
| abbyssoul/libsolace | build-conan-package.py | Python | apache-2.0 | 256 |
# -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
from __future__ import print_function, division, unicode_literals, absolute_import
import os
import sys
import pytest
from nipype import config
from mock import MagicMock
from builtins import object
try:
import xvfbwrapper
has_Xvfb = True
except ImportError:
has_Xvfb = False
# Define mocks for xvfbwrapper. Do not forget the spec to ensure that
# hasattr() checks return False with missing attributes.
xvfbpatch = MagicMock(spec=['Xvfb'])
xvfbpatch.Xvfb.return_value = MagicMock(spec=['new_display', 'start', 'stop'],
new_display=2010)
# Mock the legacy xvfbwrapper.Xvfb class (changed display attribute name)
xvfbpatch_old = MagicMock(spec=['Xvfb'])
xvfbpatch_old.Xvfb.return_value = MagicMock(spec=['vdisplay_num', 'start', 'stop'],
vdisplay_num=2010)
@pytest.mark.parametrize('dispnum', range(5))
def test_display_config(monkeypatch, dispnum):
"""Check that the display_variable option is used ($DISPLAY not set)"""
config._display = None
dispstr = ':%d' % dispnum
config.set('execution', 'display_variable', dispstr)
monkeypatch.delitem(os.environ, 'DISPLAY', raising=False)
assert config.get_display() == config.get('execution', 'display_variable')
# Test that it was correctly cached
assert config.get_display() == config.get('execution', 'display_variable')
@pytest.mark.parametrize('dispnum', range(5))
def test_display_system(monkeypatch, dispnum):
"""Check that when only a $DISPLAY is defined, it is used"""
config._display = None
config._config.remove_option('execution', 'display_variable')
dispstr = ':%d' % dispnum
monkeypatch.setitem(os.environ, 'DISPLAY', dispstr)
assert config.get_display() == dispstr
# Test that it was correctly cached
assert config.get_display() == dispstr
def test_display_config_and_system(monkeypatch):
"""Check that when only both config and $DISPLAY are defined, the config takes precedence"""
config._display = None
dispstr = ':10'
config.set('execution', 'display_variable', dispstr)
monkeypatch.setitem(os.environ, 'DISPLAY', ':0')
assert config.get_display() == dispstr
# Test that it was correctly cached
assert config.get_display() == dispstr
def test_display_noconfig_nosystem_patched(monkeypatch):
"""Check that when no $DISPLAY nor option are specified, a virtual Xvfb is used"""
config._display = None
if config.has_option('execution', 'display_variable'):
config._config.remove_option('execution', 'display_variable')
monkeypatch.delitem(os.environ, 'DISPLAY', raising=False)
monkeypatch.setitem(sys.modules, 'xvfbwrapper', xvfbpatch)
assert config.get_display() == ":2010"
# Test that it was correctly cached
assert config.get_display() == ':2010'
def test_display_empty_patched(monkeypatch):
"""
Check that when $DISPLAY is empty string and no option is specified,
a virtual Xvfb is used
"""
config._display = None
if config.has_option('execution', 'display_variable'):
config._config.remove_option('execution', 'display_variable')
monkeypatch.setitem(os.environ, 'DISPLAY', '')
monkeypatch.setitem(sys.modules, 'xvfbwrapper', xvfbpatch)
assert config.get_display() == ':2010'
# Test that it was correctly cached
assert config.get_display() == ':2010'
def test_display_noconfig_nosystem_patched_oldxvfbwrapper(monkeypatch):
"""
Check that when no $DISPLAY nor option are specified,
a virtual Xvfb is used (with a legacy version of xvfbwrapper).
"""
config._display = None
if config.has_option('execution', 'display_variable'):
config._config.remove_option('execution', 'display_variable')
monkeypatch.delitem(os.environ, 'DISPLAY', raising=False)
monkeypatch.setitem(sys.modules, 'xvfbwrapper', xvfbpatch_old)
assert config.get_display() == ":2010"
# Test that it was correctly cached
assert config.get_display() == ':2010'
def test_display_empty_patched_oldxvfbwrapper(monkeypatch):
"""
Check that when $DISPLAY is empty string and no option is specified,
a virtual Xvfb is used (with a legacy version of xvfbwrapper).
"""
config._display = None
if config.has_option('execution', 'display_variable'):
config._config.remove_option('execution', 'display_variable')
monkeypatch.setitem(os.environ, 'DISPLAY', '')
monkeypatch.setitem(sys.modules, 'xvfbwrapper', xvfbpatch_old)
assert config.get_display() == ':2010'
# Test that it was correctly cached
assert config.get_display() == ':2010'
def test_display_noconfig_nosystem_notinstalled(monkeypatch):
"""
Check that an exception is raised if xvfbwrapper is not installed
but necessary (no config and $DISPLAY unset)
"""
config._display = None
if config.has_option('execution', 'display_variable'):
config._config.remove_option('execution', 'display_variable')
monkeypatch.delitem(os.environ, 'DISPLAY', raising=False)
monkeypatch.setitem(sys.modules, 'xvfbwrapper', None)
with pytest.raises(RuntimeError):
config.get_display()
def test_display_empty_notinstalled(monkeypatch):
"""
Check that an exception is raised if xvfbwrapper is not installed
but necessary (no config and $DISPLAY empty)
"""
config._display = None
if config.has_option('execution', 'display_variable'):
config._config.remove_option('execution', 'display_variable')
monkeypatch.setitem(os.environ, 'DISPLAY', '')
monkeypatch.setitem(sys.modules, 'xvfbwrapper', None)
with pytest.raises(RuntimeError):
config.get_display()
@pytest.mark.skipif(not has_Xvfb, reason='xvfbwrapper not installed')
def test_display_noconfig_nosystem_installed(monkeypatch):
"""
Check that actually uses xvfbwrapper when installed (not mocked)
and necessary (no config and $DISPLAY unset)
"""
config._display = None
if config.has_option('execution', 'display_variable'):
config._config.remove_option('execution', 'display_variable')
monkeypatch.delitem(os.environ, 'DISPLAY', raising=False)
newdisp = config.get_display()
assert int(newdisp.split(':')[-1]) > 1000
# Test that it was correctly cached
assert config.get_display() == newdisp
@pytest.mark.skipif(not has_Xvfb, reason='xvfbwrapper not installed')
def test_display_empty_installed(monkeypatch):
"""
Check that actually uses xvfbwrapper when installed (not mocked)
and necessary (no config and $DISPLAY empty)
"""
config._display = None
if config.has_option('execution', 'display_variable'):
config._config.remove_option('execution', 'display_variable')
monkeypatch.setitem(os.environ, 'DISPLAY', '')
newdisp = config.get_display()
assert int(newdisp.split(':')[-1]) > 1000
# Test that it was correctly cached
assert config.get_display() == newdisp
def test_display_empty_macosx(monkeypatch):
"""
Check that an exception is raised if xvfbwrapper is necessary
(no config and $DISPLAY unset) but platform is OSX. See
https://github.com/nipy/nipype/issues/1400
"""
config._display = None
if config.has_option('execution', 'display_variable'):
config._config.remove_option('execution', 'display_variable')
monkeypatch.delitem(os.environ, 'DISPLAY', '')
monkeypatch.setattr(sys, 'platform', 'darwin')
with pytest.raises(RuntimeError):
config.get_display()
| mick-d/nipype | nipype/utils/tests/test_config.py | Python | bsd-3-clause | 7,682 |
read = input
n,k = map(int, read().split())
a = read()
a = '1' + a + '1'
# -1 if use 0, -1 if use len(a) -1
pstart = 0
pcnt = 0
ptype = '1'
b = []
# (start, end, cnt)
for tp in enumerate(a):
i, c = tp
if ptype == c:
pcnt = pcnt + 1
else :
b.append((pstart, i-1, pcnt))
pstart = i
ptype = c
pcnt = 1
b.append((pstart, i, pcnt))
# print(b)
if k >= len(b)//2:
print(sum(map(lambda tp:tp[2], b)) - 2)
else :
start = 0
end = k * 2
ans = 0
S = [0 for i in range(len(b)+10)]
S[0] = b[0][2]
for i in range(1, len(b)):
S[i] = S[i-1] + b[i][2]
while(end <= len(b) - 1):
tmp = S[end] - (S[start - 1] if start - 1>=0 else 0)
if b[start][0] == 0:
tmp = tmp - 1
if b[end][1] == len(a)-1:
tmp = tmp -1
# print(start, end ,tmp)
ans = max(ans, tmp)
start = start + 2
end = end + 2
print(ans)
| xsthunder/acm | at/abc124/d.py | Python | mit | 951 |
from crum import get_current_user
from django.db.models import Exists, OuterRef
from django.conf import settings
from dojo.models import Dojo_Group, Dojo_Group_Member, Product_Group, Product_Type_Group
from dojo.authorization.authorization import get_roles_for_permission, role_has_permission, get_groups
from dojo.authorization.roles_permissions import Permissions
def get_authorized_groups(permission):
user = get_current_user()
if user is None:
return Dojo_Group.objects.none()
if user.is_superuser:
return Dojo_Group.objects.all().order_by('name')
if user.is_staff and settings.AUTHORIZATION_STAFF_OVERRIDE:
return Dojo_Group.objects.all().order_by('name')
if hasattr(user, 'global_role') and user.global_role.role is not None and role_has_permission(user.global_role.role.id, permission):
return Dojo_Group.objects.all().order_by('name')
for group in get_groups(user):
if hasattr(group, 'global_role') and group.global_role.role is not None and role_has_permission(group.global_role.role.id, permission):
return Dojo_Group.objects.all().order_by('name')
roles = get_roles_for_permission(permission)
authorized_roles = Dojo_Group_Member.objects.filter(group=OuterRef('pk'),
user=user,
role__in=roles)
groups = Dojo_Group.objects.annotate(user=Exists(authorized_roles)).order_by('name')
return groups.filter(user=True)
def get_authorized_group_members(permission):
user = get_current_user()
if user is None:
return Dojo_Group_Member.objects.none()
if user.is_superuser:
return Dojo_Group_Member.objects.all().select_related('role')
if user.is_staff and settings.AUTHORIZATION_STAFF_OVERRIDE:
return Dojo_Group_Member.objects.all().select_related('role')
if hasattr(user, 'global_role') and user.global_role.role is not None and role_has_permission(user.global_role.role.id, permission):
return Dojo_Group_Member.objects.all().select_related('role')
groups = get_authorized_groups(permission)
return Dojo_Group_Member.objects.filter(group__in=groups).select_related('role')
def get_authorized_group_members_for_user(user):
groups = get_authorized_groups(Permissions.Group_View)
group_members = Dojo_Group_Member.objects.filter(user=user, group__in=groups).order_by('group__name').select_related('role', 'group')
return group_members
def get_group_members_for_group(group):
return Dojo_Group_Member.objects.filter(group=group).select_related('role')
def get_product_groups_for_group(group):
return Product_Group.objects.filter(group=group).select_related('role')
def get_product_type_groups_for_group(group):
return Product_Type_Group.objects.filter(group=group).select_related('role')
| rackerlabs/django-DefectDojo | dojo/group/queries.py | Python | bsd-3-clause | 2,802 |
from django.views.generic import RedirectView
class IndexView(RedirectView):
url = 'derpeddit.github.com'
| superduper/derpeddit | core/views.py | Python | mit | 112 |
from __future__ import print_function
import sys
sys.path.insert(1,"../../../")
import h2o
from tests import pyunit_utils
def h2o_H2OFrame_all():
"""
Python API test: h2o.frame.H2OFrame.all(), h2o.frame.H2OFrame.any()
"""
python_lists=[[True, False], [False, True], [True, True], [True, 'NA']]
h2oframe = h2o.H2OFrame(python_obj=python_lists, na_strings=['NA']) # contains true and false
assert not(h2oframe.all()), "h2o.H2OFrame.all() command is not working." # all elements are true or NA
assert h2oframe.any(), "h2o.H2OFrame.any() command is not working." # all elements are true or NA
h2o.remove(h2oframe)
python_lists=[[True, True], [True, True], [True, True], [True, 'NA']] # check with one boolean level only
h2oframe = h2o.H2OFrame(python_obj=python_lists, na_strings=['NA']) # contains true and false
assert h2oframe.all(), "h2o.H2OFrame.all() command is not working." # all elements are true or NA
assert h2oframe.any(), "h2o.H2OFrame.any() command is not working." # all elements are true or NA
h2o.remove(h2oframe)
python_lists=[[False, False], [False, False], [False, False], [False, 'NA']] # check with one boolean level only
h2oframe = h2o.H2OFrame(python_obj=python_lists, na_strings=['NA']) # contains true and false
assert not(h2oframe.all()), "h2o.H2OFrame.all() command is not working." # all elements are false or NA
assert h2oframe.any(), "h2o.H2OFrame.any() command is not working." # all elements are true or NA
pyunit_utils.standalone_test(h2o_H2OFrame_all)
| michalkurka/h2o-3 | h2o-py/tests/testdir_jira/pyunit_pubdev_4987_any_all.py | Python | apache-2.0 | 1,556 |
import pilas
def test_todos_los_objetos_de_interfaz_se_pueden_crear():
pilas.iniciar()
deslizador = pilas.interfaz.Deslizador()
assert deslizador
assert deslizador.progreso == 0
boton = pilas.interfaz.Boton()
assert boton
ingreso = pilas.interfaz.IngresoDeTexto()
assert ingreso
try:
pilas.interfaz.ListaSeleccion()
except TypeError:
assert True # Se espera esta excepcion, porque un argumento es obligatorio
lista = pilas.interfaz.ListaSeleccion([('uno')])
assert lista
try:
pilas.interfaz.Selector()
except TypeError:
assert True # el argumento texto es obligatorio.
selector = pilas.interfaz.Selector("hola")
assert selector
| irvingprog/pilas | pilas/test/test_interface.py | Python | lgpl-3.0 | 736 |
"""
Django settings for eoncloud_web project.
Generated by 'django-admin startproject' using Django 1.8.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import logging
import logging.config
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '@ybbpz49p^1x#0&un2!8i4*!9k#dav&83l7sl-ib%)-$t3jyfj'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ["*",]
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'captcha',
'biz',
'biz.account',
'biz.idc',
'biz.instance',
'biz.image',
'biz.floating',
'biz.network',
'biz.lbaas',
'biz.volume',
'biz.workflow',
'cloud',
'render',
'frontend',
'eoncloud_web',
'biz.firewall',
'biz.forum',
'biz.backup',
]
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
#'django.middleware.security.SecurityMiddleware',
)
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.SessionAuthentication',
'rest_framework.authentication.BasicAuthentication'
),
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
),
}
ROOT_URLCONF = 'eoncloud_web.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.media',
'django.core.context_processors.static',
],
},
},
]
WSGI_APPLICATION = 'eoncloud_web.wsgi.application'
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
LANGUAGE_CODE = 'zh_CN'
USE_I18N = True
USE_L10N = True
TIME_ZONE = 'Asia/Shanghai'
USE_TZ = False
FORMAT_MODULE_PATH = 'eoncloud_web.formats'
DATETIME_FORMAT="Y-m-d H:i"
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATIC_URL = '/static/'
LOGIN_URL = '/login'
LOG_PATH = BASE_DIR
LOG_CONFIG = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
"verbose": {
'format': '%(asctime)s %(levelname)s [Line: %(lineno)s] -- %(message)s',
"datefmt": "%Y-%m-%d %H:%M:%S"
}
},
'handlers': {
'default': {
'level': 'DEBUG',
'filters': None,
'class': 'logging.handlers.RotatingFileHandler',
'filename': '/var/log/eoncloud/eoncloud.log',
'formatter': 'verbose'
},
'cloud.tasks.handler': {
'level': 'DEBUG',
'filters': None,
'class': 'logging.handlers.RotatingFileHandler',
'filename': '/var/log/eoncloud/celery_task.log',
'formatter': 'verbose'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'root': {
'handlers': ['default'],
'level': 'DEBUG' if DEBUG else 'INFO',
'propagate': True,
},
'biz': {
'handlers': ['default'],
'level': 'DEBUG' if DEBUG else 'INFO',
'propagate': True,
},
'cloud.tasks': {
'handlers': ['cloud.tasks.handler'],
'level': 'DEBUG' if DEBUG else 'INFO',
'propagate': True,
},
'django.db.backends': {
'level': 'DEBUG',
'handers': ['console'],
},
}
}
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.tz',
'django.contrib.messages.context_processors.messages',
'eoncloud_web.context_processors.eoncloud'
)
logging.config.dictConfig(LOG_CONFIG)
from eoncloud_web.local.local_settings import *
CAPTCHA_IMAGE_BEFORE_FIELD = False
if LDAP_AUTH_ENABLED:
AUTHENTICATION_BACKENDS = (
'django_auth_ldap.backend.LDAPBackend',
'django.contrib.auth.backends.ModelBackend'
)
SITE_CONFIG = {
"QUOTA_CHECK": QUOTA_CHECK,
"MULTI_ROUTER_ENABLED": MULTI_ROUTER_ENABLED,
"BRAND": BRAND,
"ICP_NUMBER": ICP_NUMBER,
"WORKFLOW_ENABLED": WORKFLOW_ENABLED
}
| zhanghui9700/eonboard | eoncloud_web/eoncloud_web/settings.py | Python | apache-2.0 | 5,632 |
def hello():
print("hogehoge") | kenny-nelson/GomiBako | Libraries/Python/kiso/__init__.py | Python | mit | 31 |
#Convenience and complex routines that touch multiple MIB modules
from initialization import get_controller
from reflash import reflash_module, reflash_controller | amcgee/pymomo | pymomo/commander/meta/__init__.py | Python | lgpl-3.0 | 163 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2010-2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
from oslo.config import cfg
from glance.common import crypt
from glance.common import exception
import glance.domain
import glance.domain.proxy
from glance.openstack.common import importutils
db_opt = cfg.BoolOpt('use_tpool',
default=False,
help='Enable the use of thread pooling for '
'all DB API calls')
CONF = cfg.CONF
CONF.import_opt('metadata_encryption_key', 'glance.common.config')
CONF.register_opt(db_opt)
def get_api():
if not CONF.use_tpool:
return importutils.import_module(CONF.data_api)
return ThreadPoolWrapper(CONF.data_api)
def unwrap(db_api):
if not CONF.use_tpool:
return db_api
return db_api.unwrap()
# attributes common to all models
BASE_MODEL_ATTRS = set(['id', 'created_at', 'updated_at', 'deleted_at',
'deleted'])
IMAGE_ATTRS = BASE_MODEL_ATTRS | set(['name', 'status', 'size',
'disk_format', 'container_format',
'min_disk', 'min_ram', 'is_public',
'locations', 'checksum', 'owner',
'protected'])
class ImageRepo(object):
def __init__(self, context, db_api):
self.context = context
self.db_api = db_api
def get(self, image_id):
try:
db_api_image = dict(self.db_api.image_get(self.context, image_id))
assert not db_api_image['deleted']
except (exception.NotFound, exception.Forbidden, AssertionError):
msg = _("No image found with ID %s") % image_id
raise exception.NotFound(msg)
tags = self.db_api.image_tag_get_all(self.context, image_id)
image = self._format_image_from_db(db_api_image, tags)
return ImageProxy(image, self.context, self.db_api)
def list(self, marker=None, limit=None, sort_key='created_at',
sort_dir='desc', filters=None, member_status='accepted'):
db_api_images = self.db_api.image_get_all(
self.context, filters=filters, marker=marker, limit=limit,
sort_key=sort_key, sort_dir=sort_dir,
member_status=member_status)
images = []
for db_api_image in db_api_images:
tags = self.db_api.image_tag_get_all(self.context,
db_api_image['id'])
image = self._format_image_from_db(dict(db_api_image), tags)
images.append(image)
return images
def _format_image_from_db(self, db_image, db_tags):
visibility = 'public' if db_image['is_public'] else 'private'
properties = {}
for prop in db_image.pop('properties'):
# NOTE(markwash) db api requires us to filter deleted
if not prop['deleted']:
properties[prop['name']] = prop['value']
locations = db_image['locations']
if CONF.metadata_encryption_key:
key = CONF.metadata_encryption_key
ld = []
for l in locations:
url = crypt.urlsafe_decrypt(key, l['url'])
ld.append({'url': url, 'metadata': l['metadata']})
locations = ld
return glance.domain.Image(
image_id=db_image['id'],
name=db_image['name'],
status=db_image['status'],
created_at=db_image['created_at'],
updated_at=db_image['updated_at'],
visibility=visibility,
min_disk=db_image['min_disk'],
min_ram=db_image['min_ram'],
protected=db_image['protected'],
locations=locations,
checksum=db_image['checksum'],
owner=db_image['owner'],
disk_format=db_image['disk_format'],
container_format=db_image['container_format'],
size=db_image['size'],
extra_properties=properties,
tags=db_tags
)
def _format_image_to_db(self, image):
locations = image.locations
if CONF.metadata_encryption_key:
key = CONF.metadata_encryption_key
ld = []
for l in locations:
url = crypt.urlsafe_encrypt(key, l['url'])
ld.append({'url': url, 'metadata': l['metadata']})
locations = ld
return {
'id': image.image_id,
'name': image.name,
'status': image.status,
'created_at': image.created_at,
'min_disk': image.min_disk,
'min_ram': image.min_ram,
'protected': image.protected,
'locations': locations,
'checksum': image.checksum,
'owner': image.owner,
'disk_format': image.disk_format,
'container_format': image.container_format,
'size': image.size,
'is_public': image.visibility == 'public',
'properties': dict(image.extra_properties),
}
def add(self, image):
image_values = self._format_image_to_db(image)
# the updated_at value is not set in the _format_image_to_db
# function since it is specific to image create
image_values['updated_at'] = image.updated_at
new_values = self.db_api.image_create(self.context, image_values)
self.db_api.image_tag_set_all(self.context,
image.image_id, image.tags)
image.created_at = new_values['created_at']
image.updated_at = new_values['updated_at']
def save(self, image):
image_values = self._format_image_to_db(image)
try:
new_values = self.db_api.image_update(self.context,
image.image_id,
image_values,
purge_props=True)
except (exception.NotFound, exception.Forbidden):
msg = _("No image found with ID %s") % image.image_id
raise exception.NotFound(msg)
self.db_api.image_tag_set_all(self.context, image.image_id,
image.tags)
image.updated_at = new_values['updated_at']
def remove(self, image):
image_values = self._format_image_to_db(image)
try:
self.db_api.image_update(self.context, image.image_id,
image_values, purge_props=True)
except (exception.NotFound, exception.Forbidden):
msg = _("No image found with ID %s") % image.image_id
raise exception.NotFound(msg)
# NOTE(markwash): don't update tags?
new_values = self.db_api.image_destroy(self.context, image.image_id)
image.updated_at = new_values['updated_at']
class ImageProxy(glance.domain.proxy.Image):
def __init__(self, image, context, db_api):
self.context = context
self.db_api = db_api
self.image = image
super(ImageProxy, self).__init__(image)
def get_member_repo(self):
member_repo = ImageMemberRepo(self.context, self.db_api,
self.image)
return member_repo
class ImageMemberRepo(object):
def __init__(self, context, db_api, image):
self.context = context
self.db_api = db_api
self.image = image
def _format_image_member_from_db(self, db_image_member):
return glance.domain.ImageMembership(
id=db_image_member['id'],
image_id=db_image_member['image_id'],
member_id=db_image_member['member'],
status=db_image_member['status'],
created_at=db_image_member['created_at'],
updated_at=db_image_member['updated_at']
)
def _format_image_member_to_db(self, image_member):
image_member = {'image_id': self.image.image_id,
'member': image_member.member_id,
'status': image_member.status,
'created_at': image_member.created_at}
return image_member
def list(self):
db_members = self.db_api.image_member_find(
self.context, image_id=self.image.image_id)
image_members = []
for db_member in db_members:
image_members.append(self._format_image_member_from_db(db_member))
return image_members
def add(self, image_member):
try:
self.get(image_member.member_id)
except exception.NotFound:
pass
else:
msg = _('The target member %(member_id)s is already '
'associated with image %(image_id)s.' %
dict(member_id=image_member.member_id,
image_id=self.image.image_id))
raise exception.Duplicate(msg)
image_member_values = self._format_image_member_to_db(image_member)
new_values = self.db_api.image_member_create(self.context,
image_member_values)
image_member.created_at = new_values['created_at']
image_member.updated_at = new_values['updated_at']
image_member.id = new_values['id']
return self._format_image_member_from_db(new_values)
def remove(self, image_member):
try:
self.db_api.image_member_delete(self.context, image_member.id)
except (exception.NotFound, exception.Forbidden):
msg = _("The specified member %s could not be found")
raise exception.NotFound(msg % image_member.id)
def save(self, image_member):
image_member_values = self._format_image_member_to_db(image_member)
try:
new_values = self.db_api.image_member_update(self.context,
image_member.id,
image_member_values)
except (exception.NotFound, exception.Forbidden):
raise exception.NotFound()
image_member.updated_at = new_values['updated_at']
return self._format_image_member_from_db(new_values)
def get(self, member_id):
try:
db_api_image_member = self.db_api.image_member_find(
self.context,
self.image.image_id,
member_id)
if not db_api_image_member:
raise exception.NotFound()
except (exception.NotFound, exception.Forbidden):
raise exception.NotFound()
image_member = self._format_image_member_from_db(
db_api_image_member[0])
return image_member
class ThreadPoolWrapper(object):
def __init__(self, wrapped):
self.wrapped = importutils.import_module(wrapped)
def __getattr__(self, key):
original = getattr(self.wrapped, key)
if not callable(original):
return original
@functools.wraps(original)
def wrapper(*args, **kwargs):
from eventlet import tpool
output = tpool.execute(original, *args, **kwargs)
return output
return wrapper
def unwrap(self):
return self.wrapped
| cloudbau/glance | glance/db/__init__.py | Python | apache-2.0 | 12,246 |
#!/urs/bin/env python
#
# Copyright (C) 2015--2016, the ximpol team.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU GengReral Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import sys
import os
import unittest
import numpy
from ximpol.srcmodel.spectrum import xCountSpectrum, power_law
from ximpol.srcmodel.spectrum import int_eflux2pl_norm
from ximpol.utils.units_ import keV2erg
from ximpol.core.spline import xInterpolatedUnivariateSplineLinear
from ximpol.irf import load_arf, load_mrf
from ximpol.srcmodel.gabs import xpeInterstellarAbsorptionModel
"""The calculator ouputs have been obtained running by hand the code on the web
http://www.isdc.unige.ch/xipe/index.php/sensitivity-calculator
and are stored in the form of a list of tuples containing in order:
1 - column_density [1e22 cm^{-2}]
2 - power-law spectral index
3 - exposure time [ks]
4 - integral energy flux between 2 and 8 keV [1e-8 erg/cm^{2}/s]
5 - MDP in the 2--4, 4--6 and 6--8 keV energy bands.
Note that the numbers in the tuple are exactly what you would write in the
web form.
"""
SENSITIVITY_CALCULATOR_OUTPUT = [
(0.1, 1., 10., 0.1, [0.04022, 0.06668, 0.14058]),
(0.1, 2., 10., 0.1, [0.03293, 0.06927, 0.17443]),
(1. , 1., 10., 0.1, [0.04191, 0.06579, 0.13706]),
(1. , 2., 10., 0.1, [0.03400, 0.06729, 0.16716]),
(10., 1., 10., 0.1, [0.06228, 0.06348, 0.11810]),
(10., 2., 10., 0.1, [0.04880, 0.06013, 0.13230])
]
class TestSensitivityCalculator(unittest.TestCase):
"""Unit test for Fabio's sensitivity calculator at
http://www.isdc.unige.ch/xipe/index.php/sensitivity-calculator
"""
@classmethod
def setUpClass(cls):
"""Setup.
"""
cls.irf_name = 'xipe_baseline'
cls.aeff = load_arf(cls.irf_name)
cls.modf = load_mrf(cls.irf_name)
cls.emin = 2.
cls.emax = 8.
cls.ebinning = numpy.linspace(cls.emin, cls.emax, 4)
cls.ism_model = xpeInterstellarAbsorptionModel()
def mdp_table(self, column_density, index, exposure_time, eflux):
"""Return the MDP table for a point source with a power-law
spectral shape with a given set of parameters and for a given
observation time.
There's a slight complication, here, due to the fact that the
sensitivity calculator is rescaling the absorbed fluxes so that the
input energy flux (in the web form) is that at the observer instead of
that at the source. Therefore we need to do the same here.
"""
tsamples = numpy.linspace(0., exposure_time, 2)
norm = int_eflux2pl_norm(eflux, self.emin, self.emax, index, erg=True)
energy_spectrum = power_law(norm, index)
ism_trans = self.ism_model.transmission_factor(column_density)
_x = numpy.linspace(self.emin, self.emax, 1000)
_y = _x*energy_spectrum(_x, 0.)*ism_trans(_x)
absorbed_energy_spectrum = xInterpolatedUnivariateSplineLinear(_x, _y)
absorbed_eflux = keV2erg(absorbed_energy_spectrum.norm())
scale = eflux/absorbed_eflux
count_spectrum = xCountSpectrum(energy_spectrum, self.aeff, tsamples,
column_density, scale_factor=scale)
mdp_table = count_spectrum.build_mdp_table(self.ebinning, self.modf)
return mdp_table
def test_mdp(self):
"""Compare the MDP calculated by ximpol with that returned by the
sensitivity calculator.
"""
for column_density, index, exposure_time, eflux, target_mdps in\
SENSITIVITY_CALCULATOR_OUTPUT:
# Convert the source parameters to physical units.
column_density *= 1.e22
exposure_time *= 1000.
eflux *= 1.e-8
# Calculate the MDP table using the ximpol facilities.
table = self.mdp_table(column_density, index, exposure_time, eflux)
ximpol_mdps = table.mdp_values()[:-1]
target_mdps = numpy.array(target_mdps)
ximpol_mdps = numpy.array(ximpol_mdps)
delta = abs(target_mdps - ximpol_mdps)/target_mdps
max_delta = delta.max()
err_msg = 'max. diff. %.4f (nH = %.3e, index = %.2f)' %\
(max_delta, column_density, index)
err_msg += '\nximpol: %s\nsensitivity calculator: %s' %\
(ximpol_mdps, target_mdps)
self.assertTrue(max_delta < 0.03, err_msg)
if __name__ == '__main__':
unittest.main()
| lucabaldini/ximpol | ximpol/test/test_sensitivity_calculator.py | Python | gpl-3.0 | 5,090 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import unittest
from qpid_dispatch_internal.policy.policy_util import HostAddr, is_ipv6_enabled
from qpid_dispatch_internal.policy.policy_util import HostStruct
from qpid_dispatch_internal.policy.policy_util import PolicyError
from qpid_dispatch_internal.policy.policy_util import PolicyAppConnectionMgr
from qpid_dispatch_internal.policy.policy_local import PolicyLocal
from system_test import TestCase, main_module
class PolicyHostAddrTest(TestCase):
def expect_deny(self, badhostname, msg):
denied = False
try:
xxx = HostStruct(badhostname)
except PolicyError:
denied = True
self.assertTrue(denied, ("%s" % msg))
def check_hostaddr_match(self, tHostAddr, tString, expectOk=True):
# check that the string is a match for the addr
# check that the internal struct version matches, too
ha = HostStruct(tString)
if expectOk:
self.assertTrue( tHostAddr.match_str(tString) )
self.assertTrue( tHostAddr.match_bin(ha) )
else:
self.assertFalse( tHostAddr.match_str(tString) )
self.assertFalse( tHostAddr.match_bin(ha) )
def test_policy_hostaddr_ipv4(self):
# Create simple host and range
aaa = HostAddr("192.168.1.1")
bbb = HostAddr("1.1.1.1,1.1.1.255")
# Verify host and range
self.check_hostaddr_match(aaa, "192.168.1.1")
self.check_hostaddr_match(aaa, "1.1.1.1", False)
self.check_hostaddr_match(aaa, "192.168.1.2", False)
self.check_hostaddr_match(bbb, "1.1.1.1")
self.check_hostaddr_match(bbb, "1.1.1.254")
self.check_hostaddr_match(bbb, "1.1.1.0", False)
self.check_hostaddr_match(bbb, "1.1.2.0", False)
def test_policy_hostaddr_ipv6(self):
if not is_ipv6_enabled():
self.skipTest("System IPv6 support is not available")
# Create simple host and range
aaa = HostAddr("::1")
bbb = HostAddr("::1,::ffff")
ccc = HostAddr("ffff::0,ffff:ffff::0")
# Verify host and range
self.check_hostaddr_match(aaa, "::1")
self.check_hostaddr_match(aaa, "::2", False)
self.check_hostaddr_match(aaa, "ffff:ffff::0", False)
self.check_hostaddr_match(bbb, "::1")
self.check_hostaddr_match(bbb, "::fffe")
self.check_hostaddr_match(bbb, "::1:0", False)
self.check_hostaddr_match(bbb, "ffff::0", False)
self.check_hostaddr_match(ccc, "ffff::1")
self.check_hostaddr_match(ccc, "ffff:fffe:ffff:ffff::ffff")
self.check_hostaddr_match(ccc, "ffff:ffff::1", False)
self.check_hostaddr_match(ccc, "ffff:ffff:ffff:ffff::ffff", False)
def test_policy_hostaddr_ipv4_wildcard(self):
aaa = HostAddr("*")
self.check_hostaddr_match(aaa,"0.0.0.0")
self.check_hostaddr_match(aaa,"127.0.0.1")
self.check_hostaddr_match(aaa,"255.254.253.252")
def test_policy_hostaddr_ipv6_wildcard(self):
if not is_ipv6_enabled():
self.skipTest("System IPv6 support is not available")
aaa = HostAddr("*")
self.check_hostaddr_match(aaa,"::0")
self.check_hostaddr_match(aaa,"::1")
self.check_hostaddr_match(aaa,"ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff")
def test_policy_malformed_hostaddr_ipv4(self):
self.expect_deny( "0.0.0.0.0", "Name or service not known")
self.expect_deny( "1.1.1.1,2.2.2.2,3.3.3.3", "arg count")
self.expect_deny( "9.9.9.9,8.8.8.8", "a > b")
def test_policy_malformed_hostaddr_ipv6(self):
if not is_ipv6_enabled():
self.skipTest("System IPv6 support is not available")
self.expect_deny( "1::2::3", "Name or service not known")
self.expect_deny( "::1,::2,::3", "arg count")
self.expect_deny( "0:ff:0,0:fe:ffff:ffff::0", "a > b")
class QpidDispatch(object):
def qd_dispatch_policy_c_counts_alloc(self):
return 100
def qd_dispatch_policy_c_counts_refresh(self, cstats, entitymap):
pass
class MockAgent(object):
def __init__(self):
self.qd = QpidDispatch()
def add_implementation(self, entity, cfg_obj_name):
pass
class MockPolicyManager(object):
def __init__(self):
self.agent = MockAgent()
def log_debug(self, text):
print("DEBUG: %s" % text)
def log_info(self, text):
print("INFO: %s" % text)
def log_trace(self, text):
print("TRACE: %s" % text)
def log_error(self, text):
print("ERROR: %s" % text)
def get_agent(self):
return self.agent
class PolicyFile(TestCase):
manager = MockPolicyManager()
policy = PolicyLocal(manager)
policy.test_load_config()
def test_policy1_test_zeke_ok(self):
p1 = PolicyFile.policy.lookup_user('zeke', '192.168.100.5', 'photoserver', '192.168.100.5:33333', 1)
self.assertTrue(p1 == 'test')
upolicy = {}
self.assertTrue(
PolicyFile.policy.lookup_settings('photoserver', p1, upolicy)
)
self.assertTrue(upolicy['maxFrameSize'] == 444444)
self.assertTrue(upolicy['maxMessageSize'] == 444444)
self.assertTrue(upolicy['maxSessionWindow'] == 444444)
self.assertTrue(upolicy['maxSessions'] == 4)
self.assertTrue(upolicy['maxSenders'] == 44)
self.assertTrue(upolicy['maxReceivers'] == 44)
self.assertTrue(upolicy['allowAnonymousSender'])
self.assertTrue(upolicy['allowDynamicSource'])
self.assertTrue(upolicy['targets'] == 'private')
self.assertTrue(upolicy['sources'] == 'private')
def test_policy1_test_zeke_bad_IP(self):
self.assertTrue(
PolicyFile.policy.lookup_user('zeke', '10.18.0.1', 'photoserver', "connid", 2) == '')
self.assertTrue(
PolicyFile.policy.lookup_user('zeke', '72.135.2.9', 'photoserver', "connid", 3) == '')
self.assertTrue(
PolicyFile.policy.lookup_user('zeke', '127.0.0.1', 'photoserver', "connid", 4) == '')
def test_policy1_test_zeke_bad_app(self):
self.assertTrue(
PolicyFile.policy.lookup_user('zeke', '192.168.100.5','galleria', "connid", 5) == '')
def test_policy1_test_users_same_permissions(self):
zname = PolicyFile.policy.lookup_user('zeke', '192.168.100.5', 'photoserver', '192.168.100.5:33333', 6)
yname = PolicyFile.policy.lookup_user('ynot', '10.48.255.254', 'photoserver', '192.168.100.5:33334', 7)
self.assertTrue( zname == yname )
def test_policy1_lookup_unknown_application(self):
upolicy = {}
self.assertFalse(
PolicyFile.policy.lookup_settings('unknown', 'doesntmatter', upolicy)
)
def test_policy1_lookup_unknown_usergroup(self):
upolicy = {}
self.assertFalse(
PolicyFile.policy.lookup_settings('photoserver', 'unknown', upolicy)
)
class PolicyFileApplicationFallback(TestCase):
manager = MockPolicyManager()
policy = PolicyLocal(manager)
policy.test_load_config()
def test_bad_app_fallback(self):
# Show that with no fallback the user cannot connect
self.assertTrue(
self.policy.lookup_user('zeke', '192.168.100.5', 'galleria', "connid", 5) == '')
# Enable the fallback defaultVhost and show the same user can now connect
self.policy.set_default_vhost('photoserver')
settingsname = self.policy.lookup_user('zeke', '192.168.100.5', 'galleria', "connid", 5)
self.assertTrue(settingsname == 'test')
# Show that the fallback settings are returned
upolicy = {}
self.assertTrue(
self.policy.lookup_settings('phony*app*name', settingsname, upolicy)
)
self.assertTrue(upolicy['maxFrameSize'] == 444444)
self.assertTrue(upolicy['maxMessageSize'] == 444444)
self.assertTrue(upolicy['maxSessionWindow'] == 444444)
self.assertTrue(upolicy['maxSessions'] == 4)
self.assertTrue(upolicy['maxSenders'] == 44)
self.assertTrue(upolicy['maxReceivers'] == 44)
self.assertTrue(upolicy['allowAnonymousSender'])
self.assertTrue(upolicy['allowDynamicSource'])
self.assertTrue(upolicy['targets'] == 'private')
self.assertTrue(upolicy['sources'] == 'private')
# Disable fallback and show failure again
self.policy.set_default_vhost('')
self.assertTrue(
self.policy.lookup_user('zeke', '192.168.100.5', 'galleria', "connid", 5) == '')
class PolicyAppConnectionMgrTests(TestCase):
def test_policy_app_conn_mgr_fail_by_total(self):
stats = PolicyAppConnectionMgr(1, 2, 2)
diags = []
self.assertTrue(stats.can_connect('10.10.10.10:10000', 'chuck', '10.10.10.10', diags))
self.assertFalse(stats.can_connect('10.10.10.10:10001', 'chuck', '10.10.10.10', diags))
self.assertTrue(len(diags) == 1)
self.assertTrue('application connection limit' in diags[0])
def test_policy_app_conn_mgr_fail_by_user(self):
stats = PolicyAppConnectionMgr(3, 1, 2)
diags = []
self.assertTrue(stats.can_connect('10.10.10.10:10000', 'chuck', '10.10.10.10', diags))
self.assertFalse(stats.can_connect('10.10.10.10:10001', 'chuck', '10.10.10.10', diags))
self.assertTrue(len(diags) == 1)
self.assertTrue('per user' in diags[0])
def test_policy_app_conn_mgr_fail_by_hosts(self):
stats = PolicyAppConnectionMgr(3, 2, 1)
diags = []
self.assertTrue(stats.can_connect('10.10.10.10:10000', 'chuck', '10.10.10.10', diags))
self.assertFalse(stats.can_connect('10.10.10.10:10001', 'chuck', '10.10.10.10', diags))
self.assertTrue(len(diags) == 1)
self.assertTrue('per host' in diags[0])
def test_policy_app_conn_mgr_fail_by_user_hosts(self):
stats = PolicyAppConnectionMgr(3, 1, 1)
diags = []
self.assertTrue(stats.can_connect('10.10.10.10:10000', 'chuck', '10.10.10.10', diags))
self.assertFalse(stats.can_connect('10.10.10.10:10001', 'chuck', '10.10.10.10', diags))
self.assertTrue(len(diags) == 2)
self.assertTrue('per user' in diags[0] or 'per user' in diags[1])
self.assertTrue('per host' in diags[0] or 'per host' in diags[1])
def test_policy_app_conn_mgr_update(self):
stats = PolicyAppConnectionMgr(3, 1, 2)
diags = []
self.assertTrue(stats.can_connect('10.10.10.10:10000', 'chuck', '10.10.10.10', diags))
self.assertFalse(stats.can_connect('10.10.10.10:10001', 'chuck', '10.10.10.10', diags))
self.assertTrue(len(diags) == 1)
self.assertTrue('per user' in diags[0])
diags = []
stats.update(3, 2, 2)
self.assertTrue(stats.can_connect('10.10.10.10:10001', 'chuck', '10.10.10.10', diags))
def test_policy_app_conn_mgr_disconnect(self):
stats = PolicyAppConnectionMgr(3, 1, 2)
diags = []
self.assertTrue(stats.can_connect('10.10.10.10:10000', 'chuck', '10.10.10.10', diags))
self.assertFalse(stats.can_connect('10.10.10.10:10001', 'chuck', '10.10.10.10', diags))
self.assertTrue(len(diags) == 1)
self.assertTrue('per user' in diags[0])
diags = []
stats.disconnect("10.10.10.10:10000", 'chuck', '10.10.10.10')
self.assertTrue(stats.can_connect('10.10.10.10:10001', 'chuck', '10.10.10.10', diags))
def test_policy_app_conn_mgr_create_bad_settings(self):
denied = False
try:
stats = PolicyAppConnectionMgr(-3, 1, 2)
except PolicyError:
denied = True
self.assertTrue(denied, "Failed to detect negative setting value.")
def test_policy_app_conn_mgr_update_bad_settings(self):
denied = False
try:
stats = PolicyAppConnectionMgr(0, 0, 0)
except PolicyError:
denied = True
self.assertFalse(denied, "Should allow all zeros.")
try:
stats.update(0, -1, 0)
except PolicyError:
denied = True
self.assertTrue(denied, "Failed to detect negative setting value.")
def test_policy_app_conn_mgr_larger_counts(self):
stats = PolicyAppConnectionMgr(10000, 10000, 10000)
diags = []
for i in range(0, 10000):
self.assertTrue(stats.can_connect('1.1.1.1:' + str(i), 'chuck', '1.1.1.1', diags))
self.assertTrue(len(diags) == 0)
self.assertFalse(stats.can_connect('1.1.1.1:10000', 'chuck', '1.1.1.1', diags))
self.assertTrue(len(diags) == 3)
self.assertTrue(stats.connections_active == 10000)
self.assertTrue(stats.connections_approved == 10000)
self.assertTrue(stats.connections_denied == 1)
if __name__ == '__main__':
unittest.main(main_module())
| lulf/qpid-dispatch | tests/router_policy_test.py | Python | apache-2.0 | 13,755 |
import bh_plugin
import sublime
class SwapQuotes(bh_plugin.BracketPluginCommand):
def escaped(self, idx):
"""
Check if character is an escape char
"""
view = self.view
escaped = False
while idx >= 0 and view.substr(idx) == '\\':
escaped = ~escaped
idx -= 1
return escaped
def run(self, edit, name):
"""
Swap double or single quotes with each other.
Handle escaping or unescaping like quotes or
unlike quotes respectively.
"""
view = self.view
quote = view.substr(self.left.begin)
if quote != "'" and quote != '"':
return
new = "'" if (quote == '"') else '"'
old = quote
begin = self.left.end
end = self.right.begin
content_end = self.right.begin
view.replace(edit, self.left.toregion(), view.substr(self.left.toregion()).replace(old, new))
view.replace(edit, self.right.toregion(), view.substr(self.right.toregion()).replace(old, new))
offset = 0
while begin < end + offset:
char = view.substr(begin)
if char == old and self.escaped(begin - 1):
view.replace(edit, sublime.Region(begin - 1, begin), '')
offset -= 1
content_end -= 1
elif char == new and not self.escaped(begin - 1):
view.insert(edit, begin, "\\")
offset += 1
content_end += 1
begin += 1
self.right = self.right.move(content_end, end + offset)
self.selection = [sublime.Region(content_end)]
def plugin():
return SwapQuotes
| herove/dotfiles | sublime/Packages/BracketHighlighter/bh_modules/swapquotes.py | Python | mit | 1,753 |
__author__ = 'jpi'
import hashlib
from rest_framework.fields import Field, BooleanField
from django.conf import settings
class UniqueIDField(Field):
"""
Field that provides a unique ID that can be used to identify this object.
"""
read_only = True
def field_to_native(self, obj, field_name):
return create_secret_signature(obj)
class IgnoreValueBooleanField(BooleanField):
"""
Field that ignores the actual value and sets it to a default value
"""
def from_native(self, value):
return self.default
def create_secret_signature(obj):
"""
Creates a secret signature for a given object.
:param obj:
:return:
"""
app_secret_key = settings.SECRET_KEY
representation = repr(obj) + app_secret_key
return hashlib.sha1(representation).hexdigest() | codeforeurope/stadtgedaechtnis_backend | services/serializer/fields.py | Python | mit | 830 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-11-23 10:39
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0001_squashed_0007_auto_20160701_0926'),
]
operations = [
migrations.AddField(
model_name='app',
name='logentries_token',
field=models.CharField(default='', max_length=255),
),
migrations.AddField(
model_name='device',
name='remote_logging_id',
field=models.CharField(blank=True, max_length=255, null=True),
),
]
| VoIPGRID/vialer-middleware | app/migrations/0002_auto_20171123_1039.py | Python | mit | 665 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# $Id: __init__.py 1525 2012-08-16 16:32:03Z g.rodola $
#
# Copyright (c) 2009, Jay Loden, Giampaolo Rodola'. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""psutil is a module providing convenience functions for managing
processes and gather system information in a portable way by using
Python.
"""
from __future__ import division
__version__ = "0.6.1"
version_info = tuple([int(num) for num in __version__.split('.')])
__all__ = [
# exceptions
"Error", "NoSuchProcess", "AccessDenied", "TimeoutExpired",
# constants
"NUM_CPUS", "TOTAL_PHYMEM", "BOOT_TIME",
"version_info", "__version__",
"STATUS_RUNNING", "STATUS_IDLE", "STATUS_SLEEPING", "STATUS_DISK_SLEEP",
"STATUS_STOPPED", "STATUS_TRACING_STOP", "STATUS_ZOMBIE", "STATUS_DEAD",
"STATUS_WAKING", "STATUS_LOCKED",
# classes
"Process", "Popen",
# functions
"test", "pid_exists", "get_pid_list", "process_iter", "get_process_list",
"virtual_memory", "swap_memory",
"cpu_times", "cpu_percent", "per_cpu_percent",
"network_io_counters", "disk_io_counters",
]
import sys
import os
import time
import signal
import warnings
import errno
import subprocess
try:
import pwd
except ImportError:
pwd = None
from psutil.error import Error, NoSuchProcess, AccessDenied, TimeoutExpired
from psutil._compat import property, callable, defaultdict
from psutil._common import cached_property
from psutil._common import (deprecated as _deprecated,
nt_disk_iostat as _nt_disk_iostat,
nt_net_iostat as _nt_net_iostat,
nt_sysmeminfo as _nt_sysmeminfo,
isfile_strict as _isfile_strict)
from psutil._common import (STATUS_RUNNING, STATUS_IDLE, STATUS_SLEEPING,
STATUS_DISK_SLEEP, STATUS_STOPPED,
STATUS_TRACING_STOP, STATUS_ZOMBIE, STATUS_DEAD,
STATUS_WAKING, STATUS_LOCKED)
# import the appropriate module for our platform only
if sys.platform.startswith("linux"):
import psutil._pslinux as _psplatform
from psutil._pslinux import (phymem_buffers,
cached_phymem,
IOPRIO_CLASS_NONE,
IOPRIO_CLASS_RT,
IOPRIO_CLASS_BE,
IOPRIO_CLASS_IDLE)
phymem_buffers = _psplatform.phymem_buffers
cached_phymem = _psplatform.cached_phymem
elif sys.platform.startswith("win32"):
import psutil._psmswindows as _psplatform
from psutil._psmswindows import (ABOVE_NORMAL_PRIORITY_CLASS,
BELOW_NORMAL_PRIORITY_CLASS,
HIGH_PRIORITY_CLASS,
IDLE_PRIORITY_CLASS,
NORMAL_PRIORITY_CLASS,
REALTIME_PRIORITY_CLASS)
elif sys.platform.startswith("darwin"):
import psutil._psosx as _psplatform
elif sys.platform.startswith("freebsd"):
import psutil._psbsd as _psplatform
else:
raise NotImplementedError('platform %s is not supported' % sys.platform)
__all__.extend(_psplatform.__extra__all__)
NUM_CPUS = _psplatform.NUM_CPUS
BOOT_TIME = _psplatform.BOOT_TIME
TOTAL_PHYMEM = _psplatform.TOTAL_PHYMEM
class Process(object):
"""Represents an OS process."""
def __init__(self, pid):
"""Create a new Process object for the given pid.
Raises NoSuchProcess if pid does not exist.
"""
self._pid = pid
self._gone = False
# platform-specific modules define an _psplatform.Process
# implementation class
self._platform_impl = _psplatform.Process(pid)
self._last_sys_cpu_times = None
self._last_proc_cpu_times = None
# cache creation time for later use in is_running() method
try:
self.create_time
except AccessDenied:
pass
except NoSuchProcess:
raise NoSuchProcess(pid, None, 'no process found with pid %s' % pid)
def __str__(self):
try:
pid = self.pid
name = repr(self.name)
except NoSuchProcess:
details = "(pid=%s (terminated))" % self.pid
except AccessDenied:
details = "(pid=%s)" % (self.pid)
else:
details = "(pid=%s, name=%s)" % (pid, name)
return "%s.%s%s" % (self.__class__.__module__,
self.__class__.__name__, details)
def __repr__(self):
return "<%s at %s>" % (self.__str__(), id(self))
def as_dict(self, attrs=[], ad_value=None):
"""Utility method returning process information as a hashable
dictionary.
If 'attrs' is specified it must be a list of strings reflecting
available Process class's attribute names (e.g. ['get_cpu_times',
'name']) else all public (read only) attributes are assumed.
'ad_value' is the value which gets assigned to a dict key in case
AccessDenied exception is raised when retrieving that particular
process information.
"""
excluded_names = set(['send_signal', 'suspend', 'resume', 'terminate',
'kill', 'wait', 'is_running', 'as_dict', 'parent',
'get_children', 'nice'])
retdict = dict()
for name in set(attrs or dir(self)):
if name.startswith('_'):
continue
if name.startswith('set_'):
continue
if name in excluded_names:
continue
try:
attr = getattr(self, name)
if callable(attr):
if name == 'get_cpu_percent':
ret = attr(interval=0)
else:
ret = attr()
else:
ret = attr
except AccessDenied:
ret = ad_value
except NotImplementedError:
# in case of not implemented functionality (may happen
# on old or exotic systems) we want to crash only if
# the user explicitly asked for that particular attr
if attrs:
raise
continue
if name.startswith('get'):
if name[3] == '_':
name = name[4:]
elif name == 'getcwd':
name = 'cwd'
retdict[name] = ret
return retdict
@property
def pid(self):
"""The process pid."""
return self._pid
@cached_property
def ppid(self):
"""The process parent pid."""
return self._platform_impl.get_process_ppid()
@property
def parent(self):
"""Return the parent process as a Process object. If no parent
pid is known return None.
"""
ppid = self.ppid
if ppid is not None:
try:
return Process(ppid)
except NoSuchProcess:
pass
@cached_property
def name(self):
"""The process name."""
name = self._platform_impl.get_process_name()
if os.name == 'posix':
# On UNIX the name gets truncated to the first 15 characters.
# If it matches the first part of the cmdline we return that
# one instead because it's usually more explicative.
# Examples are "gnome-keyring-d" vs. "gnome-keyring-daemon".
try:
cmdline = self.cmdline
except AccessDenied:
pass
else:
if cmdline:
extended_name = os.path.basename(cmdline[0])
if extended_name.startswith(name):
name = extended_name
# XXX - perhaps needs refactoring
self._platform_impl._process_name = name
return name
@cached_property
def exe(self):
"""The process executable path. May also be an empty string."""
def guess_it(fallback):
# try to guess exe from cmdline[0] in absence of a native
# exe representation
cmdline = self.cmdline
if cmdline and hasattr(os, 'access') and hasattr(os, 'X_OK'):
exe = cmdline[0] # the possible exe
rexe = os.path.realpath(exe) # ...in case it's a symlink
if os.path.isabs(rexe) and os.path.isfile(rexe) \
and os.access(rexe, os.X_OK):
return exe
if isinstance(fallback, AccessDenied):
raise fallback
return fallback
try:
exe = self._platform_impl.get_process_exe()
except AccessDenied:
err = sys.exc_info()[1]
return guess_it(fallback=err)
else:
if not exe:
# underlying implementation can legitimately return an
# empty string; if that's the case we don't want to
# raise AD while guessing from the cmdline
try:
exe = guess_it(fallback=exe)
except AccessDenied:
pass
return exe
@cached_property
def cmdline(self):
"""The command line process has been called with."""
return self._platform_impl.get_process_cmdline()
@property
def status(self):
"""The process current status as a STATUS_* constant."""
return self._platform_impl.get_process_status()
if os.name == 'posix':
@property
def uids(self):
"""Return a named tuple denoting the process real,
effective, and saved user ids.
"""
return self._platform_impl.get_process_uids()
@property
def gids(self):
"""Return a named tuple denoting the process real,
effective, and saved group ids.
"""
return self._platform_impl.get_process_gids()
@property
def terminal(self):
"""The terminal associated with this process, if any,
else None.
"""
return self._platform_impl.get_process_terminal()
@property
def username(self):
"""The name of the user that owns the process.
On UNIX this is calculated by using *real* process uid.
"""
if os.name == 'posix':
if pwd is None:
# might happen if python was installed from sources
raise ImportError("requires pwd module shipped with standard python")
return pwd.getpwuid(self.uids.real).pw_name
else:
return self._platform_impl.get_process_username()
@cached_property
def create_time(self):
"""The process creation time as a floating point number
expressed in seconds since the epoch, in UTC.
"""
return self._platform_impl.get_process_create_time()
def getcwd(self):
"""Return a string representing the process current working
directory.
"""
return self._platform_impl.get_process_cwd()
# Linux, BSD and Windows only
if hasattr(_psplatform.Process, "get_process_io_counters"):
def get_io_counters(self):
"""Return process I/O statistics as a namedtuple including
the number of read/write calls performed and the amount of
bytes read and written by the process.
"""
return self._platform_impl.get_process_io_counters()
def get_nice(self):
"""Get process niceness (priority)."""
return self._platform_impl.get_process_nice()
def set_nice(self, value):
"""Set process niceness (priority)."""
return self._platform_impl.set_process_nice(value)
# available only on Linux
if hasattr(_psplatform.Process, "get_process_ionice"):
def get_ionice(self):
"""Return process I/O niceness (priority) as a namedtuple."""
return self._platform_impl.get_process_ionice()
def set_ionice(self, ioclass, value=None):
"""Set process I/O niceness (priority).
ioclass is one of the IOPRIO_CLASS_* constants.
iodata is a number which goes from 0 to 7. The higher the
value, the lower the I/O priority of the process.
"""
return self._platform_impl.set_process_ionice(ioclass, value)
# available on Windows and Linux only
if hasattr(_psplatform.Process, "get_process_cpu_affinity"):
def get_cpu_affinity(self):
"""Get process current CPU affinity."""
return self._platform_impl.get_process_cpu_affinity()
def set_cpu_affinity(self, cpus):
"""Set process current CPU affinity.
'cpus' is a list of CPUs for which you want to set the
affinity (e.g. [0, 1]).
"""
return self._platform_impl.set_process_cpu_affinity(cpus)
if os.name == 'nt':
def get_num_handles(self):
"""Return the number of handles opened by this process
(Windows only).
"""
return self._platform_impl.get_num_handles()
if os.name == 'posix':
def get_num_fds(self):
"""Return the number of file descriptors opened by this
process (POSIX only).
"""
return self._platform_impl.get_num_fds()
def get_num_ctx_switches(self):
"""Return the number voluntary and involuntary context switches
performed by this process.
"""
return self._platform_impl.get_num_ctx_switches()
def get_num_threads(self):
"""Return the number of threads used by this process."""
return self._platform_impl.get_process_num_threads()
def get_threads(self):
"""Return threads opened by process as a list of namedtuples
including thread id and thread CPU times (user/system).
"""
return self._platform_impl.get_process_threads()
def get_children(self, recursive=False):
"""Return the children of this process as a list of Process
objects.
If recursive is True return all the parent descendants.
Example (A == this process):
A ─┐
│
├─ B (child) ─┐
│ └─ X (grandchild) ─┐
│ └─ Y (great grandchild)
├─ C (child)
└─ D (child)
>>> p.get_children()
B, C, D
>>> p.get_children(recursive=True)
B, X, Y, C, D
Note that in the example above if process X disappears
process Y won't be returned either as the reference to
process A is lost.
"""
if not self.is_running():
name = self._platform_impl._process_name
raise NoSuchProcess(self.pid, name)
ret = []
if not recursive:
for p in process_iter():
try:
if p.ppid == self.pid:
# if child happens to be older than its parent
# (self) it means child's PID has been reused
if self.create_time <= p.create_time:
ret.append(p)
except NoSuchProcess:
pass
else:
# construct a dict where 'values' are all the processes
# having 'key' as their parent
table = defaultdict(list)
for p in process_iter():
try:
table[p.ppid].append(p)
except NoSuchProcess:
pass
# At this point we have a mapping table where table[self.pid]
# are the current process's children.
# Below, we look for all descendants recursively, similarly
# to a recursive function call.
checkpids = [self.pid]
for pid in checkpids:
for child in table[pid]:
try:
# if child happens to be older than its parent
# (self) it means child's PID has been reused
intime = self.create_time <= child.create_time
except NoSuchProcess:
pass
else:
if intime:
ret.append(child)
if child.pid not in checkpids:
checkpids.append(child.pid)
return ret
def get_cpu_percent(self, interval=0.1):
"""Return a float representing the current process CPU
utilization as a percentage.
When interval is > 0.0 compares process times to system CPU
times elapsed before and after the interval (blocking).
When interval is 0.0 or None compares process times to system CPU
times elapsed since last call, returning immediately.
In this case is recommended for accuracy that this function be
called with at least 0.1 seconds between calls.
"""
blocking = interval is not None and interval > 0.0
if blocking:
st1 = sum(cpu_times())
pt1 = self._platform_impl.get_cpu_times()
time.sleep(interval)
st2 = sum(cpu_times())
pt2 = self._platform_impl.get_cpu_times()
else:
st1 = self._last_sys_cpu_times
pt1 = self._last_proc_cpu_times
st2 = sum(cpu_times())
pt2 = self._platform_impl.get_cpu_times()
if st1 is None or pt1 is None:
self._last_sys_cpu_times = st2
self._last_proc_cpu_times = pt2
return 0.0
delta_proc = (pt2.user - pt1.user) + (pt2.system - pt1.system)
delta_time = st2 - st1
# reset values for next call in case of interval == None
self._last_sys_cpu_times = st2
self._last_proc_cpu_times = pt2
try:
# the utilization split between all CPUs
overall_percent = (delta_proc / delta_time) * 100
except ZeroDivisionError:
# interval was too low
return 0.0
# the utilization of a single CPU
single_cpu_percent = overall_percent * NUM_CPUS
# on posix a percentage > 100 is legitimate
# http://stackoverflow.com/questions/1032357/comprehending-top-cpu-usage
# on windows we use this ugly hack to avoid troubles with float
# precision issues
if os.name != 'posix':
if single_cpu_percent > 100.0:
return 100.0
return round(single_cpu_percent, 1)
def get_cpu_times(self):
"""Return a tuple whose values are process CPU user and system
times. The same as os.times() but per-process.
"""
return self._platform_impl.get_cpu_times()
def get_memory_info(self):
"""Return a tuple representing RSS (Resident Set Size) and VMS
(Virtual Memory Size) in bytes.
On UNIX RSS and VMS are the same values shown by ps.
On Windows RSS and VMS refer to "Mem Usage" and "VM Size" columns
of taskmgr.exe.
"""
return self._platform_impl.get_memory_info()
def get_ext_memory_info(self):
"""Return a namedtuple with variable fields depending on the
platform representing extended memory information about
the process. All numbers are expressed in bytes.
"""
return self._platform_impl.get_ext_memory_info()
def get_memory_percent(self):
"""Compare physical system memory to process resident memory and
calculate process memory utilization as a percentage.
"""
rss = self._platform_impl.get_memory_info()[0]
try:
return (rss / float(TOTAL_PHYMEM)) * 100
except ZeroDivisionError:
return 0.0
def get_memory_maps(self, grouped=True):
"""Return process's mapped memory regions as a list of nameduples
whose fields are variable depending on the platform.
If 'grouped' is True the mapped regions with the same 'path'
are grouped together and the different memory fields are summed.
If 'grouped' is False every mapped region is shown as a single
entity and the namedtuple will also include the mapped region's
address space ('addr') and permission set ('perms').
"""
it = self._platform_impl.get_memory_maps()
if grouped:
d = {}
for tupl in it:
path = tupl[2]
nums = tupl[3:]
try:
d[path] = map(lambda x, y: x+y, d[path], nums)
except KeyError:
d[path] = nums
nt = self._platform_impl.nt_mmap_grouped
return [nt(path, *d[path]) for path in d]
else:
nt = self._platform_impl.nt_mmap_ext
return [nt(*x) for x in it]
def get_open_files(self):
"""Return files opened by process as a list of namedtuples
including absolute file name and file descriptor number.
"""
return self._platform_impl.get_open_files()
def get_connections(self, kind='inet'):
"""Return connections opened by process as a list of namedtuples.
The kind parameter filters for connections that fit the following
criteria:
Kind Value Connections using
inet IPv4 and IPv6
inet4 IPv4
inet6 IPv6
tcp TCP
tcp4 TCP over IPv4
tcp6 TCP over IPv6
udp UDP
udp4 UDP over IPv4
udp6 UDP over IPv6
unix UNIX socket (both UDP and TCP protocols)
all the sum of all the possible families and protocols
"""
return self._platform_impl.get_connections(kind)
def is_running(self):
"""Return whether this process is running."""
if self._gone:
return False
try:
# Checking if pid is alive is not enough as the pid might
# have been reused by another process.
# pid + creation time, on the other hand, is supposed to
# identify a process univocally.
return self.create_time == \
self._platform_impl.get_process_create_time()
except NoSuchProcess:
self._gone = True
return False
def send_signal(self, sig):
"""Send a signal to process (see signal module constants).
On Windows only SIGTERM is valid and is treated as an alias
for kill().
"""
# safety measure in case the current process has been killed in
# meantime and the kernel reused its PID
if not self.is_running():
name = self._platform_impl._process_name
raise NoSuchProcess(self.pid, name)
if os.name == 'posix':
try:
os.kill(self.pid, sig)
except OSError:
err = sys.exc_info()[1]
name = self._platform_impl._process_name
if err.errno == errno.ESRCH:
raise NoSuchProcess(self.pid, name)
if err.errno == errno.EPERM:
raise AccessDenied(self.pid, name)
raise
else:
if sig == signal.SIGTERM:
self._platform_impl.kill_process()
else:
raise ValueError("only SIGTERM is supported on Windows")
def suspend(self):
"""Suspend process execution."""
# safety measure in case the current process has been killed in
# meantime and the kernel reused its PID
if not self.is_running():
name = self._platform_impl._process_name
raise NoSuchProcess(self.pid, name)
# windows
if hasattr(self._platform_impl, "suspend_process"):
self._platform_impl.suspend_process()
else:
# posix
self.send_signal(signal.SIGSTOP)
def resume(self):
"""Resume process execution."""
# safety measure in case the current process has been killed in
# meantime and the kernel reused its PID
if not self.is_running():
name = self._platform_impl._process_name
raise NoSuchProcess(self.pid, name)
# windows
if hasattr(self._platform_impl, "resume_process"):
self._platform_impl.resume_process()
else:
# posix
self.send_signal(signal.SIGCONT)
def terminate(self):
"""Terminate the process with SIGTERM.
On Windows this is an alias for kill().
"""
self.send_signal(signal.SIGTERM)
def kill(self):
"""Kill the current process."""
# safety measure in case the current process has been killed in
# meantime and the kernel reused its PID
if not self.is_running():
name = self._platform_impl._process_name
raise NoSuchProcess(self.pid, name)
if os.name == 'posix':
self.send_signal(signal.SIGKILL)
else:
self._platform_impl.kill_process()
def wait(self, timeout=None):
"""Wait for process to terminate and, if process is a children
of the current one also return its exit code, else None.
"""
if timeout is not None and not timeout >= 0:
raise ValueError("timeout must be a positive integer")
return self._platform_impl.process_wait(timeout)
# --- deprecated API
@property
def nice(self):
"""Get or set process niceness (priority).
Deprecated, use get_nice() instead.
"""
msg = "this property is deprecated; use Process.get_nice() method instead"
warnings.warn(msg, category=DeprecationWarning, stacklevel=2)
return self.get_nice()
@nice.setter
def nice(self, value):
# invoked on "p.nice = num"; change process niceness
# deprecated in favor of set_nice()
msg = "this property is deprecated; use Process.set_nice() method instead"
warnings.warn(msg, category=DeprecationWarning, stacklevel=2)
return self.set_nice(value)
class Popen(Process):
"""A more convenient interface to stdlib subprocess module.
It starts a sub process and deals with it exactly as when using
subprocess.Popen class but in addition also provides all the
property and methods of psutil.Process class in a single interface:
>>> import psutil
>>> from subprocess import PIPE
>>> p = psutil.Popen(["/usr/bin/python", "-c", "print 'hi'"], stdout=PIPE)
>>> p.name
'python'
>>> p.uids
user(real=1000, effective=1000, saved=1000)
>>> p.username
'giampaolo'
>>> p.communicate()
('hi\n', None)
>>> p.terminate()
>>> p.wait(timeout=2)
0
>>>
For method names common to both classes such as kill(), terminate()
and wait(), psutil.Process implementation takes precedence.
For a complete documentation refers to:
http://docs.python.org/library/subprocess.html
"""
def __init__(self, *args, **kwargs):
self.__subproc = subprocess.Popen(*args, **kwargs)
self._pid = self.__subproc.pid
self._gone = False
self._platform_impl = _psplatform.Process(self._pid)
self._last_sys_cpu_times = None
self._last_proc_cpu_times = None
try:
self.create_time
except AccessDenied:
pass
except NoSuchProcess:
raise NoSuchProcess(self._pid, None,
"no process found with pid %s" % pid)
def __dir__(self):
return list(set(dir(Popen) + dir(subprocess.Popen)))
def __getattribute__(self, name):
try:
return object.__getattribute__(self, name)
except AttributeError:
try:
return object.__getattribute__(self.__subproc, name)
except AttributeError:
raise AttributeError("%s instance has no attribute '%s'"
%(self.__class__.__name__, name))
# =====================================================================
# --- system processes related functions
# =====================================================================
get_pid_list = _psplatform.get_pid_list
pid_exists = _psplatform.pid_exists
_pmap = {}
def process_iter():
"""Return a generator yielding a Process class instance for all
running processes on the local machine.
Every new Process instance is only created once and then cached
into an internal table which is updated every time this is used.
The sorting order in which processes are yielded is based on
their PIDs.
"""
def add(pid):
proc = Process(pid)
_pmap[proc.pid] = proc
return proc
def remove(pid):
_pmap.pop(pid, None)
a = set(get_pid_list())
b = set(_pmap.keys())
new_pids = a - b
gone_pids = b - a
for pid in gone_pids:
remove(pid)
for pid, proc in sorted(list(_pmap.items()) + \
list(dict.fromkeys(new_pids).items())):
try:
if proc is None: # new process
yield add(pid)
else:
# use is_running() to check whether PID has been reused by
# another process in which case yield a new Process instance
if proc.is_running():
yield proc
else:
yield add(pid)
except NoSuchProcess:
remove(pid)
except AccessDenied:
# Process creation time can't be determined hence there's
# no way to tell whether the pid of the cached process
# has been reused. Just return the cached version.
yield proc
# =====================================================================
# --- CPU related functions
# =====================================================================
def cpu_times(percpu=False):
"""Return system-wide CPU times as a namedtuple object.
Every CPU time represents the time CPU has spent in the given mode.
The attributes availability varies depending on the platform.
Here follows a list of all available attributes:
- user
- system
- idle
- nice (UNIX)
- iowait (Linux)
- irq (Linux, FreeBSD)
- softirq (Linux)
When percpu is True return a list of nameduples for each CPU.
First element of the list refers to first CPU, second element
to second CPU and so on.
The order of the list is consistent across calls.
"""
if not percpu:
return _psplatform.get_system_cpu_times()
else:
return _psplatform.get_system_per_cpu_times()
_last_cpu_times = cpu_times()
_last_per_cpu_times = cpu_times(percpu=True)
def cpu_percent(interval=0.1, percpu=False):
"""Return a float representing the current system-wide CPU
utilization as a percentage.
When interval is > 0.0 compares system CPU times elapsed before
and after the interval (blocking).
When interval is 0.0 or None compares system CPU times elapsed
since last call or module import, returning immediately.
In this case is recommended for accuracy that this function be
called with at least 0.1 seconds between calls.
When percpu is True returns a list of floats representing the
utilization as a percentage for each CPU.
First element of the list refers to first CPU, second element
to second CPU and so on.
The order of the list is consistent across calls.
"""
global _last_cpu_times
global _last_per_cpu_times
blocking = interval is not None and interval > 0.0
def calculate(t1, t2):
t1_all = sum(t1)
t1_busy = t1_all - t1.idle
t2_all = sum(t2)
t2_busy = t2_all - t2.idle
# this usually indicates a float precision issue
if t2_busy <= t1_busy:
return 0.0
busy_delta = t2_busy - t1_busy
all_delta = t2_all - t1_all
busy_perc = (busy_delta / all_delta) * 100
return round(busy_perc, 1)
# system-wide usage
if not percpu:
if blocking:
t1 = cpu_times()
time.sleep(interval)
else:
t1 = _last_cpu_times
_last_cpu_times = cpu_times()
return calculate(t1, _last_cpu_times)
# per-cpu usage
else:
ret = []
if blocking:
tot1 = cpu_times(percpu=True)
time.sleep(interval)
else:
tot1 = _last_per_cpu_times
_last_per_cpu_times = cpu_times(percpu=True)
for t1, t2 in zip(tot1, _last_per_cpu_times):
ret.append(calculate(t1, t2))
return ret
# =====================================================================
# --- system memory related functions
# =====================================================================
def virtual_memory():
"""Return statistics about system memory usage as a namedtuple
including the following fields, expressed in bytes:
- total:
total physical memory available.
- available:
the actual amount of available memory that can be given
instantly to processes that request more memory in bytes; this
is calculated by summing different memory values depending on
the platform (e.g. free + buffers + cached on Linux) and it is
supposed to be used to monitor actual memory usage in a cross
platform fashion.
- percent:
the percentage usage calculated as (total - available) / total * 100
- used:
memory used, calculated differently depending on the platform and
designed for informational purposes only:
OSX: active + inactive + wired
BSD: active + wired + cached
LINUX: total - free
- free:
memory not being used at all (zeroed) that is readily available;
note that this doesn't reflect the actual memory available
(use 'available' instead)
Platform-specific fields:
- active (UNIX):
memory currently in use or very recently used, and so it is in RAM.
- inactive (UNIX):
memory that is marked as not used.
- buffers (BSD, Linux):
cache for things like file system metadata.
- cached (BSD, OSX):
cache for various things.
- wired (OSX, BSD):
memory that is marked to always stay in RAM. It is never moved to disk.
- shared (BSD):
memory that may be simultaneously accessed by multiple processes.
The sum of 'used' and 'available' does not necessarily equal total.
On Windows 'available' and 'free' are the same.
"""
return _psplatform.virtual_memory()
def swap_memory():
"""Return system swap memory statistics as a namedtuple including
the following attributes:
- total: total swap memory in bytes
- used: used swap memory in bytes
- free: free swap memory in bytes
- percent: the percentage usage
- sin: no. of bytes the system has swapped in from disk (cumulative)
- sout: no. of bytes the system has swapped out from disk (cumulative)
'sin' and 'sout' on Windows are meaningless and always set to 0.
"""
return _psplatform.swap_memory()
# =====================================================================
# --- disks/paritions related functions
# =====================================================================
def disk_usage(path):
"""Return disk usage statistics about the given path as a namedtuple
including total, used and free space expressed in bytes plus the
percentage usage.
"""
return _psplatform.get_disk_usage(path)
def disk_partitions(all=False):
"""Return mounted partitions as a list of namedtuples including
device, mount point, filesystem type and mount options (a raw
string separated by commas which may vary depending on the platform).
If "all" parameter is False return physical devices only and ignore
all others.
"""
return _psplatform.disk_partitions(all)
def disk_io_counters(perdisk=False):
"""Return system disk I/O statistics as a namedtuple including
the following attributes:
- read_count: number of reads
- write_count: number of writes
- read_bytes: number of bytes read
- write_bytes: number of bytes written
- read_time: time spent reading from disk (in milliseconds)
- write_time: time spent writing to disk (in milliseconds)
If perdisk is True return the same information for every
physical disk installed on the system as a dictionary
with partition names as the keys and the namedutuple
described above as the values.
"""
rawdict = _psplatform.disk_io_counters()
if not rawdict:
raise RuntimeError("couldn't find any physical disk")
if perdisk:
for disk, fields in rawdict.items():
rawdict[disk] = _nt_disk_iostat(*fields)
return rawdict
else:
return _nt_disk_iostat(*[sum(x) for x in zip(*rawdict.values())])
# =====================================================================
# --- network related functions
# =====================================================================
def network_io_counters(pernic=False):
"""Return network I/O statistics as a namedtuple including
the following attributes:
- bytes_sent: number of bytes sent
- bytes_recv: number of bytes received
- packets_sent: number of packets sent
- packets_recv: number of packets received
- errin: total number of errors while receiving
- errout: total number of errors while sending
- dropin: total number of incoming packets which were dropped
- dropout: total number of outgoing packets which were dropped
(always 0 on OSX and BSD)
If pernic is True return the same information for every
network interface installed on the system as a dictionary
with network interface names as the keys and the namedtuple
described above as the values.
"""
rawdict = _psplatform.network_io_counters()
if not rawdict:
raise RuntimeError("couldn't find any network interface")
if pernic:
for nic, fields in rawdict.items():
rawdict[nic] = _nt_net_iostat(*fields)
return rawdict
else:
return _nt_net_iostat(*[sum(x) for x in zip(*rawdict.values())])
# =====================================================================
# --- other system related functions
# =====================================================================
def get_users():
"""Return users currently connected on the system as a list of
namedtuples including the following attributes.
- user: the name of the user
- terminal: the tty or pseudo-tty associated with the user, if any.
- host: the host name associated with the entry, if any.
- started: the creation time as a floating point number expressed in
seconds since the epoch.
"""
return _psplatform.get_system_users()
# =====================================================================
# --- deprecated functions
# =====================================================================
@_deprecated()
def get_process_list():
"""Return a list of Process class instances for all running
processes on the local machine (deprecated).
"""
return list(process_iter())
@_deprecated()
def phymem_usage():
"""Return the amount of total, used and free physical memory
on the system in bytes plus the percentage usage.
Deprecated by psutil.virtual_memory().
"""
mem = virtual_memory()
return _nt_sysmeminfo(mem.total, mem.used, mem.free, mem.percent)
@_deprecated("psutil.swap_memory()")
def virtmem_usage():
return swap_memory()
@_deprecated("psutil.phymem_usage().free")
def avail_phymem():
return phymem_usage().free
@_deprecated("psutil.phymem_usage().used")
def used_phymem():
return phymem_usage().used
@_deprecated("psutil.virtmem_usage().total")
def total_virtmem():
return virtmem_usage().total
@_deprecated("psutil.virtmem_usage().used")
def used_virtmem():
return virtmem_usage().used
@_deprecated("psutil.virtmem_usage().free")
def avail_virtmem():
return virtmem_usage().free
def test():
"""List info of all currently running processes emulating ps aux
output.
"""
import datetime
from psutil._compat import print_
today_day = datetime.date.today()
templ = "%-10s %5s %4s %4s %7s %7s %-13s %5s %7s %s"
attrs = ['pid', 'username', 'get_cpu_percent', 'get_memory_percent', 'name',
'get_cpu_times', 'create_time', 'get_memory_info']
if os.name == 'posix':
attrs.append('terminal')
print_(templ % ("USER", "PID", "%CPU", "%MEM", "VSZ", "RSS", "TTY", "START",
"TIME", "COMMAND"))
for p in sorted(process_iter(), key=lambda p: p.pid):
try:
pinfo = p.as_dict(attrs, ad_value='')
except NoSuchProcess:
pass
else:
if pinfo['create_time']:
ctime = datetime.datetime.fromtimestamp(pinfo['create_time'])
if ctime.date() == today_day:
ctime = ctime.strftime("%H:%M")
else:
ctime = ctime.strftime("%b%d")
cputime = time.strftime("%M:%S", time.localtime(sum(pinfo['cpu_times'])))
user = pinfo['username']
if os.name == 'nt' and '\\' in user:
user = user.split('\\')[1]
vms = pinfo['memory_info'] and \
int(pinfo['memory_info'].vms / 1024) or '?'
rss = pinfo['memory_info'] and \
int(pinfo['memory_info'].rss / 1024) or '?'
memp = pinfo['memory_percent'] and \
round(pinfo['memory_percent'], 1) or '?'
print_(templ % (user[:10],
pinfo['pid'],
pinfo['cpu_percent'],
memp,
vms,
rss,
pinfo.get('terminal', '') or '?',
ctime,
cputime,
pinfo['name'].strip() or '?'))
if __name__ == "__main__":
test()
del property, cached_property, division
if sys.version_info < (3, 0):
del num
| ktan2020/legacy-automation | win/Lib/site-packages/psutil/__init__.py | Python | mit | 43,170 |
#alexnet with lasagne
import lasagne
import os
import numpy as np
import yaml
import theano.tensor as T
import theano
class alexNet():
def __init__(self, config, inputVar):
#This class is designed only for the conv layers of alexnet (ie to extract alexnt features)
#the 4d blob is: batch, channels, rows, cols (bc01?)
self.config = config
batch_size = config['batch_size']
self.numLayers = config['numLayers']
initWeights = config['initWeights']
if initWeights:
self.weightsDir = config['weightsDir']
self.weightFileTag = config['weightFileTag']
imgRow = config['imgRow']
imgCol = config['imgCol']
self.layers = []
# parameters describing structure of alexnet
self.numGroups = [1,2,1,2,2]
self.numFeatureMaps = [96,256,384,384,256]
self.convKernelSize = [11,5,3,3,3]
self.convStride = [4,1,1,1,1]
self.poolKernelSize = [3,3,-1,-1,3]
self.poolStride = [2,2,-1,-1,2]
self.useLRN = [True,True,False,False,False]
meanVal = np.load(config['mean_file']).astype('float32')
inp = lasagne.layers.InputLayer(shape=(None,3,imgRow,imgCol), input_var=inputVar)
#using code from standardize layer of lasagne
inp = lasagne.layers.BiasLayer(inp, theano.shared(-meanVal), shared_axes=0)
inp.params[inp.b].remove('trainable') # Do not optimize the offset parameter
layer = inp
for layerNum in range(self.numLayers):
layer = self.createConvLayer(layer, layerNum)
#print lasagne.layers.get_output_shape(layer), 'ddd'
self.layers.append(layer)
self.outLayer = self.layers[-1] #the last layer is the output layer
def createConvLayer(self, inputLayer, layerNum):
def createConvLayerForSingleGroup(inp, numFeatureMaps, convKernelSize, convStride, weights, useLRN, poolKernelSize, poolStride):
layerOut = lasagne.layers.Conv2DLayer(incoming=inp, num_filters=numFeatureMaps, filter_size=(convKernelSize,)*2, stride=convStride, W=weights[0], b=weights[1], nonlinearity=lasagne.nonlinearities.rectify, pad='same')
if useLRN:
layerOut = lasagne.layers.LocalResponseNormalization2DLayer(layerOut, alpha=0.0001, k=2, beta=0.75, n=5)
if poolKernelSize > 0:
layerOut = lasagne.layers.MaxPool2DLayer(layerOut, pool_size=(poolKernelSize,)*2, stride=poolStride)
return layerOut
weights = self.getParams(layerNum)
groups = self.numGroups[layerNum]
numFeatureMaps = self.numFeatureMaps[layerNum]; convKernelSize = self.convKernelSize[layerNum]; convStride = self.convStride[layerNum]; useLRN = self.useLRN[layerNum]; poolKernelSize = self.poolKernelSize[layerNum]; poolStride = self.poolStride[layerNum]
if groups == 1:
layerOut = createConvLayerForSingleGroup(inputLayer, numFeatureMaps, convKernelSize, convStride, weights, useLRN, poolKernelSize, poolStride)
else:
splitPoint = self.numFeatureMaps[layerNum-1]/groups
slice0 = lasagne.layers.SliceLayer(inputLayer, indices = slice(0, splitPoint), axis=1)
slice1 = lasagne.layers.SliceLayer(inputLayer, indices = slice(splitPoint, None), axis=1)
layerOut0 = createConvLayerForSingleGroup(slice0, numFeatureMaps/2, convKernelSize, convStride, weights[0:2], useLRN, poolKernelSize, poolStride)
layerOut1 = createConvLayerForSingleGroup(slice1, numFeatureMaps/2, convKernelSize, convStride, weights[2:], useLRN, poolKernelSize, poolStride)
layerOut = lasagne.layers.ConcatLayer([layerOut0, layerOut1], axis=1)
return layerOut
def getParams(self, layerNum):
retVals = []
groups = self.numGroups[layerNum]
for group in range(groups):
fileName = self.weightsDir + 'W' + ('',str(group))[groups > 1] + '_' + str(layerNum) + self.weightFileTag + '.npy'
if os.path.exists(fileName):
W = np.cast['float32'](np.load(fileName))
#print W.shape, 'ddd'
#W is in shape: i01o (inp, row,col, output maps)
W = np.rollaxis(W, 3) #converts it to oi01
#print W.shape, 'ccc'
retVals += [lasagne.utils.create_param(W, W.shape, name='W_'+str(layerNum) + '_' + str(group))]
else:
print 'init weight ( '+fileName+ ' )not found. init-ing randomly'; retVals += [lasagne.init.GlorotUniform()] #randomly initialized params do not have names (unlike the read-from-file weights in the if case above). can they be given names?
fileName = self.weightsDir + 'b' + ('',str(group))[groups > 1] + '_' + str(layerNum) + self.weightFileTag + '.npy'
if os.path.exists(fileName):
b = np.cast['float32'](np.load(fileName))
retVals += [lasagne.utils.create_param(b, b.shape, name='b_'+str(layerNum) + '_' + str(group))]
else:
print 'init weight ( '+fileName+ ' )not found. init-ing randomly'; retVals += [lasagne.init.Constant(0.)]
return retVals
'''
#usage
x = T.tensor4('x')
an = alexNet(yaml.load(open('tempConfig.yaml', 'r')), x)
print lasagne.layers.get_all_params(an.outLayer)
'''
| myt00seven/svrg | cifar/my_alexNet.py | Python | mit | 5,335 |
import requests
import json
import csv
def getcode(fname):
f_oe = open(fname,'r')
mylist = []
num_line = 0
for line in f_oe:
code=line.split('\t')[0]
if num_line>0:
mylist.append(code)
num_line = num_line +1
return mylist
def main():
prefix = 'SM'
seasonal = 'U'
area='00000'
industry = '60540000'
datatype = '01'
#f_area = 'bd.state.txt'
#area_codes = getcode(f_area)
f_state = 'sm.state.txt'
states = getcode(f_state)
fout = open('sm_series_list.txt', 'w')
fno = open('sm_series_list_noresutls.txt','w')
series_list = []
for state in states:
seriesid = prefix + seasonal + state + area + industry + datatype
series_list.append(seriesid)
fout.write(seriesid)
fout.write('\n')
series_groups = [series_list[x:x+50] for x in xrange(0, len(series_list), 50)] #only take 50 per multi-series request
headers = {'Content-type': 'application/json'}
for group in series_groups:
data = json.dumps({
"seriesid": [seriesid],
"startyear": "2011",
"endyear": "2014",
"registrationKey": "8c3ab1a673a340d39b89f1823419ee79"
})
p = requests.post('http://api.bls.gov/publicAPI/v2/timeseries/data/', data = data, headers = headers, auth=(key,''))
json_data = json.loads(p.text)
if json_data['status'].find('REQUEST_NOT_PROCESSED')<0:
for series in json_data['Results']['series']:
for item in series['data']:
row = {
"series_id": series["seriesID"],
"year": item['year'],
"period": item['period'],
"value": item['value'],
"has_footnotes": False
}
if len(item["footnotes"]) > 2:
row["has_footnotes"] = True
with open('output/sm_data.csv', 'a') as outfile:
writer = csv.DictWriter(outfile, ["series_id", "year", "period", "value", "has_footnotes"])
writer.writerow(row)
outfile.close()
else:
print seriesid
fno.write(seriesid)
fno.write('\n')
print json_data['status']
print json_data['message']
if __name__ == "__main__":
main()
| DistrictDataLabs/02-labormatch | labormatch/BLS_ingest2.py | Python | apache-2.0 | 2,774 |
#!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import urllib
import urllib2
import json
import sys
import logging
import datetime
from datetime import datetime
import time
import re
logger = logging.getLogger("sensei_components")
# Regular expression that matches a range facet value
RANGE_REGEX = re.compile(r'''\[(\d+(\.\d+)*|\*) TO (\d+(\.\d+)*|\*)\]''')
SELECTION_TYPE_RANGE = 1
SELECTION_TYPE_SIMPLE = 2
SELECTION_TYPE_TIME = 3
#
# REST API parameter constants
#
PARAM_OFFSET = "start"
PARAM_COUNT = "rows"
PARAM_QUERY = "q"
PARAM_QUERY_PARAM = "qparam"
PARAM_SORT = "sort"
PARAM_SORT_ASC = "asc"
PARAM_SORT_DESC = "desc"
PARAM_SORT_SCORE = "relevance"
PARAM_SORT_SCORE_REVERSE = "relrev"
PARAM_SORT_DOC = "doc"
PARAM_SORT_DOC_REVERSE = "docrev"
PARAM_FETCH_STORED = "fetchstored"
PARAM_SHOW_EXPLAIN = "showexplain"
PARAM_ROUTE_PARAM = "routeparam"
PARAM_GROUP_BY = "groupby"
PARAM_MAX_PER_GROUP = "maxpergroup"
PARAM_SELECT = "select"
PARAM_SELECT_VAL = "val"
PARAM_SELECT_NOT = "not"
PARAM_SELECT_OP = "op"
PARAM_SELECT_OP_AND = "and"
PARAM_SELECT_OP_OR = "or"
PARAM_SELECT_PROP = "prop"
PARAM_FACET = "facet"
PARAM_DYNAMIC_INIT = "dyn"
PARAM_PARTITIONS = "partitions"
PARAM_FACET_EXPAND = "expand"
PARAM_FACET_MAX = "max"
PARAM_FACET_MINHIT = "minhit"
PARAM_FACET_ORDER = "order"
PARAM_FACET_ORDER_HITS = "hits"
PARAM_FACET_ORDER_VAL = "val"
PARAM_DYNAMIC_TYPE = "type"
PARAM_DYNAMIC_TYPE_STRING = "string"
PARAM_DYNAMIC_TYPE_BYTEARRAY = "bytearray"
PARAM_DYNAMIC_TYPE_BOOL = "boolean"
PARAM_DYNAMIC_TYPE_INT = "int"
PARAM_DYNAMIC_TYPE_LONG = "long"
PARAM_DYNAMIC_TYPE_DOUBLE = "double"
PARAM_DYNAMIC_VAL = "vals"
PARAM_RESULT_PARSEDQUERY = "parsedquery"
PARAM_RESULT_HIT_STORED_FIELDS = "stored"
PARAM_RESULT_HIT_STORED_FIELDS_NAME = "name"
PARAM_RESULT_HIT_STORED_FIELDS_VALUE = "val"
PARAM_RESULT_HIT_EXPLANATION = "explanation"
PARAM_RESULT_FACETS = "facets"
PARAM_RESULT_TID = "tid"
PARAM_RESULT_TOTALDOCS = "totaldocs"
PARAM_RESULT_NUMHITS = "numhits"
PARAM_RESULT_HITS = "hits"
PARAM_RESULT_HIT_UID = "uid"
PARAM_RESULT_HIT_DOCID = "docid"
PARAM_RESULT_HIT_SCORE = "score"
PARAM_RESULT_HIT_SRC_DATA = "srcdata"
PARAM_RESULT_TIME = "time"
PARAM_RESULT_SELECT_LIST = "select_list"
PARAM_RESULT_ERRORS = "errors"
PARAM_RESULT_ERROR_MESSAGE = "message";
PARAM_RESULT_ERROR_TYPE = "errorType";
PARAM_RESULT_ERRORS = "errors";
PARAM_RESULT_ERROR_CODE = "errorCode";
PARAM_SYSINFO_NUMDOCS = "numdocs"
PARAM_SYSINFO_LASTMODIFIED = "lastmodified"
PARAM_SYSINFO_VERSION = "version"
PARAM_SYSINFO_FACETS = "facets"
PARAM_SYSINFO_FACETS_NAME = "name"
PARAM_SYSINFO_FACETS_RUNTIME = "runtime"
PARAM_SYSINFO_FACETS_PROPS = "props"
PARAM_SYSINFO_CLUSTERINFO = "clusterinfo"
PARAM_SYSINFO_CLUSTERINFO_ID = "id"
PARAM_SYSINFO_CLUSTERINFO_PARTITIONS = "partitions"
PARAM_SYSINFO_CLUSTERINFO_NODELINK = "nodelink"
PARAM_SYSINFO_CLUSTERINFO_ADMINLINK = "adminlink"
PARAM_RESULT_HITS_EXPL_VALUE = "value"
PARAM_RESULT_HITS_EXPL_DESC = "description"
PARAM_RESULT_HITS_EXPL_DETAILS = "details"
PARAM_RESULT_FACET_INFO_VALUE = "value"
PARAM_RESULT_FACET_INFO_COUNT = "count"
PARAM_RESULT_FACET_INFO_SELECTED = "selected"
#
# JSON API parameter constants
#
JSON_PARAM_COLUMNS = "columns"
JSON_PARAM_EXPLAIN = "explain"
JSON_PARAM_FACETS = "facets"
JSON_PARAM_FACET_INIT = "facetInit"
JSON_PARAM_FETCH_STORED = "fetchStored"
JSON_PARAM_FETCH_TERM_VECTORS = "fetchTermVectors"
JSON_PARAM_FILTER = "filter"
JSON_PARAM_FROM = "from"
JSON_PARAM_GROUPBY = "groupBy"
JSON_PARAM_PARTITIONS = "partitions"
JSON_PARAM_QUERY = "query"
JSON_PARAM_QUERY_STRING = "query_string"
JSON_PARAM_ROUTEPARAM = "routeParam"
JSON_PARAM_SELECTIONS = "selections"
JSON_PARAM_SIZE = "size"
JSON_PARAM_SORT = "sort"
JSON_PARAM_TOP = "top"
JSON_PARAM_VALUES = "values"
JSON_PARAM_EXCLUDES = "excludes"
JSON_PARAM_OPERATOR = "operator"
JSON_PARAM_NO_OPTIMIZE = "_noOptimize"
# Group by related column names
GROUP_VALUE = "groupvalue"
GROUP_HITS = "grouphits"
# Default constants
DEFAULT_REQUEST_OFFSET = 0
DEFAULT_REQUEST_COUNT = 10
DEFAULT_REQUEST_MAX_PER_GROUP = 10
DEFAULT_FACET_MINHIT = 1
DEFAULT_FACET_MAXHIT = 10
DEFAULT_FACET_ORDER = PARAM_FACET_ORDER_HITS
#
# Utilities for result display
#
def print_line(keys, max_lens, char='-', sep_char='+'):
sys.stdout.write(sep_char)
for key in keys:
sys.stdout.write(char * (max_lens[key] + 2) + sep_char)
sys.stdout.write('\n')
def print_header(keys, max_lens, char='-', sep_char='+'):
print_line(keys, max_lens, char=char, sep_char=sep_char)
sys.stdout.write('|')
for key in keys:
sys.stdout.write(' %s%s |' % (key, ' ' * (max_lens[key] - len(key))))
sys.stdout.write('\n')
print_line(keys, max_lens, char=char, sep_char=sep_char)
def print_footer(keys, max_lens, char='-', sep_char='+'):
print_line(keys, max_lens, char=char, sep_char=sep_char)
def safe_str(obj):
"""Return the byte string representation of obj."""
try:
return str(obj)
except UnicodeEncodeError:
# obj is unicode
return unicode(obj).encode("unicode_escape")
class SenseiClientError(Exception):
"""Exception raised for all errors related to Sensei client."""
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class SenseiFacet:
def __init__(self,expand=False,minHits=1,maxCounts=10,orderBy=PARAM_FACET_ORDER_HITS):
self.expand = expand
self.minHits = minHits
self.maxCounts = maxCounts
self.orderBy = orderBy
class SenseiSelections:
def __init__(self, type):
self.type = type;
self.selection = {}
def get_type(self):
return self.type
def get_selection(self):
return self.selection
class SenseiQuery:
def __init__(self, type):
self.type = type
self.query = {}
def get_type(self):
return self.type
def get_query(self):
return self.query
class SenseiQueryMatchAll(SenseiQuery):
def __init__(self):
SenseiQuery.__init__(self, "match_all")
self.query={"match_all":{"boost":1.0}}
def set_boost(self, boost):
target = (self.query)["match_all"]
target["boost"]=boost
class SenseiQueryIDs(SenseiQuery):
def __init__(self, values, excludes):
SenseiQuery.__init__(self, "ids")
self.query={"ids" : {"values" : [], "excludes":[], "boost":1.0}}
if isinstance(values, list) and isinstance(excludes, list):
self.query = {"ids" : {"values" : values, "excludes":excludes, "boost":1.0}}
def add_values(self, values):
if self.query.has_key("ids"):
values_excludes = self.query["ids"]
if values_excludes.has_key("values"):
orig_values = values_excludes["values"]
orig_set = set(orig_values)
for new_value in values:
if new_value not in orig_set:
orig_values.append(new_value)
def add_excludes(self, excludes):
if self.query.has_key("ids"):
values_excludes = self.query["ids"]
if values_excludes.has_key("excludes"):
orig_excludes = values_excludes["excludes"]
orig_set = set(orig_excludes)
for new_value in excludes:
if new_value not in orig_set:
orig_excludes.append(new_value)
def set_boost(self, boost):
target = (self.query)["ids"]
target["boost"]=boost
class SenseiQueryString(SenseiQuery):
def __init__(self, query):
SenseiQuery.__init__(self, "query_string")
self.query={"query_string":{"query":query,
"default_field":"contents",
"default_operator":"OR",
"allow_leading_wildcard":True,
"lowercase_expanded_terms":True,
"enable_position_increments":True,
"fuzzy_prefix_length":0,
"fuzzy_min_sim":0.5,
"phrase_slop":0,
"boost":1.0,
"auto_generate_phrase_queries":False,
"fields":[],
"use_dis_max":True,
"tie_breaker":0
}}
def set_field(self, field):
self.query["query_string"]["default_field"]=field
def set_operator(self, operator):
self.query["query_string"]["default_operator"]=operator
def set_allow_leading_wildcard(self, allow_leading_wildcard):
self.query["query_string"]["allow_leading_wildcard"]=allow_leading_wildcard
def set_lowercase_expanded_terms(self, lowercase_expanded_terms):
self.query["query_string"]["lowercase_expanded_terms"]=lowercase_expanded_terms
def set_enable_position_increments(self, enable_position_increments):
self.query["query_string"]["enable_position_increments"]=enable_position_increments
def set_fuzzy_prefix_length(self, fuzzy_prefix_length):
self.query["query_string"]["fuzzy_prefix_length"]=fuzzy_prefix_length
def set_fuzzy_min_sim(self, fuzzy_min_sim):
self.query["query_string"]["fuzzy_min_sim"]=fuzzy_min_sim
def set_phrase_slop(self, phrase_slop):
self.query["query_string"]["phrase_slop"]=phrase_slop
def set_boost(self, boost):
self.query["query_string"]["boost"]=boost
def set_auto_generate_phrase_queries(self, auto_generate_phrase_queries):
self.query["query_string"]["auto_generate_phrase_queries"]=auto_generate_phrase_queries
def set_fields(self, fields):
if isinstance(fields, list):
self.query["query_string"]["fields"]=fields
def set_use_dis_max(self, use_dis_max):
self.query["query_string"]["use_dis_max"]=use_dis_max
def set_tie_breaker(self, tie_breaker):
self.query["query_string"]["tie_breaker"]=tie_breaker
class SenseiQueryText(SenseiQuery):
def __init__(self, message, operator, type):
SenseiQuery.__init__(self, "text")
self.query={"text":{"message":message, "operator":operator, "type":type}}
class SenseiQueryTerm(SenseiQuery):
def __init__(self, column, value):
SenseiQuery.__init__(self, "term")
self.query={"term":{column:{"value":value, "boost":1.0}}}
def set_boost(self, boost):
target = (self.query)["term"]
for column, desc in target.iterms():
desc["boost"]=boost
class SenseiFilter:
def __init__(self, type):
self.type = type
self.filter = {}
def get_type(self):
return self.type
def get_filter(self):
return self.filter
class SenseiFilterIDs(SenseiFilter):
def __init__(self, values, excludes):
SenseiFilter.__init__(self, "ids")
self.filter={"ids" : {"values" : [], "excludes":[]}}
if isinstance(values, list) and isinstance(excludes, list):
self.filter = {"ids" : {"values" : values, "excludes":excludes}}
def add_values(self, values):
if self.filter.has_key("ids"):
values_excludes = self.filter["ids"]
if values_excludes.has_key("values"):
orig_values = values_excludes["values"]
orig_set = set(orig_values)
for new_value in values:
if new_value not in orig_set:
orig_values.append(new_value)
def add_excludes(self, excludes):
if self.filter.has_key("ids"):
values_excludes = self.filter["ids"]
if values_excludes.has_key("excludes"):
orig_excludes = values_excludes["excludes"]
orig_set = set(orig_excludes)
for new_value in excludes:
if new_value not in orig_set:
orig_excludes.append(new_value)
class SenseiFilterBool(SenseiFilter):
def __init__(self, must_filter=None, must_not_filter=None, should_filter=None):
SenseiFilter.__init__(self, "bool");
self.filter = {"bool":{"must":{}, "must_not":{}, "should":{}}}
if must_filter is not None and isinstance(must_filter, SenseiFilter):
target = (self.filter)["bool"]
target["must"]=must_filter
if must_not_filter is not None and isinstance(must_not_filter, SenseiFilter):
target = (self.filter)["bool"]
target["must_not"]=must_not_filter
if should_filter is not None and isinstance(should_filter, SenseiFilter):
target = (self.filter)["bool"]
target["should"]=should_filter
class SenseiFilterAND(SenseiFilter):
def __init__(self, filter_list):
SenseiFilter.__init__(self, "and")
self.filter={"and":[]}
old_filter_list = (self.filter)["and"]
if isinstance(filter_list, list):
for new_filter in filter_list:
if isinstance(new_filter, SenseiFilter):
old_filter_list.append(new_filter.get_filter())
class SenseiFilterOR(SenseiFilter):
def __init__(self, filter_list):
SenseiFilter.__init__(self, "or")
self.filter={"or":[]}
old_filter_list = (self.filter)["or"]
if isinstance(filter_list, list):
for new_filter in filter_list:
if isinstance(new_filter, SenseiFilter):
old_filter_list.append(new_filter.get_filter())
class SenseiFilterTerm(SenseiFilter):
def __init__(self, column, value, noOptimize=False):
SenseiFilter.__init__(self, "term")
self.filter={"term":{column:{"value": value, "_noOptimize":noOptimize}}}
class SenseiFilterTerms(SenseiFilter):
def __init__(self, column, values=None, excludes=None, operator="or", noOptimize=False):
SenseiFilter.__init__(self, "terms")
self.filter={"terms":{}}
if values is not None and isinstance(values, list):
if excludes is not None and isinstance(excludes, list):
# complicated mode
self.filter={"terms":{column:{"values":values, "excludes":excludes, "operator":operator, "_noOptimize":noOptimize}}}
else:
self.filter={"terms":{column:values}}
class SenseiFilterRange(SenseiFilter):
def __init__(self, column, from_val, to_val):
SenseiFilter.__init__(self, "range")
self.filter={"range":{column:{"from":from_val, "to":to_val, "_noOptimize":False}}}
def set_No_optimization(self, type, date_format=None):
range = (self.filter)["range"]
for key, value in range.items():
if value is not None:
value["_type"] = type
value["_noOptimize"] = True
if type == "date" and date_format is not None:
value["_date_format"]=date_format
class SenseiFilterQuery(SenseiFilter):
def __init__(self, query):
SenseiFilter.__init__(self, "query")
self.filter={"query":{}}
if isinstance(query, SenseiQuery):
self.filter={"query": query.get_query()}
class SenseiFilterSelection(SenseiFilter):
def __init__(self, selection):
SenseiFilter.__init__(self, "selection")
self.filter = {"selection":{}}
if isinstance(selection, SenseiSelections):
self.filter={"selection":selection.get_selection()}
class SenseiSelection:
def __init__(self, field, operation=PARAM_SELECT_OP_OR):
self.field = field
self.operation = operation
self.type = None
self.values = []
self.excludes = []
self.properties = {}
def __str__(self):
return ("Selection:%s:%s:%s:%s" %
(self.field, self.operation,
','.join(self.values), ','.join(self.excludes)))
def _get_type(self, value):
if isinstance(value, basestring) and RANGE_REGEX.match(value):
return SELECTION_TYPE_RANGE
else:
return SELECTION_TYPE_SIMPLE
def addSelection(self, value, isNot=False):
val_type = self._get_type(value)
if not self.type:
self.type = val_type
elif self.type != val_type:
raise SenseiClientError("Value (%s) type mismatch for facet %s: "
% (value, self.field))
if isNot:
self.excludes.append(safe_str(value))
else:
self.values.append(safe_str(value))
def removeSelection(self, value, isNot=False):
if isNot:
self.excludes.remove(safe_str(value))
else:
self.values.remove(safe_str(value))
def addProperty(self, name, value):
self.properties[name] = value
def removeProperty(self, name):
del self.properties[name]
def getValues(self):
return self.values
def setValues(self, values):
self.values = []
if len(values) > 0:
for value in values:
self.addSelection(value)
def getExcludes(self):
return self.excludes
def setExcludes(self, excludes):
self.excludes = []
if len(excludes) > 0:
for value in excludes:
self.addSelection(value, True)
def getType(self):
return self.type
def setType(self, val_type):
self.type = val_type
def getSelectNotParam(self):
return "%s.%s.%s" % (PARAM_SELECT, self.field, PARAM_SELECT_NOT)
def getSelectNotParamValues(self):
return ",".join(self.excludes)
def getSelectOpParam(self):
return "%s.%s.%s" % (PARAM_SELECT, self.field, PARAM_SELECT_OP)
def getSelectValParam(self):
return "%s.%s.%s" % (PARAM_SELECT, self.field, PARAM_SELECT_VAL)
def getSelectValParamValues(self):
return ",".join(self.values)
def getSelectPropParam(self):
return "%s.%s.%s" % (PARAM_SELECT, self.field, PARAM_SELECT_PROP)
def getSelectPropParamValues(self):
return ",".join(key + ":" + self.properties.get(key)
for key in self.properties.keys())
class SenseiSort:
def __init__(self, field, reverse=False):
self.field = field
self.dir = None
if not (field == PARAM_SORT_SCORE or
field == PARAM_SORT_SCORE_REVERSE or
field == PARAM_SORT_DOC or
field == PARAM_SORT_DOC_REVERSE):
if reverse:
self.dir = PARAM_SORT_DESC
else:
self.dir = PARAM_SORT_ASC
def __str__(self):
return self.build_sort_field()
def build_sort_field(self):
if self.dir:
return self.field + ":" + self.dir
else:
return self.field
def build_sort_spec(self):
if self.dir:
return {self.field: self.dir}
elif self.field == PARAM_SORT_SCORE:
return "_score"
else:
return self.field
class SenseiFacetInitParams:
"""FacetHandler initialization parameters."""
def __init__(self):
self.bool_map = {}
self.int_map = {}
self.long_map = {}
self.string_map = {}
self.byte_map = {}
self.double_map = {}
# Getters for param names for different types
def get_bool_param_names(self):
return self.bool_map.keys()
def get_int_param_names(self):
return self.int_map.keys()
def get_long_param_names(self):
return self.long_map.keys()
def get_string_param_names(self):
return self.string_map.keys()
def get_byte_param_names(self):
return self.byte_map.keys()
def get_double_param_names(self):
return self.double_map.keys()
# Add param name, values
def put_bool_param(self, key, value):
if isinstance(value, list):
self.bool_map[key] = value
else:
self.bool_map[key] = [value]
def put_int_param(self, key, value):
if isinstance(value, list):
self.int_map[key] = value
else:
self.int_map[key] = [value]
def put_long_param(self, key, value):
if isinstance(value, list):
self.long_map[key] = value
else:
self.long_map[key] = [value]
def put_string_param(self, key, value):
if isinstance(value, list):
self.string_map[key] = value
else:
self.string_map[key] = [value]
def put_byte_param(self, key, value):
if isinstance(value, list):
self.byte_map[key] = value
else:
self.byte_map[key] = [value]
def put_double_param(self, key, value):
if isinstance(value, list):
self.double_map[key] = value
else:
self.double_map[key] = [value]
# Getters of param value(s) based on param names
def get_bool_param(self, key):
return self.bool_map.get(key)
def get_int_param(self, key):
return self.int_map.get(key)
def get_long_param(self, key):
return self.long_map.get(key)
def get_string_param(self, key):
return self.string_map.get(key)
def get_byte_param(self, key):
return self.byte_map.get(key)
def get_double_param(self, key):
return self.double_map.get(key)
class SenseiFacetInfo:
def __init__(self, name, runtime=False, props={}):
self.name = name
self.runtime = runtime
self.props = props
def get_name(self):
return self.name
def set_name(self, name):
self.name = name
def get_runtime(self):
return self.runtime
def set_runtime(self, runtime):
self.runtime = runtime
def get_props(self):
return self.props
def set_props(self, props):
self.props = props
class SenseiNodeInfo:
def __init__(self, id, partitions, node_link, admin_link):
self.id = id
self.partitions = partitions
self.node_link = node_link
self.admin_link = admin_link
def get_id(self):
return self.id
def get_partitions(self):
return self.partitions
def get_node_link(self):
return self.node_link
def get_admin_link(self):
return self.admin_link
class SenseiSystemInfo:
def __init__(self, json_data):
logger.debug("json_data = %s" % json_data)
self.num_docs = int(json_data.get(PARAM_SYSINFO_NUMDOCS))
self.last_modified = long(json_data.get(PARAM_SYSINFO_LASTMODIFIED))
self.version = json_data.get(PARAM_SYSINFO_VERSION)
self.facet_infos = []
for facet in json_data.get(PARAM_SYSINFO_FACETS):
facet_info = SenseiFacetInfo(facet.get(PARAM_SYSINFO_FACETS_NAME),
facet.get(PARAM_SYSINFO_FACETS_RUNTIME),
facet.get(PARAM_SYSINFO_FACETS_PROPS))
self.facet_infos.append(facet_info)
# TODO: get cluster_info
self.cluster_info = None
def display(self):
"""Display sysinfo."""
keys = ["facet_name", "facet_type", "runtime", "column", "column_type", "depends"]
max_lens = None
# XXX add existing flags
def get_max_lens(columns):
max_lens = {}
for column in columns:
max_lens[column] = len(column)
for facet_info in self.facet_infos:
props = facet_info.get_props()
tmp_len = len(facet_info.get_name())
if tmp_len > max_lens["facet_name"]:
max_lens["facet_name"] = tmp_len
tmp_len = len(props.get("type"))
if tmp_len > max_lens["facet_type"]:
max_lens["facet_type"] = tmp_len
# runtime can only contain "true" or "false", so len("runtime")
# is big enough
tmp_len = len(props.get("column"))
if tmp_len > max_lens["column"]:
max_lens["column"] = tmp_len
tmp_len = len(props.get("column_type"))
if tmp_len > max_lens["column_type"]:
max_lens["column_type"] = tmp_len
tmp_len = len(props.get("depends"))
if tmp_len > max_lens["depends"]:
max_lens["depends"] = tmp_len
return max_lens
max_lens = get_max_lens(keys)
print_header(keys, max_lens)
for facet_info in self.facet_infos:
props = facet_info.get_props()
sys.stdout.write('|')
val = facet_info.get_name()
sys.stdout.write(' %s%s |' % (val, ' ' * (max_lens["facet_name"] - len(val))))
val = props.get("type")
sys.stdout.write(' %s%s |' % (val, ' ' * (max_lens["facet_type"] - len(val))))
val = facet_info.get_runtime() and "true" or "false"
sys.stdout.write(' %s%s |' % (val, ' ' * (max_lens["runtime"] - len(val))))
val = props.get("column")
sys.stdout.write(' %s%s |' % (val, ' ' * (max_lens["column"] - len(val))))
val = props.get("column_type")
sys.stdout.write(' %s%s |' % (val, ' ' * (max_lens["column_type"] - len(val))))
val = props.get("depends")
sys.stdout.write(' %s%s |' % (val, ' ' * (max_lens["depends"] - len(val))))
sys.stdout.write('\n')
print_footer(keys, max_lens)
def get_num_docs(self):
return self.num_docs
def set_num_docs(self, num_docs):
self.num_docs = num_docs
def get_last_modified(self):
return self.last_modified
def set_last_modified(self, last_modified):
self.last_modified = last_modified
def get_facet_infos(self):
return self.facet_infos
def set_facet_infos(self, facet_infos):
self.facet_infos = facet_infos
def get_version(self):
return self.version
def set_version(self, version):
self.version = version
def get_cluster_info(self):
return self.cluster_info
def set_cluster_info(self, cluster_info):
self.cluster_info = cluster_info
class SenseiRequest:
def __init__(self,
bql_req=None,
offset=DEFAULT_REQUEST_OFFSET,
count=DEFAULT_REQUEST_COUNT,
max_per_group=DEFAULT_REQUEST_MAX_PER_GROUP,
facet_map=None):
self.qParam = {}
self.explain = False
self.route_param = None
self.prepare_time = 0 # Statement prepare time in milliseconds
self.stmt_type = "unknown"
if bql_req != None:
assert(facet_map)
time1 = datetime.now() # XXX need to move to SenseiClient
# ok, msg = bql_req.merge_selections()
# if not ok:
# raise SenseiClientError(msg)
self.stmt_type = bql_req.get_stmt_type()
if self.stmt_type == "desc":
self.index = bql_req.get_index()
else:
self.query = bql_req.get_query()
self.offset = bql_req.get_offset() or offset
self.count = bql_req.get_count() or count
self.columns = bql_req.get_columns()
self.sorts = bql_req.get_sorts()
self.selections = bql_req.get_selections()
self.filter = bql_req.get_filter()
self.query_pred = bql_req.get_query_pred()
self.facets = bql_req.get_facets()
# PARAM_RESULT_HIT_STORED_FIELDS is a reserved column name. If this
# column is selected, turn on fetch_stored flag automatically.
if (PARAM_RESULT_HIT_STORED_FIELDS in self.columns or
bql_req.get_fetching_stored()):
self.fetch_stored = True
else:
self.fetch_stored = False
self.groupby = bql_req.get_groupby()
self.max_per_group = bql_req.get_max_per_group() or max_per_group
self.facet_init_param_map = bql_req.get_facet_init_param_map()
delta = datetime.now() - time1
self.prepare_time = delta.seconds * 1000 + delta.microseconds / 1000
logger.debug("Prepare time: %sms" % self.prepare_time)
else:
self.query = None
self.offset = offset
self.count = count
self.columns = []
self.sorts = None
self.selections = []
self.filter = {}
self.query_pred = {}
self.facets = {}
self.fetch_stored = False
self.groupby = None
self.max_per_group = max_per_group
self.facet_init_param_map = {}
def set_offset(self, offset):
self.offset = offset
def set_count(self, count):
self.count = count
def set_query(self, query):
self.query = query
def set_explain(self, explain):
self.explain = explain
def set_fetch_stored(self, fetch_stored):
self.fetch_stored = fetch_stored
def set_route_param(self, route_param):
self.route_param = route_param
def set_sorts(self, sorts):
self.sorts = sorts
def append_sort(self, sort):
if isinstance(sort, SenseiSort):
if self.sorts is None:
self.sorts = []
self.sorts.append(sort)
else:
self.sorts.append(sort)
def set_filter(self, filter):
self.filter = filter
def set_selections(self, selections):
self.selections = selections
def append_term_selection(self, column, value):
if self.selections is None:
self.selections = []
term_selection = {"term": {column : {"value" : value}}}
self.selections.append(term_selection)
def append_terms_selection(self, column, values, excludes, operator):
if self.selections is None:
self.selections = []
terms_selection = {"terms": {column : {"value" : value}}}
self.selections.append(term_selection)
def append_range_selection(self, column, from_str="*", to_str="*", include_lower=True, include_upper=True):
if self.selections is None:
self.selections = []
range_selection = {"range":{column:{"to":to_str, "from":from_str, "include_lower":include_lower, "include_upper":include_upper}}}
self.selections.append(range_selection)
def append_path_selection(self, column, value, strict=False, depth=1):
if self.selections is None:
self.selections = []
path_selection = {"path": {column : {"value":value, "strict":strict, "depth":depth}}}
self.selections.append(path_selection)
def set_facets(self, facets):
self.facets = facets
def set_groupby(self, groupby):
self.groupby = groupby
def set_max_per_group(self, max_per_group):
self.max_per_group = max_per_group
def set_facet_init_param_map(self, facet_init_param_map):
self.facet_init_param_map = facet_init_param_map
def get_columns(self):
return self.columns
class SenseiHit:
def __init__(self):
self.docid = None
self.uid = None
self.srcData = {}
self.score = None
self.explanation = None
self.stored = None
def load(self, jsonHit):
self.docid = jsonHit.get(PARAM_RESULT_HIT_DOCID)
self.uid = jsonHit.get(PARAM_RESULT_HIT_UID)
self.score = jsonHit.get(PARAM_RESULT_HIT_SCORE)
srcStr = jsonHit.get(PARAM_RESULT_HIT_SRC_DATA)
self.explanation = jsonHit.get(PARAM_RESULT_HIT_EXPLANATION)
self.stored = jsonHit.get(PARAM_RESULT_HIT_STORED_FIELDS)
if srcStr:
self.srcData = json.loads(srcStr)
else:
self.srcData = None
class SenseiResultFacet:
value = None
count = None
selected = None
def load(self,json):
self.value=json.get(PARAM_RESULT_FACET_INFO_VALUE)
self.count=json.get(PARAM_RESULT_FACET_INFO_COUNT)
self.selected=json.get(PARAM_RESULT_FACET_INFO_SELECTED,False)
class SenseiResult:
"""Sensei search results for a query."""
def __init__(self, json_data):
logger.debug("json_data = %s" % json_data)
self.jsonMap = json_data
self.parsedQuery = json_data.get(PARAM_RESULT_PARSEDQUERY)
self.totalDocs = json_data.get(PARAM_RESULT_TOTALDOCS, 0)
self.time = json_data.get(PARAM_RESULT_TIME, 0)
self.total_time = 0
self.numHits = json_data.get(PARAM_RESULT_NUMHITS, 0)
self.hits = json_data.get(PARAM_RESULT_HITS)
self.errors = json_data.get(PARAM_RESULT_ERRORS)
map = json_data.get(PARAM_RESULT_FACETS)
self.facetMap = {}
if map:
for k, v in map.items():
facetList = []
for facet in v:
facetObj = SenseiResultFacet()
facetObj.load(facet)
facetList.append(facetObj)
self.facetMap[k]=facetList
def display(self, columns=['*'], max_col_width=40):
"""Print the results in SQL SELECT result format."""
keys = []
max_lens = None
has_group_hits = False
def get_max_lens(columns):
max_lens = {}
has_group_hits = False
srcdata_subcols = []
srcdata_subcols_selected = False
for col in columns:
max_lens[col] = len(col)
if re.match('_srcdata\.', col):
srcdata_subcols.append(col.split('.')[1])
if len(srcdata_subcols) > 0:
srcdata_subcols_selected = True
for hit in self.hits:
if srcdata_subcols_selected and hit.has_key('_srcdata'):
srcdata_json = json.loads(hit.get('_srcdata'))
for subcol in srcdata_subcols:
new_col = '_srcdata.' + subcol
if srcdata_json.has_key(subcol):
hit[new_col] = srcdata_json[subcol]
else:
hit[new_col] = '<Not Found>'
group_hits = [hit]
if hit.has_key(GROUP_HITS):
group_hits = hit.get(GROUP_HITS)
has_group_hits = True
for group_hit in group_hits:
for col in columns:
if group_hit.has_key(col):
v = group_hit.get(col)
else:
v = '<Not Found>'
if isinstance(v, list):
v = ','.join([safe_str(item) for item in v])
elif isinstance(v, (int, long, float)):
v = str(v)
value_len = len(v)
if value_len > max_lens[col]:
max_lens[col] = min(value_len, max_col_width)
return max_lens, has_group_hits
if not self.hits:
print "No hit is found."
return
elif not columns:
print "No column is selected."
return
if len(columns) == 1 and columns[0] == '*':
keys = self.hits[0].keys()
if GROUP_HITS in keys:
keys.remove(GROUP_HITS)
if GROUP_VALUE in keys:
keys.remove(GROUP_VALUE)
if PARAM_RESULT_HIT_SRC_DATA in keys:
keys.remove(PARAM_RESULT_HIT_SRC_DATA)
else:
keys = columns
max_lens, has_group_hits = get_max_lens(keys)
print_header(keys, max_lens,
has_group_hits and '=' or '-',
has_group_hits and '=' or '+')
# Print the results
for hit in self.hits:
group_hits = [hit]
if hit.has_key(GROUP_HITS):
group_hits = hit.get(GROUP_HITS)
for group_hit in group_hits:
sys.stdout.write('|')
for key in keys:
if group_hit.has_key(key):
v = group_hit.get(key)
else:
v = '<Not Found>'
if isinstance(v, list):
v = ','.join([safe_str(item) for item in v])
elif isinstance(v, (int, float, long)):
v = str(v)
else:
# The value may contain unicode characters
v = safe_str(v)
if len(v) > max_col_width:
v = v[:max_col_width]
sys.stdout.write(' %s%s |' % (v, ' ' * (max_lens[key] - len(v))))
sys.stdout.write('\n')
if has_group_hits:
print_line(keys, max_lens)
print_footer(keys, max_lens,
has_group_hits and '=' or '-',
has_group_hits and '=' or '+')
sys.stdout.write('%s %s%s in set, %s hit%s, %s total doc%s (server: %sms, total: %sms)\n' %
(len(self.hits),
has_group_hits and 'group' or 'row',
len(self.hits) > 1 and 's' or '',
self.numHits,
self.numHits > 1 and 's' or '',
self.totalDocs,
self.totalDocs > 1 and 's' or '',
self.time,
self.total_time
))
# Print facet information
for facet, values in self.jsonMap.get(PARAM_RESULT_FACETS).iteritems():
max_val_len = len(facet)
max_count_len = 1
for val in values:
max_val_len = max(max_val_len, min(max_col_width, len(val.get('value'))))
max_count_len = max(max_count_len, len(str(val.get('count'))))
total_len = max_val_len + 2 + max_count_len + 3
sys.stdout.write('+' + '-' * total_len + '+\n')
sys.stdout.write('| ' + facet + ' ' * (total_len - len(facet) - 1) + '|\n')
sys.stdout.write('+' + '-' * total_len + '+\n')
for val in values:
sys.stdout.write('| %s%s (%s)%s |\n' %
(val.get('value'),
' ' * (max_val_len - len(val.get('value'))),
val.get('count'),
' ' * (max_count_len - len(str(val.get('count'))))))
sys.stdout.write('+' + '-' * total_len + '+\n')
| javasoze/sensei | clients/python/sensei/sensei_components.py | Python | apache-2.0 | 36,120 |
#
# Copyright 2008 Spencer Davis <[email protected]>
#
# This file is part of Tuxedo.
#
# Tuxedo is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# Tuxedo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tuxedo; if not, see <http://gnu.org/licenses/.>
#
import os
import sys
# Set constants for status
TASK_COMPLETED = 2
TASK_INPROGRESS = 1
TASK_NOTSTARTED = 0
# Set constants for priority
TASK_PRIORITY_HIGHEST = 0
TASK_PRIORITY_HIGH = 1
TASK_PRIORITY_NORMAL = 2
TASK_PRIORITY_LOW = 3
TASK_PRIORITY_LOWEST = 4
class Task:
"""Manages tasks."""
def __init__(self, name="", priority=TASK_PRIORITY_NORMAL,
status=TASK_NOTSTARTED, duedate=()):
"""Initializes values."""
# Initialize values
self.name = name
self.priority = priority
self.status = status
self.id = None
self.duedate = duedate
| Alwnikrotikz/tuxedo | src/task.py | Python | gpl-2.0 | 1,327 |
from conans.client.build.compiler_flags import format_frameworks
from conans.model import Generator
from conans.paths import BUILD_INFO_XCODE
class XCodeGenerator(Generator):
template = '''
HEADER_SEARCH_PATHS = $(inherited) {include_dirs}
LIBRARY_SEARCH_PATHS = $(inherited) {lib_dirs}
OTHER_LDFLAGS = $(inherited) {linker_flags} {libs} {system_libs} {frameworks}
GCC_PREPROCESSOR_DEFINITIONS = $(inherited) {definitions}
OTHER_CFLAGS = $(inherited) {c_compiler_flags}
OTHER_CPLUSPLUSFLAGS = $(inherited) {cxx_compiler_flags}
FRAMEWORK_SEARCH_PATHS = $(inherited) {rootpaths} {framework_paths}
'''
def __init__(self, conanfile):
super(XCodeGenerator, self).__init__(conanfile)
deps_cpp_info = conanfile.deps_cpp_info
self.include_dirs = " ".join('"%s"' % p.replace("\\", "/")
for p in deps_cpp_info.include_paths)
self.lib_dirs = " ".join('"%s"' % p.replace("\\", "/")
for p in deps_cpp_info.lib_paths)
self.libs = " ".join(['-l%s' % lib for lib in deps_cpp_info.libs])
self.definitions = " ".join('"%s"' % d for d in deps_cpp_info.defines)
self.c_compiler_flags = " ".join(deps_cpp_info.cflags)
self.cxx_compiler_flags = " ".join(deps_cpp_info.cxxflags)
self.linker_flags = " ".join(deps_cpp_info.sharedlinkflags)
self.rootpaths = " ".join('"%s"' % d.replace("\\", "/") for d in deps_cpp_info.rootpaths)
self.frameworks = " ".join(["-framework %s" % framework
for framework in deps_cpp_info.frameworks])
self.framework_paths = " ".join(deps_cpp_info.framework_paths)
self.system_libs = " ".join(['-l%s' % lib for lib in deps_cpp_info.system_libs])
@property
def filename(self):
return BUILD_INFO_XCODE
@property
def content(self):
return self.template.format(**self.__dict__)
| conan-io/conan | conans/client/generators/xcode.py | Python | mit | 1,935 |
# coding: utf-8
"""Configuration générale de musite.
Vous pouvez modifier ici le titre du site, l'adresse et le port d'écoute, les
emplacements des dossiers.
Vous pouvez aussi définir si vous êtes ou non en mode développement.
"""
import os
from pathlib import Path
TITRE = 'Musite'
HOTE = '0.0.0.0'
PORT = 8080
PWD = Path(os.path.realpath(__file__)).parents[1]
ETC = PWD / 'etc'
EXT = PWD / 'ext'
DATA = PWD / 'data'
I18N = PWD / 'i18n'
PAGES = PWD / 'pages'
PANDOC = PWD / 'modeles' / 'pandoc'
STATIC = PWD / 'static'
DOCS = STATIC / 'docs'
TMP = DOCS / 'tmp'
LANGUES = [
('fr', 'french'),
('en', 'english'),
]
LANGUE = 'fr'
MODELES = [
PWD / 'modeles' / 'css',
PWD / 'modeles' / 'html',
PWD / 'modeles' / 'ly',
PWD / 'modeles' / 'tex',
]
# Mettez le paramètre suivant à False en production.
DEVEL = False
| jperon/musite | etc.sample/config.py | Python | mit | 848 |
#Class to Generate Requests for Peers
import math
import Queue
class Requester(object):
'Handles, Generates all the Requests for the Client'
def __init__(self, torrent, metainfo):
'Initializes all variables required'
self.request_queue = Queue.Queue()
self.current_piece_index = torrent.file_handler.piece_index
self.current_block_index = torrent.file_handler.block_index
self.block_size = torrent.file_handler.block_size
self.piece_size = torrent.file_handler.piece_size
self.total_pieces = torrent.file_handler.total_pieces
self.no_of_blocks = math.ceil(self.piece_size/self.block_size)
self.total_requests = ''
self.generateRequestQueue()
def generateRequestQueue(self):
'Generates a new Request Queue'
self.total_requests = 0
for i in range(int(self.no_of_blocks)):
self.request_queue.put([self.current_piece_index,i])
self.total_requests += 1
def generateNewRequest(self):
'Generates a new request to pass to peer'
ret_index = self.request_queue.get() # ret_index = [piece_index, block_index]
return ret_index[0], ret_index[1]
def incrementPiece(self):
'Modifies the Queue to generate a new one'
self.current_piece_index += 1
while self.request_queue.empty() is False:
self.request_queue.get()
self.generateRequestQueue()
def reEnqueueRequest(self, piece_index, block_index):
'Reenqueues the request if it is not satisfied'
self.request_queue.put([piece_index, block_index])
| ssksameer56/Torrent-O | RequestManager.py | Python | gpl-3.0 | 1,682 |
"""
===========================================
FeatureHasher and DictVectorizer Comparison
===========================================
Compares FeatureHasher and DictVectorizer by using both to vectorize
text documents.
The example demonstrates syntax and speed only; it doesn't actually do
anything useful with the extracted vectors. See the example scripts
{document_classification_20newsgroups,clustering}.py for actual learning
on text documents.
A discrepancy between the number of terms reported for DictVectorizer and
for FeatureHasher is to be expected due to hash collisions.
"""
# Author: Lars Buitinck <[email protected]>
# License: BSD 3 clause
from __future__ import print_function
from collections import defaultdict
import re
import sys
from time import time
import numpy as np
from sklearn.datasets import fetch_20newsgroups
from sklearn.feature_extraction import DictVectorizer, FeatureHasher
def n_nonzero_columns(X):
"""Returns the number of non-zero columns in a CSR matrix X."""
return len(np.unique(X.nonzero()[1]))
def tokens(doc):
"""Extract tokens from doc.
This uses a simple regex to break strings into tokens. For a more
principled approach, see CountVectorizer or TfidfVectorizer.
"""
return (tok.lower() for tok in re.findall(r"\w+", doc))
def token_freqs(doc):
"""Extract a dict mapping tokens from doc to their frequencies."""
freq = defaultdict(int)
for tok in tokens(doc):
freq[tok] += 1
return freq
categories = [
'alt.atheism',
'comp.graphics',
'comp.sys.ibm.pc.hardware',
'misc.forsale',
'rec.autos',
'sci.space',
'talk.religion.misc',
]
# Uncomment the following line to use a larger set (11k+ documents)
#categories = None
print(__doc__)
print("Usage: %s [n_features_for_hashing]" % sys.argv[0])
print(" The default number of features is 2**18.")
print()
try:
n_features = int(sys.argv[1])
except IndexError:
n_features = 2 ** 18
except ValueError:
print("not a valid number of features: %r" % sys.argv[1])
sys.exit(1)
print("Loading 20 newsgroups training data")
raw_data = fetch_20newsgroups(subset='train', categories=categories).data
data_size_mb = sum(len(s.encode('utf-8')) for s in raw_data) / 1e6
print("%d documents - %0.3fMB" % (len(raw_data), data_size_mb))
print()
print("DictVectorizer")
t0 = time()
vectorizer = DictVectorizer()
vectorizer.fit_transform(token_freqs(d) for d in raw_data)
duration = time() - t0
print("done in %fs at %0.3fMB/s" % (duration, data_size_mb / duration))
print("Found %d unique terms" % len(vectorizer.get_feature_names()))
print()
print("FeatureHasher on frequency dicts")
t0 = time()
hasher = FeatureHasher(n_features=n_features)
X = hasher.transform(token_freqs(d) for d in raw_data)
duration = time() - t0
print("done in %fs at %0.3fMB/s" % (duration, data_size_mb / duration))
print("Found %d unique terms" % n_nonzero_columns(X))
print()
print("FeatureHasher on raw tokens")
t0 = time()
hasher = FeatureHasher(n_features=n_features, input_type="string")
X = hasher.transform(tokens(d) for d in raw_data)
duration = time() - t0
print("done in %fs at %0.3fMB/s" % (duration, data_size_mb / duration))
print("Found %d unique terms" % n_nonzero_columns(X))
| RPGOne/Skynet | scikit-learn-c604ac39ad0e5b066d964df3e8f31ba7ebda1e0e/examples/text/hashing_vs_dict_vectorizer.py | Python | bsd-3-clause | 3,265 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import base
from keystoneclient import exceptions
from keystoneclient.i18n import _
class RoleAssignment(base.Resource):
"""Represents an Identity role assignment.
Attributes:
* role: an object which contains a role uuid
* user or group: an object which contains either a user or
group uuid
* scope: an object which has either a project or domain object
containing an uuid
"""
pass
class RoleAssignmentManager(base.CrudManager):
"""Manager class for manipulating Identity roles assignments."""
resource_class = RoleAssignment
collection_key = 'role_assignments'
key = 'role_assignment'
def _check_not_user_and_group(self, user, group):
if user and group:
msg = _('Specify either a user or group, not both')
raise exceptions.ValidationError(msg)
def _check_not_domain_and_project(self, domain, project):
if domain and project:
msg = _('Specify either a domain or project, not both')
raise exceptions.ValidationError(msg)
def list(self, user=None, group=None, project=None, domain=None, role=None,
effective=False):
"""Lists role assignments.
If no arguments are provided, all role assignments in the
system will be listed.
If both user and group are provided, a ValidationError will be
raised. If both domain and project are provided, it will also
raise a ValidationError.
:param user: User to be used as query filter. (optional)
:param group: Group to be used as query filter. (optional)
:param project: Project to be used as query filter.
(optional)
:param domain: Domain to be used as query
filter. (optional)
:param role: Role to be used as query filter. (optional)
:param boolean effective: return effective role
assignments. (optional)
"""
self._check_not_user_and_group(user, group)
self._check_not_domain_and_project(domain, project)
query_params = {}
if user:
query_params['user.id'] = base.getid(user)
if group:
query_params['group.id'] = base.getid(group)
if project:
query_params['scope.project.id'] = base.getid(project)
if domain:
query_params['scope.domain.id'] = base.getid(domain)
if role:
query_params['role.id'] = base.getid(role)
if effective:
query_params['effective'] = effective
return super(RoleAssignmentManager, self).list(**query_params)
def create(self, **kwargs):
raise exceptions.MethodNotImplemented(
_('Create not supported for role assignments'))
def update(self, **kwargs):
raise exceptions.MethodNotImplemented(
_('Update not supported for role assignments'))
def get(self, **kwargs):
raise exceptions.MethodNotImplemented(
_('Get not supported for role assignments'))
def find(self, **kwargs):
raise exceptions.MethodNotImplemented(
_('Find not supported for role assignments'))
def put(self, **kwargs):
raise exceptions.MethodNotImplemented(
_('Put not supported for role assignments'))
def delete(self, **kwargs):
raise exceptions.MethodNotImplemented(
_('Delete not supported for role assignments'))
| alexpilotti/python-keystoneclient | keystoneclient/v3/role_assignments.py | Python | apache-2.0 | 4,121 |
# -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test Zenodo deposit REST API."""
from __future__ import absolute_import, print_function
import json
from invenio_search import current_search
from six import BytesIO
def test_bucket_create_delete(api_client, deposit, json_auth_headers,
deposit_url, get_json, license_record,
auth_headers, minimal_deposit):
"""Test bucket creation/deletion of bucket with each deposit."""
client = api_client
headers = json_auth_headers
auth = auth_headers
# Create deposit
res = client.post(
deposit_url, data=json.dumps(minimal_deposit), headers=headers)
links = get_json(res, code=201)['links']
current_search.flush_and_refresh(index='deposits')
# Assert bucket was created and accessible
assert 'bucket' in links
res = client.get(links['bucket'], headers=auth)
assert res.status_code == 200
res = client.get(links['bucket'])
assert res.status_code == 404
# Upload object via files-rest.
object_url = links['bucket'] + '/viafilesrest'
res = client.put(
object_url,
input_stream=BytesIO(b'viafilesrest'),
headers=auth,
)
assert res.status_code == 200
# Get object via files-rest
res = client.get(object_url, headers=auth)
assert res.status_code == 200
# List files in deposit.
res = client.get(links['self'], headers=headers)
data = get_json(res, code=200)
assert len(data['files']) == 1
# Get file via deposit.
res = client.get(data['files'][0]['links']['self'], headers=headers)
data = get_json(res, code=200)
# Delete deposit
res = client.delete(links['self'], headers=auth)
assert res.status_code == 204
# Assert bucket no longer exists
res = client.get(links['bucket'], headers=auth)
assert res.status_code == 404
res = client.get(object_url, headers=auth)
assert res.status_code == 404
def test_bucket_create_publish(api_client, deposit, json_auth_headers,
deposit_url, get_json, license_record,
auth_headers, minimal_deposit):
"""Test bucket features on deposit publish."""
client = api_client
headers = json_auth_headers
auth = auth_headers
# Create deposit
res = client.post(
deposit_url, data=json.dumps(minimal_deposit), headers=headers)
links = get_json(res, code=201)['links']
current_search.flush_and_refresh(index='deposits')
# Upload file
res = client.put(
links['bucket'] + '/test.txt',
input_stream=BytesIO(b'testfile'),
headers=auth,
)
assert res.status_code == 200
# Publish deposit
res = client.post(links['publish'], headers=auth)
data = get_json(res, code=202)
# Bucket should be locked.
res = client.put(
links['bucket'] + '/newfile.txt',
input_stream=BytesIO(b'testfile'),
headers=auth,
)
assert res.status_code == 403
# Get deposit.
res = client.get(links['self'], headers=auth)
assert res.status_code == 200
# Get record.
res = client.get(data['links']['record'])
data = get_json(res, code=200)
# Assert record and deposit bucket is not identical.
assert data['links']['bucket'] != links['bucket']
# Get record bucket.
res = client.get(data['links']['bucket'])
assert res.status_code == 200
# Get file in bucket.
res = client.get(data['links']['bucket'] + '/test.txt')
assert res.status_code == 200
# Record bucket is also locked.
res = client.put(
data['links']['bucket'] + '/newfile.txt',
input_stream=BytesIO(b'testfile'),
headers=auth,
)
assert res.status_code == 404
# Delete deposit not allowed
res = client.delete(links['self'], headers=auth)
assert res.status_code == 403
def test_bucket_new_version(api_client, deposit, json_auth_headers,
deposit_url, get_json, license_record,
auth_headers, minimal_deposit):
"""Test bucket features on record new version."""
client = api_client
headers = json_auth_headers
auth = auth_headers
# Create deposit
res = client.post(
deposit_url, data=json.dumps(minimal_deposit), headers=headers)
links = get_json(res, code=201)['links']
current_search.flush_and_refresh(index='deposits')
# Upload file
res = client.put(
links['bucket'] + '/test.txt',
input_stream=BytesIO(b'testfile'),
headers=auth,
)
assert res.status_code == 200
# Publish deposit
res = client.post(links['publish'], headers=auth)
data = get_json(res, code=202)
# Get record
res = client.get(data['links']['record'])
data = get_json(res, code=200)
rec_v1_bucket = data['links']['bucket']
# Get deposit
res = client.get(links['self'], headers=auth)
links = get_json(res, code=200)['links']
dep_v1_bucket = links['bucket']
# Create new version
res = client.post(links['newversion'], headers=auth)
data = get_json(res, code=201)
# Get new version deposit
res = client.get(data['links']['latest_draft'], headers=auth)
data = get_json(res, code=200)
dep_v2_publish = data['links']['publish']
dep_v2_bucket = data['links']['bucket']
# Assert that all the buckets are different
assert len(set([rec_v1_bucket, dep_v1_bucket, dep_v2_bucket])) == 3
# Get file from old version deposit bucket
res = client.get(dep_v1_bucket + '/test.txt', headers=auth)
dep_v1_file_data = res.get_data(as_text=True)
# Get file from old version record bucket
res = client.get(rec_v1_bucket + '/test.txt')
rec_v1_file_data = res.get_data(as_text=True)
# Get file from new version deposit bucket
res = client.get(dep_v2_bucket + '/test.txt', headers=auth)
dep_v2_file_data = res.get_data(as_text=True)
# Assert that the file is the same in the new version
assert rec_v1_file_data == dep_v1_file_data == dep_v2_file_data
# Record bucket is unlocked.
res = client.put(
dep_v2_bucket + '/newfile.txt',
input_stream=BytesIO(b'testfile2'),
headers=auth,
)
assert res.status_code == 200
# Deleting files in new version deposit bucket is allowed
res = client.delete(dep_v2_bucket + '/newfile.txt', headers=auth)
assert res.status_code == 204
# Try to publish the new version
# Should fail (400), since the bucket contents is the same
res = client.post(dep_v2_publish, headers=auth)
data = get_json(res, code=400)
# Add another file, so that the bucket has a different content
res = client.put(
dep_v2_bucket + '/newfile2.txt',
input_stream=BytesIO(b'testfile3'),
headers=auth,
)
assert res.status_code == 200
# Publish new version deposit
res = client.post(dep_v2_publish, headers=auth)
data = get_json(res, code=202)
# Get record
res = client.get(data['links']['record'])
data = get_json(res, code=200)
rec_v2_bucket = data['links']['bucket']
# Assert that all the buckets are different
assert len(set(
[rec_v1_bucket, rec_v2_bucket, dep_v1_bucket, dep_v2_bucket])) == 4
# Create another new version
res = client.post(links['newversion'], headers=auth)
data = get_json(res, code=201)
# Get new version deposit
res = client.get(data['links']['latest_draft'], headers=auth)
data = get_json(res, code=200)
dep_v3_bucket = data['links']['bucket']
dep_v3_publish = data['links']['publish']
# Try to publish the new version without changes (should fail as before)
res = client.post(dep_v3_publish, headers=auth)
data = get_json(res, code=400)
# Deleting the file from v2 should be possible, but publishing should
# also fail since the contents will be the same as the very first version.
res = client.delete(dep_v3_bucket + '/newfile2.txt', headers=auth)
assert res.status_code == 204
res = client.post(dep_v3_publish, headers=auth)
data = get_json(res, code=400)
| jainaman224/zenodo | tests/unit/deposit/test_api_buckets.py | Python | gpl-2.0 | 9,093 |
"""
MAX31855_test.py
Alexander Hiam - 12/2012
Updated Oct. 2015
Example program for PyBBIO's MAX31855 library.
Reads the temperature from a MAX31855 thermocouple amplifier connected
over SPI.
This example program is in the public domain.
"""
from bbio import *
from bbio.libraries.MAX31855 import MAX31855
# Open SPI bus 1:
SPI1.open()
# Using SPI1 CS0 (P9_28):
cs_pin = 0
# Create an instance of the MAX31855 class:
thermocouple = MAX31855(SPI1, cs_pin)
def setup():
# Nothing to do here...
pass
def loop():
temp = thermocouple.readTempC()
if (temp == None):
# The MAX31855 reported an error, print it:
if thermocouple.error == thermocouple.OPEN_CIRCUIT:
print "Thermocouple not connected"
elif thermocouple.error == thermocouple.SHORT_TO_GND:
print "Thermocouple shorted to GND"
elif thermocouple.error == thermocouple.SHORT_TO_VCC:
print "Thermocouple shorted to VCC"
else:
print "Temp: {:0.2f} C".format(temp)
delay(1000)
run(setup, loop)
| graycatlabs/PyBBIO | examples/MAX31855_test.py | Python | mit | 1,015 |
from share.provider import ProviderAppConfig
from .harvester import SocarxivHarvester
# from providers.io.osf.preprints.normalizer import PreprintNormalizer
class AppConfig(ProviderAppConfig):
name = 'providers.org.socarxiv'
version = '0.0.1'
title = 'osf_preprints_socarxiv'
long_title = 'SocArXiv'
emitted_type = 'preprint'
home_page = 'https://socopen.org/'
harvester = SocarxivHarvester
# temporary change - switch back to PreprintNormalizer when preprint branding is complete
# normalizer = PreprintNormalizer
| zamattiac/SHARE | providers/org/socarxiv/apps.py | Python | apache-2.0 | 553 |
# coding: utf-8
"""
Tests for mogli.Molecule.calculate_bonds()
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import mogli
import pytest
def test_calculate_bonds_with_default_method():
"""
Calculate bonds with no parameters.
"""
molecule = mogli.read('examples/dna.xyz')[0]
assert molecule.bonds is None
molecule.calculate_bonds()
assert molecule.bonds is not None
assert molecule.bonds.method == 'radii'
def test_calculate_bonds_with_wrong_method():
"""
Calculate bonds with an invalid method name and check the expected failure.
"""
molecule = mogli.read('examples/dna.xyz')[0]
with pytest.raises(ValueError):
molecule.calculate_bonds(method='invalid_method_name')
def test_calculate_bonds_with_radii_method():
"""
Calculate bonds with the radii method.
"""
molecule = mogli.read('examples/dna.xyz')[0]
molecule.calculate_bonds(method='radii')
assert molecule.bonds is not None
def test_calculate_bonds_with_constant_delta_method_with_default_param():
"""
Calculate bonds with the constant_delta method without providing a value
for param and check the expected failure.
"""
molecule = mogli.read('examples/dna.xyz')[0]
with pytest.raises(ValueError):
molecule.calculate_bonds(method='constant_delta')
assert molecule.bonds is None
def test_calculate_bonds_with_constant_delta_method_with_explicit_param():
"""
Calculate bonds with the constant_delta method.
"""
molecule = mogli.read('examples/dna.xyz')[0]
molecule.calculate_bonds(method='constant_delta', param=3.0)
assert molecule.bonds is not None
def test_calculate_bonds_empty():
"""
Calculate bonds in a way that should not produce any bonds.
"""
molecule = mogli.read('examples/dna.xyz')[0]
molecule.calculate_bonds(method='constant_delta', param=0.0)
assert molecule.bonds is not None
assert len(molecule.bonds) == 0
def test_calculate_bonds_memoization_with_other_method_or_param():
"""
Calculate bonds and check the function memoization with varying parameters.
"""
molecule = mogli.read('examples/dna.xyz')[0]
assert molecule.bonds is None
molecule.calculate_bonds(method='constant_delta', param=0.0)
bonds = molecule.bonds
molecule.calculate_bonds(method='radii', param=0.0)
assert molecule.bonds is not bonds
bonds = molecule.bonds
molecule.calculate_bonds(method='radii')
assert molecule.bonds is not bonds
bonds = molecule.bonds
molecule.calculate_bonds(method='radii', param=0.0)
assert molecule.bonds is not bonds
def test_calculate_bonds_memoization_with_default_param():
"""
Calculate bonds and check the function memoization for default paremeters.
"""
molecule = mogli.read('examples/dna.xyz')[0]
assert molecule.bonds is None
molecule.calculate_bonds()
bonds = molecule.bonds
molecule.calculate_bonds()
assert molecule.bonds is bonds
def test_calculate_bonds_memoization_with_explicit_param():
"""
Calculate bonds and check the function memoization for given paremeters.
"""
molecule = mogli.read('examples/dna.xyz')[0]
assert molecule.bonds is None
molecule.calculate_bonds(param=1.0)
bonds = molecule.bonds
molecule.calculate_bonds(param=1.0)
assert molecule.bonds is bonds
if __name__ == '__main__':
test_calculate_bonds_with_default_method()
test_calculate_bonds_with_wrong_method()
test_calculate_bonds_with_radii_method()
test_calculate_bonds_with_constant_delta_method_with_default_param()
test_calculate_bonds_with_constant_delta_method_with_explicit_param()
test_calculate_bonds_empty()
test_calculate_bonds_memoization_with_other_method_or_param()
test_calculate_bonds_memoization_with_default_param()
test_calculate_bonds_memoization_with_explicit_param()
| sciapp/mogli | tests/test_bonds.py | Python | mit | 4,010 |
#!/usr/bin/env python
# -*- coding: utf-8
# ----------------------------------------------------------------------
# Flask web service for Golfred
# ----------------------------------------------------------------------
# Ivan Vladimir Meza-Ruiz/ ivanvladimir at turing.iimas.unam.mx
# 2015/IIMAS/UNAM
# ----------------------------------------------------------------------
from __future__ import print_function
from flask import Flask, request, Blueprint
from flask_sqlalchemy import SQLAlchemy
from flask.ext.triangle import Triangle
import golfred
import argparse
from rdflib import Graph
g = Graph()
g.parse('golfred/golfred.nt',format="nt")
app = Flask('golfred')
Triangle(app)
app.config.from_pyfile('server.cfg')
db = SQLAlchemy(app)
# Loading blueprints
from golfred_api import api
app.register_blueprint(api)
from golfred_web import web
app.register_blueprint(web)
| rcln/Golfred | golem/server/golfred/__init__.py | Python | gpl-2.0 | 881 |
import unittest
import pycap.protocol, pycap.constants
class ARPTest(unittest.TestCase):
def testCreate(self):
shw = '00:03:93:44:a9:92'
sp = '192.168.0.2'
thw = '00:50:ba:8f:c4:5f'
tp = '192.168.0.1'
arp = pycap.protocol.arp(shw, thw, sp, tp, pycap.constants.arp.ARPOP_REQUEST)
self.assertEqual(arp.sourcehardware, shw)
self.assertEqual(arp.targethardware, thw)
self.assertEqual(arp.sourceprotocol, sp)
self.assertEqual(arp.targetprotocol, tp)
self.assertEqual(arp.operation, pycap.constants.arp.ARPOP_REQUEST)
if __name__ == "__main__":
unittest.main()
| Aliced3645/DataCenterMarketing | pycap-0.1.6/test/test_arp.py | Python | apache-2.0 | 655 |
#!/usr/bin/python
#coding: utf-8
#Author: asher
#Date: 2017/10/20
#Purpose: get dell server info from api.dell.com
import ConfigParser
import time
import datetime
import requests
import fileinput
import sys
import os
import re
import Queue
from multiprocessing import Pool
import threading
import codecs
reload(sys)
#sys.setdefaultencoding( "utf-8" )
sys.setdefaultencoding( "GB2312" )
import json
#fileName = sys.argv[1]
def gereneData():
"""
将通用的一些数据读取放在一个函数里。不再每个函数里去写一遍了。
"""
global apikey,stgfile,newfile,alljson,bigdicts
fileName = os.path.abspath(__file__)
binPath = os.path.dirname(os.path.realpath(__file__))
basePath = os.path.dirname(binPath)
confPath = basePath + '/config/'
conf = ConfigParser.ConfigParser()
conf.read("%s/dell.ini" % confPath)
#####
apikey = conf.get("dell","apikey")
alljson = conf.get("dell","alljson")
####dell api
stgfile = conf.get("dell","stgfile")
newfile = conf.get("dell","newfile")
def getDellExpires(stg):
"""
此函数将返回一个字典,有服务器的型号、出厂时间,过报时间,服务类型、还有剩余多少天在报
这个过报时间当然是以查询时候的时间去计算的。
"""
##获取全局变量
gereneData()
dicts = {}
payload = {"ID":""}
payload['ID'] = stg
api_url = 'https://sandbox.api.dell.com/support/assetinfo/v4/getassetwarranty'
headers = {"Content-Type":"application/x-www-form-urlencoded",
"apikey":"%s" % apikey,
"accept":"application/json"}
try:
r = requests.post(api_url, headers=headers, data=payload).json()
StartDate = r['AssetWarrantyResponse'][0]['AssetEntitlementData'][0]['StartDate']
MachineDescription = r['AssetWarrantyResponse'][0]['AssetHeaderData']['MachineDescription']
ServiceLevelDescription = r['AssetWarrantyResponse'][0]['AssetEntitlementData'][0]['ServiceLevelDescription']
EndDate = r['AssetWarrantyResponse'][0]['AssetEntitlementData'][0]['EndDate']
if MachineDescription:
dicts['MachineDescription'] = MachineDescription
else:
dicts['MachineDescription'] = 'None'
if EndDate:
##计算在保时间
###get warranty time
timestg = EndDate.split('T')
expiretime = ' '.join(timestg)
dicts['EndDate'] = expiretime
nowtime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
expire1 = datetime.datetime.strptime(expiretime,"%Y-%m-%d %H:%M:%S")
nowtime1 = datetime.datetime.strptime(nowtime,"%Y-%m-%d %H:%M:%S")
remaintime = str(expire1 - nowtime1)
avlTime = remaintime.split('days')[0]
dicts['RemainDays'] = avlTime
else:
dicts['EndDate'] = 'None'
if ServiceLevelDescription:
dicts['ServiceLevelDescription'] = ServiceLevelDescription
else:
dicts['ServiceLevelDescription'] = 'None'
if StartDate:
timestg = StartDate.split('T')
starttime = ' '.join(timestg)
dicts['StartDate'] = starttime
else:
dicts['StartDate'] = 'None'
return dicts
except:
##if stg is wrong, return nothing
pass
def doGetDellExpires(stg):
#stg1 = '7TQMKY1'
if getDellExpires(stg):
bigdicts[stg] = getDellExpires(stg)
#if Adicts:
# print Adicts['StartDate']
# print Adicts['EndDate']
# print Adicts['RemainDays']
# print Adicts
def writefile(stgfile,newfile,bigdicts):
"""
通过stgfile是ip,服务器型号,stg这个格式将从dell api获取到的保修信息写入这个文件
"""
with open(newfile,'w') as ff:
with open(stgfile,'r') as f:
for fi in f.readlines():
lines = fi.split(',')
if len(lines) < 5:
print lines + ' is ' + ' wrong'
continue
stg = lines[2]
if bigdicts[stg]:
if bigdicts[stg]['EndDate']:
endtime = bigdicts[stg]['EndDate']
else:
endtime = 'no'
if bigdicts[stg]['StartDate']:
starttime = bigdicts[stg]['StartDate']
else:
starttime = 'no'
if bigdicts[stg]['RemainDays']:
RemainDays = bigdicts[stg]['RemainDays']
else:
RemainDays = 'no'
lines.insert(3,RemainDays)
lines.insert(3,endtime)
lines.insert(3,starttime)
newlines = ','.join(lines)
ff.write("%s\n" % newlines.strip().encode("GB2312"))
def writedict2json(bigdicts,alljson):
"""
将字典写入json
bigdicts,alljson
"""
gereneData()
#print json.dumps(bigdicts,sort_keys=True, indent=4)
#print json.dumps(bigdicts,indent=4)
# print dicts
olddicts = {}
newdicts = {}
if os.path.isfile(alljson):
ff = open(alljson)
olddicts = json.load(ff)
#print bigdicts
for k in olddicts.keys():
if olddicts[k]:
pass
else:
del olddicts[k]
# print olddicts
#合并字典,将重复的给拿掉
#newdicts = dict(olddicts, **bigdicts)
newdicts.update(olddicts)
for k in bigdicts.keys():
if bigdicts[k]:
pass
else:
del bigdicts[k]
newdicts.update(bigdicts)
# print newdicts
with open(alljson,'w') as f:
f.write(json.dumps(newdicts,indent=8))
f.write('\n')
def multiGetinfo():
"""
并发获取信息
"""
###获取全局变量
gereneData()
stgs = []
global bigdicts
bigdicts = {}
#172.30.211.49,PowerEdge R620,36LMKY1,DiskNU:5,RAIDLOG:OK,IDRACLOG:OK
#172.30.187.84,PowerEdge R620,9THDZX1,10.131.174.11,开放,game1,完美国际,无锡国际,0|Failed|558.38 GB|HDD|SEAGATE|S0M0708J,IDRACLOG:OK
###读取旧的信息,不再重复去调用dell api
olddicts = {}
if os.path.isfile(alljson):
ff = open(alljson)
olddicts = json.load(ff)
for k in olddicts.keys():
if olddicts[k]:
pass
#print olddicts[k]
else:
del olddicts[k]
stgkeys = olddicts.keys()
with open(stgfile,'r') as f:
for fi in f.readlines():
lines = fi.split(',')
if len(lines) < 5:
print fi + ' is ' + ' wrong'
continue
stg = fi.split(',')[2]
if stg in stgkeys:
if olddicts[stg]:
bigdicts[stg] = olddicts[stg]
else:
bigdicts[stg] = {}
doGetDellExpires(stg)
else:
print stg
bigdicts[stg] = {}
doGetDellExpires(stg)
# T_threads.append(threading.Thread(target=doGetDellExpires,args=(stg,)))
# for i in range(len(T_threads)):
# T_threads[i].start()
# print bigdicts
# writefile(stgfile,newfile,bigdicts)
writedict2json(bigdicts,alljson)
if __name__ == '__main__':
# stg1 = '6K98JY1'
## stg1 = '7TQMKY1'
# Adicts = getDellExpires(stg1)
# if Adicts:
# print Adicts['StartDate']
# print Adicts['EndDate']
# print Adicts['RemainDays']
# print Adicts
# else:
# print 'error'
#
# gereneData()
multiGetinfo()
| lichengshuang/createvhost | python/asher/getcmdbinfo/bin/getWarranty.py | Python | apache-2.0 | 7,752 |
try:
import ujson as json
except:
import json
def main(j, args, params, tags, tasklet):
#macro puts obj info as params on doc, when show used as label, shows the content of the obj in nicely structured code block
nid = args.getTag('id')
gid = args.getTag('gid')
if not nid or not gid:
params.result = ('Node "id" and "gid" must be passed.', args.doc)
return params
gid = int(gid)
osis = j.core.portal.active.osis
node = osis.get('system', 'node', '%s_%s' % (gid, nid))
grid = {'name': 'N/A'}
if osis.exists('system', 'grid', gid):
grid = osis.get('system', 'grid', gid)
if not node:
params.result = ('Node with and id %s_%s not found' % (gid, nid), args.doc)
return params
def objFetchManipulate(id):
#obj is a dict
node["ipaddr"]=", ".join(node["ipaddr"])
node["roles"]=", ".join(node["roles"])
r=""
for mac in node["netaddr"].keys():
dev,ip=node["netaddr"][mac]
r+="|%-15s | %-20s | %s| \n"%(dev,mac,ip)
node["netaddr"]=r
node['gridname'] = grid['name']
return node
push2doc=j.apps.system.contentmanager.extensions.macrohelper.push2doc
return push2doc(args,params,objFetchManipulate)
def match(j, args, params, tags, tasklet):
return True
| Jumpscale/jumpscale6_core | apps/gridportal/base/Grid/.macros/wiki/node_getdata/1_node.py | Python | bsd-2-clause | 1,344 |
from sa_tools.base.magic import MagicMixin
from sa_tools.inbox import Inbox
from sa_tools.session import SASession
from sa_tools.index import Index
import os
import pickle
import sys
def py_ver() -> str:
return str(sys.version_info.major)
class APSession(object):
def __init__(self, username: str, passwd: str=None, save_session: bool=False, *args, **kwargs):
self.username = username
self.passwd = passwd
self._session_bak = \
'.' + username.replace(' ', '_') + py_ver() + '.bak'
self.session = self._get_session(save_session=save_session)
del passwd
del self.passwd
def _get_session(self, save_session: bool=True) -> SASession:
backup_exists = os.path.exists(self._session_bak)
# session = None
if backup_exists:
session = self._load_session()
else:
session = SASession(self.username, self.passwd)
if save_session:
self._save_session(session)
return session
def _load_session(self) -> None:
with open(self._session_bak, 'rb') as old_session:
print("Loading from backup: " + self._session_bak)
session = pickle.load(old_session)
return session
def _save_session(self, session: SASession) -> None:
with open(self._session_bak, 'wb') as session_file:
pickle.dump(session, session_file)
class AwfulPy(APSession, MagicMixin):
def __init__(self, username, *args, **kwargs):
super().__init__(username, *args, **kwargs)
self.index = Index(self.session)
self.inbox = Inbox(self.session)
self.name = "awful.py"
self.version = "v0.2014.08.24"
def __repr__(self):
info = '[' + self.name + ' ' + self.version + '] '
acct = 'Logged in as ' + self.username
login_time = ' on ' + self.session.login_time
return info + acct + login_time | thismachinechills/awful.py | awful.py | Python | gpl-3.0 | 1,950 |
from Screens.Screen import Screen
from Screens.InputBox import InputBox
from Screens.MessageBox import MessageBox
from Screens.VirtualKeyBoard import VirtualKeyBoard
from Screens.Console import Console
from Components.FileList import FileList
from Components.ActionMap import ActionMap
from Components.Label import Label
from Tools.LoadPixmap import LoadPixmap
from Tools.Directories import fileExists
from Components.ConfigList import ConfigListScreen
from Components.config import getConfigListEntry, ConfigYesNo, NoSave, config, ConfigFile, ConfigNothing, ConfigSelection
from Components.MultiContent import MultiContentEntryText, MultiContentEntryPixmapAlphaTest
from os import system, remove as os_remove
from nemesisTool import ListboxE1, GetSkinPath, ListboxE4, nemesisTool
from nemesisConsole import nemesisConsole
from nemesisShowPanel import nemesisShowPanel
from enigma import eTimer
from Tools.Directories import fileExists
t = nemesisTool()
configfile = ConfigFile()
def checkDev():
try:
mydev = []
f = open('/proc/mounts', 'r')
for line in f.readlines():
if (line.find('/cf') != -1):
mydev.append(('/media/cf/','COMPACT FLASH'))
if (line.find('/media/usb') != -1):
mydev.append(('/media/usb/','USB PEN'))
if (line.find('/hdd') != -1):
mydev.append(('/media/hdd/','HARD DISK'))
f.close()
if mydev:
return mydev
except:
return None
class NUtility(Screen):
__module__ = __name__
skin = """
<screen position="80,95" size="560,430">
<widget name="title" position="10,5" size="320,55" font="Regular;28" foregroundColor="#ff2525" backgroundColor="transpBlack" transparent="1"/>
<widget name="list" position="10,10" size="540,340" scrollbarMode="showOnDemand" />
<widget name="key_red" position="0,510" size="560,20" zPosition="1" font="Regular;22" valign="center" foregroundColor="#0064c7" backgroundColor="#9f1313" transparent="1" />
</screen>"""
def __init__(self, session):
Screen.__init__(self, session)
self.list = []
self.menuList = [
('Services',_('Start/Stop Services'),'icons/enigma.png'),
('Module',_('Manage Kernel Modules'),'icons/module.png'),
('Ssetup',_('Manage Startup Services'),'icons/log.png'),
('Slog',_('View Services Logs'),'icons/setup.png'),
('Ccommand',_('Execute commands'),'icons/terminal.png'),
('NUserScript',_('Execute Users Scripts'),'icons/user.png'),
('NSwap',_('Manage Swap File'),'icons/swapsettings.png'),
('Csave',_('Save Enigma Setting'),'icons/save.png')
]
self["title"] = Label(_("System Utility"))
self['list'] = ListboxE1(self.list)
self["key_red"] = Label(_("Exit"))
self['actions'] = ActionMap(['WizardActions','ColorActions'],
{
'ok': self.KeyOk,
"red": self.close,
'back': self.close
})
self.saveConfTimer = eTimer()
self.saveConfTimer.timeout.get().append(self.saveConf)
self.onShown.append(self.setWindowTitle)
self.onLayoutFinish.append(self.updateList)
def setWindowTitle(self):
self.setTitle(_("System Utility"))
def KeyOk(self):
self.sel = self["list"].getCurrent()[0]
if (self.sel == "Services"):
self.session.open(NServices)
elif (self.sel == "Module"):
self.session.open(NModule)
elif (self.sel == "Ssetup"):
self.session.open(NServicesSetup)
elif (self.sel == "Slog"):
self.session.open(NServicesLog)
elif (self.sel == "Ccommand"):
self.session.open(NCommand)
elif (self.sel == "NUserScript"):
self.session.open(NUserScript)
elif (self.sel == "NSwap"):
if checkDev() == None:
msg = _('No device for swap found!')
confBox = self.session.open(MessageBox, msg, MessageBox.TYPE_INFO)
confBox.setTitle(_("Swap Error"))
else:
self.session.open(NSwap)
elif (self.sel == "Csave"):
msg = _('Saving Enigma Setting\nPlease Wait...')
self.confBox = self.session.open(MessageBox, msg, MessageBox.TYPE_INFO)
self.confBox.setTitle(_("Saving"))
self.saveConfTimer.start(50, False)
def saveConf(self):
self.saveConfTimer.stop()
configfile.save()
self.confBox.close()
def updateList(self):
del self.list[:]
skin_path = GetSkinPath()
for men in self.menuList:
res = [men[0]]
res.append(MultiContentEntryText(pos=(50, 5), size=(300, 32), font=0, text=men[1]))
res.append(MultiContentEntryPixmapAlphaTest(pos=(5, 1), size=(34, 34), png=LoadPixmap(skin_path + men[2])))
self.list.append(res)
self['list'].l.setList(self.list)
class NCommand(Screen):
__module__ = __name__
skin = """
<screen position="80,95" size="560,430">
<widget name="title" position="10,5" size="320,55" font="Regular;28" foregroundColor="#ff2525" backgroundColor="transpBlack" transparent="1"/>
<widget name="list" position="10,65" size="540,340" scrollbarMode="showOnDemand"/>
<widget name="key_red" position="0,510" size="280,20" zPosition="1" font="Regular;22" valign="center" foregroundColor="#0064c7" backgroundColor="#9f1313" transparent="1" />
<widget name="key_yellow" position="280,510" size="280,20" zPosition="1" font="Regular;22" valign="center" foregroundColor="#bab329" backgroundColor="#9f1313" transparent="1" />
</screen>"""
def __init__(self, session):
Screen.__init__(self, session)
self.list = []
self["title"] = Label(_("Execute commands"))
self['list'] = ListboxE4(self.list)
self["key_red"] = Label(_("Exit"))
self["key_yellow"] = Label(_("Custom"))
self['actions'] = ActionMap(['WizardActions','ColorActions'],
{
'ok': self.KeyOk,
"red": self.close,
"yellow": self.openCustom,
'back': self.close
})
self.onLayoutFinish.append(self.updateList)
self.onShown.append(self.setWindowTitle)
def setWindowTitle(self):
self.setTitle(_("Execute Commands"))
def KeyOk(self):
cmd = self["list"].getCurrent()[0]
self.runCommand(cmd)
def updateList(self):
del self.list[:]
if fileExists('/etc/custom_command'):
f = open('/etc/custom_command', 'r')
for line in f.readlines():
a = line.split(":")
res = [a[1].strip()]
res.append(MultiContentEntryText(pos=(0, 0), size=(340, 25), font=0, text=a[0].strip()))
self.list.append(res)
else:
res = ["None"]
res.append(MultiContentEntryText(pos=(0, 0), size=(340, 25), font=0, text=_("File /etc/custom_command not found!")))
self.list.append(res)
self['list'].l.setList(self.list)
def openCustom(self):
if config.nemesis.usevkeyboard.value:
self.session.openWithCallback(self.runCommand, VirtualKeyBoard, title = (_("Enter command to run:")), text = "")
else:
self.session.openWithCallback(self.runCommand,InputBox, title = _("Enter command to run:"), windowTitle = _("Execute Commands"), text="")
def runCommand(self, cmd):
if cmd is not None:
self.session.open(Console, title = (_('Execute command: ')) + cmd, cmdlist = [cmd])
class NUserScript(Screen):
__module__ = __name__
skin = """
<screen position="80,95" size="560,430">
<widget name="title" position="10,5" size="320,55" font="Regular;28" foregroundColor="#ff2525" backgroundColor="transpBlack" transparent="1"/>
<widget name="list" position="10,65" size="540,340" scrollbarMode="showOnDemand"/>
<widget name="key_red" position="0,510" size="510,20" zPosition="1" font="Regular;22" valign="center" foregroundColor="#0064c7" backgroundColor="#9f1313" transparent="1" />
</screen>"""
def __init__(self, session):
Screen.__init__(self, session)
self.list = []
self["title"] = Label(_("Execute Users Scripts"))
self['list'] = ListboxE4(self.list)
self["key_red"] = Label(_("Exit"))
self['actions'] = ActionMap(['WizardActions','ColorActions'],
{
'ok': self.KeyOk,
"red": self.close,
'back': self.close
})
self.onLayoutFinish.append(self.updateList)
self.onShown.append(self.setWindowTitle)
def setWindowTitle(self):
self.setTitle("Execute Users Scripts")
def KeyOk(self):
cmd = self["list"].getCurrent()[0]
if cmd:
self.runCommand("/usr/script/" + cmd + "_user.sh")
def updateList(self):
del self.list[:]
filelist = FileList("/usr/script", matchingPattern = "_user.sh")
for x in filelist.getFileList():
if x[0][1] != True:
scriptName = t.getScriptName(x[0][0][:-8])
res = [x[0][0][:-8]]
res.append(MultiContentEntryText(pos=(0, 0), size=(340, 25), font=0, text=scriptName))
self.list.append(res)
if len(self.list) == 0:
res = ["None"]
res.append(MultiContentEntryText(pos=(0, 0), size=(340, 25), font=0, text=_("No Users Script Found!")))
self.list.append(res)
self['list'].l.setList(self.list)
def runCommand(self, cmd):
if cmd is not None:
self.session.open(Console, title = (_('Execute script: ')) + cmd, cmdlist = [cmd])
class NServices(Screen):
__module__ = __name__
skin = """
<screen position="80,95" size="560,430">
<widget name="title" position="10,5" size="320,55" font="Regular;28" foregroundColor="#ff2525" backgroundColor="transpBlack" transparent="1"/>
<widget name="list" position="10,65" size="540,340" scrollbarMode="showOnDemand" />
<widget name="key_red" position="0,510" size="280,20" zPosition="1" font="Regular;22" valign="center" foregroundColor="#0064c7" backgroundColor="#9f1313" transparent="1" />
<widget name="key_yellow" position="280,510" size="280,20" zPosition="1" font="Regular;22" valign="center" foregroundColor="#bab329" backgroundColor="#9f1313" transparent="1" />
</screen>"""
def __init__(self, session):
Screen.__init__(self, session)
self.list = []
self.servicesList = [
('nfs','/etc/init.d/nfsserver','[nfsd]','NFS Server'),
('smb','/etc/init.d/samba','/usr/sbin/smbd','Samba'),
('autofs','/etc/init.d/autofs','/usr/sbin/automount','Automount'),
('vpn','/etc/init.d/openvpn','/usr/sbin/openvpn','OpenVPN'),
('ipudate','/etc/init.d/ipupdate','/usr/bin/ez-ipupdate','IpUpdate'),
('inadyn','/etc/init.d/inadyn','inadyn','InaDyn'),
('ssh','/etc/init.d/dropbear','dropbear','Dropbear (SSH)'),
('vsftpd','/etc/init.d/vsftpd','/usr/sbin/vsftpd','FTP Server'),
('crond','/etc/init.d/busybox-cron','/usr/sbin/crond','Crontab')
]
self.servicestatus = {}
self["title"] = Label(_("Manage Services"))
self["key_red"] = Label(_("Exit"))
self["key_yellow"] = Label(_("Setup"))
self['list'] = ListboxE1(self.list)
self['actions'] = ActionMap(['WizardActions','ColorActions'],
{
'ok': self.KeyOk,
"yellow": self.openSetting,
"red": self.close,
'back': self.close
})
self.onShown.append(self.setWindowTitle)
self.onLayoutFinish.append(self.updateList)
def setWindowTitle(self):
self.setTitle(_("Manage services"))
def openSetting(self):
self.session.open(NServicesSetup)
def KeyOk(self):
ser = self['list'].getCurrent()[0]
if ser:
for s in self.servicesList:
if s[0] == ser:
cmd = {True:s[1] + ' stop',False:s[1] + ' start'}[self.servicestatus.get(s[0])]
self.session.openWithCallback(self.executedScript, nemesisConsole, cmd, _('Execute command: ') + cmd)
def executedScript(self, *answer):
self.updateList()
def readStatus(self):
for ser in self.servicesList:
self.servicestatus[ser[0]] = False
system("ps -ef > /tmp/status.log")
f = open('/tmp/status.log', 'r')
for line in f.readlines():
for ser in self.servicesList:
if (line.find(ser[2]) != -1):
self.servicestatus[ser[0]] = True
f.close()
def updateList(self):
self.readStatus()
del self.list[:]
skin_path = GetSkinPath() + 'menu/'
for ser in self.servicesList:
res = [ser[0]]
res.append(MultiContentEntryText(pos=(5, 5), size=(250, 32), font=0, text={False: _('Start'),True: _('Stop')}[self.servicestatus.get(ser[0])] + ' ' + ser[3]))
png = LoadPixmap({ True:skin_path + 'menu_on.png',False:skin_path + 'menu_off.png' }[self.servicestatus.get(ser[0])])
res.append(MultiContentEntryPixmapAlphaTest(pos=(260, 6), size=(80, 23), png=png))
self.list.append(res)
self['list'].l.setList(self.list)
class NModule(Screen):
__module__ = __name__
skin = """
<screen position="80,95" size="560,430">
<widget name="title" position="10,5" size="320,55" font="Regular;28" foregroundColor="#ff2525" backgroundColor="transpBlack" transparent="1"/>
<widget name="list" position="10,65" size="540,340" scrollbarMode="showOnDemand" />
<widget name="key_red" position="0,510" size="280,20" zPosition="1" font="Regular;22" valign="center" foregroundColor="#0064c7" backgroundColor="#9f1313" transparent="1" />
</screen>"""
def __init__(self, session):
Screen.__init__(self, session)
self.modules = [
('sr_mod','USB CD/DVD'),
('usbhid','USB Human Int. Device'),
('ftdi_sio','USB Serial (FTDI Smargo)'),
('pl2303','USB Serial (PL2303)'),
('tun','TUN (OpenVPN)'),
('rt73','WLAN Usb Adapter RT73'),
('zd1211b','WLAN Usb Adapter ZD1211B'),
('isofs','ISOFS (CD/DVD)'),
('cdfs','CDFS (Audio-CD)'),
('udf','UDF (CD/DVD)'),
('ntfs','NTFS (Windows)'),
('smbfs','SMBFS (Windows)')
]
self.modstatus = {}
self.list = []
self["title"] = Label(_("Manage Modules"))
self["key_red"] = Label(_("Exit"))
self['list'] = ListboxE1(self.list)
self['actions'] = ActionMap(['WizardActions','ColorActions'],
{
'ok': self.KeyOk,
"red": self.close,
'back': self.close
})
self.onShown.append(self.setWindowTitle)
self.onLayoutFinish.append(self.updateList)
def setWindowTitle(self):
self.setTitle(_("Manage Modules"))
def KeyOk(self):
sel = self['list'].getCurrent()[0]
if sel:
cmd = "modprobe " + {True:'-rv ',False:'-v '}[self.modstatus.get(sel)] + sel
self.session.openWithCallback(self.executedScript, nemesisConsole, cmd, _('Execute command: ') + sel)
def executedScript(self, *answer):
self.updateList()
def saveStatus(self):
out = open('/etc/nemesis.modules', 'w')
for mod in self.modules:
if self.modstatus.get(mod[0]):
out.write(mod[0] + '\n')
out.close()
def readStatus(self):
for mod in self.modules:
self.modstatus[mod[0]] = False
system("lsmod > /tmp/status.log")
f = open('/tmp/status.log', 'r')
for line in f.readlines():
for mod in self.modules:
if (line.find(mod[0]) != -1):
self.modstatus[mod[0]] = True
f.close()
self.saveStatus()
def updateList(self):
self.readStatus()
del self.list[:]
skin_path = GetSkinPath()
for mod in self.modules:
res = [mod[0]]
res.append(MultiContentEntryText(pos=(5, 5), size=(250, 32), font=0, text=mod[1]))
png = LoadPixmap({ True:skin_path + 'menu/menu_on.png',False:skin_path + 'menu/menu_off.png' }[self.modstatus.get(mod[0])])
res.append(MultiContentEntryPixmapAlphaTest(pos=(260, 6), size=(80, 23), png=png))
self.list.append(res)
self['list'].l.setList(self.list)
class NServicesSetup(Screen, ConfigListScreen):
__module__ = __name__
skin = """
<screen position="330,160" size="620,440" title="Manage Startup">
<eLabel position="0,0" size="620,2" backgroundColor="grey" zPosition="5"/>
<widget name="config" position="20,20" size="580,330" scrollbarMode="showOnDemand" />
<widget name="conn" position="20,350" size="580,30" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#6565ff" />
<eLabel position="0,399" size="620,2" backgroundColor="grey" zPosition="5"/>
<widget name="canceltext" position="20,400" zPosition="1" size="290,40" font="Regular;20" halign="center" valign="center" foregroundColor="red" transparent="1" />
<widget name="oktext" position="310,400" zPosition="1" size="290,40" font="Regular;20" halign="center" valign="center" foregroundColor="green" transparent="1" />
</screen>"""
def __init__(self, session):
Screen.__init__(self, session)
self.list = []
self.initPath = ['/etc/rc2.d','/etc/rc3.d','/etc/rc4.d','/etc/rc4.d','/etc/rc5.d']
self.servicesList = [
('nfsserver','S20nfsserver',_('Activate NFS Server at boot?')),
('samba','S20samba',_('Activate Samba Server at boot?')),
('autofs','S21autofs',_('Activate Automount at boot?')),
('openvpn','S30openvpn',_('Activate OpenVPN at boot?')),
('ipupdate','S20ipupdate',_('Activate IpUpdate at boot?')),
('inadyn','S30inadyn',_('Activate InaDyn at boot?')),
('dropbear','S10dropbear',_('Activate Dropbear (SSH) at boot?')),
('vsftpd','S20vsftpd',_('Activate FTP Server at boot?')),
('busybox-cron','S99cron',_('Activate Crontab at boot?'))
]
self.serviceconfig = {}
ConfigListScreen.__init__(self, self.list)
self["oktext"] = Label(_("OK"))
self["canceltext"] = Label(_("Exit"))
self['conn'] = Label("")
self['conn'].hide()
self['actions'] = ActionMap(['WizardActions', 'ColorActions'],
{
"red": self.close,
"back": self.close,
"green": self.saveSetting
})
self.onShown.append(self.setWindowTitle)
self.onLayoutFinish.append(self.loadSetting)
def setWindowTitle(self):
self.setTitle(_("Manage Startup Services"))
def loadSetting(self):
del self.list[:]
for s in self.servicesList:
self.serviceconfig[s[0]] = NoSave(ConfigYesNo(default = False))
self.list.append(getConfigListEntry(s[2], self.serviceconfig[s[0]]))
if fileExists('/etc/rc3.d/' + s[1]):
self.serviceconfig[s[0]].value = True
self['config'].list = self.list
self['config'].l.setList(self.list)
def saveSetting(self):
self['conn'].show()
self['conn'].setText(_('Saving Setting. Please wait...'))
self.activityTimer = eTimer()
self.activityTimer.timeout.get().append(self.saveConf)
self.activityTimer.start(300, False)
def saveConf(self):
self.activityTimer.stop()
for p in self.initPath:
for s in self.servicesList:
system({True:'ln -s ../init.d/%s %s/%s' % (s[0],p,s[1]),False:'rm -f %s/%s' % (p,s[1])}[self.serviceconfig[s[0]].value])
self.close()
class NServicesLog(Screen):
__module__ = __name__
skin = """
<screen position="80,95" size="560,430" title="Addons">
<widget name="title" position="10,5" size="320,55" font="Regular;28" foregroundColor="#ff2525" backgroundColor="transpBlack" transparent="1"/>
<widget name="list" position="10,10" size="540,340" scrollbarMode="showOnDemand" />
<widget name="key_red" position="0,510" size="560,20" zPosition="1" font="Regular;22" valign="center" foregroundColor="#0064c7" backgroundColor="#9f1313" transparent="1" />
<widget name="key_yellow" position="280,510" size="280,20" zPosition="1" font="Regular;22" valign="center" foregroundColor="#bab329" backgroundColor="#9f1313" transparent="1" />
</screen>"""
def __init__(self, session):
Screen.__init__(self, session)
self.list = []
self.logsList = [
('inadyn',config.inadyn.log.value.strip() +'/inadyn.log',_('Show InaDyn Log')),
('smb','/var/log/log.smbd',_('Show SMB Server Log')),
('nmb','/var/log/log.nmbd',_('Show NMB Log')),
('vsftpd','/var/log/vsftpd.log',_('Show FTP Server Log')),
('openvpn','/etc/openvpn/openvpn.log',_('Show OpenVPN Log'))
]
self["title"] = Label(_("Services Logs"))
self['list'] = ListboxE1(self.list)
self["key_red"] = Label(_("Exit"))
self["key_yellow"] = Label(_("Clear log"))
self.updateList()
self['actions'] = ActionMap(['WizardActions','ColorActions'],
{
'ok': self.KeyOk,
"yellow": self.deleteLog,
"red": self.close,
'back': self.close
})
self.onShown.append(self.setWindowTitle)
def setWindowTitle(self):
self.setTitle(_("Services Logs"))
def KeyOk(self):
log = self["list"].getCurrent()[0]
if log:
for l in self.logsList:
if l[0] == log:
cmd = l
self.session.open(nemesisShowPanel, cmd[1] ,cmd[0] + _(' logged info'))
def deleteLog(self):
self.session.open(deleteLog)
def updateList(self):
del self.list[:]
skin_path = GetSkinPath()
for log in self.logsList:
res = [log[0]]
res.append(MultiContentEntryText(pos=(50, 5), size=(300, 32), font=0, text=log[2]))
res.append(MultiContentEntryPixmapAlphaTest(pos=(5, 1), size=(34, 34), png=LoadPixmap(skin_path + 'icons/log.png')))
self.list.append(res)
self['list'].l.setList(self.list)
class deleteLog(Screen, ConfigListScreen):
__module__ = __name__
skin = """
<screen position="330,160" size="620,440" title="Delete log files">
<eLabel position="0,0" size="620,2" backgroundColor="grey" zPosition="5"/>
<widget name="config" position="20,20" size="580,330" scrollbarMode="showOnDemand" />
<widget name="conn" position="20,350" size="580,30" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#6565ff" />
<eLabel position="0,399" size="620,2" backgroundColor="grey" zPosition="5"/>
<widget name="canceltext" position="20,400" zPosition="1" size="290,40" font="Regular;20" halign="center" valign="center" foregroundColor="red" transparent="1" />
<widget name="oktext" position="310,400" zPosition="1" size="290,40" font="Regular;20" halign="center" valign="center" foregroundColor="green" transparent="1" />
</screen>"""
def __init__(self, session):
Screen.__init__(self, session)
self.list = []
self.logsList = [
('inadyn',config.inadyn.log.value.strip() +'/inadyn.log',_('Delete InaDyn log file?')),
('smb','/var/log/log.smbd',_('Delete SMB log file?')),
('nmb','/var/log/log.nmbd',_('Delete NMB log file?')),
('vsftpd','/var/log/vsftpd.log',_('Delete FTP log file?')),
('openvpn','/etc/openvpn/openvpn.log',_('Delete OpenVPN log file?')),
('enigma','/hdd/*.log',_('Delete Enigma Crash log file?'))
]
self.logconfig = {}
ConfigListScreen.__init__(self, self.list)
self["oktext"] = Label(_("Delete"))
self["canceltext"] = Label(_("Exit"))
self['conn'] = Label("")
self['conn'].hide()
self['actions'] = ActionMap(['WizardActions', 'ColorActions'],
{
"red": self.close,
"back": self.close,
"green": self.delLog
})
self.onShown.append(self.setWindowTitle)
self.onLayoutFinish.append(self.loadSetting)
def setWindowTitle(self):
self.setTitle(_("Delete Log Files"))
def loadSetting(self):
del self.list[:]
for l in self.logsList:
self.logconfig[l[0]] = NoSave(ConfigYesNo(default = False))
self.list.append(getConfigListEntry(l[2], self.logconfig[l[0]]))
self['config'].list = self.list
self['config'].l.setList(self.list)
def delLog(self):
self['conn'].show()
self['conn'].setText(_('Deleting log files. Please wait...'))
self.activityTimer = eTimer()
self.activityTimer.timeout.get().append(self.DLog)
self.activityTimer.start(300, False)
def DLog(self):
self.activityTimer.stop()
for l in self.logsList:
if self.logconfig[l[0]].value:
system("rm -f " + l[1])
self.close()
class NSwap(Screen, ConfigListScreen):
__module__ = __name__
skin = """
<screen position="330,160" size="620,440">
<eLabel position="0,0" size="620,2" backgroundColor="grey" zPosition="5"/>
<widget name="config" position="20,20" size="580,330" scrollbarMode="showOnDemand" />
<widget name="conn" position="20,350" size="580,30" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#6565ff" />
<eLabel position="0,399" size="620,2" backgroundColor="grey" zPosition="5"/>
<widget name="canceltext" position="20,400" zPosition="1" size="290,40" font="Regular;20" halign="center" valign="center" foregroundColor="red" transparent="1" />
<widget name="oktext" position="310,400" zPosition="1" size="290,40" font="Regular;20" halign="center" valign="center" foregroundColor="green" transparent="1" />
</screen>"""
def __init__(self, session):
Screen.__init__(self, session)
self.list = []
ConfigListScreen.__init__(self, self.list)
self["oktext"] = Label(_("Save"))
self["canceltext"] = Label(_("Exit"))
self['conn'] = Label("")
self['conn'].hide()
self.active = False
self.loc = ''
self.size = 0
self.activityTimer = eTimer()
self['actions'] = ActionMap(['WizardActions', 'ColorActions'],
{
"red": self.close,
"back": self.close,
"green": self.saveSwap
})
self.loadSetting()
self.onShown.append(self.setWindowTitle)
def setWindowTitle(self):
self.setTitle(_("Manage Swap File"))
def loadSetting(self):
self.mydev = checkDev()
mystat = self.findSwap()
del self.list[:]
self.loc = self.mydev[0][0]
self.size = 32768
if mystat != None:
self.active = True
self.loc = mystat[0]
self.size = mystat[1] + 8
self.swap_active = NoSave(ConfigYesNo(default = self.active))
self.list.append(getConfigListEntry(_('Activate Swap File?'), self.swap_active))
self.swap_size = NoSave(ConfigSelection(default = self.size, choices =[
(8192,'8 MB'), (16384,'16 MB'), (32768,'32 MB'),
(65536,'64 MB'), (131072,'128 MB'), (262144,'256 MB')]))
self.list.append(getConfigListEntry(_('Swap file size'), self.swap_size))
self.swap_location = NoSave(ConfigSelection(default = self.loc, choices = self.mydev))
self.list.append(getConfigListEntry(_('Swap file location'), self.swap_location))
self['config'].list = self.list
self['config'].l.setList(self.list)
def saveSwap(self):
self['conn'].show()
self['conn'].setText(_('Saving swap config. Please wait...'))
self.activityTimer.timeout.get().append(self.Dsave)
self.activityTimer.start(500, False)
def Dsave(self):
self.activityTimer.stop()
swapfile = self.swap_location.value.strip() + 'swapfile'
cmd = ''
if (self.swap_active.value) and (not self.active):
cmd += "echo 'Creating swap file...'"
cmd += ' && '
cmd += 'dd if=/dev/zero of=' + swapfile + ' bs=1024 count=' + str(self.swap_size.value)
cmd += ' && '
cmd += "echo 'Creating swap device...'"
cmd += ' && '
cmd += 'mkswap ' + swapfile
cmd += ' && '
cmd += "echo 'Activating swap device...'"
cmd += ' && '
cmd += 'swapon ' + swapfile
self.session.openWithCallback(self.scriptReturn, nemesisConsole, cmd, _('Creating Swap file...'))
elif (not self.swap_active.value) and (self.active):
cmd += "echo 'Dectivating swap device...'"
cmd += ' && '
cmd += 'swapoff ' + swapfile
cmd += ' && '
cmd += "echo 'Removing swap file...'"
cmd += ' && '
cmd += 'rm -f ' + swapfile
self.session.openWithCallback(self.scriptReturn, nemesisConsole, cmd, _('Deleting Swap file...'))
else:
self['conn'].setText(_('Nothing to do!'))
def scriptReturn(self, *answer):
if answer[0] == nemesisConsole.EVENT_DONE:
self['conn'].setText(_('Swap process completed successfully!'))
else:
self['conn'].setText(_('Swap process killed by user!'))
self.loadSetting()
def findSwap(self):
try:
myswap = []
f = open('/proc/swaps', 'r')
for line in f.readlines():
if (line.find('/swapfile') != -1):
myswap = line.strip().split()
f.close()
if myswap:
return '/media/' + myswap[0].split("/")[2] + "/", int(myswap[2])
except:
return None
| kingvuplus/nn-gui | lib/python/Nemesis/nemesisUtility.py | Python | gpl-2.0 | 26,671 |
#!/usr/bin/env python3
"""
Copyright (C) 2016 Markus Doering
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from lxml import etree
from lxml import html
import json
import requests
import os
import io
import pprint
from urllib.parse import urljoin
import logging
LOGGER = logging.getLogger(__name__)
class Question:
def __init__(self, number: int):
self.number = int(number)
self.text = ""
self.resources = []
self.answers = []
def dictify(self):
return {"type": "question", "number": self.number, "text": self.text,
"resources": list(map(lambda x: x.dictify(), self.resources)),
"answers": self.answers}
def __str__(self):
return str(self.dictify())
class Image:
def __init__(self, base, src, alt="", title="", basedir=None):
self.base = base
self.alt = alt
self.title = title
self.basedir = basedir
self.src = urljoin(self.base, src)
name = self.src.split("/")[-1]
if self.basedir is not None:
self.target = os.path.join(self.basedir, name)
else:
self.target = name
self.name = name
def dictify(self):
return {"type": "image", "remote_addr": self.src,
"local_addr": self.target}
def __str__(self):
return str(self.dictify())
class StateMachine:
def __init__(self, basedir=None, download=False):
self._d = dict()
self._current_question = None
self._depth = 0
self._download = download
self._url = None
self._basedir = basedir if basedir is not None else "."
def store_catalog(self, mode="json"):
catalog = self.catalog(mode=mode)
with open(os.path.join(self._basedir, "catalog.json"), "w") as f:
f.write(catalog)
def catalog(self, mode="json"):
if mode == "python":
stream = io.StringIO()
pprint.pprint(self._d, stream)
return stream.getvalue()
elif mode == "json":
return json.dumps(self._d, sort_keys=True, indent=2)
else:
raise NotImplementedError("unknown mode '{}'".format(mode))
def set_url(self, url):
self._url = url
def parse(self, root: etree.Element):
if len(root) == 0:
return
for child in root:
self.handle_element(child)
def handle_element(self, elem: etree.Element):
if elem.tag == "ol":
self.handle_ol(elem)
elif elem.tag == "li":
self.handle_li(elem)
elif elem.tag == "img":
self.handle_img(elem)
else:
self.handle_other(elem)
def handle_ol(self, ol: etree.Element):
self._depth += 1
n = int(ol.get("start"))
if self._depth == 1:
# this is a new question
self._current_question = Question(int(n))
self.parse(ol)
if self._depth == 1:
dict_ = self._current_question.dictify()
self._d[dict_["number"]] = dict_
self._depth -= 1
def handle_li(self, li: etree.Element):
if self._depth == 1:
self._current_question.text = get_text(li)
elif self._depth == 2:
self._current_question.answers.append(get_text(li))
self.parse(li)
def handle_img(self, img: etree.Element):
if self._depth < 1:
# we don't wnat those images
return
img = Image(self._url, img.get("src"), alt=img.get("alt"),
title=img.get("title"), basedir=self._basedir)
if self._download:
req = requests.get(img.src)
if req.status_code == 200:
with open(img.target, "wb") as out_file:
out_file.write(req.content)
else:
LOGGER.error("Error {} while accessing {}".format(
req.status_code, img.src))
self._current_question.resources.append(img)
def handle_other(self, other: etree.Element):
self.parse(other)
def get_text(node: etree.Element, stream: io.StringIO=None):
"""
Extract text from this node and all children until an <ol> occurs
"""
if stream is None:
start = True
stream = io.StringIO()
else:
start = False
def to_xml(s: str):
s = "" if s is None else s
return s.encode('ascii', 'xmlcharrefreplace').decode()
stream.write(to_xml(node.text))
for child in node:
if child.tag == "ol":
break
get_text(child, stream=stream)
if start:
# we are done, return the buffered string
return stream.getvalue()
else:
# we are in a child, append our tail to the total string
stream.write(to_xml(node.tail))
def main():
import argparse
logging.basicConfig()
LOGGER.setLevel(logging.INFO)
parser = argparse.ArgumentParser()
parser.add_argument("-o", "--outputdir", action="store",
default="data",
help="directory to store crawled questions")
parser.add_argument("-d", "--download", action="store_true",
default=False, help="download images")
parser.add_argument("-m", "--mode", action="store",
default="json", help="output format (python/json)")
args = parser.parse_args()
sm = StateMachine(download=args.download, basedir=args.outputdir)
urls = ["https://www.elwis.de/Freizeitschifffahrt/fuehrerscheininformationen/Fragenkatalog-See/Basisfragen/index.html",
"https://www.elwis.de/Freizeitschifffahrt/fuehrerscheininformationen/Fragenkatalog-See/See/index.html"]
for url in urls:
LOGGER.info("Processing {}".format(url))
req = requests.get(url)
if req.status_code != 200:
LOGGER.error("Error processing {}".format(url))
else:
tree = html.parse(io.BytesIO(req.content))
sm.set_url(url)
sm.parse(tree.getroot())
sm.store_catalog(mode=args.mode)
if __name__ == "__main__":
main()
| burgerdev/sbf-tools | crawler/crawl.py | Python | gpl-3.0 | 6,728 |
import datetime
from django.conf import settings
from django.core.cache import cache
from django.utils.translation import ugettext_lazy as _
from rest_framework import exceptions
from rest_framework.authentication import TokenAuthentication
EXPIRE_DAYS = getattr(settings, 'REST_FRAMEWORK_TOKEN_EXPIRE_DAYS', 1)
class ExpiringTokenAuthentication(TokenAuthentication):
def authenticate_credentials(self, key):
# cache_user = cache.get('token_' + key)
# if cache_user:
# return (cache_user, key)
model = self.get_model()
try:
token = model.objects.get(key=key)
except model.DoesNotExist:
raise exceptions.AuthenticationFailed('Invalid token')
if not token.user.is_active:
raise exceptions.AuthenticationFailed('User inactive or deleted')
time_now = datetime.datetime.now()
token.created = token.created.replace(tzinfo=None)
# token.delete()
if token.created < time_now - datetime.timedelta(days=EXPIRE_DAYS):
token.delete()
raise exceptions.AuthenticationFailed('Token has expired')
if token:
cache.set('token_' + key, token.user, EXPIRE_DAYS * 24 * 60 * 60)
return (token.user, token)
| ZhangDubhe/Tropical-Cyclone-Information-System | TyphoonApi/TyphoonApi/authentication.py | Python | mit | 1,284 |
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright © 2016-2018 Cyril Desjouy <[email protected]>
#
# This file is part of cpyvke
#
# cpyvke is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# cpyvke is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with cpyvke. If not, see <http://www.gnu.org/licenses/>.
#
#
# Creation Date : Wed Nov 9 10:03:04 2016
# Last Modified : mar. 10 avril 2018 20:54:45 CEST
"""
-----------
DOCSTRING
@author: Cyril Desjouy
"""
import os
import sys
import locale
import logging
import argparse
import time
from jupyter_client import find_connection_file
from logging.handlers import RotatingFileHandler
from .curseswin.app import InitApp
from .curseswin.mainwin import MainWin
from .utils.config import cfg_setup
from .utils.kernel import connect_kernel, print_kernel_list
from .utils.kd import kd_status
from .utils.sockets import SocketManager
from .utils.term_colors import RED, RESET
locale.setlocale(locale.LC_ALL, '')
code = locale.getpreferredencoding()
def init_cf(lockfile):
""" Init connection file. """
with open(lockfile, 'r') as f:
kid = f.readline()
return find_connection_file(kid)
def with_daemon(lockfile, pidfile, cmd):
""" Launch daemon. """
os.system(cmd)
while os.path.exists(pidfile) is False:
time.sleep(0.1)
return init_cf(lockfile)
def no_lock_exit():
""" If no kd5.lock ! """
message = '{}Error :{}\tCannot find kd5.lock !\n\tFixing issues shutting down kd5...\n'
sys.stderr.write(message.format(RED, RESET))
os.system('kd5 stop')
sys.stderr.write("You can now restart cpyvke!\n")
sys.exit(1)
def parse_args(lockfile, pidfile):
""" Parse Arguments. """
parser = argparse.ArgumentParser()
parser.add_argument("-L", "--list", help="List all kernels",
action="store_true")
parser.add_argument("integer", help="Start up with existing kernel. \
INTEGER is the id of the connection file. \
INTEGER can also be the keyword 'last' for 'last kernel'",
nargs='?')
args = parser.parse_args()
pid = kd_status(pidfile)
if args.list:
print_kernel_list()
sys.exit(0)
elif os.path.exists(lockfile) and pid:
try:
cf = init_cf(lockfile)
except OSError:
sys.stderr.write('lockfile points to an unknown connection file.\n')
sys.stderr.write("Try 'kd5 stop'\n")
sys.exit(1)
if args.integer:
message = 'Daemon is already running. Dropping argument {}\n'
sys.stderr.write(message.format(args.integer))
time.sleep(1.5)
elif not os.path.exists(lockfile) and pid:
no_lock_exit()
elif args.integer == 'last' and not os.path.exists(lockfile):
no_lock_exit()
elif args.integer == 'last' and os.path.exists(lockfile):
cmd = 'kd5 last'
cf = with_daemon(lockfile, pidfile, cmd)
elif args.integer:
try:
find_connection_file(str(args.integer))
except OSError:
message = '{}Error :{}\tCannot find kernel id. {} !\n\tExiting\n'
sys.stderr.write(message.format(RED, RESET, args.integer))
sys.exit(1)
else:
cmd = 'kd5 start ' + str(args.integer)
cf = with_daemon(lockfile, pidfile, cmd)
else:
cmd = 'kd5 start'
cf = with_daemon(lockfile, pidfile, cmd)
return args, cf
def main(args=None):
""" Launch cpyvke. """
# Parse Config
cfg = cfg_setup()
config = cfg.run()
# Define Paths
logdir = os.path.expanduser('~') + '/.cpyvke/'
lockfile = logdir + 'kd5.lock'
pidfile = logdir + 'kd5.pid'
logfile = logdir + 'cpyvke.log'
# Logger
logger = logging.getLogger("cpyvke")
logger.setLevel(logging.DEBUG)
# create the logging file handler
handler = RotatingFileHandler(logfile, maxBytes=10*1024*1024,
backupCount=5)
logmsg = '%(asctime)s :: %(name)s :: %(threadName)s :: %(levelname)s :: %(message)s'
formatter = logging.Formatter(logmsg, datefmt='%Y-%m-%d - %H:%M:%S')
handler.setFormatter(formatter)
logger.addHandler(handler)
# Parse arguments
args, cf = parse_args(lockfile, pidfile)
# Init kernel
km, kc = connect_kernel(cf)
# Init Curses App
sock = SocketManager(config, logger)
app = InitApp(kc, cf, config, sock)
# Run App
logger.info('cpyvke started')
main_curse = MainWin(app, sock, logger)
main_curse.display()
if __name__ == "__main__":
main()
| ipselium/cpyvke | cpyvke/cpyvke.py | Python | gpl-3.0 | 5,093 |
from nose.tools import * # noqa
from modularodm import Q
from website.prereg import prereg_landing_page as landing_page
from website.prereg.utils import drafts_for_user, get_prereg_schema
from website.project.model import ensure_schemas, MetaSchema
from tests.base import OsfTestCase
from tests import factories
class TestPreregLandingPage(OsfTestCase):
def setUp(self):
super(TestPreregLandingPage, self).setUp()
ensure_schemas()
self.user = factories.UserFactory()
def test_no_projects(self):
assert_equal(
landing_page(user=self.user),
{
'has_projects': False,
'has_draft_registrations': False,
'campaign_long': 'Prereg Challenge',
'campaign_short': 'prereg',
}
)
def test_has_project(self):
factories.ProjectFactory(creator=self.user)
assert_equal(
landing_page(user=self.user),
{
'has_projects': True,
'has_draft_registrations': False,
'campaign_long': 'Prereg Challenge',
'campaign_short': 'prereg',
}
)
def test_has_project_and_draft_registration(self):
prereg_schema = MetaSchema.find_one(
Q('name', 'eq', 'Prereg Challenge')
)
factories.DraftRegistrationFactory(
initiator=self.user,
registration_schema=prereg_schema
)
assert_equal(
landing_page(user=self.user),
{
'has_projects': True,
'has_draft_registrations': True,
'campaign_long': 'Prereg Challenge',
'campaign_short': 'prereg',
}
)
def test_drafts_for_user_omits_registered(self):
prereg_schema = MetaSchema.find_one(
Q('name', 'eq', 'Prereg Challenge') &
Q('schema_version', 'eq', 2)
)
d1 = factories.DraftRegistrationFactory(
initiator=self.user,
registration_schema=prereg_schema
)
d2 = factories.DraftRegistrationFactory(
initiator=self.user,
registration_schema=prereg_schema
)
d3 = factories.DraftRegistrationFactory(
initiator=self.user,
registration_schema=prereg_schema
)
d1.registered_node = factories.ProjectFactory()
d1.save()
drafts = drafts_for_user(self.user, 'prereg')
for d in drafts:
assert_in(d._id, (d2._id, d3._id))
assert_not_equal(d._id, d1._id)
class TestPreregUtils(OsfTestCase):
def setUp(self):
super(TestPreregUtils, self).setUp()
ensure_schemas()
def test_get_prereg_schema_returns_prereg_metaschema(self):
schema = get_prereg_schema()
assert_is_instance(schema, MetaSchema)
assert_equal(schema.name, 'Prereg Challenge')
def test_get_prereg_schema_can_return_erpc_metaschema(self):
schema = get_prereg_schema('erpc')
assert_is_instance(schema, MetaSchema)
assert_equal(schema.name, 'Election Research Preacceptance Competition')
def test_get_prereg_schema_raises_error_for_invalid_campaign(self):
with assert_raises(ValueError):
get_prereg_schema(campaign='invalid')
| wearpants/osf.io | tests/test_prereg.py | Python | apache-2.0 | 3,366 |
"""__init__.py."""
| ciarams87/PyU4V | PyU4V/tools/__init__.py | Python | mit | 20 |
# -*- config:utf-8 -*-
"""
Copyright 2015 Airbridge
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from flask import Blueprint
from flask.ext.restful import Api, Resource
security = Blueprint('security', __name__)
api = Api(security)
class SecurityApi(Resource):
def get(self):
return {'module': 'security'}
api.add_resource(SecurityApi, '/security')
| lucasb/airbridge | api/security/controllers.py | Python | apache-2.0 | 893 |
import frappe
def execute():
frappe.reload_doc('website', 'doctype', 'web_page_view', force=True)
frappe.db.sql("""UPDATE `tabWeb Page View` set path='/' where path=''""")
| frappe/frappe | frappe/patches/v13_0/set_path_for_homepage_in_web_page_view.py | Python | mit | 175 |
from segmentation import SimilarityIndex, BRAINSTalairach, BRAINSTalairachMask
from utilities import HistogramMatchingFilter
from classify import BRAINSPosteriorToContinuousClass
| grlee77/nipype | nipype/interfaces/semtools/brains/__init__.py | Python | bsd-3-clause | 179 |
from twilio.rest import TwilioRestClient
# Twilio phone number goes here. Grab one at https://twilio.com/try-twilio
# and use the E.164 format, for example: "+12025551234"
TWILIO_PHONE_NUMBER = ""
# list of one or more phone numbers to dial, in "+19732644210" format
DIAL_NUMBERS = ["",]
# URL location of TwiML instructions for how to handle the phone call
TWIML_INSTRUCTIONS_URL = \
"http://static.fullstackpython.com/phone-calls-python.xml"
# replace the placeholder values with your Account SID and Auth Token
# found on the Twilio Console: https://www.twilio.com/console
client = TwilioRestClient("ACxxxxxxxxxx", "yyyyyyyyyy")
def dial_numbers(numbers_list):
"""Dials one or more phone numbers from a Twilio phone number."""
for number in numbers_list:
print("Dialing " + number)
# set the method to "GET" from default POST because Amazon S3 only
# serves GET requests on files. Typically POST would be used for apps
client.calls.create(to=number, from_=TWILIO_PHONE_NUMBER,
url=TWIML_INSTRUCTIONS_URL, method="GET")
if __name__ == "__main__":
dial_numbers(DIAL_NUMBERS)
| mattmakai/python-twilio-example-apps | no-framework/phone-calls/phone_calls.py | Python | mit | 1,159 |
# -*- coding: utf-8 -*-
"""
***************************************************************************
GridDataMetrics.py
---------------------
Date : October 2013
Copyright : (C) 2013 by Alexander Bruy
Email : alexander dot bruy at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Alexander Bruy'
__date__ = 'October 2013'
__copyright__ = '(C) 2013, Alexander Bruy'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from qgis.PyQt.QtGui import QIcon
from qgis.core import (QgsRasterFileWriter,
QgsProcessing,
QgsProcessingParameterDefinition,
QgsProcessingParameterFeatureSource,
QgsProcessingParameterEnum,
QgsProcessingParameterField,
QgsProcessingParameterNumber,
QgsProcessingParameterString,
QgsProcessingParameterRasterDestination)
from processing.algs.gdal.GdalAlgorithm import GdalAlgorithm
from processing.algs.gdal.GdalUtils import GdalUtils
pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0]
class GridDataMetrics(GdalAlgorithm):
INPUT = 'INPUT'
Z_FIELD = 'Z_FIELD'
METRIC = 'METRIC'
RADIUS_1 = 'RADIUS_1'
RADIUS_2 = 'RADIUS_2'
MIN_POINTS = 'MIN_POINTS'
ANGLE = 'ANGLE'
NODATA = 'NODATA'
OPTIONS = 'OPTIONS'
DATA_TYPE = 'DATA_TYPE'
OUTPUT = 'OUTPUT'
TYPES = ['Byte', 'Int16', 'UInt16', 'UInt32', 'Int32', 'Float32', 'Float64', 'CInt16', 'CInt32', 'CFloat32', 'CFloat64']
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
self.metrics = ((self.tr('Minimum'), 'minimum'),
(self.tr('Maximum'), 'maximum'),
(self.tr('Range'), 'range'),
(self.tr('Count'), 'count'),
(self.tr('Average distance'), 'average_distance'),
(self.tr('Average distance between points'), 'average_distance_pts'))
self.addParameter(QgsProcessingParameterFeatureSource(self.INPUT,
self.tr('Point layer'),
[QgsProcessing.TypeVectorPoint]))
z_field_param = QgsProcessingParameterField(self.Z_FIELD,
self.tr('Z value from field'),
None,
self.INPUT,
QgsProcessingParameterField.Numeric,
optional=True)
z_field_param.setFlags(z_field_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(z_field_param)
self.addParameter(QgsProcessingParameterEnum(self.METRIC,
self.tr('Data metric to use'),
options=[i[0] for i in self.metrics],
allowMultiple=False,
defaultValue=0))
self.addParameter(QgsProcessingParameterNumber(self.RADIUS_1,
self.tr('The first radius of search ellipse'),
type=QgsProcessingParameterNumber.Double,
minValue=0.0,
defaultValue=0.0))
self.addParameter(QgsProcessingParameterNumber(self.RADIUS_2,
self.tr('The second radius of search ellipse'),
type=QgsProcessingParameterNumber.Double,
minValue=0.0,
defaultValue=0.0))
self.addParameter(QgsProcessingParameterNumber(self.ANGLE,
self.tr('Angle of search ellipse rotation in degrees (counter clockwise)'),
type=QgsProcessingParameterNumber.Double,
minValue=0.0,
maxValue=360.0,
defaultValue=0.0))
self.addParameter(QgsProcessingParameterNumber(self.MIN_POINTS,
self.tr('Minimum number of data points to use'),
type=QgsProcessingParameterNumber.Integer,
minValue=0,
defaultValue=0))
self.addParameter(QgsProcessingParameterNumber(self.NODATA,
self.tr('NODATA marker to fill empty points'),
type=QgsProcessingParameterNumber.Double,
defaultValue=0.0))
options_param = QgsProcessingParameterString(self.OPTIONS,
self.tr('Additional creation parameters'),
defaultValue='',
optional=True)
options_param.setFlags(options_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
options_param.setMetadata({
'widget_wrapper': {
'class': 'processing.algs.gdal.ui.RasterOptionsWidget.RasterOptionsWidgetWrapper'}})
self.addParameter(options_param)
dataType_param = QgsProcessingParameterEnum(self.DATA_TYPE,
self.tr('Output data type'),
self.TYPES,
allowMultiple=False,
defaultValue=5)
dataType_param.setFlags(dataType_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(dataType_param)
self.addParameter(QgsProcessingParameterRasterDestination(self.OUTPUT,
self.tr('Interpolated (data metrics)')))
def name(self):
return 'griddatametrics'
def displayName(self):
return self.tr('Grid (Data metrics)')
def icon(self):
return QIcon(os.path.join(pluginPath, 'images', 'gdaltools', 'grid.png'))
def group(self):
return self.tr('Raster analysis')
def groupId(self):
return 'rasteranalysis'
def commandName(self):
return 'gdal_grid'
def getConsoleCommands(self, parameters, context, feedback, executing=True):
ogrLayer, layerName = self.getOgrCompatibleSource(self.INPUT, parameters, context, feedback, executing)
arguments = ['-l']
arguments.append(layerName)
fieldName = self.parameterAsString(parameters, self.Z_FIELD, context)
if fieldName:
arguments.append('-zfield')
arguments.append(fieldName)
params = self.metrics[self.parameterAsEnum(parameters, self.METRIC, context)][1]
params += ':radius1={}'.format(self.parameterAsDouble(parameters, self.RADIUS_1, context))
params += ':radius2={}'.format(self.parameterAsDouble(parameters, self.RADIUS_2, context))
params += ':angle={}'.format(self.parameterAsDouble(parameters, self.ANGLE, context))
params += ':min_points={}'.format(self.parameterAsInt(parameters, self.MIN_POINTS, context))
params += ':nodata={}'.format(self.parameterAsDouble(parameters, self.NODATA, context))
arguments.append('-a')
arguments.append(params)
arguments.append('-ot')
arguments.append(self.TYPES[self.parameterAsEnum(parameters, self.DATA_TYPE, context)])
out = self.parameterAsOutputLayer(parameters, self.OUTPUT, context)
arguments.append('-of')
arguments.append(QgsRasterFileWriter.driverForExtension(os.path.splitext(out)[1]))
options = self.parameterAsString(parameters, self.OPTIONS, context)
if options:
arguments.extend(GdalUtils.parseCreationOptions(options))
arguments.append(ogrLayer)
arguments.append(out)
return [self.commandName(), GdalUtils.escapeAndJoin(arguments)]
| dgoedkoop/QGIS | python/plugins/processing/algs/gdal/GridDataMetrics.py | Python | gpl-2.0 | 9,473 |
"""Store :func:`.message`."""
from typing import Optional, Union
from praw import const
from praw.models import Message, Redditor, Subreddit
def message(
self,
to: Union[Redditor, Subreddit, str],
title: str,
body: str,
from_sr: Optional[Union[Subreddit, str]] = None,
) -> Message:
"""Abstract function for sending out a message via string.
.. note:: Adds attribute ``message`` to :class:`praw.Reddit`.
:param to: Destination of the message.
:param title: The subject line of the message.
:param body: The body of the message.
:param from_sr: A Subreddit instance of string to send the message from.
By default the message originates from the user.
:returns: An instance of :class:`praw.models.Message`.
Example code:
.. code-block:: python
import prawdditions.patch
reddit = praw.Reddit(client_id='CLIENT_ID',
client_secret="CLIENT_SECRET",
password='PASSWORD',
user_agent='USERAGENT', username='USERNAME')
prawdditions.patch.patch()
reddit.message('username','title','body')
"""
dest = str(to)
if isinstance(to, Subreddit): # Subreddits need to be prefixed with `/r/`
dest = "/r/" + dest
data = {"subject": title, "text": body, "to": dest}
if from_sr:
data["from_sr"] = str(from_sr)
return self.post(const.API_PATH["compose"], data=data)
| praw-dev/prawdditions | prawdditions/patch/message.py | Python | bsd-2-clause | 1,472 |
# Copyright 2013 OpenStack Foundation
# Copyright (C) 2013 Yahoo! Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
import json
import os
import six
import subprocess
import tempfile
import testtools
import mock
from glanceclient import exc
from glanceclient import shell
import glanceclient.v1.client as client
import glanceclient.v1.images
import glanceclient.v1.shell as v1shell
from tests import utils
if six.PY3:
import io
file_type = io.IOBase
else:
file_type = file
fixtures = {
'/v1/images/96d2c7e1-de4e-4612-8aa2-ba26610c804e': {
'PUT': (
{
'Location': 'http://fakeaddress.com:9292/v1/images/'
'96d2c7e1-de4e-4612-8aa2-ba26610c804e',
'Etag': 'f8a2eeee2dc65b3d9b6e63678955bd83',
'X-Openstack-Request-Id':
'req-b645039d-e1c7-43e5-b27b-2d18a173c42b',
'Date': 'Mon, 29 Apr 2013 10:24:32 GMT'
},
json.dumps({
'image': {
'status': 'active', 'name': 'testimagerename',
'deleted': False,
'container_format': 'ami',
'created_at': '2013-04-25T15:47:43',
'disk_format': 'ami',
'updated_at': '2013-04-29T10:24:32',
'id': '96d2c7e1-de4e-4612-8aa2-ba26610c804e',
'min_disk': 0,
'protected': False,
'min_ram': 0,
'checksum': 'f8a2eeee2dc65b3d9b6e63678955bd83',
'owner': '1310db0cce8f40b0987a5acbe139765a',
'is_public': True,
'deleted_at': None,
'properties': {
'kernel_id': '1b108400-65d8-4762-9ea4-1bf6c7be7568',
'ramdisk_id': 'b759bee9-0669-4394-a05c-fa2529b1c114'
},
'size': 25165824
}
})
),
'HEAD': (
{
'x-image-meta-id': '96d2c7e1-de4e-4612-8aa2-ba26610c804e',
'x-image-meta-status': 'active'
},
None
),
'GET': (
{
'x-image-meta-status': 'active',
'x-image-meta-owner': '1310db0cce8f40b0987a5acbe139765a',
'x-image-meta-name': 'cirros-0.3.1-x86_64-uec',
'x-image-meta-container_format': 'ami',
'x-image-meta-created_at': '2013-04-25T15:47:43',
'etag': 'f8a2eeee2dc65b3d9b6e63678955bd83',
'location': 'http://fakeaddress.com:9292/v1/images/'
'96d2c7e1-de4e-4612-8aa2-ba26610c804e',
'x-image-meta-min_ram': '0',
'x-image-meta-updated_at': '2013-04-25T15:47:43',
'x-image-meta-id': '96d2c7e1-de4e-4612-8aa2-ba26610c804e',
'x-image-meta-property-ramdisk_id':
'b759bee9-0669-4394-a05c-fa2529b1c114',
'date': 'Mon, 29 Apr 2013 09:25:17 GMT',
'x-image-meta-property-kernel_id':
'1b108400-65d8-4762-9ea4-1bf6c7be7568',
'x-openstack-request-id':
'req-842735bf-77e8-44a7-bfd1-7d95c52cec7f',
'x-image-meta-deleted': 'False',
'x-image-meta-checksum': 'f8a2eeee2dc65b3d9b6e63678955bd83',
'x-image-meta-protected': 'False',
'x-image-meta-min_disk': '0',
'x-image-meta-size': '25165824',
'x-image-meta-is_public': 'True',
'content-type': 'text/html; charset=UTF-8',
'x-image-meta-disk_format': 'ami',
},
None
)
},
'/v1/images/44d2c7e1-de4e-4612-8aa2-ba26610c444f': {
'PUT': (
{
'Location': 'http://fakeaddress.com:9292/v1/images/'
'44d2c7e1-de4e-4612-8aa2-ba26610c444f',
'Etag': 'f8a2eeee2dc65b3d9b6e63678955bd83',
'X-Openstack-Request-Id':
'req-b645039d-e1c7-43e5-b27b-2d18a173c42b',
'Date': 'Mon, 29 Apr 2013 10:24:32 GMT'
},
json.dumps({
'image': {
'status': 'queued', 'name': 'testimagerename',
'deleted': False,
'container_format': 'ami',
'created_at': '2013-04-25T15:47:43',
'disk_format': 'ami',
'updated_at': '2013-04-29T10:24:32',
'id': '44d2c7e1-de4e-4612-8aa2-ba26610c444f',
'min_disk': 0,
'protected': False,
'min_ram': 0,
'checksum': 'f8a2eeee2dc65b3d9b6e63678955bd83',
'owner': '1310db0cce8f40b0987a5acbe139765a',
'is_public': True,
'deleted_at': None,
'properties': {
'kernel_id':
'1b108400-65d8-4762-9ea4-1bf6c7be7568',
'ramdisk_id':
'b759bee9-0669-4394-a05c-fa2529b1c114'
},
'size': 25165824
}
})
),
'HEAD': (
{
'x-image-meta-id': '44d2c7e1-de4e-4612-8aa2-ba26610c444f',
'x-image-meta-status': 'queued'
},
None
),
'GET': (
{
'x-image-meta-status': 'queued',
'x-image-meta-owner': '1310db0cce8f40b0987a5acbe139765a',
'x-image-meta-name': 'cirros-0.3.1-x86_64-uec',
'x-image-meta-container_format': 'ami',
'x-image-meta-created_at': '2013-04-25T15:47:43',
'etag': 'f8a2eeee2dc65b3d9b6e63678955bd83',
'location': 'http://fakeaddress.com:9292/v1/images/'
'44d2c7e1-de4e-4612-8aa2-ba26610c444f',
'x-image-meta-min_ram': '0',
'x-image-meta-updated_at': '2013-04-25T15:47:43',
'x-image-meta-id': '44d2c7e1-de4e-4612-8aa2-ba26610c444f',
'x-image-meta-property-ramdisk_id':
'b759bee9-0669-4394-a05c-fa2529b1c114',
'date': 'Mon, 29 Apr 2013 09:25:17 GMT',
'x-image-meta-property-kernel_id':
'1b108400-65d8-4762-9ea4-1bf6c7be7568',
'x-openstack-request-id':
'req-842735bf-77e8-44a7-bfd1-7d95c52cec7f',
'x-image-meta-deleted': 'False',
'x-image-meta-checksum': 'f8a2eeee2dc65b3d9b6e63678955bd83',
'x-image-meta-protected': 'False',
'x-image-meta-min_disk': '0',
'x-image-meta-size': '25165824',
'x-image-meta-is_public': 'True',
'content-type': 'text/html; charset=UTF-8',
'x-image-meta-disk_format': 'ami',
},
None
)
}
}
class ShellInvalidEndpointandParameterTest(utils.TestCase):
# Patch os.environ to avoid required auth info.
def setUp(self):
"""Run before each test."""
super(ShellInvalidEndpointandParameterTest, self).setUp()
self.old_environment = os.environ.copy()
os.environ = {
'OS_USERNAME': 'username',
'OS_PASSWORD': 'password',
'OS_TENANT_ID': 'tenant_id',
'OS_TOKEN_ID': 'test',
'OS_AUTH_URL': 'http://127.0.0.1:5000/v2.0/',
'OS_AUTH_TOKEN': 'pass',
'OS_IMAGE_API_VERSION': '1',
'OS_REGION_NAME': 'test',
'OS_IMAGE_URL': 'http://is.invalid'}
self.shell = shell.OpenStackImagesShell()
def tearDown(self):
super(ShellInvalidEndpointandParameterTest, self).tearDown()
os.environ = self.old_environment
def run_command(self, cmd):
self.shell.main(cmd.split())
def assert_called(self, method, url, body=None, **kwargs):
return self.shell.cs.assert_called(method, url, body, **kwargs)
def assert_called_anytime(self, method, url, body=None):
return self.shell.cs.assert_called_anytime(method, url, body)
def test_image_list_invalid_endpoint(self):
self.assertRaises(
exc.CommunicationError, self.run_command, 'image-list')
def test_image_create_invalid_endpoint(self):
self.assertRaises(
exc.CommunicationError,
self.run_command, 'image-create')
def test_image_delete_invalid_endpoint(self):
self.assertRaises(
exc.CommunicationError,
self.run_command, 'image-delete <fake>')
def test_image_download_invalid_endpoint(self):
self.assertRaises(
exc.CommunicationError,
self.run_command, 'image-download <fake>')
def test_members_list_invalid_endpoint(self):
self.assertRaises(
exc.CommunicationError,
self.run_command, 'member-list --image-id fake')
def test_image_show_invalid_endpoint(self):
self.assertRaises(
exc.CommunicationError,
self.run_command, 'image-show --human-readable <IMAGE_ID>')
def test_member_create_invalid_endpoint(self):
self.assertRaises(
exc.CommunicationError,
self.run_command,
'member-create --can-share <IMAGE_ID> <TENANT_ID>')
def test_member_delete_invalid_endpoint(self):
self.assertRaises(
exc.CommunicationError,
self.run_command,
'member-delete <IMAGE_ID> <TENANT_ID>')
@mock.patch('sys.stderr')
def test_image_create_invalid_size_parameter(self, __):
self.assertRaises(
SystemExit,
self.run_command, 'image-create --size 10gb')
@mock.patch('sys.stderr')
def test_image_create_invalid_ram_parameter(self, __):
self.assertRaises(
SystemExit,
self.run_command, 'image-create --min-ram 10gb')
@mock.patch('sys.stderr')
def test_image_create_invalid_min_disk_parameter(self, __):
self.assertRaises(
SystemExit,
self.run_command, 'image-create --min-disk 10gb')
@mock.patch('sys.stderr')
def test_image_update_invalid_size_parameter(self, __):
self.assertRaises(
SystemExit,
self.run_command, 'image-update --size 10gb')
@mock.patch('sys.stderr')
def test_image_update_invalid_min_disk_parameter(self, __):
self.assertRaises(
SystemExit,
self.run_command, 'image-update --min-disk 10gb')
@mock.patch('sys.stderr')
def test_image_update_invalid_ram_parameter(self, __):
self.assertRaises(
SystemExit,
self.run_command, 'image-update --min-ram 10gb')
@mock.patch('sys.stderr')
def test_image_list_invalid_min_size_parameter(self, __):
self.assertRaises(
SystemExit,
self.run_command, 'image-list --size-min 10gb')
@mock.patch('sys.stderr')
def test_image_list_invalid_max_size_parameter(self, __):
self.assertRaises(
SystemExit,
self.run_command, 'image-list --size-max 10gb')
class ShellStdinHandlingTests(testtools.TestCase):
def _fake_update_func(self, *args, **kwargs):
'''Function to replace glanceclient.images.update,
to determine the parameters that would be supplied with the update
request
'''
# Store passed in args
self.collected_args = (args, kwargs)
# Return the first arg, which is an image,
# as do_image_update expects this.
return args[0]
def setUp(self):
super(ShellStdinHandlingTests, self).setUp()
self.api = utils.FakeAPI(fixtures)
self.gc = client.Client("http://fakeaddress.com")
self.gc.images = glanceclient.v1.images.ImageManager(self.api)
# Store real stdin, so it can be restored in tearDown.
self.real_sys_stdin_fd = os.dup(0)
# Replace stdin with a FD that points to /dev/null.
dev_null = open('/dev/null')
self.dev_null_fd = dev_null.fileno()
os.dup2(dev_null.fileno(), 0)
# Replace the image update function with a fake,
# so that we can tell if the data field was set correctly.
self.real_update_func = self.gc.images.update
self.collected_args = []
self.gc.images.update = self._fake_update_func
def tearDown(self):
"""Restore stdin and gc.images.update to their pretest states."""
super(ShellStdinHandlingTests, self).tearDown()
def try_close(fd):
try:
os.close(fd)
except OSError:
# Already closed
pass
# Restore stdin
os.dup2(self.real_sys_stdin_fd, 0)
# Close duplicate stdin handle
try_close(self.real_sys_stdin_fd)
# Close /dev/null handle
try_close(self.dev_null_fd)
# Restore the real image update function
self.gc.images.update = self.real_update_func
def _do_update(self, image='96d2c7e1-de4e-4612-8aa2-ba26610c804e'):
"""call v1/shell's do_image_update function."""
v1shell.do_image_update(
self.gc, argparse.Namespace(
image=image,
name='testimagerename',
property={},
purge_props=False,
human_readable=False,
file=None,
progress=False
)
)
def test_image_update_closed_stdin(self):
"""Supply glanceclient with a closed stdin, and perform an image
update to an active image. Glanceclient should not attempt to read
stdin.
"""
# NOTE(hughsaunders) Close stdin, which is repointed to /dev/null by
# setUp()
os.close(0)
self._do_update()
self.assertTrue(
'data' not in self.collected_args[1]
or self.collected_args[1]['data'] is None
)
def test_image_update_data_is_read_from_file(self):
"""Ensure that data is read from a file."""
try:
# NOTE(hughsaunders) Create a tmpfile, write some data to it and
# set it as stdin
f = open(tempfile.mktemp(), 'w+')
f.write('Some Data')
f.flush()
f.seek(0)
os.dup2(f.fileno(), 0)
self._do_update('44d2c7e1-de4e-4612-8aa2-ba26610c444f')
self.assertTrue('data' in self.collected_args[1])
self.assertIsInstance(self.collected_args[1]['data'], file_type)
self.assertEqual('Some Data',
self.collected_args[1]['data'].read())
finally:
try:
f.close()
os.remove(f.name)
except Exception:
pass
def test_image_update_data_is_read_from_pipe(self):
"""Ensure that data is read from a pipe."""
try:
# NOTE(hughsaunders): Setup a pipe, duplicate it to stdin
# ensure it is read.
process = subprocess.Popen(['/bin/echo', 'Some Data'],
stdout=subprocess.PIPE)
os.dup2(process.stdout.fileno(), 0)
self._do_update('44d2c7e1-de4e-4612-8aa2-ba26610c444f')
self.assertTrue('data' in self.collected_args[1])
self.assertIsInstance(self.collected_args[1]['data'], file_type)
self.assertEqual('Some Data\n',
self.collected_args[1]['data'].read())
finally:
try:
process.stdout.close()
except OSError:
pass
| alexpilotti/python-glanceclient | tests/v1/test_shell.py | Python | apache-2.0 | 16,564 |
from django.shortcuts import get_object_or_404
from django.core.urlresolvers import reverse
from django.http import Http404, HttpResponseRedirect
from django.contrib.auth.decorators import login_required
from django.conf import settings
from django.template.loader import render_to_string
from django.template import Template, Context
from django.db.models.expressions import F
from django.conf import settings
from ixc_common.decorators import render_to, resolve_or_404
from ixc_common.shortcuts import JSONResponse
from utils import resolve_object_or_404, can_modify_collection, can_be_ajax
from models import Collection, CollectionException, Relation
from forms import CollectionForm, RelationForm
"""
NOTE: decorator @can_modify_collection
alter the function arguments;
after it is applied,
collection _is_ the actual Collection instance
other NOTE:
Most of the views can act both as a normal views and as AJAX ones
non-javascript views functionality may not be ready at this stage
(GET requests ignored, templates missing/incorrect, etc.)
"""
@render_to('favourites/permission_denied.html')
def permission_denied(request):
return {}
@render_to('favourites/public_collection_details.html')
def public_collection_details(request, collection):
"""
View the public collection
"""
return locals()
@render_to('favourites/collection_details.html')
@login_required
def collections_index(request):
"""
Show the default collection
"""
if request.user.is_anonymous():
return Http404()
return {"collection": request.user.collection_set.get(default=True), "editable": True}
@render_to('favourites/collection_details.html')
@resolve_or_404(Collection, pk_arg="collection_id", instance_arg="collection")
def collection_details(request, collection):
if collection.owner != request.user:
if not collection.is_public:
return HttpResponseRedirect(reverse("favourites.permission_denied"))
return public_collection_details(request, collection)
editable = True
return locals()
@render_to('favourites/remove_collection.html')
@can_be_ajax
@can_modify_collection
def remove_collection(request, collection, is_ajax):
if request.method == "POST":
if 'cancel' in request.POST:
return HttpResponseRedirect(reverse("favourites.collection_details", args=[collection.pk]))
collection.delete()
url = reverse("favourites.collections_index")
if is_ajax:
return JSONResponse({"action": "redirect", "url": url})
return HttpResponseRedirect(url)
else:
return locals()
@render_to('favourites/edit_collection.html')
@can_be_ajax
@can_modify_collection
def edit_collection(request, collection, is_ajax):
form = CollectionForm
if request.method == "POST":
form = form(request.POST, instance=collection)
if is_ajax:
if not form.is_valid():
return JSONResponse({"action": "show_errors", "form": form.as_ul()})
else:
form.save()
return JSONResponse({"details": render_to_string("favourites/includes/meta_collection.html", locals())})
else:
if not form.is_valid():
return locals()
else:
if not 'cancel' in request.POST:
form.save()
return HttpResponseRedirect(reverse("favourites.collection_details", args=[collection.pk]))
form = form(instance=collection)
if is_ajax:
return JSONResponse({"action": "render_form", "form": form.as_ul()})
else:
return locals()
@render_to('favourites/edit_item.html')
@can_be_ajax
@can_modify_collection
@resolve_or_404(Relation, pk_arg="relation_id", instance_arg="relation")
def edit_item(request, collection, relation, is_ajax):
form = RelationForm
if request.method == "POST":
form = form(request.POST, instance=relation)
if is_ajax:
if not form.is_valid():
return JSONResponse({"action": "show_errors", "form": form.as_ul()})
else:
form.save()
item = {"favourites_meta_info": relation}
return JSONResponse({"details": render_to_string("favourites/includes/meta.html", locals())})
else:
if not form.is_valid():
return locals()
if not 'cancel' in request.POST:
form.save()
return HttpResponseRedirect(reverse("favourites.collection_details", args=[collection.pk]))
form = form(instance=relation)
if is_ajax:
return JSONResponse({"action": "render_form", "form": form.as_ul()})
else:
return locals()
@can_modify_collection
@can_be_ajax
def swap_items(request, collection, is_ajax):
try:
from_ = int(request.REQUEST.get("from"))
to = int(request.REQUEST.get("to"))
relation_id = int(request.REQUEST.get("relation_id"))
moved_r = Relation.objects.get(pk=relation_id)
nearby_r = Relation.objects.filter(collection=collection).order_by('importance')[to]
if to > from_:
Relation.objects.filter(collection=collection).filter(importance__gt=nearby_r.importance).update(importance=F('importance') + 1)
moved_r.importance = nearby_r.importance + 1
moved_r.save()
else:
Relation.objects.filter(collection=collection).filter(importance__lt=nearby_r.importance).update(importance=F('importance') - 1)
moved_r.importance = nearby_r.importance - 1
moved_r.save()
return JSONResponse({"status": "OK"})
except (KeyError, ValueError, Relation.DoesNotExist):
raise Http404()
_links_template = Template("{% load favourites %}{% favourites_links item %}")
def _show_links(item, user):
c = Context({"item": item, "favourites_media_showed": True, "user": user, "visible": True})
return _links_template.render(c)
@render_to('favourites/add_to_new_collection.html')
@can_be_ajax
@login_required
def add_to_new_collection(request, model_name, item_pk, is_ajax):
item = resolve_object_or_404(model_name, item_pk)
try:
collection = Collection.objects.create_from(item, owner=request.user)
if request.method == "POST" and is_ajax:
return JSONResponse({"status": "OK", "action": "redirect", "url": url})
else:
return HttpResponseRedirect(reverse("favourites.collection_details", args=[collection.pk]))
except (CollectionException):
raise Http404()
@render_to('favourites/add_to_collection.html')
@can_be_ajax
@can_modify_collection
def add_to_collection(request, collection, model_name, item_pk, is_ajax):
item = resolve_object_or_404(model_name, item_pk)
try:
collection.add_item(item)
if request.method == "POST" and is_ajax:
return JSONResponse({"status": "OK", "html": _show_links(item, request.user)})
else:
return HttpResponseRedirect(reverse("favourites.collection_details", args=[collection.pk]))
except (CollectionException):
raise Http404()
@render_to('favourites/remove_from_collection.html')
@can_be_ajax
@can_modify_collection
@resolve_or_404(Relation, pk_arg="relation_id", instance_arg="relation")
def remove_from_collection(request, collection, relation, is_ajax):
relation.delete()
if is_ajax and request.method == "POST":
return JSONResponse({"status": "OK", "html": _show_links(relation.get_item(), request.user)})
else:
return HttpResponseRedirect(reverse("favourites.collection_details", args=[collection.pk])) | gregplaysguitar/glamkit | glamkit/incubated/favourites/views.py | Python | bsd-3-clause | 7,793 |
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from . import models
def post_init_hook(cr, registry):
from odoo import api, SUPERUSER_ID
env = api.Environment(cr, SUPERUSER_ID, {})
# emulate updating existing field to website-dependent one
env.cr.execute("ALTER TABLE test_website_dependent ADD COLUMN foo VARCHAR")
env.cr.execute("ALTER TABLE test_website_dependent ADD COLUMN user_id INTEGER")
| yelizariev/addons-yelizariev | web_website/__init__.py | Python | lgpl-3.0 | 443 |
import matplotlib
matplotlib.use('agg')
import pygrib
import matplotlib.pyplot as plt
from mpl_toolkits.basemap import Basemap, shiftgrid,cm
import numpy as np
from pylab import rcParams
import pylab as pyl
from matplotlib.colors import LinearSegmentedColormap
from objects import coltbls
from objects.gribMap import GribMap
from subprocess import call, STDOUT
import concurrent.futures
import os
import gc
import multiprocessing as mp
from objects.asyncPool import AsyncPool
# See: /home/vagrant/GEMPAK7/gempak/tables/stns/geog.tbl
# !------------------------------------------------------------------------------
# !G GEOG NAME CENLAT CENLON LLLAT LLLON URLAT URLON PROJ
# !(8) (18) (8) (8) (8) (8) (8) (8) (30)
# !------------------------------------------------------------------------------
# NC NORTH CAROLINA 35.50 -79.25 30.00 -87.25 41.00 -71.25 NPS
# WA WASHINGTON 47.25 -120.00 41.75 -128.00 52.75 -112.00 NPS
# WWE WINTER WX AREA 36.00 -78.00 18.00 -106.00 54.00 -50.00 NPS (LABELED EASTUS)
# OK OKLAHOMA 35.75 -97.25 30.25 -105.25 41.25 -89.25 NPS
# MA MASSACHUSETTS 42.25 -72.25 36.75 -80.25 47.75 -64.25 NPS (LABELED NEUS)
# CENTUS CENTRAL US 36.15 -91.20 24.70 -105.40 47.60 -77.00 STR/90;-95;0
# TATL TROPICAL ATLANTIC 15.00 -50.00 -10.00 -90.00 35.00 -15.00 MER
# EPAC E PACIFIC 40.00 -130.60 12.00 -134.00 75.00 -110.00 STR/90;-100;1
# CHIFA CHICAGO FA AREA 42.00 -93.00 34.00 -108.00 50.00 -75.00 LCC
# CA CALIFORNIA 37.00 -119.75 31.50 -127.75 42.50 -111.75 NPS
# WSIG PACIFIC 38.00 -160.00 18.00 155.00 58.00 -115.00 CED
# setup north polar stereographic basemap.
# The longitude lon_0 is at 6-o'clock, and the
# latitude circle boundinglat is tangent to the edge
# of the map at lon_0. Default value of lat_ts
# (latitude of true scale) is pole.
class Grib2Plot:
'''''
Intialize the grib2Plot class with everything we need!
@return void
'''''
def __init__(self, constants, model):
self.regionMaps = {
# CENLAT CENLON LLLAT LLLON URLAT URLON PROJ
# NC NORTH CAROLINA 35.50 -79.25 30.00 -87.25 41.00 -71.25 laea
"CONUS": GribMap(llcrnrlat=19,urcrnrlat=50,\
llcrnrlon=-119,urcrnrlon=-56, \
resolution='l',projection="stere",\
lat_ts=50,lat_0=90,lon_0=-100., fix_aspect=False, \
borderX=61., borderY=40.), \
"CENTUS" : GribMap(llcrnrlat=24.70,urcrnrlat=47.60,\
llcrnrlon=-105.40,urcrnrlon=-77.00, \
resolution='l',projection="lcc",\
lat_ts=20,lat_0=36.15,lon_0=-91.20, lat_1=36.15, fix_aspect=False, \
borderX=127.), \
# # CHIFA CHICAGO FA AREA 42.00 -93.00 34.00 -108.00 50.00 -75.00 LCC
"CHIFA" : GribMap(llcrnrlat=34.00,urcrnrlat=50.00,\
llcrnrlon=-108.00,urcrnrlon=-75.00, \
resolution='l',projection="laea",\
lat_ts=20,lat_0=42.00,lon_0=-93.00, fix_aspect=False, \
borderY=76., hasDoubleYBorder=True), \
"NEUS" : GribMap(llcrnrlat=42.25,urcrnrlat=47.75,\
llcrnrlon=-80.25,urcrnrlon=-64.25, \
resolution='l',projection="laea",\
lat_ts=20,lat_0=42.25,lon_0=-72.25, fix_aspect=False, \
borderX=91.), \
# CUSTOM: 30; -85; 18.00;-92.00;54.00;-40.00
"EASTUS" : GribMap(llcrnrlat=18.00,urcrnrlat=54.00,\
llcrnrlon=-92.00,urcrnrlon=-40.00, \
resolution='l',projection="lcc",\
lat_ts=50,lat_0=30.00,lon_0=-85.00, lat_1=30.00, fix_aspect=False, \
borderX=213.), \
"NC" : GribMap(llcrnrlat=30.00,urcrnrlat=41.00,\
llcrnrlon=-87.25,urcrnrlon=-71.25, \
resolution='l',projection="laea",\
lat_ts=20,lat_0=36.00,lon_0=-78.00, fix_aspect=False, \
borderX=35.), \
"WA" : GribMap(llcrnrlat=41.75,urcrnrlat=52.75,\
llcrnrlon=-128.00,urcrnrlon=-112.00, \
resolution='l',projection="laea",\
lat_ts=50,lat_0=47.25,lon_0=-120.00, fix_aspect=False, \
borderX=135.), \
"OK" : GribMap(llcrnrlat=30.25,urcrnrlat=41.25,\
llcrnrlon=-105.25,urcrnrlon=-89.25, \
resolution='l',projection="laea",\
lat_ts=50,lat_0=35.75,lon_0=-97.25, fix_aspect=False, \
borderX=37.), \
"CA" : GribMap(llcrnrlat=31.50,urcrnrlat=42.50,\
llcrnrlon= -127.75,urcrnrlon=-111.75, \
resolution='l',projection="laea",\
lat_ts=50,lat_0=37.00,lon_0= -119.75, fix_aspect=False, \
borderX=45.) \
}
self.nonLAEAprojections = ['CENTUS', 'CONUS','EASTUS']
self.globalModelGrids = ['gfs']
self.borderPadding = {}
self.constants = constants
self.isPng = ['CONUS']
self.snowSum = None
self.snowSum12 = None
self.snowSum24 = None
self.snowSum72 = None
self.snowSum120 = None
# Run state
self.runTime = model.runTime
self.times = model.modelTimes
self.modelDataPath = constants.dataDirEnv
self.model = model.getName()
self.gribVars = ['swem','500mbT', '2mT','precip','850mbT']
# Cache for all loaded grib data.
self.cache = {}
self.preloadData()
return
# Preloads all data.
# times to preload can be overridden.
def preloadData(self, times = None):
if not times:
times = self.times
for time in times:
skip = False
if time not in self.cache:
self.cache[time] = {}
g2file = self.getGrib2File(self.modelDataPath, self.runTime, self.model, time)
for var in self.gribVars:
if var not in self.cache[time]:
self.cache[time][var] = None
else:
skip = True
# Skip preloading if already loaded.
print "LOADING TIME: " + time
try:
grbs=pygrib.open(g2file)
grbs.seek(0)
except Exception, e:
print "Failure on loading grib file = " + g2file
continue
pass
if skip:
print "Skipping: " + time + " -> Already loaded or not found."
continue
try:
self.cache[time]['swem'] = grbs.select(name='Water equivalent of accumulated snow depth', typeOfLevel='surface', level=0)[0]
except Exception, e:
print e
pass
try:
self.cache[time]['500mbT'] = grbs.select(name='Temperature', typeOfLevel='isobaricInhPa', level=500)[0]
except Exception, e:
print e
pass
try:
self.cache[time]['850mbT'] = grbs.select(name='Temperature', typeOfLevel='isobaricInhPa', level=850)[0]
except Exception, e:
print e
pass
try:
self.cache[time]['2mT'] = grbs.select(name='2 metre temperature', typeOfLevel='heightAboveGround', level=2)[0]
except Exception, e:
print e
pass
try:
self.cache[time]['precip'] = grbs.select(name='Total Precipitation', level=0)[0]
except Exception, e:
print e
pass
if grbs is not None:
grbs.close()
return
# Wraps plot2mTemp in it's own process. Isolates any possible memory leaks.
def plot2mMPTemp(self, model, times, runTime, modelDataPath):
ap = AsyncPool(6)
for time in times:
ap.doJob(self.plot2mTempMP,(model, time, runTime, modelDataPath))
ap.join()
return
# Wraps doSnowPlot in it's own process. Isolates any possible memory leaks.
# Executes two processes at a time, and waits for processes to finish before continueing.
# This gives a little concurrency, and assures memory is released.
# as opposed to using a Pool (Where processes are kept alive until pool is closed.)
def doPlotMP(self, method, argList, maxWorkers = 2):
'''''
Do AT MOST two processes, two at a time. Otherwise do 1 process, one at a time.
'''''
# Make sure that maxWorkers <= 2.
if maxWorkers > 2:
raise ValueError('Only max two workers allowed for doPlotMP. Pass 1 or 2 for maxWorkers param.')
for i in xrange(0,len(argList),maxWorkers):
# Process 1.
try:
args = argList[i]
p = mp.Process(target=method, args=args)
p.start()
except Exception, e:
print e
pass
# Process 2.
if maxWorkers > 1:
try:
args2 = argList[i+1]
p2 = mp.Process(target=method, args=args2)
p2.start()
except Exception, e:
print e
pass
# Join Process 1.
try:
p.join()
except Exception, e:
pass
# Join Process 2.
if maxWorkers > 1:
try:
p2.join()
except Exception, e:
pass
return
# Wraps doSnowPlot in it's own process. Isolates any possible memory leaks.
def doSnowPlotAccumMP(self, runTime, region, model, times, gribmap, modelDataPath ,previousTime):
args = (runTime, region, model, times, gribmap, modelDataPath ,previousTime)
p = mp.Process(target=self.doSnowAccumulations, args=args)
p.start()
p.join()
return
def plot2mTempMP(self, zargs):
(model, time, runTime, modelDataPath) = zargs
level = "sfc"
variable = "tmpf"
baseDir = "data"
imgDir = baseDir+"/"+ model+"/"+runTime+"/"+level+"/"+variable
call("mkdir -p " + imgDir, shell=True)
#g2File = self.getGrib2File(modelDataPath, runTime, model, time)
# Get grib2 data.
gTemp2m = self.cache[time]['2mT']
if gTemp2m is None:
return
for region, gribmap in self.regionMaps.items():
borderWidth = 0.
borderBottom = 0.
fig, borderWidth, borderBottom = self.getRegionFigure(gribmap)
m = gribmap.getBaseMap()
borderBottomCmd = "" # Reset bottom border.
convertExtension = ".gif"
if region in self.isPng:
convertExtension = ".png"
tempFileName = "init_" + model + "_" + level + "_" + variable + "_f" + time + ".png"
saveFileName = imgDir + "/" + region +"_f" + time + convertExtension
temp2m = gTemp2m.values
grbs = None
# Convert Kelvin to (F)
temp2m = ((temp2m- 273.15)* 1.8000) + 32.00
lat, lon = gTemp2m.latlons()
borderBottomCmd = ""
if borderBottom > 1.0:
if gribmap.hasDoubleYBorder:
borderBottomCmd = " -bordercolor none -border 0x" + str(int(borderBottom))
else:
borderBottomCmd = " -gravity south -splice 0x" + str(int(borderBottom))
#if model == "gfs" and region is not "CONUS":
# GFS model (and some others) come with (0 - 360) Longitudes.
# This must be converted to (-180 - 180) when using Mercator.
# lon = self.convertLon360to180(lon, temp2m)
# TURNING OFF MESHGRID FOR GFS FOR NOW. DAMN SHAPE BUG yo.
# if model == 'gfs'and region != 'merc':
# x = np.arange(-180, 180.5, 1.0).reshape((361,720))
# y = np.arange(-90, 91, 1.0).reshape((361,720))
# x,y = np.meshgrid(x,y)
# x,y = m(x,y)
# else:
x,y = m(lon,lat)
ax = fig.add_axes([1,1,1,1], axisbg='k') # This needs to be here or else the figsize*DPI calc will not work!
# I have no idea why. Just a Matplotlib quirk I guess.
colorMap = coltbls.sftemp()
# TURNING OFF MESHGRID FOR GFS FOR NOW. DAMN SHAPE BUG yo.
if region == 'CONUS' and model != 'gfs':
cs = m.pcolormesh(x, y, temp2m, cmap=colorMap, vmin=-25, vmax=115)
else:
CLEVELS= [(c*5)-25 for c in range(29)]
cs = m.contourf(x,y,temp2m,CLEVELS,cmap=colorMap, vmin=-25, vmax=115)
# m.drawcoastlines()
m.drawmapboundary()
# Overlay 32 degree isotherm
cc = m.contour(x,y,temp2m, [32], cmap=plt.cm.winter, vmin=32, vmax=32)
# m.drawstates()
# m.drawcountries()
# m.drawcoastlines()
# m.drawparallels(np.arange(-90.,120.,30.),labels=[1,0,0,0]) # 19.00;-119.00;50.00;-56.00
# m.drawmeridians(np.arange(-180.,180.,60.),labels=[0,0,0,1])
# FOR SETTING COLOR BARS!!!!!
# cb = plt.colorbar(cs, orientation='vertical', ticks=[(c*10)-25 for c in range(29)])
# axes_obj = plt.getp(ax,'axes') #get the axes' property handler
# plt.setp(plt.getp(axes_obj, 'yticklabels'), color='w') #set yticklabels color
# plt.setp(plt.getp(axes_obj, 'xticklabels'), color='w') #set xticklabels color
# plt.setp(plt.getp(cb.ax.axes, 'yticklabels'), color='w') # set colorbar
# cb.ax.yaxis.set_tick_params(color='w') #set colorbar ticks color
# fig.set_facecolor('black')
# cb.outline.set_edgecolor('white')
# END COLORBARS
# PNG optimization
# pngquant -o lossy.png --force --quality=70-80 input.png
# optipng -o1 -strip all -out out.png -clobber input.png
#print "convert -background none "+ tempFileName + " " + borderBottomCmd + " -bordercolor none -border " + str(int(borderWidth)) + "x0 " + saveFileName
#print "pngquant -o "+ os.getcwd()+ "/" + tempFileName + " --force --quality=70-80 "+ os.getcwd()+ "/" + tempFileName
fig.savefig(tempFileName, dpi=200, bbox_inches='tight', pad_inches=0, facecolor=fig.get_facecolor())
call("pngquant -o "+ tempFileName + " --force --quality=50-65 "+ tempFileName, shell=True)
#call("optipng -o2 -strip all -out " + tempFileName + " -clobber " + tempFileName, shell=True)
print "convert -background none "+ tempFileName + " " + borderBottomCmd + " -bordercolor none -border " + str(int(borderWidth)) + "x0 " + saveFileName
call("convert -background none "+ tempFileName + " " + borderBottomCmd + " -bordercolor none -border " + str(int(borderWidth)) + "x0 " + saveFileName, shell=True)
call("rm " + tempFileName, shell=True)
cc = None
cs = None
fig.clf()
plt.clf()
gc.collect()
# Try everything. lol.
fig.clf()
plt.close()
plt.close(fig.number)
fig = None
cs = None
gc.collect()
return ""
def plotSnowFall(self, model, times, runTime, modelDataPath, previousTime):
accumArgList = []
hourArgList = []
snowPrevTime = previousTime
# args for accumulations.
for region,gribmap in self.regionMaps.items():
accumArgList.append((runTime, region, model, times, gribmap, modelDataPath ,previousTime))
for time in times:
hourArgList.append((runTime, model, time, modelDataPath ,snowPrevTime))
snowPrevTime = time
maxWorkers = 2
# The GFS model run is big, and has lots of files.
# Matplotlib will flip out and suck up memory, so only allow one process at a time.
if model == 'gfs':
maxWorkers = 1
ap = AsyncPool(6)
try:
# Do multiprocessing -> snow plots.
#self.doPlotMP(self.doSnowPlot, hourArgList, maxWorkers)
for time in times:
ap.doJob(self.doSnowPlot,(runTime, model, time, modelDataPath ,snowPrevTime))
snowPrevTime = time
ap.join()
except Exception, e:
print e
pass
try:
# Do multiprocessing -> snowfall accumulations.
self.doPlotMP(self.doSnowAccumulations, accumArgList, maxWorkers)
except Exception, e:
print e
pass
return
def plotPrecip(self, model, times, runTime, modelDataPath):
argList = []
# nam.t18z.awip3281.tm00.grib2
for region,gribmap in self.regionMaps.items():
argList.append((runTime, region, model, times, gribmap, modelDataPath))
maxWorkers = 2
# The GFS model run is big, and has lots of files.
# Matplotlib will flip out and suck up memory, so only allow one process at a time.
if model == 'gfs':
maxWorkers = 1
try:
# Do multiprocessing -> snowfall accumulations.
self.doPlotMP(self.doAccumPrecipPlotMP, argList, maxWorkers)
except Exception, e:
print e
pass
return
def doSnowPlot(self, zargs):
(runTime, model, time, modelDataPath ,previousTime) = zargs
previous = previousTime
level = "sfc"
variable = "snow"
baseDir = "data"
imgDir = baseDir+"/"+ model+"/"+runTime+"/"+level+"/"+variable
imgDirAccumTotal = baseDir+"/"+ model+"/"+runTime+"/"+level+"/"+variable + "_accum"
call("mkdir -p " + imgDir, shell=True)
call("mkdir -p " + imgDirAccumTotal, shell=True)
# If the inital timestep (0th hour) is in the times set.
# self.hasInitialTime = 0 in map(int, times)
# # dont do anything on the 0th hour (if it's the only time being processed)
# if self.hasInitialTime and len(times) <= 1:
# print "Passed 0th Hour only... skipping snowfall stuff"
# return
# skip the 0th hour.
if int(time) == 0:
return
#for time in times:
for region,gribmap in self.regionMaps.items():
borderWidth = 0.
borderBottom = 0.
fig, borderWidth, borderBottom = self.getRegionFigure(gribmap)
startFile = self.getGrib2File(modelDataPath, runTime, model, previous)
endFile = self.getGrib2File(modelDataPath, runTime, model, time)
tempFileName = "init_" + region + "_" + model + "_" + level + "_" + variable + "_f" + time + ".png"
saveFileName = imgDir + "/" + region +"_f" + time + ".gif"
borderBottomCmd = "" # Reset bottom border.
print "Region: " + region
print "TIME: " + time
print "START FILE: " + startFile
print "END FILE: " + endFile
# if int(time) == 3:
# # skip third hour.
# return
skip = False
grbSwemPrevious = self.cache[previous]['swem']
grbSwemCurrent = self.cache[time]['swem']
grbT500 = self.cache[time]['500mbT']
grbT850 = self.cache[time]['850mbT']
grbT2m = self.cache[time]['2mT']
# Check to make sure data is set.
if grbSwemPrevious is None:
previous = time
skip = True
if grbSwemCurrent is None or grbT500 is None or grbT850 is None or grbT2m is None:
skip = True
# try:
# grbs=pygrib.open(startFile)
# grbs.seek(0)
# grbSwemPrevious = grbs.select(name='Water equivalent of accumulated snow depth', typeOfLevel='surface', level=0)[0]
# grbs.close()
# except Exception, e:
# print "Failure on loading grib [START] file = " + startFile
# print "Region" + region
# print "Model" + model
# print e
# previous = time
# # DO Increment previous time in the case where previous time has missing data.
# # So if previous=33h and time = 36hr, and 33h has missing data:
# # The next step would be previous=36hr and time=39hr: total = 39h - 36hr
# skip = True
# pass
# try:
# grbs=pygrib.open(endFile)
# grbs.seek(0)
# grbSwemCurrent = grbs.select(name='Water equivalent of accumulated snow depth', typeOfLevel='surface', level=0)[0]
# grbT500 = grbs.select(name='Temperature', typeOfLevel='isobaricInhPa', level=500)[0]
# grbT850 = grbs.select(name='Temperature', typeOfLevel='isobaricInhPa', level=850)[0]
# grbT2m = grbs.select(name='2 metre temperature', typeOfLevel='heightAboveGround', level=2)[0]
# grbs.close()
# except Exception, e:
# print "Failure on loading grib [END] file = " + endFile
# print "Region" + region
# print "Model" + model
# print e
# skip = True
# # DONT Increment previous time in the case of missing data.
# # ie. if 33h and 36 have missing data, the next increment
# # will try 39h - 33h = difference.
# pass
if skip == True:
print "Skipping Hour: " + time
continue
data = {}
# Subset data for global grids...
# Very strange bug.
if model in self.globalModelGrids and region in self.nonLAEAprojections:
data['500'],lat, lon = grbT500.data(lat1=20,lat2=75,lon1=220,lon2=320)
data['850'],lat, lon = grbT850.data(lat1=20,lat2=75,lon1=220,lon2=320)
data['2'],lat, lon = grbT2m.data(lat1=20,lat2=75,lon1=220,lon2=320)
data['swemCurr'],lat, lon = grbSwemCurrent.data(lat1=20,lat2=75,lon1=220,lon2=320)
data['swemPrev'],lat, lon = grbSwemPrevious.data(lat1=20,lat2=75,lon1=220,lon2=320)
else:
data['500'] = grbT500.values
data['850'] = grbT850.values
data['2'] = grbT2m.values
data['swemCurr'] = grbSwemCurrent.values
data['swemPrev'] = grbSwemPrevious.values
lat, lon = grbT2m.latlons()
d = np.maximum(data['850'],data['2'])
d = np.maximum(d, data['500'])
dmax = np.where(d >=271.16, d, np.nan)
dmin = np.where(d <271.16, d, np.nan)
# np.nan should propagate. Otherwise you end up with (12 + 2*(271.16 - 0)) = (really fucking big). Instead we just want np.nan.
dmin = (12 + (271.16 - dmin))
dmax = (12 + 2*(271.16 - dmax))
dmin = np.nan_to_num(dmin)
dmax = np.nan_to_num(dmax)
# A fix for weird grids ie. CONUS for gfs model.
# This fixes strange graphical.
# if model in self.globalModelGrids and region in self.nonLAEAprojections:
# dmin[dmin > 40] = 0 # Filter grid. I can't quite understand why this is nesc.
# # It is certainly a bug with matplotlib.
# # 40 = 12 + (271.16 - X) -> X = -22 degrees F.
# # Limits the calculation to -22F...
dtot = dmin + dmax # Total Snow water equivalent ratios
swemAccum = data['swemCurr'] - data['swemPrev']
swemAccum = np.where(swemAccum > 0, swemAccum, 0)
# Truncate negative values to 0.
swemAccum = swemAccum.clip(0)
dtot = dtot.clip(0)
#snow = swemAccum/25.4 * 10
snow = (swemAccum*dtot)/25.4
print '-----------------------------------'
print "MEAN " + str(np.mean(snow))
print "-----------------------------------"
# A fix for weird grids ie. CONUS for gfs model.
# This fixes strange graphical.
# if model in self.globalModelGrids and region in self.nonLAEAprojections:
# snow[snow < .25] = 0 # Keep small values out of total accumulation calc.
# # Also. might fix a crazy bug. We will see.
# median = np.median(snow)
# if median > 0:
# # In theory the median should be 0.
# # If the data glitches out for some reason,
# # the median will NOT be 0. So therefore,
# # set all values where value == median = 0
# snow[snow == median] = 0
# print "CURRENT MEDIAN = " + str(median)
# print "FORCING MEDIAN 0!!!!!"
m = gribmap.getBaseMap()
if borderBottom > 1.0:
if gribmap.hasDoubleYBorder:
borderBottomCmd = " -border 0x" + str(int(borderBottom))
else:
borderBottomCmd = " -gravity south -splice 0x" + str(int(borderBottom))
#if model == "gfs" and proj == 'merc':
# GFS model (and some others) come with (0 - 360) Longitudes.
# This must be converted to (-180 - 180) when using Mercator.
# lon = self.convertLon360to180(lon, data['2'])
x,y = m(lon,lat)
# x = np.arange(-180, 180.5, 1.0)
# y = np.arange(-90, 91, 1.0)
# x,y = np.meshgrid(x,y)
# x,y = m(x,y)
ax = fig.add_axes([1,1,1,1],axisbg='k') # This needs to be here or else the figsize*DPI calc will not work!
# I have no idea why. Just a Matplot lib quirk I guess.
SNOWP_LEVS = [0.25,0.5,0.75,1,1.5,2,2.5,3,4,5,6,8,10,12,14,16,18]
# print snow
cs = plt.contourf(x,y,snow, SNOWP_LEVS, extend='max',cmap=coltbls.snow2())
#cs = plt.imshow(data['2'], cmap='RdBu', vmin=data['2'].min(), vmax=data['2'].max(), extent=[x.min(), x.max(), y.min(), y.max()])
# m = Basemap(llcrnrlat=19,urcrnrlat=50,\
# llcrnrlon=-119,urcrnrlon=-56, \
# resolution='l',projection='stere',\
# lat_ts=50,lat_0=90,lon_0=-100., fix_aspect=False)
#cs = plt.imshow(data['2'], cmap='RdBu', vmin=data['2'].min(), vmax=data['2'].max(), extent=[x.min(), x.max(), y.min(), y.max()])
#cs = m.pcolormesh(x,y,swemAccum,shading='flat',cmap=plt.cm.jet)
#cs = m.contourf(x,y,snow,15,cmap=plt.cm.jet)
#cb = plt.colorbar(cs, orientation='vertical')
#m.drawcoastlines()
#m.fillcontinents()
m.drawmapboundary()
#fig.savefig(tempFileName, dpi=200, bbox_inches='tight', pad_inches=0,facecolor=fig.get_facecolor())
#m.drawstates()
#m.drawcountries()
# m.drawparallels(np.arange(-90.,120.,30.),labels=[1,0,0,0]) # 19.00;-119.00;50.00;-56.00
# m.drawmeridians(np.arange(-180.,180.,60.),labels=[0,0,0,1])
# FOR SETTING COLOR BARS!!!!!
#cb = plt.colorbar(cs, orientation='vertical')
# cb.outline.set_color('white')
# axes_obj = plt.getp(ax,'axes') #get the axes' property handler
# plt.setp(plt.getp(axes_obj, 'yticklabels'), color='w') #set yticklabels color
# plt.setp(plt.getp(axes_obj, 'xticklabels'), color='w') #set xticklabels color
# plt.setp(plt.getp(cb.ax.axes, 'yticklabels'), color='w') # set colorbar
# # yticklabels color
##### two new lines ####
# cb.outline.set_color('w') #set colorbar box color
# cb.ax.yaxis.set_tick_params(color='w') #set colorbar ticks color
##### two new lines ####
# fig.set_facecolor('black')
# END COLORBARS
#print "convert -background none "+ tempFileName + " " + borderBottomCmd + " -transparent '#000000' -matte -bordercolor none -border " + str(int(borderWidth)) + "x0 " + borderBottomCmd + " " + saveFileName
fig.savefig(tempFileName, dpi=200, bbox_inches='tight', pad_inches=0,facecolor=fig.get_facecolor())
call("convert -background none "+ tempFileName + " " + borderBottomCmd + " -transparent '#000000' -matte -bordercolor none -border " + str(int(borderWidth)) + "x0 " + saveFileName, shell=True)
call("rm " + tempFileName, shell=True)
fig.clf()
plt.close()
plt.close(fig.number)
fig = None
cs = None
gc.collect()
return
def doAccumPrecipPlotMP(self, runTime, region, model, times, gribmap, modelDataPath):
level = "sfc"
variable = "precip"
baseDir = "data"
imgDir = baseDir+"/"+ model+"/"+runTime+"/"+level+"/"+variable
imgDirAccumTotal = baseDir+"/"+ model+"/"+runTime+"/"+level+"/"+variable + "_accum"
call("mkdir -p " + imgDir, shell=True)
call("mkdir -p " + imgDirAccumTotal, shell=True)
# If the inital timestep (0th hour) is in the times set.
self.hasInitialTime = 0 in map(int, times)
# dont do anything on the 0th hour (if it's the only time being processed)
if self.hasInitialTime and len(times) <= 1:
print "Passed 0th Hour only... skipping precipitation stuff"
return
precipSum = None
precipSum12 = None
precipSum24 = None
# precipSum72 = None
# precipSum120 = None
fig, borderWidth, borderBottom = self.getRegionFigure(gribmap)
for time in times:
# skip the 0th hour.
if int(time) == 0:
continue
# Only get every 6th hour of the GFS.
# This is because GFS stores 6 hour accums.
if model == 'gfs' and (int(time) % 6) != 0:
continue
# Only do every 3rd hour 0-3hr, 3-6hr, etc.
# FOR NAM4km only.
if model == 'nam4km' and (int(time) % 3) != 0:
continue
g2File = self.getGrib2File(modelDataPath, runTime, model, time)
print "Region: " + region
print "TIME: " + time
variableAccum = variable + "_accum"
tempFileName = "init_" + region + "_" + model + "_" + level + "_" + variable + "_f" + time + ".png"
saveFileName = imgDir + "/" + region +"_f" + time + ".gif"
accumTmpFileName = "init_" + region + "_" + model + "_" + level + "_" + variableAccum + "_f" + time + ".png"
accumSaveFileName = imgDir + "_accum" + "/" + region +"_f" + time + ".gif"
borderBottomCmd = "" # Reset bottom border.
skip = False
precipgrb = self.cache[time]['precip']
if precipgrb is None:
skip = True
if skip == True:
print "Skipping Hour: " + time
continue
# Subset data for global grids...
# Very strange bug.
if model in self.globalModelGrids: # and region in self.nonLAEAprojections
precip,lat, lon = precipgrb.data(lat1=20,lat2=75,lon1=220,lon2=320)
else:
precip = precipgrb.values
lat, lon = precipgrb.latlons()
precip = precip/25.4
if int(time) > 3:
# Set Hour accumulation
if precipSum is None:
precipSum = precip
else:
precipSum += precip
print "MAX PRECIP: " + str(np.max(precip))
# 120 hour accum.
# if snowSum120 is None:
# precipSum120 = precip
# else:
# precipSum120 += precip
# # 72 hour accum.
# if snowSum72 is None:
# precipSum72 = precip
# else:
# precipSum72 += precip
# 24 hour accum
if precipSum24 is None:
precipSum24 = precip
else:
precipSum24 += precip
# 12 hour accum
if precipSum12 is None:
precipSum12 = precip
else:
precipSum12 += precip
m = gribmap.getBaseMap()
if borderBottom > 1.0:
if gribmap.hasDoubleYBorder:
borderBottomCmd = " -border 0x" + str(int(borderBottom))
else:
borderBottomCmd = " -gravity south -splice 0x" + str(int(borderBottom))
x,y = m(lon,lat)
PRECIP_LEVS = [0.1, 0.25,0.5,1, 1.5, 2, 2.5,3, 3.5,4, 4.5,5,6,8,10,12,14,16]
fig.clf()
if precipSum is not None:
print "---------------------------------------------------------------------------"
print "--------------Drawing precip Accum plot for time: " + time + "---------------"
print "--------------SAVING TO: " + accumSaveFileName
print "----------------------------------------------------------------------------"
ax = fig.add_axes([1,1,1,1],axisbg='k')
cs = plt.contourf(x,y,precipSum, PRECIP_LEVS, extend='max',cmap=coltbls.reflect_ncdc())
m.drawmapboundary()
# FOR SETTING COLOR BARS!!!!!
# cb = plt.colorbar(cs, orientation='vertical')
# axes_obj = plt.getp(ax,'axes') #get the axes' property handler
# plt.setp(plt.getp(axes_obj, 'yticklabels'), color='w') #set yticklabels color
# plt.setp(plt.getp(axes_obj, 'xticklabels'), color='w') #set xticklabels color
# plt.setp(plt.getp(cb.ax.axes, 'yticklabels'), color='w') # set colorbar
# cb.ax.yaxis.set_tick_params(color='w') #set colorbar ticks color
# fig.set_facecolor('black')
# cb.outline.set_edgecolor('white')
# END COLORBARS
fig.savefig(accumTmpFileName, dpi=200, bbox_inches='tight', pad_inches=0,facecolor=fig.get_facecolor())
call("convert -background none "+ accumTmpFileName + " " + borderBottomCmd + " -transparent '#000000' -matte -bordercolor none -border " + str(int(borderWidth)) + "x0 " + accumSaveFileName, shell=True)
call("rm " + accumTmpFileName, shell=True)
fig.clf()
# if int(time) % 120 == 0 and int(time) > 0:
# # do plot
# #save to model/precip120/*
# imgDir120 = imgDir + "120"
# call("mkdir -p " + imgDir120, shell=True)
# tempFileName = "init_" + region + "_" + model + "_" + level + "_" + variable + "120" + "_f" + time + ".png"
# saveFileName = imgDir120 + "/" + region +"_f" + time + ".gif"
# ax = fig.add_axes([1,1,1,1],axisbg='k')
# cs = plt.contourf(x, y, precip1120, PRECIP_LEVS, extend='max', cmap=coltbls.precip1())
# m.drawmapboundary()
# fig.savefig(tempFileName, dpi=200, bbox_inches='tight', pad_inches=0,facecolor=fig.get_facecolor())
# call("convert -background none "+ tempFileName + " " + borderBottomCmd + " -transparent '#000000' -matte -bordercolor none -border " + str(int(borderWidth)) + "x0 " + saveFileName, shell=True)
# call("rm " + tempFileName, shell=True)
# precipSum120 = None
# fig.clf()
# if int(time) % 72 == 0 and int(time) > 0:
# # do plot
# #save to model/snow72/*
# imgDir72 = imgDir + "72"
# call("mkdir -p " + imgDir72, shell=True)
# tempFileName = "init_" + region + "_" + model + "_" + level + "_" + variable + "72" + "_f" + time + ".png"
# saveFileName = imgDir72 + "/" + region +"_f" + time + ".gif"
# ax = fig.add_axes([1,1,1,1],axisbg='k')
# cs = plt.contourf(x, y, precipSum72, PRECIP_LEVS, extend='max', cmap=coltbls.precip1())
# m.drawmapboundary()
# fig.savefig(tempFileName, dpi=200, bbox_inches='tight', pad_inches=0,facecolor=fig.get_facecolor())
# call("convert -background none "+ tempFileName + " " + borderBottomCmd + " -transparent '#000000' -matte -bordercolor none -border " + str(int(borderWidth)) + "x0 " + saveFileName, shell=True)
# call("rm " + tempFileName, shell=True)
# precipSum72 = None
# fig.clf()
if int(time) % 24 == 0 and int(time) > 0:
# do plot
#save to model/precip24/*
imgDir24 = imgDir + "24"
call("mkdir -p " + imgDir24, shell=True)
tempFileName = "init_" + region + "_" + model + "_" + level + "_" + variable + "24" + "_f" + time + ".png"
saveFileName = imgDir24 + "/" + region +"_f" + time + ".gif"
ax = fig.add_axes([1,1,1,1],axisbg='k')
cs = plt.contourf(x, y, precipSum24, PRECIP_LEVS, extend='max', cmap=coltbls.reflect_ncdc())
m.drawmapboundary()
fig.savefig(tempFileName, dpi=200, bbox_inches='tight', pad_inches=0,facecolor=fig.get_facecolor())
call("convert -background none "+ tempFileName + " " + borderBottomCmd + " -transparent '#000000' -matte -bordercolor none -border " + str(int(borderWidth)) + "x0 " + saveFileName, shell=True)
call("rm " + tempFileName, shell=True)
precipSum24 = None
fig.clf()
if int(time) % 12 == 0 and int(time) > 0:
# do plot
#save to model/precip12/*
imgDir12 = imgDir + "12"
call("mkdir -p " + imgDir12, shell=True)
tempFileName = "init_" + region + "_" + model + "_" + level + "_" + variable + "12" + "_f" + time + ".png"
saveFileName = imgDir12 + "/" + region +"_f" + time + ".gif"
ax = fig.add_axes([1,1,1,1],axisbg='k')
cs = plt.contourf(x, y, precipSum12, PRECIP_LEVS, extend='max', cmap=coltbls.reflect_ncdc())
m.drawmapboundary()
fig.savefig(tempFileName, dpi=200, bbox_inches='tight', pad_inches=0,facecolor=fig.get_facecolor())
call("convert -background none "+ tempFileName + " " + borderBottomCmd + " -transparent '#000000' -matte -bordercolor none -border " + str(int(borderWidth)) + "x0 " + saveFileName, shell=True)
call("rm " + tempFileName, shell=True)
precipSum12 = None
fig.clf()
plt.close()
plt.close(fig.number)
fig = None
cs = None
gc.collect()
return
def doSnowAccumulations(self, runTime, region, model, times, gribmap, modelDataPath ,previousTime):
previous = previousTime
level = "sfc"
variable = "snow"
baseDir = "data"
imgDir = baseDir+"/"+ model+"/"+runTime+"/"+level+"/"+variable
imgDirAccumTotal = baseDir+"/"+ model+"/"+runTime+"/"+level+"/"+variable + "_accum"
call("mkdir -p " + imgDir, shell=True)
call("mkdir -p " + imgDirAccumTotal, shell=True)
# If the inital timestep (0th hour) is in the times set.
self.hasInitialTime = 0 in map(int, times)
# dont do anything on the 0th hour (if it's the only time being processed)
if self.hasInitialTime and len(times) <= 1:
print "Passed 0th Hour only... skipping snowfall stuff"
return
snowSum = None
snowSum12 = None
snowSum24 = None
snowSum72 = None
snowSum120 = None
fig, borderWidth, borderBottom = self.getRegionFigure(gribmap)
for time in times:
# skip the 0th hour.
if int(time) == 0:
continue
startFile = self.getGrib2File(modelDataPath, runTime, model, previous)
endFile = self.getGrib2File(modelDataPath, runTime, model, time)
print "Region: " + region
print "TIME: " + time
print "START FILE: " + startFile
print "END FILE: " + endFile
variableAccum = variable + "_accum"
tempFileName = "init_" + region + "_" + model + "_" + level + "_" + variable + "_f" + time + ".png"
saveFileName = imgDir + "/" + region +"_f" + time + ".gif"
accumTmpFileName = "init_" + region + "_" + model + "_" + level + "_" + variableAccum + "_f" + time + ".png"
accumSaveFileName = imgDir + "_accum" + "/" + region +"_f" + time + ".gif"
borderBottomCmd = "" # Reset bottom border.
# if int(time) == 3:
# # skip third hour.
# return
skip = False
grbSwemPrevious = self.cache[previous]['swem']
grbSwemCurrent = self.cache[time]['swem']
grbT500 = self.cache[time]['500mbT']
grbT850 = self.cache[time]['850mbT']
grbT2m = self.cache[time]['2mT']
# Check to make sure data is set.
if grbSwemPrevious is None:
previous = time
skip = True
if grbSwemCurrent is None or grbT500 is None or grbT850 is None or grbT2m is None:
skip = True
# try:
# grbs=pygrib.open(startFile)
# grbs.seek(0)
# grbSwemPrevious = grbs.select(name='Water equivalent of accumulated snow depth', typeOfLevel='surface', level=0)[0]
# grbs.close()
# except Exception, e:
# print "Failure on loading grib [START] file = " + startFile
# print "Region" + region
# print "Model" + model
# print e
# skip = True
# previous = time
# # DO Increment previous time in the case where previous time has missing data.
# # So if previous=33h and time = 36hr, and 33h has missing data:
# # The next step would be previous=36hr and time=39hr: total = 39h - 36hr
# pass
# try:
# grbs=pygrib.open(endFile)
# grbs.seek(0)
# grbSwemCurrent = grbs.select(name='Water equivalent of accumulated snow depth', typeOfLevel='surface', level=0)[0]
# grbT500 = grbs.select(name='Temperature', typeOfLevel='isobaricInhPa', level=500)[0]
# grbT850 = grbs.select(name='Temperature', typeOfLevel='isobaricInhPa', level=850)[0]
# grbT2m = grbs.select(name='2 metre temperature', typeOfLevel='heightAboveGround', level=2)[0]
# grbs.close()
# except Exception, e:
# print "Failure on loading grib [END] file = " + endFile
# print "Region" + region
# print "Model" + model
# print e
# skip = True
# # DONT Increment previous time in the case of missing data.
# # ie. if 33h and 36 have missing data, the next increment
# # will try 39h - 33h = difference.
# pass
if skip == True:
print "Skipping Hour: " + time
continue
data = {}
# Subset data for global grids...
# Very strange bug.
if model in self.globalModelGrids and region in self.nonLAEAprojections:
data['500'],lat, lon = grbT500.data(lat1=20,lat2=75,lon1=220,lon2=320)
data['850'],lat, lon = grbT850.data(lat1=20,lat2=75,lon1=220,lon2=320)
data['2'],lat, lon = grbT2m.data(lat1=20,lat2=75,lon1=220,lon2=320)
data['swemCurr'],lat, lon = grbSwemCurrent.data(lat1=20,lat2=75,lon1=220,lon2=320)
data['swemPrev'],lat, lon = grbSwemPrevious.data(lat1=20,lat2=75,lon1=220,lon2=320)
else:
data['500'] = grbT500.values
data['850'] = grbT850.values
data['2'] = grbT2m.values
data['swemCurr'] = grbSwemCurrent.values
data['swemPrev'] = grbSwemPrevious.values
lat, lon = grbT2m.latlons()
d = np.maximum(data['850'],data['2'])
d = np.maximum(d, data['500'])
dmax = np.where(d >=271.16, d, np.nan)
dmin = np.where(d <271.16, d, np.nan)
# np.nan should propagate. Otherwise you end up with (12 + 2*(271.16 - 0)) = (really fucking big). Instead we just want np.nan.
dmin = (12 + (271.16 - dmin))
dmax = (12 + 2*(271.16 - dmax))
dmin = np.nan_to_num(dmin)
dmax = np.nan_to_num(dmax)
dtot = dmin + dmax # Total Snow water equivalent ratios
swemAccum = data['swemCurr'] - data['swemPrev']
swemAccum = np.where(swemAccum > 0, swemAccum, 0)
# Truncate negative values to 0.
swemAccum = swemAccum.clip(0)
dtot = dtot.clip(0)
#snow = swemAccum/25.4 * 10
snow = (swemAccum*dtot)/25.4
print '-----------------------------------'
print "MEAN " + str(np.mean(snow))
print "-----------------------------------"
if int(time) > 3:
# Set Hour accumulation
if snowSum is None:
snowSum = snow
else:
snowSum += snow
print "MAX SNOW: " + str(np.max(snow))
# 120 hour accum.
if snowSum120 is None:
snowSum120 = snow
else:
snowSum120 += snow
# 72 hour accum.
if snowSum72 is None:
snowSum72 = snow
else:
snowSum72 += snow
# 24 hour accum
if snowSum24 is None:
snowSum24 = snow
else:
snowSum24 += snow
# 12 hour accum
if snowSum12 is None:
snowSum12 = snow
else:
snowSum12 += snow
m = gribmap.getBaseMap()
if borderBottom > 1.0:
if gribmap.hasDoubleYBorder:
borderBottomCmd = " -border 0x" + str(int(borderBottom))
else:
borderBottomCmd = " -gravity south -splice 0x" + str(int(borderBottom))
x,y = m(lon,lat)
SNOWP_LEVS = [0.25,0.5,0.75,1,1.5,2,2.5,3,4,5,6,8,10,12,14,16,18]
fig.clf()
if snowSum is not None:
print "---------------------------------------------------------------------------"
print "--------------Drawing snow Accum plot for time: " + time + "---------------"
print "--------------SAVING TO: " + accumSaveFileName
print "----------------------------------------------------------------------------"
ax = fig.add_axes([1,1,1,1],axisbg='k')
cs = plt.contourf(x,y,snowSum, SNOWP_LEVS, extend='max',cmap=coltbls.snow2())
m.drawmapboundary()
fig.savefig(accumTmpFileName, dpi=200, bbox_inches='tight', pad_inches=0,facecolor=fig.get_facecolor())
call("convert -background none "+ accumTmpFileName + " " + borderBottomCmd + " -transparent '#000000' -matte -bordercolor none -border " + str(int(borderWidth)) + "x0 " + accumSaveFileName, shell=True)
call("rm " + accumTmpFileName, shell=True)
fig.clf()
if int(time) % 120 == 0 and int(time) > 0:
# do plot
#save to model/snow120/*
imgDir120 = imgDir + "120"
call("mkdir -p " + imgDir120, shell=True)
tempFileName = "init_" + region + "_" + model + "_" + level + "_" + variable + "120" + "_f" + time + ".png"
saveFileName = imgDir120 + "/" + region +"_f" + time + ".gif"
ax = fig.add_axes([1,1,1,1],axisbg='k')
cs = plt.contourf(x, y, snowSum120, SNOWP_LEVS, extend='max', cmap=coltbls.snow2())
m.drawmapboundary()
fig.savefig(tempFileName, dpi=200, bbox_inches='tight', pad_inches=0,facecolor=fig.get_facecolor())
call("convert -background none "+ tempFileName + " " + borderBottomCmd + " -transparent '#000000' -matte -bordercolor none -border " + str(int(borderWidth)) + "x0 " + saveFileName, shell=True)
call("rm " + tempFileName, shell=True)
snowSum120 = None
fig.clf()
if int(time) % 72 == 0 and int(time) > 0:
# do plot
#save to model/snow72/*
imgDir72 = imgDir + "72"
call("mkdir -p " + imgDir72, shell=True)
tempFileName = "init_" + region + "_" + model + "_" + level + "_" + variable + "72" + "_f" + time + ".png"
saveFileName = imgDir72 + "/" + region +"_f" + time + ".gif"
ax = fig.add_axes([1,1,1,1],axisbg='k')
cs = plt.contourf(x, y, snowSum72, SNOWP_LEVS, extend='max', cmap=coltbls.snow2())
m.drawmapboundary()
fig.savefig(tempFileName, dpi=200, bbox_inches='tight', pad_inches=0,facecolor=fig.get_facecolor())
call("convert -background none "+ tempFileName + " " + borderBottomCmd + " -transparent '#000000' -matte -bordercolor none -border " + str(int(borderWidth)) + "x0 " + saveFileName, shell=True)
call("rm " + tempFileName, shell=True)
snowSum72 = None
fig.clf()
if int(time) % 24 == 0 and int(time) > 0:
# do plot
#save to model/snow24/*
imgDir24 = imgDir + "24"
call("mkdir -p " + imgDir24, shell=True)
tempFileName = "init_" + region + "_" + model + "_" + level + "_" + variable + "24" + "_f" + time + ".png"
saveFileName = imgDir24 + "/" + region +"_f" + time + ".gif"
ax = fig.add_axes([1,1,1,1],axisbg='k')
cs = plt.contourf(x, y, snowSum24, SNOWP_LEVS, extend='max', cmap=coltbls.snow2())
m.drawmapboundary()
fig.savefig(tempFileName, dpi=200, bbox_inches='tight', pad_inches=0,facecolor=fig.get_facecolor())
call("convert -background none "+ tempFileName + " " + borderBottomCmd + " -transparent '#000000' -matte -bordercolor none -border " + str(int(borderWidth)) + "x0 " + saveFileName, shell=True)
call("rm " + tempFileName, shell=True)
snowSum24 = None
fig.clf()
if int(time) % 12 == 0 and int(time) > 0:
# do plot
#save to model/snow12/*
imgDir12 = imgDir + "12"
call("mkdir -p " + imgDir12, shell=True)
tempFileName = "init_" + region + "_" + model + "_" + level + "_" + variable + "12" + "_f" + time + ".png"
saveFileName = imgDir12 + "/" + region +"_f" + time + ".gif"
ax = fig.add_axes([1,1,1,1],axisbg='k')
cs = plt.contourf(x, y, snowSum12, SNOWP_LEVS, extend='max', cmap=coltbls.snow2())
m.drawmapboundary()
fig.savefig(tempFileName, dpi=200, bbox_inches='tight', pad_inches=0,facecolor=fig.get_facecolor())
call("convert -background none "+ tempFileName + " " + borderBottomCmd + " -transparent '#000000' -matte -bordercolor none -border " + str(int(borderWidth)) + "x0 " + saveFileName, shell=True)
call("rm " + tempFileName, shell=True)
snowSum12 = None
fig.clf()
previous = time
plt.close()
plt.close(fig.number)
fig = None
cs = None
gc.collect()
return
def getAccumulationStartTime(self, divisor, time):
quotient = int(int(time)/divisor) # 24 / 24 = 1 (quotient)
startInt = (quotient-1) * divisor # (1-1) * 24 = 0 (Start time if hour = 24)
startTime = time
if startInt < 100:
if startInt < 10:
startTime= "00" + str(startInt)
else:
startTime= "0" + str(startInt)
return startTime
# Take a ndarray of Longitudes, and shift E/W to (-180-180) range.
def convertLon360to180(self, lons, data):
loncopy = lons.copy()
for i,j in enumerate(lons):
for n,l in enumerate(j):
#data,loncopy[i] = shiftgrid(-180., data, j,start=False)
if l >= 180:
loncopy[i][n]=loncopy[i][n]-360.
return loncopy
def getGrib2File(self, modelDataPath, runTime, model, time):
g2file = ""
if model == 'nam':
time = time[-2:]
runHour = runTime[-2:]
g2file = modelDataPath + model + "/" + "nam.t" + runHour + "z.awip32"+ time +".tm00.grib2"
# elif model == 'gfs':
# g2file = modelDataPath + model + "/" + "gfs.t" + runHour + "z.pgrb2full.0p50.f"+ time
elif model == 'gfs':
runHour = runTime[-2:]
# gfs.t18z.pgrb2.0p25.f009
g2file = modelDataPath + model + "/" + "gfs.t" + runHour + "z.pgrb2.0p25.f"+ time
elif model == 'nam4km':
runHour = runTime[-2:]
time = time[-2:]
# nam.t00z.conusnest.hiresf03.tm00.grib2
g2file = modelDataPath + model + "/" + "nam.t" + runHour + "z.conusnest.hiresf"+ time +".tm00.grib2"
elif model == 'hrrr':
runHour = runTime[-2:]
time = time[-2:]
# hrrr.t00z.wrfnatf03.grib2
g2file = modelDataPath + model + "/" + "hrrr.t" + runHour + "z.wrfnatf"+ time +".grib2"
return g2file
def getRegionFigure(self, gribmap):
frameWidth = 6.402
frameHieght = 5.121
fig = None
borderTop = 0
borderWidth = gribmap.getBorderX() # In Pixels. Should match that generated by Gempak.
borderBottom = gribmap.getBorderY()
if gribmap.hasDoubleYBorder:
borderTop = borderBottom
if int(borderBottom) > 0 or int(borderTop) > 0:
# Top and bottom borders may be different.
frameHieght = frameHieght - ((borderBottom + borderTop)/200.)
if int(borderWidth) > 0:
frameWidth = frameWidth - ((borderWidth*2.)/200.)
fig = plt.figure(figsize=(frameWidth, frameHieght), dpi = 200)
return (fig, borderWidth, borderBottom) | cacraig/wxDataGetters | wxdatagetters/generators/grib2Plot.py | Python | mit | 50,115 |
#!/usr/bin/env python2
# ./builder.py -I/usr/local/include/siconos
# mechanics
# ./builder.py -I/usr/local/include/siconos --target=mechanics
# control
# ./builder.py -I/usr/local/include/siconos --target=control
# we use pygccxml from Roman Yakovenko.
# http://sourceforge.net/projects/pygccxml/
import sys
import os
import getopt
import re
import itertools
from pygccxml import parser
from pygccxml import declarations
from pygccxml.parser import COMPILATION_MODE
from builder_common import *
(include_paths,
siconos_namespace,
targets,
generated_file,
source_dir,
generated_header,
build_path) = parse_args()
all_headers = get_headers(targets)
def is_serializable(something):
return 'serializable' in [_c_.name for _c_ in
something.typedefs(allow_empty=True)]
def name(t):
if isinstance(t, declarations.class_t):
return t.name
elif isinstance(t, declarations.typedef_t):
return t.decl_string[2:] # remove ::
def replace_by_typedef(some_type):
if str(some_type) in typedef:
rep_typedef = typedef[str(some_type)]
if not '<' in rep_typedef: # replace only if not a template
return rep_typedef
return str(some_type)
# main loop
if 'xml_generator_configuration_t' in dir(parser):
config = parser.xml_generator_configuration_t(include_paths=include_paths,
ignore_gccxml_output=False,
keep_xml=True)
else:
config = parser.config_t(include_paths=include_paths, ignore_gccxml_output=True)
decls = parser.parse(all_headers, config, compilation_mode=COMPILATION_MODE.ALL_AT_ONCE)
global_ns = declarations.get_global_namespace(decls)
# classes in siconos_namespace
class_names = dict()
# class name of classes with a least a base (for the boost archive
# registration)
with_base = []
# a typedef table to replace templated class by their typedefs in
# macros call
typedef = dict()
for t in global_ns.typedefs():
typedef[str(t._type)] = name(t)
with open(generated_file, 'a') as dest_file:
write_header(dest_file, ' '.join(sys.argv), generated_header)
write_includes(dest_file, all_headers)
for type_ in filter(lambda c: c.parent.name == siconos_namespace,
itertools.chain(
global_ns.classes(), global_ns.typedefs())):
is_typedef = False
if isinstance(type_, declarations.class_t):
class_names[declarations.full_name(type_)] = type_
class_ = type_
elif isinstance(type_, declarations.typedef_t):
try:
is_typedef = True
class_ = class_names['::' + str(type_.type)]
except:
class_ = None
# with the serializable tag
# (could not find friend functions with pygccxml)
if class_ is not None and \
is_serializable(class_) and \
(is_typedef or not
declarations.templates.is_instantiation(class_.name)):
if not unwanted(class_.name):
if not class_.is_abstract:
with_base.append(
(class_.name,
get_priority(class_.name, source_dir,
type_.location.file_name,
type_.location.line)))
# print registration macros depending on inheritance
if class_.bases == []:
dest_file.write(
'SICONOS_IO_REGISTER({0},\n'.format(name(type_)))
else:
serializable_bases = \
reduce(lambda r, b:
r + [b.related_class]
if is_serializable(b.related_class)
and
b.related_class.parent.name == siconos_namespace
else r, class_.bases, [])
if len(serializable_bases) > 0:
dest_file.write(
'SICONOS_IO_REGISTER_WITH_BASES({0},{1},\n'
.format(name(type_), ''.join(['({0})'
.format(replace_by_typedef(c.name))
for c in serializable_bases])))
else:
dest_file.write('SICONOS_IO_REGISTER({0},\n'
.format(name(type_)))
variables = [v.name
for v in filter(lambda x: not 'void'
in str(x._get_type()),
class_.variables(
allow_empty=True))]
dest_file.write('{0})\n'
.format('\n'
.join(' ({0})'
.format(vn)
for vn in
filter(lambda x: not unwanted(x),
variables))))
# filtering is not correct at this point
# some unwanted classes are necessary
# (the ones in SiconosFull.hpp) others not (xml)
# some leads to compilation errors.
write_register_with_bases(dest_file, with_base)
write_footer(dest_file)
| siconos/siconos-deb | io/tools/builder_refactored.py | Python | apache-2.0 | 5,529 |
from django.conf import settings
import os
SITES_DIR = os.path.join(os.path.dirname(__file__), 'sites')
DEFAULT_HOST = 'www.corp-umbrella-site.com'
HOSTNAME_REDIRECTS = {
'aboutfood.com': 'www.about-food.com',
'about-food.net': 'www.about-food.com',
'meats.about-food.com': 'meat.about-food.com',
'fruits.about-food.com': 'fruit.about-food.com',
'vegetable.about-food.com': 'vegetables.about-food.com',
'diary.about-food.com': 'dairy.about-food.com',
'restaurant.about-food.com': 'restaurants.about-food.com',
'dining.about-food.com': 'restaurants.about-food.com',
'carnes.sobre-comida.com.br': 'carne.sobre-comida.com.br',
'frutas.sobre-comida.com.br': 'fruta.sobre-comida.com.br',
'legume.sobre-comida.com.br': 'legumes.sobre-comida.com.br',
'leites.sobre-comida.com.br': 'leite.sobre-comida.com.br',
'about-games.com': 'about.gam.es'
}
DEV_HOSTNAMES = {
'cus.dev': 'corp-umbrella-site.com',
'af.dev': 'about-food.com',
'res.af.dev': 'restaurants.about-food.com',
'sc.dev': 'sobre-comida.com.br',
'ag.dev': 'about.gam.es'
}
class TestSettings(object):
"""
Temporarily modifies django.conf.settings to use test settings
"""
SITES_DIR = None
DEFAULT_HOST = None
HOSTNAME_REDIRECTS = None
DEV_HOSTNAMES = None
def __init__(self):
"""
Modifies django's settings for this test environment
"""
self._copy_in()
self._set_test_settings()
def _copy_in(self):
try:
self.SITES_DIR = settings.SITES_DIR
except AttributeError:
pass
try:
self.DEFAULT_HOST = settings.DEFAULT_HOST
except AttributeError:
pass
try:
self.HOSTNAME_REDIRECTS = settings.HOSTNAME_REDIRECTS
except AttributeError:
pass
try:
self.DEV_HOSTNAMES = settings.DEV_HOSTNAMES
except AttributeError:
pass
def _set_test_settings(self):
settings.SITES_DIR = SITES_DIR
settings.DEFAULT_HOST = DEFAULT_HOST
settings.HOSTNAME_REDIRECTS = HOSTNAME_REDIRECTS
settings.DEV_HOSTNAMES = DEV_HOSTNAMES
def revert(self):
"""
reverts django.conf.settings back to what they were
"""
if self.SITES_DIR:
settings.SITES_DIR = self.SITES_DIR
else:
delattr(settings, 'SITES_DIR')
if self.DEFAULT_HOST:
settings.DEFAULT_HOST = self.DEFAULT_HOST
else:
delattr(settings, 'DEFAULT_HOST')
if self.HOSTNAME_REDIRECTS:
settings.HOSTNAME_REDIRECTS = self.HOSTNAME_REDIRECTS
else:
delattr(settings, 'HOSTNAME_REDIRECTS')
if self.DEV_HOSTNAMES:
settings.DEV_HOSTNAMES = self.DEV_HOSTNAMES
else:
delattr(settings, 'DEV_HOSTNAMES') | ajrbyers/django-dynamicsites | dynamicsites/test/settings.py | Python | gpl-3.0 | 2,994 |
from django.core.files.storage import get_storage_class
from storages.backends.s3boto import S3BotoStorage
class ForceAuthS3BotoStorage(S3BotoStorage):
def __init__(self, *args, **kwargs):
kwargs['querystring_auth'] = True
super(ForceAuthS3BotoStorage, self).__init__(*args, **kwargs)
class CachedS3BotoStorage(S3BotoStorage):
"""
S3 storage backend that saves the files locally, too.
"""
def __init__(self, *args, **kwargs):
super(CachedS3BotoStorage, self).__init__(*args, **kwargs)
self.local_storage = get_storage_class(
"compressor.storage.CompressorFileStorage")()
def save(self, name, content):
name = super(CachedS3BotoStorage, self).save(name, content)
self.local_storage._save(name, content)
return name
| LilithWittmann/froide | froide/helper/storage_utils.py | Python | mit | 812 |
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
import pytest
from . import config
from .. import test_sim_verilator_opentitan as ot
@pytest.fixture(params=config.TEST_APPS_SELFCHECKING,
ids=lambda param: param['name'])
def app_selfchecking(request, bin_dir):
""" A self-checking device application for Verilator simulation
Returns:
A set (elf_path, verilator_extra_args)
"""
app_config = request.param
if 'name' not in app_config:
raise RuntimeError("Key 'name' not found in TEST_APPS_SELFCHECKING")
if 'targets' in app_config and 'sim_verilator' not in app_config['targets']:
pytest.skip("Test %s skipped on Verilator." % app_config['name'])
if 'binary_name' in app_config:
binary_name = app_config['binary_name']
else:
binary_name = app_config['name']
if 'verilator_extra_args' in app_config:
verilator_extra_args = app_config['verilator_extra_args']
else:
verilator_extra_args = []
# Allow tests to optionally specify their subdir within the project.
test_dir = app_config.get('test_dir', 'sw/device/tests')
test_filename = binary_name + '_sim_verilator.64.scr.vmem'
bin_path = bin_dir / test_dir / test_filename
assert bin_path.is_file()
return (bin_path, verilator_extra_args)
def test_apps_selfchecking(tmp_path, bin_dir, app_selfchecking):
"""
Run a self-checking application on a English Breakfast Verilator simulation
The ROM is initialized with the default boot ROM, the flash is initialized
with |app_selfchecking|.
Self-checking applications are expected to return PASS or FAIL in the end.
"""
sim_path = bin_dir / "hw/top_englishbreakfast/Vchip_englishbreakfast_verilator"
rom_vmem_path = (bin_dir /
"sw/device/lib/testing/test_rom/test_rom_sim_verilator.32.vmem")
sim = ot.VerilatorSimOpenTitan(sim_path, rom_vmem_path, None, tmp_path)
sim.run(app_selfchecking[0], extra_sim_args=app_selfchecking[1])
ot.assert_selfchecking_test_passes(sim)
sim.terminate()
| lowRISC/opentitan | test/systemtest/englishbreakfast/test_sim_verilator.py | Python | apache-2.0 | 2,189 |
#!/usr/bin/env python
# Requirements
# - pyvmomi >= 6.0.0.2016.4
# TODO:
# * more jq examples
# * optional folder heriarchy
"""
$ jq '._meta.hostvars[].config' data.json | head
{
"alternateguestname": "",
"instanceuuid": "5035a5cd-b8e8-d717-e133-2d383eb0d675",
"memoryhotaddenabled": false,
"guestfullname": "Red Hat Enterprise Linux 7 (64-bit)",
"changeversion": "2016-05-16T18:43:14.977925Z",
"uuid": "4235fc97-5ddb-7a17-193b-9a3ac97dc7b4",
"cpuhotremoveenabled": false,
"vpmcenabled": false,
"firmware": "bios",
"""
from __future__ import print_function
import argparse
import atexit
import datetime
import getpass
import jinja2
import os
import six
import ssl
import sys
import uuid
from collections import defaultdict
from six.moves import configparser
from time import time
HAS_PYVMOMI = False
try:
from pyVmomi import vim
from pyVim.connect import SmartConnect, Disconnect
HAS_PYVMOMI = True
except ImportError:
pass
try:
import json
except ImportError:
import simplejson as json
hasvcr = False
try:
import vcr
hasvcr = True
except ImportError:
pass
class VMwareMissingHostException(Exception):
pass
class VMWareInventory(object):
__name__ = 'VMWareInventory'
guest_props = False
instances = []
debug = False
load_dumpfile = None
write_dumpfile = None
maxlevel = 1
lowerkeys = True
config = None
cache_max_age = None
cache_path_cache = None
cache_path_index = None
server = None
port = None
username = None
password = None
host_filters = []
groupby_patterns = []
if (sys.version_info > (3, 0)):
safe_types = [int, bool, str, float, None]
else:
safe_types = [int, long, bool, str, float, None]
iter_types = [dict, list]
bad_types = ['Array', 'disabledMethod', 'declaredAlarmState']
skip_keys = ['declaredalarmstate',
'disabledmethod',
'dynamicproperty',
'dynamictype',
'environmentbrowser',
'managedby',
'parent',
'childtype']
# translation table for attributes to fetch for known vim types
if not HAS_PYVMOMI:
vimTable = {}
else:
vimTable = {
vim.Datastore: ['_moId', 'name'],
vim.ResourcePool: ['_moId', 'name'],
}
def _empty_inventory(self):
return {"_meta" : {"hostvars" : {}}}
def __init__(self, load=True):
self.inventory = self._empty_inventory()
if load:
# Read settings and parse CLI arguments
self.parse_cli_args()
self.read_settings()
# Check the cache
cache_valid = self.is_cache_valid()
# Handle Cache
if self.args.refresh_cache or not cache_valid:
self.do_api_calls_update_cache()
else:
self.debugl('loading inventory from cache')
self.inventory = self.get_inventory_from_cache()
def debugl(self, text):
if self.args.debug:
try:
text = str(text)
except UnicodeEncodeError:
text = text.encode('ascii','ignore')
print('%s %s' % (datetime.datetime.now(), text))
def show(self):
# Data to print
self.debugl('dumping results')
data_to_print = None
if self.args.host:
data_to_print = self.get_host_info(self.args.host)
elif self.args.list:
# Display list of instances for inventory
data_to_print = self.inventory
return json.dumps(data_to_print, indent=2)
def is_cache_valid(self):
''' Determines if the cache files have expired, or if it is still valid '''
valid = False
if os.path.isfile(self.cache_path_cache):
mod_time = os.path.getmtime(self.cache_path_cache)
current_time = time()
if (mod_time + self.cache_max_age) > current_time:
valid = True
return valid
def do_api_calls_update_cache(self):
''' Get instances and cache the data '''
instances = self.get_instances()
self.instances = instances
self.inventory = self.instances_to_inventory(instances)
self.write_to_cache(self.inventory, self.cache_path_cache)
def write_to_cache(self, data, cache_path):
''' Dump inventory to json file '''
with open(self.cache_path_cache, 'wb') as f:
f.write(json.dumps(data))
def get_inventory_from_cache(self):
''' Read in jsonified inventory '''
jdata = None
with open(self.cache_path_cache, 'rb') as f:
jdata = f.read()
return json.loads(jdata)
def read_settings(self):
''' Reads the settings from the vmware_inventory.ini file '''
scriptbasename = __file__
scriptbasename = os.path.basename(scriptbasename)
scriptbasename = scriptbasename.replace('.py', '')
defaults = {'vmware': {
'server': '',
'port': 443,
'username': '',
'password': '',
'validate_certs': True,
'ini_path': os.path.join(os.path.dirname(__file__), '%s.ini' % scriptbasename),
'cache_name': 'ansible-vmware',
'cache_path': '~/.ansible/tmp',
'cache_max_age': 3600,
'max_object_level': 1,
'alias_pattern': '{{ config.name + "_" + config.uuid }}',
'host_pattern': '{{ guest.ipaddress }}',
'host_filters': '{{ guest.gueststate == "running" }}',
'groupby_patterns': '{{ guest.guestid }},{{ "templates" if config.template else "guests"}}',
'lower_var_keys': True }
}
if six.PY3:
config = configparser.ConfigParser()
else:
config = configparser.SafeConfigParser()
# where is the config?
vmware_ini_path = os.environ.get('VMWARE_INI_PATH', defaults['vmware']['ini_path'])
vmware_ini_path = os.path.expanduser(os.path.expandvars(vmware_ini_path))
config.read(vmware_ini_path)
# apply defaults
for k,v in defaults['vmware'].iteritems():
if not config.has_option('vmware', k):
config.set('vmware', k, str(v))
# where is the cache?
self.cache_dir = os.path.expanduser(config.get('vmware', 'cache_path'))
if self.cache_dir and not os.path.exists(self.cache_dir):
os.makedirs(self.cache_dir)
# set the cache filename and max age
cache_name = config.get('vmware', 'cache_name')
self.cache_path_cache = self.cache_dir + "/%s.cache" % cache_name
self.debugl('cache path is %s' % self.cache_path_cache)
self.cache_max_age = int(config.getint('vmware', 'cache_max_age'))
# mark the connection info
self.server = os.environ.get('VMWARE_SERVER', config.get('vmware', 'server'))
self.debugl('server is %s' % self.server)
self.port = int(os.environ.get('VMWARE_PORT', config.get('vmware', 'port')))
self.username = os.environ.get('VMWARE_USERNAME', config.get('vmware', 'username'))
self.debugl('username is %s' % self.username)
self.password = os.environ.get('VMWARE_PASSWORD', config.get('vmware', 'password'))
self.validate_certs = os.environ.get('VMWARE_VALIDATE_CERTS', config.get('vmware', 'validate_certs'))
if self.validate_certs in ['no', 'false', 'False', False]:
self.validate_certs = False
else:
self.validate_certs = True
self.debugl('cert validation is %s' % self.validate_certs)
# behavior control
self.maxlevel = int(config.get('vmware', 'max_object_level'))
self.debugl('max object level is %s' % self.maxlevel)
self.lowerkeys = config.get('vmware', 'lower_var_keys')
if type(self.lowerkeys) != bool:
if str(self.lowerkeys).lower() in ['yes', 'true', '1']:
self.lowerkeys = True
else:
self.lowerkeys = False
self.debugl('lower keys is %s' % self.lowerkeys)
self.host_filters = list(config.get('vmware', 'host_filters').split(','))
self.debugl('host filters are %s' % self.host_filters)
self.groupby_patterns = list(config.get('vmware', 'groupby_patterns').split(','))
self.debugl('groupby patterns are %s' % self.groupby_patterns)
# Special feature to disable the brute force serialization of the
# virtulmachine objects. The key name for these properties does not
# matter because the values are just items for a larger list.
if config.has_section('properties'):
self.guest_props = []
for prop in config.items('properties'):
self.guest_props.append(prop[1])
# save the config
self.config = config
def parse_cli_args(self):
''' Command line argument processing '''
parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on PyVmomi')
parser.add_argument('--debug', action='store_true', default=False,
help='show debug info')
parser.add_argument('--list', action='store_true', default=True,
help='List instances (default: True)')
parser.add_argument('--host', action='store',
help='Get all the variables about a specific instance')
parser.add_argument('--refresh-cache', action='store_true', default=False,
help='Force refresh of cache by making API requests to VSphere (default: False - use cache files)')
parser.add_argument('--max-instances', default=None, type=int,
help='maximum number of instances to retrieve')
self.args = parser.parse_args()
def get_instances(self):
''' Get a list of vm instances with pyvmomi '''
instances = []
kwargs = {'host': self.server,
'user': self.username,
'pwd': self.password,
'port': int(self.port) }
if hasattr(ssl, 'SSLContext') and not self.validate_certs:
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.verify_mode = ssl.CERT_NONE
kwargs['sslContext'] = context
instances = self._get_instances(kwargs)
return instances
def _get_instances(self, inkwargs):
''' Make API calls '''
instances = []
si = SmartConnect(**inkwargs)
self.debugl('retrieving all instances')
if not si:
print("Could not connect to the specified host using specified "
"username and password")
return -1
atexit.register(Disconnect, si)
content = si.RetrieveContent()
# Create a search container for virtualmachines
self.debugl('creating containerview for virtualmachines')
container = content.rootFolder
viewType = [vim.VirtualMachine]
recursive = True
containerView = content.viewManager.CreateContainerView(container, viewType, recursive)
children = containerView.view
for child in children:
# If requested, limit the total number of instances
if self.args.max_instances:
if len(instances) >= (self.args.max_instances):
break
instances.append(child)
self.debugl("%s total instances in container view" % len(instances))
if self.args.host:
instances = [x for x in instances if x.name == self.args.host]
instance_tuples = []
for instance in sorted(instances):
if self.guest_props != False:
ifacts = self.facts_from_proplist(instance)
else:
ifacts = self.facts_from_vobj(instance)
instance_tuples.append((instance, ifacts))
self.debugl('facts collected for all instances')
return instance_tuples
def instances_to_inventory(self, instances):
''' Convert a list of vm objects into a json compliant inventory '''
self.debugl('re-indexing instances based on ini settings')
inventory = self._empty_inventory()
inventory['all'] = {}
inventory['all']['hosts'] = []
last_idata = None
total = len(instances)
for idx,instance in enumerate(instances):
# make a unique id for this object to avoid vmware's
# numerous uuid's which aren't all unique.
thisid = str(uuid.uuid4())
idata = instance[1]
# Put it in the inventory
inventory['all']['hosts'].append(thisid)
inventory['_meta']['hostvars'][thisid] = idata.copy()
inventory['_meta']['hostvars'][thisid]['ansible_uuid'] = thisid
# Make a map of the uuid to the alias the user wants
name_mapping = self.create_template_mapping(inventory,
self.config.get('vmware', 'alias_pattern'))
# Make a map of the uuid to the ssh hostname the user wants
host_mapping = self.create_template_mapping(inventory,
self.config.get('vmware', 'host_pattern'))
# Reset the inventory keys
for k,v in name_mapping.iteritems():
if not host_mapping or not k in host_mapping:
continue
# set ansible_host (2.x)
try:
inventory['_meta']['hostvars'][k]['ansible_host'] = host_mapping[k]
# 1.9.x backwards compliance
inventory['_meta']['hostvars'][k]['ansible_ssh_host'] = host_mapping[k]
except Exception as e:
continue
if k == v:
continue
# add new key
inventory['all']['hosts'].append(v)
inventory['_meta']['hostvars'][v] = inventory['_meta']['hostvars'][k]
# cleanup old key
inventory['all']['hosts'].remove(k)
inventory['_meta']['hostvars'].pop(k, None)
self.debugl('pre-filtered hosts:')
for i in inventory['all']['hosts']:
self.debugl(' * %s' % i)
# Apply host filters
for hf in self.host_filters:
if not hf:
continue
self.debugl('filter: %s' % hf)
filter_map = self.create_template_mapping(inventory, hf, dtype='boolean')
for k,v in filter_map.iteritems():
if not v:
# delete this host
inventory['all']['hosts'].remove(k)
inventory['_meta']['hostvars'].pop(k, None)
self.debugl('post-filter hosts:')
for i in inventory['all']['hosts']:
self.debugl(' * %s' % i)
# Create groups
for gbp in self.groupby_patterns:
groupby_map = self.create_template_mapping(inventory, gbp)
for k,v in groupby_map.iteritems():
if v not in inventory:
inventory[v] = {}
inventory[v]['hosts'] = []
if k not in inventory[v]['hosts']:
inventory[v]['hosts'].append(k)
return inventory
def create_template_mapping(self, inventory, pattern, dtype='string'):
''' Return a hash of uuid to templated string from pattern '''
mapping = {}
for k,v in inventory['_meta']['hostvars'].iteritems():
t = jinja2.Template(pattern)
newkey = None
try:
newkey = t.render(v)
newkey = newkey.strip()
except Exception as e:
self.debugl(e)
if not newkey:
continue
elif dtype == 'integer':
newkey = int(newkey)
elif dtype == 'boolean':
if newkey.lower() == 'false':
newkey = False
elif newkey.lower() == 'true':
newkey = True
elif dtype == 'string':
pass
mapping[k] = newkey
return mapping
def facts_from_proplist(self, vm):
'''Get specific properties instead of serializing everything'''
rdata = {}
for prop in self.guest_props:
self.debugl('getting %s property for %s' % (prop, vm.name))
key = prop
if self.lowerkeys:
key = key.lower()
if not '.' in prop:
# props without periods are direct attributes of the parent
rdata[key] = getattr(vm, prop)
else:
# props with periods are subkeys of parent attributes
parts = prop.split('.')
total = len(parts) - 1
# pointer to the current object
val = None
# pointer to the current result key
lastref = rdata
for idx,x in enumerate(parts):
# if the val wasn't set yet, get it from the parent
if not val:
val = getattr(vm, x)
else:
# in a subkey, get the subprop from the previous attrib
try:
val = getattr(val, x)
except AttributeError as e:
self.debugl(e)
# lowercase keys if requested
if self.lowerkeys:
x = x.lower()
# change the pointer or set the final value
if idx != total:
if x not in lastref:
lastref[x] = {}
lastref = lastref[x]
else:
lastref[x] = val
return rdata
def facts_from_vobj(self, vobj, level=0):
''' Traverse a VM object and return a json compliant data structure '''
# pyvmomi objects are not yet serializable, but may be one day ...
# https://github.com/vmware/pyvmomi/issues/21
# WARNING:
# Accessing an object attribute will trigger a SOAP call to the remote.
# Increasing the attributes collected or the depth of recursion greatly
# increases runtime duration and potentially memory+network utilization.
if level == 0:
try:
self.debugl("get facts for %s" % vobj.name)
except Exception as e:
self.debugl(e)
rdata = {}
methods = dir(vobj)
methods = [str(x) for x in methods if not x.startswith('_')]
methods = [x for x in methods if not x in self.bad_types]
methods = [x for x in methods if not x.lower() in self.skip_keys]
methods = sorted(methods)
for method in methods:
# Attempt to get the method, skip on fail
try:
methodToCall = getattr(vobj, method)
except Exception as e:
continue
# Skip callable methods
if callable(methodToCall):
continue
if self.lowerkeys:
method = method.lower()
rdata[method] = self._process_object_types(
methodToCall,
thisvm=vobj,
inkey=method
)
return rdata
def _process_object_types(self, vobj, thisvm=None, inkey=None, level=0):
''' Serialize an object '''
rdata = {}
if vobj is None:
rdata = None
elif type(vobj) in self.vimTable:
rdata = {}
for key in self.vimTable[type(vobj)]:
rdata[key] = getattr(vobj, key)
elif issubclass(type(vobj), str) or isinstance(vobj, str):
if vobj.isalnum():
rdata = vobj
else:
rdata = vobj.decode('ascii', 'ignore')
elif issubclass(type(vobj), bool) or isinstance(vobj, bool):
rdata = vobj
elif issubclass(type(vobj), int) or isinstance(vobj, int):
rdata = vobj
elif issubclass(type(vobj), float) or isinstance(vobj, float):
rdata = vobj
elif issubclass(type(vobj), long) or isinstance(vobj, long):
rdata = vobj
elif issubclass(type(vobj), list) or issubclass(type(vobj), tuple):
rdata = []
try:
vobj = sorted(vobj)
except Exception as e:
pass
for idv, vii in enumerate(vobj):
if (level+1 <= self.maxlevel):
vid = self._process_object_types(
vii,
thisvm=thisvm,
inkey=inkey+'['+str(idv)+']',
level=(level+1)
)
if vid:
rdata.append(vid)
elif issubclass(type(vobj), dict):
pass
elif issubclass(type(vobj), object):
methods = dir(vobj)
methods = [str(x) for x in methods if not x.startswith('_')]
methods = [x for x in methods if not x in self.bad_types]
methods = [x for x in methods if not x.lower() in self.skip_keys]
methods = sorted(methods)
for method in methods:
# Attempt to get the method, skip on fail
try:
methodToCall = getattr(vobj, method)
except Exception as e:
continue
if callable(methodToCall):
continue
if self.lowerkeys:
method = method.lower()
if (level+1 <= self.maxlevel):
rdata[method] = self._process_object_types(
methodToCall,
thisvm=thisvm,
inkey=inkey+'.'+method,
level=(level+1)
)
else:
pass
return rdata
def get_host_info(self, host):
''' Return hostvars for a single host '''
if host in self.inventory['_meta']['hostvars']:
return self.inventory['_meta']['hostvars'][host]
elif self.args.host and self.inventory['_meta']['hostvars']:
# check if the machine has the name requested
keys = self.inventory['_meta']['hostvars'].keys()
match = None
for k,v in self.inventory['_meta']['hostvars'].iteritems():
if self.inventory['_meta']['hostvars'][k]['name'] == self.args.host:
match = k
break
if match:
return self.inventory['_meta']['hostvars'][match]
else:
raise VMwareMissingHostException('%s not found' % host)
else:
raise VMwareMissingHostException('%s not found' % host)
if __name__ == "__main__":
# Run the script
print(VMWareInventory().show())
| kaarolch/ansible | contrib/inventory/vmware_inventory.py | Python | gpl-3.0 | 23,625 |
"""myproject URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Import the include() function: from django.conf.urls import url, include
3. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import url, include
from django.contrib import admin
from django.conf import settings
urlpatterns = [
url(r'^admin/', admin.site.urls),
]
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
| greven/vagrant-django | project_name/urls.py | Python | bsd-3-clause | 993 |
from util import fibonacci
def solve():
result = 0
for i in fibonacci(1, 2):
if i > 4000000:
return result
elif i % 2 == 0:
result += i
if __name__ == '__main__':
print solve()
| elemel/project-euler | src/2.py | Python | mit | 243 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name' : 'Account Tax Cash Basis',
'version' : '1.1',
'author' : 'OpenERP SA',
'summary': 'Allow to have cash basis on tax',
'sequence': 4,
'description': """
Add an option on tax to allow them to be cash based, meaning that during reconciliation, if there is a tax with
cash basis involved, a new journal entry will be create containing those taxes value.
""",
'category' : 'Accounting & Finance',
'website': 'https://www.odoo.com/page/accounting',
'depends' : ['account'],
'data': [
'views/tax_cash_basis_view.xml',
],
'installable': True,
'auto_install': False,
}
| minhphung171093/GreenERP_V9 | openerp/addons/account_tax_cash_basis/__openerp__.py | Python | gpl-3.0 | 740 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
tab_to_fastq
Description: Convert a tab file to a fastq file
tab_to_fastq.py -i input.tab -o output.fq
-----------------------------------------------------------------------
Author: This software is written and maintained by Pierre Pericard
([email protected])
Created: 2016-04-12
Last Modified: 2016-04-12
Licence: GNU GPL 3.0
Copyright 2016 Pierre Pericard
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Convert a tab file to a fastq file')
parser.add_argument('-i', '--input_tab', metavar='INTAB',
type=argparse.FileType('r', 0), default='-',
help='input tab file')
parser.add_argument('-o', '--output_fastq', metavar='OUTFASTQ',
type=argparse.FileType('w', 0), default='-',
help='ouput fastq file')
args = parser.parse_args()
for tab in (l.strip().split('\t') for l in args.input_tab if l.strip()):
header= tab[0]
seq = tab[1]
qual = tab[2]
args.output_fastq.write('@{0}\n{1}\n+\n{2}\n'.format(header, seq, qual))
| bonsai-team/matam | scripts/tab_to_fastq.py | Python | agpl-3.0 | 1,806 |
from CellCycle.ChainModule.ListThread import *
from start import loadSettings
from start import loadLogger
from CellCycle.MemoryModule.calculateSon import calculateSonId
def add_check():
currentProfile = {"profile_name": "alessandro_fazio", "key_pair": "AWSCellCycle", "branch": "ListUtilities"}
settings_to_launch = loadSettings(currentProfile=currentProfile)
logger_to_launch = loadLogger(settings_to_launch)
n1 = Node("1", "172.10.1.1", settings_to_launch.getIntPort(), settings_to_launch.getExtPort(), "0", "19")
n2 = Node("2", "172.10.1.2", settings_to_launch.getIntPort(), settings_to_launch.getExtPort(), "20", "39")
n3 = Node("3", "172.10.1.3", settings_to_launch.getIntPort(), settings_to_launch.getExtPort(), "40", "59")
n4 = Node("4", "172.10.1.4", settings_to_launch.getIntPort(), settings_to_launch.getExtPort(), "60", "79")
n5 = Node("5", "172.10.1.5", settings_to_launch.getIntPort(), settings_to_launch.getExtPort(), "80", "99")
i3 = ListThread(master_of_master=n1, master=n2, myself=n3, slave=n4, slave_of_slave=n5, logger=logger_to_launch,
settings=settings_to_launch, name=n3.id)
i4 = ListThread(master_of_master=n2, master=n3, myself=n4, slave=n5, slave_of_slave=n1, logger=logger_to_launch,
settings=settings_to_launch, name=n4.id)
i5 = ListThread(master_of_master=n3, master=n4, myself=n5, slave=n1, slave_of_slave=n2, logger=logger_to_launch,
settings=settings_to_launch, name=n5.id)
i1 = ListThread(master_of_master=n4, master=n5, myself=n1, slave=n2, slave_of_slave=n3, logger=logger_to_launch,
settings=settings_to_launch, name=n1.id)
i2 = ListThread(master_of_master=n5, master=n1, myself=n2, slave=n3, slave_of_slave=n4, logger=logger_to_launch,
settings=settings_to_launch, name=n2.id)
# pretend that we add the new node
m_o = MemoryObject(n1, n2, n3, n4, n5)
new_min_max_key = keyCalcToCreateANewNode(m_o).newNode
new_node_id_to_add = str(calculateSonId(float(n3.id), float(n4.id)))
new_node_instance_to_add = Node(new_node_id_to_add, None, settings_to_launch.getIntPort(),
settings_to_launch.getExtPort(),
new_min_max_key.min_key, new_min_max_key.max_key)
'''
logger_to_launch.debug("########## BEFORE ADD ############")
i1.print_relatives()
i2.print_relatives()
i3.print_relatives()
i4.print_relatives()
i5.print_relatives()
'''
logger_to_launch.debug("########## AFTER ADD #############")
i4.change_added_keys_to(n3.id)
i4.test_update(source_id=n3.id, target_relative_id=n4.id, node_to_add=new_node_instance_to_add)
i4.change_parents_from_list()
i5.change_added_keys_to(n3.id)
i5.test_update(source_id=n3.id, target_relative_id=n4.id, node_to_add=new_node_instance_to_add)
i5.change_parents_from_list()
i1.change_added_keys_to(n3.id)
i1.test_update(source_id=n3.id, target_relative_id=n4.id, node_to_add=new_node_instance_to_add)
i1.change_parents_from_list()
i2.change_added_keys_to(n3.id)
i2.test_update(source_id=n3.id, target_relative_id=n4.id, node_to_add=new_node_instance_to_add)
i2.change_parents_from_list()
i3.change_added_keys_to(n3.id)
i3.test_update(source_id=n3.id, target_relative_id=n4.id, node_to_add=new_node_instance_to_add)
i3.change_parents_from_list()
i1.print_relatives()
i2.print_relatives()
i3.print_relatives()
i4.print_relatives()
i5.print_relatives()
def dead_check():
currentProfile = {"profile_name": "alessandro_fazio", "key_pair": "AWSCellCycle", "branch": "ListUtilities"}
settings_to_launch = loadSettings(currentProfile=currentProfile)
logger_to_launch = loadLogger(settings_to_launch)
n1 = Node("1", "172.10.1.1", settings_to_launch.getIntPort(), settings_to_launch.getExtPort(), "12", "19")
n2 = Node("2", "172.10.1.2", settings_to_launch.getIntPort(), settings_to_launch.getExtPort(), "20", "39")
n3 = Node("3", "172.10.1.3", settings_to_launch.getIntPort(), settings_to_launch.getExtPort(), "40", "59")
n4 = Node("4", "172.10.1.4", settings_to_launch.getIntPort(), settings_to_launch.getExtPort(), "60", "79")
n5 = Node("5", "172.10.1.5", settings_to_launch.getIntPort(), settings_to_launch.getExtPort(), "80", "11")
i3 = ListThread(master_of_master=n1, master=n2, myself=n3, slave=n4, slave_of_slave=n5, logger=logger_to_launch,
settings=settings_to_launch, name=n3.id)
i4 = ListThread(master_of_master=n2, master=n3, myself=n4, slave=n5, slave_of_slave=n1, logger=logger_to_launch,
settings=settings_to_launch, name=n4.id)
i5 = ListThread(master_of_master=n3, master=n4, myself=n5, slave=n1, slave_of_slave=n2, logger=logger_to_launch,
settings=settings_to_launch, name=n5.id)
i1 = ListThread(master_of_master=n4, master=n5, myself=n1, slave=n2, slave_of_slave=n3, logger=logger_to_launch,
settings=settings_to_launch, name=n1.id)
'''
logger_to_launch.debug("########## BEFORE ADD ############")
i1.print_relatives()
i2.print_relatives()
i3.print_relatives()
i4.print_relatives()
i5.print_relatives()
'''
logger_to_launch.debug("########## AFTER DEAD #############")
i4.change_dead_keys_to(n3.id)
i4.test_remove(target_id=n2.id, source_id=n3.id, target_relative_id=n1.id)
i4.change_parents_from_list()
i5.change_dead_keys_to(n3.id)
i5.test_remove(target_id=n2.id, source_id=n3.id, target_relative_id=n1.id)
i5.change_parents_from_list()
i1.change_dead_keys_to(n3.id)
i1.test_remove(target_id=n2.id, source_id=n3.id, target_relative_id=n1.id)
i1.change_parents_from_list()
i3.change_dead_keys_to(n3.id)
i3.test_remove(target_id=n2.id, source_id=n3.id, target_relative_id=n1.id)
i3.change_parents_from_list()
i1.print_relatives()
i3.print_relatives()
i4.print_relatives()
i5.print_relatives()
logger_to_launch.debug("this is the ip found {}".format((i1.node_list.find_memory_key(0)).target.ip))
| AQuadroTeam/CellsCycle | doc/tests/list-communication/KeyTest.py | Python | mit | 6,180 |
import numpy as np
import restrictedBoltzmannMachine as rbm
from batchtrainer import *
from activationfunctions import *
from common import *
from debug import *
from trainingoptions import *
import theano
from theano import tensor as T
from theano.tensor.shared_randomstreams import RandomStreams
theanoFloat = theano.config.floatX
DEBUG = False
class MiniBatchTrainer(BatchTrainer):
def __init__(self, input, inputLabels, nrLayers, initialWeights, initialBiases,
activationFunction, classificationActivationFunction,
visibleDropout, hiddenDropout,
adversarial_training, adversarial_epsilon, adversarial_coefficient):
self.input = input
self.inputLabels = inputLabels
# If we should use adversarial training or not
self.adversarial_training = adversarial_training
self.adversarial_coefficient = adversarial_coefficient
self.adversarial_epsilon = adversarial_epsilon
self.visibleDropout = visibleDropout
self.hiddenDropout = hiddenDropout
self.activationFunction = activationFunction
self.classificationActivationFunction = classificationActivationFunction
# Let's initialize the fields
# The weights and biases, make them shared variables
self.weights = []
self.biases = []
nrWeights = nrLayers - 1
self.nrWeights = nrWeights
for i in xrange(nrWeights):
w = theano.shared(value=np.asarray(initialWeights[i],
dtype=theanoFloat),
name='W')
self.weights.append(w)
b = theano.shared(value=np.asarray(initialBiases[i],
dtype=theanoFloat),
name='b')
self.biases.append(b)
# Set the parameters of the object
# Do not set more than this, these will be used for differentiation in the
# gradient
params = self.weights + self.biases
# Required for momentum
# The updates that were performed in the last batch
# It is important that the order in which we add the oldUpdates is the same
# as which we add the params
oldUpdates = []
for i in xrange(nrWeights):
oldDw = theano.shared(value=np.zeros(shape=initialWeights[i].shape,
dtype=theanoFloat),
name='oldDw')
oldUpdates.append(oldDw)
for i in xrange(nrWeights):
oldDb = theano.shared(value=np.zeros(shape=initialBiases[i].shape,
dtype=theanoFloat),
name='oldDb')
oldUpdates.append(oldDb)
# Rmsprop
# The old mean that were performed in the last batch
oldMeanSquares = []
for i in xrange(nrWeights):
oldDw = theano.shared(value=np.zeros(shape=initialWeights[i].shape,
dtype=theanoFloat),
name='oldDw')
oldMeanSquares.append(oldDw)
for i in xrange(nrWeights):
oldDb = theano.shared(value=np.zeros(shape=initialBiases[i].shape,
dtype=theanoFloat),
name='oldDb')
oldMeanSquares.append(oldDb)
# Initialize the super class
super(MiniBatchTrainer, self).__init__(params, oldUpdates, oldMeanSquares)
# Create a theano random number generator
# Required to sample units for dropout
self.theanoRng = RandomStreams(seed=np.random.randint(1, 1000))
self.output = self.forwardPass(self.input)
if self.adversarial_training:
# TODO(mihaela): move this to the BatchTrainer superclass?
# This would require moving the forward functionality there
error = T.sum(self.costFun(self.output, self.inputLabels))
grad_error = T.grad(error, self.input)
adversarial_input = self.input + self.adversarial_epsilon * T.sgn(grad_error)
self.adversarial_output = self.forwardPass(adversarial_input)
def forwardPass(self, x):
# Sample from the visible layer
# Get the mask that is used for the visible units
if self.visibleDropout in [1.0, 1]:
currentLayerValues = x
else:
dropoutMask = self.theanoRng.binomial(n=1, p=self.visibleDropout,
size=x.shape,
dtype=theanoFloat)
currentLayerValues = x * dropoutMask
for stage in xrange(self.nrWeights -1):
w = self.weights[stage]
b = self.biases[stage]
linearSum = T.dot(currentLayerValues, w) + b
# dropout: give the next layer only some of the units from this layer
if self.hiddenDropout in [1.0, 1]:
currentLayerValues = self.activationFunction.deterministic(linearSum)
else:
dropoutMaskHidden = self.theanoRng.binomial(n=1, p=self.hiddenDropout,
size=linearSum.shape,
dtype=theanoFloat)
currentLayerValues = dropoutMaskHidden * self.activationFunction.deterministic(linearSum)
# Last layer operations, no dropout in the output
w = self.weights[self.nrWeights - 1]
b = self.biases[self.nrWeights - 1]
linearSum = T.dot(currentLayerValues, w) + b
currentLayerValues = self.classificationActivationFunction.deterministic(linearSum)
return currentLayerValues
def costFun(self, x, y):
return T.nnet.categorical_crossentropy(x, y)
def costFun(self, x, y):
return T.nnet.categorical_crossentropy(x, y)
# TODO: do I still need to pass the y?
def cost(self, y):
output_error = self.costFun(self.output, y)
if self.adversarial_training:
adversarial_error = self.costFun(self.adversarial_output, y)
alpha = self.adversarial_coefficient
return alpha * output_error + (1.0 - alpha) * adversarial_error
else:
return output_error
class ClassifierBatch(object):
def __init__(self, input, nrLayers, weights, biases,
visibleDropout, hiddenDropout,
activationFunction, classificationActivationFunction):
self.input = input
self.classificationWeights = classificationWeightsFromTestWeights(weights,
visibleDropout=visibleDropout,
hiddenDropout=hiddenDropout)
nrWeights = nrLayers - 1
currentLayerValues = input
for stage in xrange(nrWeights -1):
w = self.classificationWeights[stage]
b = biases[stage]
linearSum = T.dot(currentLayerValues, w) + b
currentLayerValues = activationFunction.deterministic(linearSum)
self.lastHiddenActivations = currentLayerValues
w = self.classificationWeights[nrWeights - 1]
b = biases[nrWeights - 1]
linearSum = T.dot(currentLayerValues, w) + b
currentLayerValues = classificationActivationFunction.deterministic(linearSum)
self.output = currentLayerValues
def cost(self, y):
return T.nnet.categorical_crossentropy(self.output, y)
""" Class that implements a deep belief network, for classification """
class DBN(object):
"""
Arguments:
nrLayers: the number of layers of the network. In case of discriminative
traning, also contains the classifcation layer
type: integer
layerSizes: the sizes of the individual layers.
type: list of integers of size nrLayers
binary: is binary data used
type: bool
activationFunction: the activation function used for the forward pass in the network
type: ActivationFunction (see module activationfunctions)
rbmActivationFunctionVisible: the activation function used for the visible layer in the
stacked RBMs during pre training
type: ActivationFunction (see module activationfunctions)
rbmActivationFunctionHidden: the activation function used for the hidden layer in the
stacked RBMs during pre training
type: ActivationFunction (see module activationfunctions)
classificationActivationFunction: the activation function used for the classification layer
type: ActivationFunction (see module activationfunctions)
unsupervisedLearningRate: learning rate for pretraining
type: float
supervisedLearningRate: learning rate for discriminative training
type: float
nesterovMomentum: if true, nesterov momentum is used for discriminative training
type: bool
rbmNesterovMomentum: if true, nesterov momentum is used for pretraining
type: bool
momentumFactorForLearningRate: if true, the learning rate is multiplied by 1 - momentum
for parameter updates
type: bool
momentumMax: the maximum value momentum is allowed to increase to
type: float
momentumMax: the maximum value momentum is allowed to increase to in training RBMs
type: float
momentumForEpochFunction: the function used to increase momentum during training
type: python function (for examples see module common)
rmsprop: if true, rmsprop is used for training
type: bool
rmsprop: if true, rmsprop is used for training RBMs
type: bool
miniBatchSize: the number of instances to be used in a mini batch during training
type: int
hiddenDropout: the dropout used for the hidden layers during discriminative training
type: float
visibleDropout: the dropout used for the visible layers during discriminative training
type: float
rbmHiddenDropout: the dropout used for the hidden layer stacked rbms during pre training.
Unless you are using multiple pre-training epochs, set this to be 1. If you want the
hidden activation to be sparse, use sparsity constraints instead.
type: float
rbmVisibleDropout: the dropout used for the stacked rbms during pre training.
type: float
weightDecayL1: regularization parameter for L1 weight decay
type: float
weightDecayL2: regularization parameter for L2 weight decay
type: float
adversarial_training:
type: boolean
adversarial_coefficient: The coefficient used to define the cost function in case
adversarial training is used.
the cost function will be:
adversarial_coefficient * Cost(params, x, y) +
(1 - adversarial_coefficient) * Cost(params, x + adversarial_epsilon * sign(grad (Cost(params, x, y)), y)
Defaults to 0.5.
type: float
adversarial_epsilon: Used to define the cost function during training in case
adversarial training is used.
Guideline for how to set this field:
adversarial_epsilon should be set to the maximal difference in two input fields that is not perceivable
by the input storing data structure.
Eg: with MNIST, we set the input values to be between 0 and 1, from the original input which had
values between 0 and 255.
So if the difference between two inputs were to be less than 1/255 in all pixels, we want the network
to not assign different classes to them, because our structure would not even distinguish between them.
Hence for MNIST we set adversarial_epsilon = 1 / 255
See: https://drive.google.com/file/d/0B64011x02sIkX0poOGVyZDI4dUU/view
for the original paper and more details
type: float
firstRBMheuristic: if true, we use a heuristic that the first rbm should have a
learning rate 10 times bigger than the learning rate obtained using
CV with DBN for the unsupervisedLearningRate. The learning rate is capped to 1.0.
type: bool
sparsityConstraintRbm: if true, sparsity regularization is used for training the RBMs
type: bool
sparsityRegularizationRbm: the regularization parameter for the sparsity constraints.
if sparsityConstraintRbm is False, it is ignore
type: float
sparsityTragetRbm: the target sparsity for the hidden units in the RBMs
type: float
preTrainEpochs: the number of pre training epochs
type: int
initialInputShape: the initial shape of input data (it had to be vectorized to be made an input)
type: tuple of ints
nameDataset: the name of the dataset
type: string
"""
def __init__(self, nrLayers, layerSizes,
binary,
activationFunction=Sigmoid(),
rbmActivationFunctionVisible=Sigmoid(),
rbmActivationFunctionHidden=Sigmoid(),
classificationActivationFunction=Softmax(),
unsupervisedLearningRate=0.01,
supervisedLearningRate=0.05,
nesterovMomentum=True,
rbmNesterovMomentum=True,
momentumFactorForLearningRate=True,
momentumFactorForLearningRateRBM=True,
momentumMax=0.9,
momentumMaxRbm=0.05,
momentumForEpochFunction=getMomentumForEpochLinearIncrease,
rmsprop=True,
rmspropRbm=True,
miniBatchSize=10,
hiddenDropout=0.5,
visibleDropout=0.8,
rbmHiddenDropout=0.5,
rbmVisibleDropout=1,
weightDecayL1=0.0001,
weightDecayL2=0.0001,
firstRBMheuristic=False,
sparsityConstraintRbm=False,
sparsityRegularizationRbm=None,
sparsityTragetRbm=None,
adversarial_training=False,
adversarial_coefficient=0.5,
adversarial_epsilon=1.0/255,
preTrainEpochs=1,
initialInputShape=None,
nameDataset=''):
self.nrLayers = nrLayers
self.layerSizes = layerSizes
print "creating network with " + str(self.nrLayers) + " and layer sizes", str(self.layerSizes)
assert len(layerSizes) == nrLayers
self.hiddenDropout = hiddenDropout
self.visibleDropout = visibleDropout
self.rbmHiddenDropout = rbmHiddenDropout
self.rbmVisibleDropout = rbmVisibleDropout
self.miniBatchSize = miniBatchSize
self.supervisedLearningRate = supervisedLearningRate
self.unsupervisedLearningRate = unsupervisedLearningRate
self.nesterovMomentum = nesterovMomentum
self.rbmNesterovMomentum = rbmNesterovMomentum
self.rmsprop = rmsprop
self.rmspropRbm = rmspropRbm
self.weightDecayL1 = weightDecayL1
self.weightDecayL2 = weightDecayL2
self.preTrainEpochs = preTrainEpochs
self.activationFunction = activationFunction
self.rbmActivationFunctionHidden = rbmActivationFunctionHidden
self.rbmActivationFunctionVisible = rbmActivationFunctionVisible
self.classificationActivationFunction = classificationActivationFunction
self.momentumFactorForLearningRate = momentumFactorForLearningRate
self.momentumMax = momentumMax
self.momentumMaxRbm = momentumMaxRbm
self.momentumForEpochFunction = momentumForEpochFunction
self.binary = binary
self.firstRBMheuristic = firstRBMheuristic
self.momentumFactorForLearningRateRBM = momentumFactorForLearningRateRBM
self.sparsityRegularizationRbm = sparsityRegularizationRbm
self.sparsityConstraintRbm = sparsityConstraintRbm
self.sparsityTragetRbm = sparsityTragetRbm
# If we should use adversarial training or not
# For more details on adversarial training see
# https://drive.google.com/file/d/0B64011x02sIkX0poOGVyZDI4dUU/view
self.adversarial_training = adversarial_training
self.adversarial_coefficient = adversarial_coefficient
self.adversarial_epsilon = adversarial_epsilon
self.nameDataset = nameDataset
print "hidden dropout in DBN", hiddenDropout
print "visible dropout in DBN", visibleDropout
print "using adversarial training"
def __getstate__(self):
odict = self.__dict__.copy() # copy the dict since we change it
kept = ['x', 'classifier']
for key in self.__dict__:
if key not in kept:
del odict[key]
return odict
def __setstate__(self, dict):
self.__dict__.update(dict) # update attributes
def __getinitargs__():
return None
def pretrain(self, data, unsupervisedData):
nrRbms = self.nrLayers - 2
self.weights = []
self.biases = []
self.generativeBiases = []
currentData = data
if unsupervisedData is not None:
print "adding unsupervisedData"
currentData = np.vstack((currentData, unsupervisedData))
print "pre-training with a data set of size", len(currentData)
lastRbmBiases = None
lastRbmTrainWeights = None
dropoutList = [self.visibleDropout] + [self.hiddenDropout] * (self.nrLayers -1)
for i in xrange(nrRbms):
# If the RBM can be initialized from the previous one,
# do so, by using the transpose of the already trained net
if i > 0 and self.layerSizes[i+1] == self.layerSizes[i-1] and type(self.rbmActivationFunctionVisible) == type(self.rbmActivationFunctionHidden):
print "compatible rbms: initializing rbm number " + str(i) + "with the trained weights of rbm " + str(i-1)
initialWeights = lastRbmTrainWeights.T
initialBiases = lastRbmBiases
else:
initialWeights = None
initialBiases = None
if i == 0 and self.firstRBMheuristic:
print "different learning rate for the first rbm"
# Do not let the learning rate be bigger than 1
unsupervisedLearningRate = min(self.unsupervisedLearningRate * 10, 1.0)
else:
unsupervisedLearningRate = self.unsupervisedLearningRate
net = rbm.RBM(self.layerSizes[i], self.layerSizes[i+1],
learningRate=unsupervisedLearningRate,
visibleActivationFunction=self.rbmActivationFunctionVisible,
hiddenActivationFunction=self.rbmActivationFunctionHidden,
hiddenDropout=self.rbmHiddenDropout,
visibleDropout=self.rbmVisibleDropout,
rmsprop=self.rmspropRbm,
momentumMax=self.momentumMaxRbm,
momentumFactorForLearningRate=self.momentumFactorForLearningRateRBM,
nesterov=self.rbmNesterovMomentum,
initialWeights=initialWeights,
initialBiases=initialBiases,
trainingEpochs=self.preTrainEpochs,
sparsityConstraint=self.sparsityConstraintRbm,
sparsityTraget=self.sparsityTragetRbm,
sparsityRegularization=self.sparsityRegularizationRbm)
net.train(currentData)
# Use the test weights from the rbm, the ones the correspond to the incoming
# weights for the hidden units
# Then you have to divide by the dropout
self.weights += [net.testWeights[1] / dropoutList[i]]
# Only add the biases for the hidden unit
b = net.biases[1]
lastRbmBiases = net.biases
# Do not take the test weight, take the training ones
# because you will continue training with them
lastRbmTrainWeights = net.weights
self.biases += [b]
self.generativeBiases += [net.biases[0]]
# Let's update the current representation given to the next RBM
currentData = net.hiddenRepresentation(currentData)
# Average activation
print "average activation after rbm pretraining"
print currentData.mean()
# This depends if you have generative or not
# Initialize the last layer of weights to zero if you have
# a discriminative net
lastLayerWeights = np.zeros(shape=(self.layerSizes[-2], self.layerSizes[-1]),
dtype=theanoFloat)
lastLayerBiases = np.zeros(shape=(self.layerSizes[-1]),
dtype=theanoFloat)
self.weights += [lastLayerWeights]
self.biases += [lastLayerBiases]
assert len(self.weights) == self.nrLayers - 1
assert len(self.biases) == self.nrLayers - 1
# For sklearn compatibility
def fit(self, data, labels, maxEpochs, validation=True, percentValidation=0.05,
unsupervisedData=None, trainingIndices=None):
return self.train(data, labels, maxEpochs, validation, percentValidation, unsupervisedData, trainingIndices)
"""
Choose a percentage (percentValidation) of the data given to be
validation data, used for early stopping of the model.
"""
def train(self, data, labels, maxEpochs, validation=True, percentValidation=0.05,
unsupervisedData=None, trainingIndices=None):
# Required if the user wants to record on what indices they tested the dataset on
self.trainingIndices = trainingIndices
# Do a small check to see if the data is in between (0, 1)
# if we claim we have binary stochastic units
if self.binary:
mins = data.min(axis=1)
maxs = data.max(axis=1)
assert np.all(mins >=0.0) and np.all(maxs < 1.0 + 1e-8)
else:
# We are using gaussian visible units so we need to scale the data
# TODO: NO: pass in a scale argument
if isinstance(self.rbmActivationFunctionVisible, Identity):
print "scaling input data"
data = scale(data)
if unsupervisedData is not None:
mins = unsupervisedData.min(axis=1)
maxs = unsupervisedData.max(axis=1)
assert np.all(mins) >=0.0 and np.all(maxs) < 1.0 + 1e-8
print "shuffling training data"
data, labels = shuffle(data, labels)
if validation:
nrInstances = len(data)
validationIndices = np.random.choice(xrange(nrInstances),
percentValidation * nrInstances)
trainingIndices = list(set(xrange(nrInstances)) - set(validationIndices))
trainingData = data[trainingIndices, :]
trainingLabels = labels[trainingIndices, :]
validationData = data[validationIndices, :]
validationLabels = labels[validationIndices, :]
self._trainWithGivenValidationSet(trainingData, trainingLabels,
validationData, validationLabels, maxEpochs,
unsupervisedData)
else:
trainingData = data
trainingLabels = labels
self.trainNoValidation(trainingData, trainingLabels, maxEpochs,
unsupervisedData)
#TODO: if this is used from outside, you have to scale the data as well
# and also the validation data
# Could be a good idea to use validation data from a different set?
def _trainWithGivenValidationSet(self, data, labels,
validationData,
validationLabels,
maxEpochs,
unsupervisedData=None):
sharedData = theano.shared(np.asarray(data, dtype=theanoFloat))
sharedLabels = theano.shared(np.asarray(labels, dtype=theanoFloat))
self.pretrain(data, unsupervisedData)
self.nrMiniBatchesTrain = max(len(data) / self.miniBatchSize, 1)
self.miniBatchValidateSize = min(len(validationData), self.miniBatchSize * 10)
self.nrMiniBatchesValidate = self.miniBatchValidateSize / self.miniBatchValidateSize
sharedValidationData = theano.shared(np.asarray(validationData, dtype=theanoFloat))
sharedValidationLabels = theano.shared(np.asarray(validationLabels, dtype=theanoFloat))
# Does backprop for the data and a the end sets the weights
self.fineTune(sharedData, sharedLabels, True,
sharedValidationData, sharedValidationLabels, maxEpochs)
def trainNoValidation(self, data, labels, maxEpochs, unsupervisedData):
sharedData = theano.shared(np.asarray(data, dtype=theanoFloat))
sharedLabels = theano.shared(np.asarray(labels, dtype=theanoFloat))
self.pretrain(data, unsupervisedData)
self.nrMiniBatchesTrain = max(len(data) / self.miniBatchSize, 1)
# Does backprop for the data and a the end sets the weights
self.fineTune(sharedData, sharedLabels, False, None, None, maxEpochs)
"""Fine tunes the weigths and biases using backpropagation.
data and labels are shared
Arguments:
data: The data used for traning and fine tuning
data has to be a theano variable for it to work in the current version
labels: A numpy nd array. Each label should be transformed into a binary
base vector before passed into this function.
miniBatch: The number of instances to be used in a miniBatch
epochs: The number of epochs to use for fine tuning
"""
def fineTune(self, data, labels, validation, validationData, validationLabels,
maxEpochs):
print "supervisedLearningRate"
print self.supervisedLearningRate
batchLearningRate = self.supervisedLearningRate / self.miniBatchSize
batchLearningRate = np.float32(batchLearningRate)
# Let's build the symbolic graph which takes the data trough the network
# allocate symbolic variables for the data
# index of a mini-batch
miniBatchIndex = T.lscalar()
# momentum = T.fscalar()
# The mini-batch data is a matrix
x = T.matrix('x', dtype=theanoFloat)
# labels[start:end] this needs to be a matrix because we output probabilities
y = T.matrix('y', dtype=theanoFloat)
batchTrainer = MiniBatchTrainer(input=x, inputLabels=y, nrLayers=self.nrLayers,
activationFunction=self.activationFunction,
classificationActivationFunction=self.classificationActivationFunction,
initialWeights=self.weights,
initialBiases=self.biases,
visibleDropout=self.visibleDropout,
hiddenDropout=self.hiddenDropout,
adversarial_training=self.adversarial_training,
adversarial_coefficient=self.adversarial_coefficient,
adversarial_epsilon=self.adversarial_epsilon)
classifier = ClassifierBatch(input=x, nrLayers=self.nrLayers,
activationFunction=self.activationFunction,
classificationActivationFunction=self.classificationActivationFunction,
visibleDropout=self.visibleDropout,
hiddenDropout=self.hiddenDropout,
weights=batchTrainer.weights,
biases=batchTrainer.biases)
# TODO: remove training error from this
# the error is the sum of the errors in the individual cases
trainingError = T.sum(batchTrainer.cost(y))
# also add some regularization costs
error = trainingError
for w in batchTrainer.weights:
error += self.weightDecayL1 * T.sum(abs(w)) + self.weightDecayL2 * T.sum(w ** 2)
self.trainingOptions = TrainingOptions(self.miniBatchSize, self.supervisedLearningRate, self.momentumMax, self.rmsprop,
self.nesterovMomentum, self.momentumFactorForLearningRate)
trainModel = batchTrainer.makeTrainFunction(x, y, data, labels, self.trainingOptions)
if not self.nesterovMomentum:
theano.printing.pydotprint(trainModel)
trainingErrorNoDropout = theano.function(
inputs=[miniBatchIndex],
outputs=T.mean(classifier.cost(y)),
givens={
x: data[miniBatchIndex * self.miniBatchSize:(miniBatchIndex + 1) * self.miniBatchSize],
y: labels[miniBatchIndex * self.miniBatchSize:(miniBatchIndex + 1) * self.miniBatchSize]})
if validation:
# Let's create the function that validates the model!
validateModel = theano.function(inputs=[miniBatchIndex],
outputs=T.mean(classifier.cost(y)),
givens={
x: validationData[miniBatchIndex * self.miniBatchValidateSize:(miniBatchIndex + 1) * self.miniBatchValidateSize],
y: validationLabels[miniBatchIndex * self.miniBatchValidateSize:(miniBatchIndex + 1) * self.miniBatchValidateSize]})
self.trainModelPatience(trainModel, validateModel, maxEpochs, trainingErrorNoDropout)
else:
if validationData is not None or validationLabels is not None:
raise Exception(("You provided validation data but requested a train method "
"that does not need validation"))
self.trainLoopModelFixedEpochs(batchTrainer, trainModel, maxEpochs)
# Set up the weights in the dbn object
self.x = x
self.classifier = classifier
self.weights = map(lambda x: x.get_value(), batchTrainer.weights)
self.biases = map(lambda x: x.get_value(), batchTrainer.biases)
self.classificationWeights = classificationWeightsFromTestWeights(self.weights,
visibleDropout=self.visibleDropout,
hiddenDropout=self.hiddenDropout)
def trainLoopModelFixedEpochs(self, batchTrainer, trainModel, maxEpochs):
# trainingErrors = []
epochTrainingErrors = []
try:
for epoch in xrange(maxEpochs):
print "epoch " + str(epoch)
momentum = self.momentumForEpochFunction(self.momentumMax, epoch)
s = 0
for batchNr in xrange(self.nrMiniBatchesTrain):
trainError = trainModel(batchNr, momentum) / self.miniBatchSize
s += trainError
s = s / self.nrMiniBatchesTrain
print "training error " + str(trainError)
epochTrainingErrors += [s]
except KeyboardInterrupt:
print "you have interrupted training"
print "we will continue testing with the state of the network as it is"
# plotTraningError(epochTrainingError)
print "number of epochs"
print epoch + 1
def trainLoopWithValidation(self, trainModel, validateModel, maxEpochs):
lastValidationError = np.inf
count = 0
epoch = 0
validationErrors = []
trainingErrors = []
try:
while epoch < maxEpochs and count < 8:
print "epoch " + str(epoch)
momentum = self.momentumForEpochFunction(self.momentumMax, epoch)
s = 0
for batchNr in xrange(self.nrMiniBatchesTrain):
trainingErrorBatch = trainModel(batchNr, momentum) / self.miniBatchSize
s += trainingErrorBatch
trainingErrors += [s / self.nrMiniBatchesTrain]
meanValidations = map(validateModel, xrange(self.nrMiniBatchesValidate))
meanValidation = sum(meanValidations) / len(meanValidations)
validationErrors += [meanValidation]
if meanValidation > lastValidationError:
count +=1
else:
count = 0
lastValidationError = meanValidation
epoch +=1
except KeyboardInterrupt:
print "you have interrupted training"
print "we will continue testing with the state of the network as it is"
plotTrainingAndValidationErros(trainingErrors, validationErrors)
print "number of epochs"
print epoch + 1
# A very greedy approach to training
# A more mild version would be to actually take 3 conescutive ones
# that give the best average (to ensure you are not in a luck place)
# and take the best of them
def trainModelGetBestWeights(self, batchTrainer, trainModel, validateModel, maxEpochs):
bestValidationError = np.inf
validationErrors = []
trainingErrors = []
bestWeights = None
bestBiases = None
bestEpoch = 0
for epoch in xrange(maxEpochs):
print "epoch " + str(epoch)
momentum = self.momentumForEpochFunction(self.momentumMax, epoch)
for batchNr in xrange(self.nrMiniBatchesTrain):
trainingErrorBatch = trainModel(batchNr, momentum) / self.miniBatchSize
trainingErrors += [trainingErrorBatch]
meanValidations = map(validateModel, xrange(self.nrMiniBatchesValidate))
meanValidation = sum(meanValidations) / len(meanValidations)
validationErrors += [meanValidation]
if meanValidation < bestValidationError:
bestValidationError = meanValidation
# Save the weights which are the best ones
bestWeights = batchTrainer.weights
bestBiases = batchTrainer.biases
bestEpoch = epoch
# If we have improved at all during training
# not sure if things work well like this with theano stuff
# maybe I need an update
if bestWeights is not None and bestBiases is not None:
batchTrainer.weights = bestWeights
batchTrainer.biases = bestBiases
plotTrainingAndValidationErros(trainingErrors, validationErrors)
print "number of epochs"
print epoch
print "best epoch"
print bestEpoch
def trainModelPatience(self, trainModel, validateModel, maxEpochs, trainNoDropout):
bestValidationError = np.inf
epoch = 0
doneTraining = False
patience = 10 * self.nrMiniBatchesTrain # do at least 10 passes trough the data no matter what
patienceIncrease = 2 # Increase our patience up to patience * patienceIncrease
validationErrors = []
trainingErrors = []
trainingErrorNoDropout = []
try:
while (epoch < maxEpochs) and not doneTraining:
# while (epoch < maxEpochs):
# Train the net with all data
print "epoch " + str(epoch)
momentum = self.momentumForEpochFunction(self.momentumMax, epoch)
for batchNr in xrange(self.nrMiniBatchesTrain):
iteration = epoch * self.nrMiniBatchesTrain + batchNr
trainingErrorBatch = trainModel(batchNr, momentum) / self.miniBatchSize
meanValidations = map(validateModel, xrange(self.nrMiniBatchesValidate))
meanValidation = sum(meanValidations) / len(meanValidations)
if meanValidation < bestValidationError:
# If we have improved well enough, then increase the patience
if meanValidation < bestValidationError:
print "increasing patience"
patience = max(patience, iteration * patienceIncrease)
bestValidationError = meanValidation
validationErrors += [meanValidation]
trainingErrors += [trainingErrorBatch]
trainingErrorNoDropout += [trainNoDropout(batchNr)]
if patience <= iteration:
doneTraining = True
epoch += 1
except KeyboardInterrupt:
print "you have interrupted training"
print "we will continue testing with the state of the network as it is"
plotTrainingAndValidationErros(trainingErrors, validationErrors)
print "number of epochs"
print epoch
def classify(self, dataInstaces):
dataInstacesConverted = theano.shared(np.asarray(dataInstaces, dtype=theanoFloat))
classifyFunction = theano.function(
inputs=[],
outputs=self.classifier.output,
updates={},
givens={self.x: dataInstacesConverted}
)
lastLayers = classifyFunction()
return lastLayers, np.argmax(lastLayers, axis=1)
# For compatibility with sklearn
def predict(self, dataInstaces):
return self.classify(dataInstaces)
"""The speed of this function could be improved but since it is never called
during training and it is for illustrative purposes that should not be a problem. """
def sample(self, nrSamples):
nrRbms = self.nrLayers - 2
# Create a random samples of the size of the last layer
if self.binary:
samples = np.random.rand(nrSamples, self.layerSizes[-2])
else:
samples = np.random.randint(255, size=(nrSamples, self.layerSizes[-2]))
# You have to do it in decreasing order
for i in xrange(nrRbms -1, 0, -1):
# If the network can be initialized from the previous one,
# do so, by using the transpose of the already trained net
weigths = self.classificationWeights[i-1].T
biases = np.array([self.biases[i-1], self.generativeBiases[i-1]])
net = rbm.RBM(self.layerSizes[i], self.layerSizes[i-1],
learningRate=self.unsupervisedLearningRate,
visibleActivationFunction=self.rbmActivationFunctionVisible,
hiddenActivationFunction=self.rbmActivationFunctionHidden,
hiddenDropout=1.0,
visibleDropout=1.0,
rmsprop=True, # TODO: argument here as well?
nesterov=self.rbmNesterovMomentum,
initialWeights=weigths,
initialBiases=biases)
# Do 20 layers of gibbs sampling for the last layer
print samples.shape
print biases.shape
print biases[1].shape
if i == nrRbms - 1:
samples = net.reconstruct(samples, cdSteps=20)
# Do pass trough the net
samples = net.hiddenRepresentation(samples)
return samples
"""The speed of this function could be improved but since it is never called
during training and it is for illustrative purposes that should not be a problem. """
def getHiddenActivations(self, data):
nrRbms = self.nrLayers - 2
activations = data
activationsList = []
# You have to do it in decreasing order
for i in xrange(nrRbms):
# If the network can be initialized from the previous one,
# do so, by using the transpose of the already trained net
weigths = self.classificationWeights[i]
biases = np.array([self.generativeBiases[i], self.biases[i]])
net = rbm.RBM(self.layerSizes[i], self.layerSizes[i+1],
learningRate=self.unsupervisedLearningRate,
visibleActivationFunction=self.rbmActivationFunctionVisible,
hiddenActivationFunction=self.rbmActivationFunctionHidden,
hiddenDropout=1.0,
visibleDropout=1.0,
rmsprop=True, # TODO: argument here as well?
nesterov=self.rbmNesterovMomentum,
initialWeights=weigths,
initialBiases=biases)
# Do pass trough the net
activations = net.hiddenRepresentation(activations)
activationsList += [activations]
return activationsList
def hiddenActivations(self, data):
dataInstacesConverted = theano.shared(np.asarray(data, dtype=theanoFloat))
classifyFunction = theano.function(
inputs=[],
outputs=self.classifier.output,
updates={},
givens={self.x: dataInstacesConverted})
classifyFunction()
return self.classifier.lastHiddenActivations
def classificationWeightsFromTestWeights(weights, visibleDropout, hiddenDropout):
classificationWeights = [visibleDropout * weights[0]]
classificationWeights += map(lambda x: x * hiddenDropout, weights[1:])
return classificationWeights
| Warvito/pydeeplearn | code/lib/deepbelief.py | Python | bsd-3-clause | 38,804 |
import datetime
from django.conf import settings
from django.core.management.base import BaseCommand
from django.template.loader import render_to_string
from ...models import CorporateMember
from home.tasks import mail_task
class Command(BaseCommand):
def handle(self, *args, **options):
thirty_days_from_now = datetime.date.today() + datetime.timedelta(days=30)
for member in CorporateMember.objects.filter(inactive=False):
if member.get_expiry_date() == thirty_days_from_now:
mail_task(
'Expiring Conservation Technology Solutions Membership for %s' % member.display_name,
render_to_string('members/corporate_member_renewal_email.txt', {
'contact_name': member.contact_name,
'member_name': member.display_name,
'expiry_date': member.get_expiry_date(),
'renewal_link': member.get_renewal_link(),
}),
settings.DEFAULT_FROM_EMAIL,
[
settings.DEFAULT_FROM_EMAIL,
member.contact_email,
'[email protected]'
],
)
| cts-admin/cts | cts/members/management/commands/send_renewal_emails.py | Python | gpl-3.0 | 1,296 |
#coding=utf-8
import sys,platform
sys.path.append("../")
if 'twisted.internet.reactor' not in sys.modules:
if platform.system() == "Linux":
from twisted.internet import epollreactor
epollreactor.install()
else:
from twisted.internet import iocpreactor
iocpreactor.install()
import logging
from logging.handlers import TimedRotatingFileHandler
from twisted.internet import reactor
from twisted.python import log
from common import daemon
import clientfactory
import config
import random
import time
import redishelper
import clientmanager
def MainStop():
redishelper.instance.stop()
def MainRun(isdaemon):
random.seed(time.time())
logging.getLogger().setLevel(config.instance.log_level)
handler = TimedRotatingFileHandler(filename=config.instance.log_file,when='D',interval=1)
handler.setLevel(config.instance.log_level)
formatter = logging.Formatter(config.instance.log_format)
handler.setFormatter(formatter)
logging.getLogger().addHandler(handler)
log.PythonLoggingObserver().start()
if not isdaemon:
handler = logging.StreamHandler()
handler.setLevel(config.instance.log_level)
formatter = logging.Formatter(config.instance.log_format)
handler.setFormatter(formatter)
logging.getLogger().addHandler(handler)
redishelper.instance.start()
clientmanager.instance.start()
# 建立socket监听
clientfactory.instance.start(config.instance.server_ip, config.instance.server_port, config.instance.max_client)
logging.info(u"服务器管理服务器启动成功!")
reactor.run()
logging.info(u"服务器管理服务器停止运行!")
MainStop()
def Run():
daemon.run(config.instance.server_pid,MainRun)
if __name__ == "__main__":
Run()
| xiexiangwei/xGame | servermanager/main.py | Python | apache-2.0 | 1,801 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
CNB Matrix Module - Approuve
'''
import random
from cnb.cnbMatrixModule import CNBMatrixModule
class CNBMMApprouve(CNBMatrixModule):
"""
"""
name = 'approuve'
usage = ''
desc = 'Approuve an affirmation when the bot detect 4x "k" messages'
enProcessCmd = False
enProcessPattern = True
_nbKMsg = 0
IRC_CHAT, IRC_GROUPCHAT = 'privmsg', 'pubmsg'
XMPP_CHAT, XMPP_GROUPCHAT = 'chat', 'groupchat'
KKK_REPLY = ['Approuved', 'I approuve this message', 'I agree', 'Yarrr', 'Certainement.']
KKK_MSG = ['k', 'kk', 'kkk', 'ok', 'okay', 'oki', 'confirmed', 'agree', 'i agree', 'indeed', \
'y', 'yes', 'yep', 'yup', 'yarrr', 'affirmative', 'certainement.', 'oui', "D'accord"]
KKK_NB_MAX = 4
def __init__(self,log):
CNBMatrixModule.__init__(self,log)
def __del__(self):
pass
def checkPattern(self,oMsg):
if str(oMsg.text).lower() in self.KKK_MSG \
and ((oMsg.protocol.startswith('xmpp') and oMsg.type == self.XMPP_GROUPCHAT)\
or (oMsg.protocol.startswith('irc') and oMsg.type == self.IRC_GROUPCHAT)):
return True
else:
return False
def processPattern(self,oMsg):
self._nbKMsg = self._nbKMsg + 1
self.log.debug('NbKMsg: ' + str(self._nbKMsg))
if self._nbKMsg > self.KKK_NB_MAX:
self._nbKMsg = 1
return self.KKK_REPLY[random.randint(1, len(self.KKK_REPLY)-1)]
else:
return ''
| hackfestca/cnb | cnb/modAvailable/CNBMMApprouve.py | Python | gpl-3.0 | 1,571 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010-2011 OpenStack, LLC
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests for database migrations. This test case reads the configuration
file test_migrations.conf for database connection settings
to use in the tests. For each connection found in the config file,
the test case runs a series of test cases to ensure that migrations work
properly both upgrading and downgrading, and that no data loss occurs
if possible.
"""
import commands
import ConfigParser
import os
import urlparse
from migrate.versioning import repository
import sqlalchemy
import nova.db.migration as migration
import nova.db.sqlalchemy.migrate_repo
from nova.db.sqlalchemy.migration import versioning_api as migration_api
from nova.openstack.common import log as logging
from nova import test
LOG = logging.getLogger(__name__)
def _mysql_get_connect_string(user="openstack_citest",
passwd="openstack_citest",
database="openstack_citest"):
"""
Try to get a connection with a very specfic set of values, if we get
these then we'll run the mysql tests, otherwise they are skipped
"""
return "mysql://%(user)s:%(passwd)s@localhost/%(database)s" % locals()
def _is_mysql_avail(user="openstack_citest",
passwd="openstack_citest",
database="openstack_citest"):
try:
connect_uri = _mysql_get_connect_string(
user=user, passwd=passwd, database=database)
engine = sqlalchemy.create_engine(connect_uri)
connection = engine.connect()
except Exception:
# intentionally catch all to handle exceptions even if we don't
# have mysql code loaded at all.
return False
else:
connection.close()
return True
def _have_mysql():
present = os.environ.get('NOVA_TEST_MYSQL_PRESENT')
if present is None:
return _is_mysql_avail()
return present.lower() in ('', 'true')
class TestMigrations(test.TestCase):
"""Test sqlalchemy-migrate migrations"""
DEFAULT_CONFIG_FILE = os.path.join(os.path.dirname(__file__),
'test_migrations.conf')
# Test machines can set the NOVA_TEST_MIGRATIONS_CONF variable
# to override the location of the config file for migration testing
CONFIG_FILE_PATH = os.environ.get('NOVA_TEST_MIGRATIONS_CONF',
DEFAULT_CONFIG_FILE)
MIGRATE_FILE = nova.db.sqlalchemy.migrate_repo.__file__
REPOSITORY = repository.Repository(
os.path.abspath(os.path.dirname(MIGRATE_FILE)))
def setUp(self):
super(TestMigrations, self).setUp()
self.snake_walk = False
self.test_databases = {}
# Load test databases from the config file. Only do this
# once. No need to re-run this on each test...
LOG.debug('config_path is %s' % TestMigrations.CONFIG_FILE_PATH)
if os.path.exists(TestMigrations.CONFIG_FILE_PATH):
cp = ConfigParser.RawConfigParser()
try:
cp.read(TestMigrations.CONFIG_FILE_PATH)
defaults = cp.defaults()
for key, value in defaults.items():
self.test_databases[key] = value
self.snake_walk = cp.getboolean('walk_style', 'snake_walk')
except ConfigParser.ParsingError, e:
self.fail("Failed to read test_migrations.conf config "
"file. Got error: %s" % e)
else:
self.fail("Failed to find test_migrations.conf config "
"file.")
self.engines = {}
for key, value in self.test_databases.items():
self.engines[key] = sqlalchemy.create_engine(value)
# We start each test case with a completely blank slate.
self._reset_databases()
def tearDown(self):
# We destroy the test data store between each test case,
# and recreate it, which ensures that we have no side-effects
# from the tests
self._reset_databases()
# remove these from the list so they aren't used in the migration tests
if "mysqlcitest" in self.engines:
del self.engines["mysqlcitest"]
if "mysqlcitest" in self.test_databases:
del self.test_databases["mysqlcitest"]
super(TestMigrations, self).tearDown()
def _reset_databases(self):
def execute_cmd(cmd=None):
status, output = commands.getstatusoutput(cmd)
LOG.debug(output)
self.assertEqual(0, status)
for key, engine in self.engines.items():
conn_string = self.test_databases[key]
conn_pieces = urlparse.urlparse(conn_string)
if conn_string.startswith('sqlite'):
# We can just delete the SQLite database, which is
# the easiest and cleanest solution
db_path = conn_pieces.path.strip('/')
if os.path.exists(db_path):
os.unlink(db_path)
# No need to recreate the SQLite DB. SQLite will
# create it for us if it's not there...
elif conn_string.startswith('mysql'):
# We can execute the MySQL client to destroy and re-create
# the MYSQL database, which is easier and less error-prone
# than using SQLAlchemy to do this via MetaData...trust me.
database = conn_pieces.path.strip('/')
loc_pieces = conn_pieces.netloc.split('@')
host = loc_pieces[1]
auth_pieces = loc_pieces[0].split(':')
user = auth_pieces[0]
password = ""
if len(auth_pieces) > 1:
if auth_pieces[1].strip():
password = "-p\"%s\"" % auth_pieces[1]
sql = ("drop database if exists %(database)s; "
"create database %(database)s;") % locals()
cmd = ("mysql -u \"%(user)s\" %(password)s -h %(host)s "
"-e \"%(sql)s\"") % locals()
execute_cmd(cmd)
elif conn_string.startswith('postgresql'):
database = conn_pieces.path.strip('/')
loc_pieces = conn_pieces.netloc.split('@')
host = loc_pieces[1]
auth_pieces = loc_pieces[0].split(':')
user = auth_pieces[0]
password = ""
if len(auth_pieces) > 1:
password = auth_pieces[1].strip()
# note(boris-42): This file is used for authentication
# without password prompt.
createpgpass = ("echo '*:*:*:%(user)s:%(password)s' > "
"~/.pgpass && chmod 0600 ~/.pgpass" % locals())
execute_cmd(createpgpass)
# note(boris-42): We must create and drop database, we can't
# drop database wich we have connected to, so for such
# operations there is a special database template1.
sqlcmd = ("psql -w -U %(user)s -h %(host)s -c"
" '%(sql)s' -d template1")
sql = ("drop database if exists %(database)s;") % locals()
droptable = sqlcmd % locals()
execute_cmd(droptable)
sql = ("create database %(database)s;") % locals()
createtable = sqlcmd % locals()
execute_cmd(createtable)
def test_walk_versions(self):
"""
Walks all version scripts for each tested database, ensuring
that there are no errors in the version scripts for each engine
"""
for key, engine in self.engines.items():
self._walk_versions(engine, self.snake_walk)
def test_mysql_connect_fail(self):
"""
Test that we can trigger a mysql connection failure and we fail
gracefully to ensure we don't break people without mysql
"""
if _is_mysql_avail(user="openstack_cifail"):
self.fail("Shouldn't have connected")
def test_mysql_innodb(self):
"""
Test that table creation on mysql only builds InnoDB tables
"""
if not _have_mysql():
self.skipTest("mysql not available")
# add this to the global lists to make reset work with it, it's removed
# automatically in tearDown so no need to clean it up here.
connect_string = _mysql_get_connect_string()
engine = sqlalchemy.create_engine(connect_string)
self.engines["mysqlcitest"] = engine
self.test_databases["mysqlcitest"] = connect_string
# build a fully populated mysql database with all the tables
self._reset_databases()
self._walk_versions(engine, False, False)
uri = _mysql_get_connect_string(database="information_schema")
connection = sqlalchemy.create_engine(uri).connect()
# sanity check
total = connection.execute("SELECT count(*) "
"from information_schema.TABLES "
"where TABLE_SCHEMA='openstack_citest'")
self.assertTrue(total.scalar() > 0, "No tables found. Wrong schema?")
noninnodb = connection.execute("SELECT count(*) "
"from information_schema.TABLES "
"where TABLE_SCHEMA='openstack_citest' "
"and ENGINE!='InnoDB' "
"and TABLE_NAME!='migrate_version'")
count = noninnodb.scalar()
self.assertEqual(count, 0, "%d non InnoDB tables created" % count)
def _walk_versions(self, engine=None, snake_walk=False, downgrade=True):
# Determine latest version script from the repo, then
# upgrade from 1 through to the latest, with no data
# in the databases. This just checks that the schema itself
# upgrades successfully.
# Place the database under version control
migration_api.version_control(engine, TestMigrations.REPOSITORY,
migration.INIT_VERSION)
self.assertEqual(migration.INIT_VERSION,
migration_api.db_version(engine,
TestMigrations.REPOSITORY))
migration_api.upgrade(engine, TestMigrations.REPOSITORY,
migration.INIT_VERSION + 1)
LOG.debug('latest version is %s' % TestMigrations.REPOSITORY.latest)
for version in xrange(migration.INIT_VERSION + 2,
TestMigrations.REPOSITORY.latest + 1):
# upgrade -> downgrade -> upgrade
self._migrate_up(engine, version)
if snake_walk:
self._migrate_down(engine, version)
self._migrate_up(engine, version)
if downgrade:
# Now walk it back down to 0 from the latest, testing
# the downgrade paths.
for version in reversed(
xrange(migration.INIT_VERSION + 2,
TestMigrations.REPOSITORY.latest + 1)):
# downgrade -> upgrade -> downgrade
self._migrate_down(engine, version)
if snake_walk:
self._migrate_up(engine, version)
self._migrate_down(engine, version)
def _migrate_down(self, engine, version):
migration_api.downgrade(engine,
TestMigrations.REPOSITORY,
version)
self.assertEqual(version,
migration_api.db_version(engine,
TestMigrations.REPOSITORY))
def _migrate_up(self, engine, version):
migration_api.upgrade(engine,
TestMigrations.REPOSITORY,
version)
self.assertEqual(version,
migration_api.db_version(engine,
TestMigrations.REPOSITORY))
def test_migration_146(self):
name = 'name'
az = 'custom_az'
def _145_check():
agg = aggregates.select(aggregates.c.id == 1).execute().first()
self.assertEqual(name, agg.name)
self.assertEqual(az, agg.availability_zone)
for key, engine in self.engines.items():
migration_api.version_control(engine, TestMigrations.REPOSITORY,
migration.INIT_VERSION)
migration_api.upgrade(engine, TestMigrations.REPOSITORY, 145)
metadata = sqlalchemy.schema.MetaData()
metadata.bind = engine
aggregates = sqlalchemy.Table('aggregates', metadata,
autoload=True)
aggregates.insert().values(id=1, availability_zone=az,
aggregate_name=1, name=name).execute()
_145_check()
migration_api.upgrade(engine, TestMigrations.REPOSITORY, 146)
aggregate_metadata = sqlalchemy.Table('aggregate_metadata',
metadata, autoload=True)
metadata = aggregate_metadata.select(aggregate_metadata.c.
aggregate_id == 1).execute().first()
self.assertEqual(az, metadata['value'])
migration_api.downgrade(engine, TestMigrations.REPOSITORY, 145)
_145_check()
def test_migration_147(self):
az = 'test_zone'
host1 = 'compute-host1'
host2 = 'compute-host2'
def _146_check():
service = services.select(services.c.id == 1).execute().first()
self.assertEqual(az, service.availability_zone)
self.assertEqual(host1, service.host)
service = services.select(services.c.id == 2).execute().first()
self.assertNotEqual(az, service.availability_zone)
service = services.select(services.c.id == 3).execute().first()
self.assertEqual(az, service.availability_zone)
self.assertEqual(host2, service.host)
for key, engine in self.engines.items():
migration_api.version_control(engine, TestMigrations.REPOSITORY,
migration.INIT_VERSION)
migration_api.upgrade(engine, TestMigrations.REPOSITORY, 146)
metadata = sqlalchemy.schema.MetaData()
metadata.bind = engine
#populate service table
services = sqlalchemy.Table('services', metadata,
autoload=True)
services.insert().values(id=1, host=host1,
binary='nova-compute', topic='compute', report_count=0,
availability_zone=az).execute()
services.insert().values(id=2, host='sched-host',
binary='nova-scheduler', topic='scheduler', report_count=0,
availability_zone='ignore_me').execute()
services.insert().values(id=3, host=host2,
binary='nova-compute', topic='compute', report_count=0,
availability_zone=az).execute()
_146_check()
migration_api.upgrade(engine, TestMigrations.REPOSITORY, 147)
# check aggregate metadata
aggregate_metadata = sqlalchemy.Table('aggregate_metadata',
metadata, autoload=True)
aggregate_hosts = sqlalchemy.Table('aggregate_hosts',
metadata, autoload=True)
metadata = aggregate_metadata.select(aggregate_metadata.c.
aggregate_id == 1).execute().first()
self.assertEqual(az, metadata['value'])
self.assertEqual(aggregate_hosts.select(
aggregate_hosts.c.aggregate_id == 1).execute().
first().host, host1)
blank = [h for h in aggregate_hosts.select(
aggregate_hosts.c.aggregate_id == 2).execute()]
self.assertEqual(blank, [])
migration_api.downgrade(engine, TestMigrations.REPOSITORY, 146)
_146_check()
| fajoy/nova | nova/tests/test_migrations.py | Python | apache-2.0 | 16,891 |
#!/usr/bin/python
# -*- coding: iso-8859-1 -*-
import subprocess,sys,re
from numpy import array
def find_and_replace(pattern, dest_str, repl_str):
re_comp = re.compile(r'%s(?=\W|$)'%(pattern)) # not followed by alphanumeric [a-zA-Z0-9_] or (|) is at end of string ($)
return re_comp.sub(repl_str, dest_str) # with correct user parameters if used
class model_builder:
"""receive model information, process to build system.h file and compile the model so that it is an executable C program. provides function for simulating the model as well. """
def __init__(self, _m, _path, T_END):
self.para_dict = dict([(p,nr) for nr, p in enumerate(_m.parameters)]) # parameter names with parameter indices
# self.build_system_file_C(_m, _path, T_END)
# self.compile_model(_path)
# self.build_system_file_python(_m, _path)
def build_system_file_C(self,_m, _path, T_END):
f = open(_path+'C/system.h','w')
f.write('#include <math.h>\n')
f.write('#ifndef SYSTEM_H_INCLUDED\n#define SYSTEM_H_INCLUDED\n\n')
f.write('#define NEQ %s // number of equations \n'%( len(_m.species) ) )
f.write('#define NK %s // number of parameters \n'%( len(_m.parameters) ) )
f.write('#define T_START 0.0 // start time \n' )
f.write('#define T_END %s // end time \n\n'%( T_END ) ) # specify end time here, steps will be T_END - T_START
f.write('double y_start[NEQ]; // init conditions vector, can also be set explicitly, e.g. double y_start[] = { 10.0, 10.0 }; \n\n' )
f.write('double k[NK]; // parameter vector \n\n' )
f.write('// equation system \n' )
f.write('void system(int *neq, double *t, double *y, double *ydot) \n' )
f.write('{\n')
for index,species in enumerate(_m.species):
new_diff_equation = _m.diff_equations[species]
for p in self.para_dict:
new_diff_equation = new_diff_equation.replace(p,'k[%s]'%(self.para_dict[p])) # replace parameter names with parameter indices -> k[index]
for ind,spe in enumerate(_m.species):
new_diff_equation = new_diff_equation.replace(spe,'y[%s]'%(ind)) # replace species names with indices -> y[index]
f.write(' ydot[%s] =%s;\n'%(index,new_diff_equation)) # zB ydot[0] = y[0]*( k[0] - k[1]*y[1] );
f.write('}\n\n')
f.write('#endif // SYSTEM_H_INCLUDED')
f.close()
return None
def compile_model(self, _path):
""" need libs (odepack, gfortran) and system.h file in the same directory """
# cmd = g++ main.cpp system.h -omodel.out -lodepack -L./ -lgfortran
process = subprocess.Popen(["g++", _path+"C/main.cpp", _path+"C/system.h", "-o"+_path+"C/model.out", "-L"+_path+"C/", "-lodepack", "-lgfortran"], stderr=subprocess.PIPE)
stdout,stderr = process.communicate()
if stderr: print 'ERROR: ', stderr
return None
def simulate(self,_m,current_s,current_p, _path):
""" simulate C model """
process = subprocess.Popen(["./"+_path+ "C/model.out"] + [str(current_p[p]) for p in _m.parameters] + [str(s) for s in current_s], stdout=subprocess.PIPE) # get error with stderr=subprocess.PIPE
my_out_list=[i.replace(' \n','') for i in process.stdout.readlines()] # get rid of newlines
my_out_list=array([[float(i) for i in line.split(' ')] for line in my_out_list]) # split and convert from string to float
return my_out_list
def build_user_eq_system_python(self,_m, _path):
f= open(_path+'user_eq_system.py','w')
f.write('#!/usr/bin/python\n# -*- coding: iso-8859-1 -*-\n\n')
f.write('from numpy import array\n\n')
f.write('def dY_dt(y,t,k):\n')
f.write(' return array([\n')
for index,species in enumerate(_m.species):
new_diff_equation = _m.diff_equations[species]
for ind, spe in enumerate(_m.species):
new_diff_equation = find_and_replace(pattern=spe, dest_str=new_diff_equation, repl_str='Y[%s]'%(ind))
for ind, p in enumerate(_m.parameters):
new_diff_equation = find_and_replace(pattern=p, dest_str=new_diff_equation, repl_str='p[%s]'%(ind))
new_diff_equation= new_diff_equation.strip() # get rid of leading and trailing \n\t oder ' '
if index == len(_m.species)-1:
f.write(' %s\n'%new_diff_equation)
else:
f.write(' %s,\n'%new_diff_equation)
f.write(' ])')
f.close()
return None
def build_merged_system_file_python(self,_m, _path):
#read in v5 equation system
f= open(_path+'v5_eq_system_w_modifiers.py','r')
a=f.readlines()
f.close()
v5_species=dict([('mCLN12','Y[0]'), ('Cln12','Y[1]'), ('B_R','Y[2]'), ('B_Am','Y[3]'), ('B_Ad','Y[4]'), ('mB_R','Y[5]'), ('mB_A','Y[6]')])
v5_species_list=['mCLN12','Cln12','B_R','B_Am','B_Ad','mB_R','mB_A']
v5_params=dict([('k_d1','p[0]'),('k_d2','p[1]'),('k_p1','p[2]'),('k_R','p[3]'),('k_Am','p[4]'),('k_Ad','p[5]'),('growth','p[6]'),('A_t','p[7]'),('V_t','p[8]')])
global_eq_modifiers = ['k__mCLN12','k__Cln12_plus','k__Cln12_minus','k__B_R','k__B_Am','k__B_Ad','k__mB_R','k__mB_A']
# check for v5_species, defined in user_model:
v5_add_equations=dict() # put additional user def equations in here
v5_user_species=list(set(v5_species).intersection(set(_m.species_names.values()))) # v5 species with add def by user
v5_user_species_name_id= dict([(sname, sid) for sid, sname in _m.species_names.items() if sname in v5_user_species]) # dict of key: name, value: id of all v5 species that have been defined as species by user
v5_user_pars=list(set(v5_params).intersection(set(_m.parameter_names.values()))) # v5 species with add def by user
v5_user_pars_name_id= dict([(pname, pid) for pid, pname in _m.parameter_names.items() if pname in v5_user_pars]) # dict of key: name, value: id of all v5 species that have been defined as species by user
for sname, sid in v5_user_species_name_id.items():
if _m.diff_equations[sid]:
v5_add_equations[sname] = _m.diff_equations[sid]
_m.species.remove(sid) # delete v5_species from model_species_list
del _m.diff_equations[sid] # delete diff_eq of v5_species
del _m.species_values[sid] # delete value of v5_species
del _m.species_names[sid] # delete name of v5_species
self.n_para_dict = dict([(p,nr+len(v5_params)) for nr, p in enumerate(_m.parameters)]) # parameter names with v5 updated parameter indices, so new parameters start at p[9]
self.n_spe_dict = dict([(s,nr+len(v5_species)) for nr, s in enumerate(_m.species)]) # species names with v5 updated species indices, so new equations start with Y[7]
def convert_equation(convert_str):
convert_str = convert_str.replace( 'compartment_1 * ', '' )
for sname, sid in v5_user_species_name_id.items():
convert_str = find_and_replace(pattern=sid, dest_str=convert_str, repl_str=v5_species[sname])
for pname, pid in v5_user_pars_name_id.items():
convert_str = find_and_replace(pattern=pid, dest_str=convert_str, repl_str=v5_params[pname])
for spe in _m.species:
convert_str = find_and_replace(pattern=spe, dest_str=convert_str, repl_str='Y[%s]'%(self.n_spe_dict[spe]))
for p in _m.parameters:
convert_str = find_and_replace(pattern=p, dest_str=convert_str, repl_str='p[%s]'%(self.n_para_dict[p]))
return convert_str
f= open(_path+'merged_eq_system.py','w')
for i in a[:8]: f.write(i) # write the first always equal lines
for eq,eq_name in zip(a[8:-1], v5_species_list): # the equation part, to be modified in the following
new_eq = eq.replace(',','') # get rid of the end of line semi-colon
new_eq=new_eq.strip()
if eq_name in v5_add_equations:
new_eq = new_eq+convert_equation(v5_add_equations[eq_name]) # add reactions to v5_species if user specified
for modifier in global_eq_modifiers:
if modifier in new_eq: # if the modifier is in the equation
if modifier in _m.parameter_names.values(): # if global system modifier is specified
pid = [pid for pid, pname in _m.parameter_names.items() if pname == modifier][0] # get id of parameter that has been specified to be a global modifier
if _m.rules and pid in _m.rules:
global_eq_modifier = _m.rules[pid] # if it is a rule, replace by rule equation
# print global_eq_modifier, '1'
for i in range(5): # check 5 levels deep if there are parameters that need to be replaced by rules
for rule_key in _m.rules:
m1 = re.compile(r'%s$'%(rule_key)) # at the end of the string
m2 = re.compile(r'%s(?=\W)'%(rule_key)) # not followed by another digit
global_eq_modifier = m1.sub(_m.rules[rule_key],global_eq_modifier)
global_eq_modifier = m2.sub('( '+_m.rules[rule_key]+' )',global_eq_modifier)
global_eq_modifier = convert_equation(global_eq_modifier)
else:
global_eq_modifier = _m.parameter_values[pid] # if not a rule, set to parameter value
else:
global_eq_modifier = 1.0 # or if not specified, set to 1
new_eq = new_eq.replace(modifier,str(global_eq_modifier))
f.write(' '+new_eq+',\n') # write the modified equation with semi-colon and newline
# and now append the new equation for user species as specified in SBML file:
for index,species in enumerate(_m.species):
new_diff_equation = _m.diff_equations[species]
new_diff_equation = convert_equation(new_diff_equation)
new_diff_equation= new_diff_equation.strip() # get rid of leading and trailing \n\t oder ' '
if index == len(_m.species)-1:
f.write(' %s\n'%new_diff_equation)
else:
f.write(' %s,\n'%new_diff_equation)
f.write(' ])')
f.close()
return None
def build_merged_eq_sys(self,_m, _path):
#read in core equation system
f= open(_path+'core_eq_system_w_modifiers.py','r')
a=f.readlines()
f.close()
core_species=dict([('mCLN','Y[0]'), ('Cln','Y[1]'), ('B_R','Y[2]'), ('B_Am','Y[3]'), ('B_Ad','Y[4]'), ('mB_R','Y[5]'), ('mB_A','Y[6]'), ('mCLB','Y[7]'), ('Clb','Y[8]')])
core_species_list=['mCLN','Cln','B_R','B_Am','B_Ad','mB_R','mB_A','mCLB','Clb']
core_params=dict([('k_d1','p[0]'),('k_p1','p[1]'),('k_d2','p[2]'),('k_R','p[3]'),('k_Am','p[4]'),('k_Ad','p[5]'),('growth','p[6]'),('k_d3','p[7]'),('k_p2','p[8]'),('k_d4','p[9]'),('A_t','p[10]'),('V_t','p[11]')])
global_eq_modifiers = ['k__mCLN','k__Cln_plus','k__Cln_minus','k__B_R','k__B_Am','k__B_Ad','k__mB_R','k__mB_A','k__mCLB','k__Clb_plus','k__Clb_minus']
# check for core_species, defined in user_model:
core_add_equations=dict() # put additional user def equations in here
core_user_species=list(set(core_species).intersection(set(_m.species_names.values()))) # core species with add def by user
core_user_species_name_id=dict([(sname, sid) for sid, sname in _m.species_names.items() if sname in core_user_species]) # dict of key: name, value: id of all core species that have been defined as species by user
core_user_pars=list(set(core_params).intersection(set(_m.parameter_names.values()))) # core pars used by user
core_user_pars_name_id= dict([(pname, pid) for pid, pname in _m.parameter_names.items() if pname in core_user_pars]) # dict of key: name, value: id of all core pars that have been used by user
for sname, sid in core_user_species_name_id.items():
if _m.diff_equations[sid] and _m.diff_equations[sid]!='0':
# diff eqs of empty or constant species are initialized with 'dX/dt=0'. The 0 should not be added to our core diff eqs
core_add_equations[sname] = _m.diff_equations[sid]
_m.species.remove(sid) # delete core_species from model_species_list
del _m.diff_equations[sid] # delete diff_eq of core_species
del _m.species_values[sid] # delete value of core_species
del _m.species_names[sid] # delete name of core_species
self.n_para_dict = dict([(p,nr+len(core_params)) for nr, p in enumerate(_m.parameters)]) # parameter names with core updated parameter indices, so new parameters start at p[12]
self.n_spe_dict = dict([(s,nr+len(core_species)) for nr, s in enumerate(_m.species)]) # species names with core updated species indices, so new equations start with Y[9]
def convert_equation(convert_str):
convert_str = convert_str.replace( 'compartment_1 * ', '' )
for sname, sid in core_user_species_name_id.items():
convert_str = find_and_replace(pattern=sid, dest_str=convert_str, repl_str=core_species[sname])
for pname, pid in core_user_pars_name_id.items():
convert_str = find_and_replace(pattern=pid, dest_str=convert_str, repl_str=core_params[pname])
for spe in _m.species:
convert_str = find_and_replace(pattern=spe, dest_str=convert_str, repl_str='Y[%s]'%(self.n_spe_dict[spe]))
for p in _m.parameters:
convert_str = find_and_replace(pattern=p, dest_str=convert_str, repl_str='p[%s]'%(self.n_para_dict[p]))
return convert_str
f= open(_path+'merged_eq_system.py','w')
for i in a[:7]: f.write(i) # write the first always equal lines
for eq,eq_name in zip(a[7:-1], core_species_list): # the equation part, to be modified in the following
new_eq = eq.replace(',','') # get rid of the end of line semi-colon
new_eq=new_eq.strip() # and leading, trailing whitespaces, tabs or newlines
if eq_name in core_add_equations:
new_eq = new_eq+convert_equation(core_add_equations[eq_name]) # add reactions to core_species if user specified
for modifier in global_eq_modifiers:
if modifier in new_eq: # if the modifier is in the equation
if modifier in _m.parameter_names.values(): # if global system modifier is specified
pid = [pid for pid, pname in _m.parameter_names.items() if pname == modifier][0] # get id of parameter that has been specified to be a global modifier
if _m.rules and pid in _m.rules:
global_eq_modifier = _m.rules[pid] # if it is a rule, replace by rule equation
# print global_eq_modifier, '1'
for i in range(5): # check 5 levels deep if there are parameters that need to be replaced by rules
for rule_key in _m.rules:
m1 = re.compile(r'%s$'%(rule_key)) # at the end of the string
m2 = re.compile(r'%s(?=\W)'%(rule_key)) # not followed by another digit
global_eq_modifier = m1.sub(_m.rules[rule_key],global_eq_modifier)
global_eq_modifier = m2.sub('( '+_m.rules[rule_key]+' )',global_eq_modifier)
global_eq_modifier = convert_equation(global_eq_modifier)
else:
global_eq_modifier = _m.parameter_values[pid] # if not a rule, set to parameter value
else:
global_eq_modifier = 1.0 # or if not specified, set to 1
new_eq = new_eq.replace(modifier,str(global_eq_modifier))
f.write(' '+new_eq+',\n') # write the modified equation with semi-colon and newline
# and now append the new equation for user species as specified in SBML file:
for index,species in enumerate(_m.species):
new_diff_equation = _m.diff_equations[species]
new_diff_equation = convert_equation(new_diff_equation)
new_diff_equation= new_diff_equation.strip() # get rid of leading and trailing \n\t oder ' '
if index == len(_m.species)-1:
f.write(' %s\n'%new_diff_equation)
else:
f.write(' %s,\n'%new_diff_equation)
f.write(' ])')
f.close()
return None
#Y[0]=mCLN, Y[1]=Cln, Y[2]=B_R, Y[3]=B_Am, Y[4]=B_Ad, Y[5]=mB_R, Y[6]=mB_A, Y[7]=mCLB, Y[8]=Clb
#p[0]=k_d1, p[1]=k_p1, p[2]=k_d2, p[3]=k_R, p[4]=k_Am, p[5]=k_Ad, p[6]=growth, p[7]=A_t, p[8]=V_t, p[9]=k_d3, p[10]=k_p2, p[11]=k_d4
#k_d1 = destruction rate of mCLN
#k_p1 = production rate of Cln
#k_d2 = destruction rate of Cln
#k_d3 = destruction rate of mCLB
#k_p2 = production rate of Clb
#k_d4 = destruction rate of Clb | thomasspiesser/MYpop | model_builder.py | Python | mit | 17,412 |
import os
import re
from smtplib import SMTPException
from django import forms
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.contrib.auth import forms as auth_forms
from django.contrib.auth.tokens import default_token_generator
from django.forms.util import ErrorList
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _, ugettext_lazy as _lazy
import commonware.log
from olympia import amo
from olympia.accounts.views import fxa_error_message
from olympia.amo.fields import ReCaptchaField, HttpHttpsOnlyURLField
from olympia.users import notifications as email
from olympia.amo.urlresolvers import reverse
from olympia.amo.utils import clean_nl, has_links, slug_validator
from olympia.lib import happyforms
from olympia.translations import LOCALES
from . import tasks
from .models import (
UserProfile, UserNotification, BlacklistedName, BlacklistedEmailDomain,
BlacklistedPassword)
from .widgets import (
NotificationsSelectMultiple, RequiredCheckboxInput, RequiredEmailInput,
RequiredInputMixin, RequiredTextarea)
log = commonware.log.getLogger('z.users')
admin_re = re.compile('(?=.*\d)(?=.*[a-zA-Z])')
class PasswordMixin:
min_length = 8
error_msg = {
'min_length': _lazy('Must be %s characters or more.') % min_length}
@classmethod
def widget(cls, **kw):
attrs = {
'class': 'password-strength',
'data-min-length': cls.min_length,
}
if kw.pop('required', False):
attrs.update(RequiredInputMixin.required_attrs)
return forms.PasswordInput(attrs=attrs, **kw)
def clean_password(self, field='password', instance='instance'):
data = self.cleaned_data[field]
if not data:
return data
user = getattr(self, instance, None)
if user and user.pk and user.needs_tougher_password:
if not admin_re.search(data):
raise forms.ValidationError(_('Letters and numbers required.'))
if BlacklistedPassword.blocked(data):
raise forms.ValidationError(_('That password is not allowed.'))
return data
class AuthenticationForm(auth_forms.AuthenticationForm):
username = forms.CharField(max_length=75, widget=RequiredEmailInput)
password = forms.CharField(max_length=255,
min_length=PasswordMixin.min_length,
error_messages=PasswordMixin.error_msg,
widget=PasswordMixin.widget(render_value=False,
required=True))
rememberme = forms.BooleanField(required=False)
recaptcha = ReCaptchaField()
recaptcha_shown = forms.BooleanField(widget=forms.HiddenInput,
required=False)
def __init__(self, request=None, use_recaptcha=False, *args, **kw):
super(AuthenticationForm, self).__init__(*args, **kw)
if not use_recaptcha or not settings.NOBOT_RECAPTCHA_PRIVATE_KEY:
del self.fields['recaptcha']
def clean(self):
# We want an explicit error message for old accounts with a too
# short password, see bug 1067673 for details.
if ('password' in self.errors and 'password' in self.data and
1 < len(self.data['password']) < PasswordMixin.min_length):
msg = _('As part of our new password policy, your password must '
'be %s characters or more. Please update your password by '
'<a href="%s">issuing a password reset</a>.'
) % (PasswordMixin.min_length,
reverse('password_reset_form'))
self._errors['password'] = ErrorList([mark_safe(msg)])
# Only clean the form (username and password) if recaptcha is ok.
if 'recaptcha' in self.errors:
return {}
return super(AuthenticationForm, self).clean()
class PasswordResetForm(auth_forms.PasswordResetForm):
email = forms.EmailField(widget=RequiredEmailInput)
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request', None)
super(PasswordResetForm, self).__init__(*args, **kwargs)
def clean_email(self):
email = self.cleaned_data['email']
self.users_cache = UserProfile.objects.filter(email__iexact=email)
try:
if self.users_cache.get().fxa_migrated():
raise forms.ValidationError(
_('You must recover your password through Firefox '
'Accounts. Try logging in instead.'))
except UserProfile.DoesNotExist:
pass
return email
def save(self, **kw):
if not self.users_cache:
log.info("Unknown email used for password reset: {email}".format(
**self.cleaned_data))
return
for user in self.users_cache:
if user.needs_tougher_password:
log.info(
u'Password reset email sent for privileged user (%s)'
% user)
else:
log.info(
u'Password reset email sent for user (%s)'
% user)
try:
# Django calls send_mail() directly and has no option to pass
# in fail_silently, so we have to catch the SMTP error ourselves
self.base_save(**kw)
except SMTPException, e:
log.error("Failed to send mail for (%s): %s" % (user, e))
# Copypaste from superclass
def base_save(
self, domain_override=None,
subject_template_name='registration/password_reset_subject.txt',
email_template_name='registration/password_reset_email.html',
use_https=False, token_generator=default_token_generator,
from_email=None, request=None, html_email_template_name=None):
"""
Generates a one-use only link for resetting password and sends to the
user.
"""
from django.core.mail import send_mail
from django.contrib.auth import get_user_model
from django.contrib.sites.models import get_current_site
from django.template import loader
from django.utils.encoding import force_bytes
from django.utils.http import urlsafe_base64_encode
UserModel = get_user_model()
email = self.cleaned_data["email"]
active_users = UserModel._default_manager.filter(
email__iexact=email,
# we use "deleted" instead of "is_active"
deleted=False)
for user in active_users:
# Make sure that no email is sent to a user that actually has
# a password marked as unusable
if not user.has_usable_password():
continue
if not domain_override:
current_site = get_current_site(request)
site_name = current_site.name
domain = current_site.domain
else:
site_name = domain = domain_override
c = {
'email': user.email,
'domain': domain,
'site_name': site_name,
'uid': urlsafe_base64_encode(force_bytes(user.pk)),
'user': user,
'token': token_generator.make_token(user),
'protocol': 'https' if use_https else 'http',
}
subject = loader.render_to_string(subject_template_name, c)
# Email subject *must not* contain newlines
subject = ''.join(subject.splitlines())
email = loader.render_to_string(email_template_name, c)
if html_email_template_name:
html_email = loader.render_to_string(
html_email_template_name, c)
else:
html_email = None
send_mail(
subject, email, from_email, [user.email],
html_message=html_email)
class SetPasswordForm(auth_forms.SetPasswordForm, PasswordMixin):
new_password1 = forms.CharField(label=_lazy(u'New password'),
min_length=PasswordMixin.min_length,
error_messages=PasswordMixin.error_msg,
widget=PasswordMixin.widget(required=True))
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request', None)
super(SetPasswordForm, self).__init__(*args, **kwargs)
def clean_new_password1(self):
return self.clean_password(field='new_password1', instance='user')
def save(self, **kw):
# Three different loggers? :(
amo.log(amo.LOG.CHANGE_PASSWORD, user=self.user)
log.info(u'User (%s) changed password with reset form' % self.user)
super(SetPasswordForm, self).save(**kw)
class UserDeleteForm(forms.Form):
email = forms.CharField(max_length=255, required=True,
widget=RequiredEmailInput)
confirm = forms.BooleanField(required=True, widget=RequiredCheckboxInput)
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request', None)
super(UserDeleteForm, self).__init__(*args, **kwargs)
self.fields['email'].widget.attrs['placeholder'] = (
self.request.user.email)
def clean_email(self):
user_email = self.request.user.email
if not user_email == self.cleaned_data['email']:
raise forms.ValidationError(_('Email must be {email}.').format(
email=user_email))
def clean(self):
amouser = self.request.user
if amouser.is_developer:
# This is tampering because the form isn't shown on the page if the
# user is a developer
log.warning(u'[Tampering] Attempt to delete developer account (%s)'
% self.request.user)
raise forms.ValidationError("")
class UsernameMixin:
def clean_username(self):
name = self.cleaned_data['username']
if not name:
if self.instance.has_anonymous_username():
name = self.instance.username
else:
name = self.instance.anonymize_username()
# All-digits usernames are disallowed since they can be
# confused for user IDs in URLs. (See bug 862121.)
if name.isdigit():
raise forms.ValidationError(
_('Usernames cannot contain only digits.'))
slug_validator(
name, lower=False,
message=_('Enter a valid username consisting of letters, numbers, '
'underscores or hyphens.'))
if BlacklistedName.blocked(name):
raise forms.ValidationError(_('This username cannot be used.'))
# FIXME: Bug 858452. Remove this check when collation of the username
# column is changed to case insensitive.
if (UserProfile.objects.exclude(id=self.instance.id)
.filter(username__iexact=name).exists()):
raise forms.ValidationError(_('This username is already in use.'))
return name
class UserRegisterForm(happyforms.ModelForm, UsernameMixin, PasswordMixin):
"""
For registering users. We're not building off
d.contrib.auth.forms.UserCreationForm because it doesn't do a lot of the
details here, so we'd have to rewrite most of it anyway.
"""
username = forms.CharField(max_length=50, required=False)
email = forms.EmailField(widget=RequiredEmailInput)
display_name = forms.CharField(label=_lazy(u'Display Name'), max_length=50,
required=False)
location = forms.CharField(label=_lazy(u'Location'), max_length=100,
required=False)
occupation = forms.CharField(label=_lazy(u'Occupation'), max_length=100,
required=False)
password = forms.CharField(max_length=255,
min_length=PasswordMixin.min_length,
error_messages=PasswordMixin.error_msg,
widget=PasswordMixin.widget(render_value=False,
required=True))
password2 = forms.CharField(max_length=255,
widget=PasswordMixin.widget(render_value=False,
required=True))
recaptcha = ReCaptchaField()
homepage = HttpHttpsOnlyURLField(label=_lazy(u'Homepage'), required=False)
class Meta:
model = UserProfile
fields = ('username', 'display_name', 'location', 'occupation',
'password', 'password2', 'recaptcha', 'homepage', 'email')
def __init__(self, *args, **kwargs):
instance = kwargs.get('instance')
if instance and instance.has_anonymous_username():
kwargs.setdefault('initial', {})
kwargs['initial']['username'] = ''
super(UserRegisterForm, self).__init__(*args, **kwargs)
if not settings.NOBOT_RECAPTCHA_PRIVATE_KEY:
del self.fields['recaptcha']
errors = {'invalid': _('This URL has an invalid format. '
'Valid URLs look like '
'http://example.com/my_page.')}
self.fields['homepage'].error_messages = errors
def clean_email(self):
d = self.cleaned_data['email'].split('@')[-1]
if BlacklistedEmailDomain.blocked(d):
raise forms.ValidationError(_('Please use an email address from a '
'different provider to complete '
'your registration.'))
return self.cleaned_data['email']
def clean_display_name(self):
name = self.cleaned_data['display_name']
if BlacklistedName.blocked(name):
raise forms.ValidationError(_('This display name cannot be used.'))
return name
def clean(self):
super(UserRegisterForm, self).clean()
data = self.cleaned_data
# Passwords
p1 = data.get('password')
p2 = data.get('password2')
# If p1 is invalid because its blocked, this message is non sensical.
if p1 and p1 != p2:
msg = _('The passwords did not match.')
self._errors['password2'] = ErrorList([msg])
if p2:
del data['password2']
return data
class UserEditForm(UserRegisterForm, PasswordMixin):
oldpassword = forms.CharField(
max_length=255, required=False,
widget=forms.PasswordInput(render_value=False))
password = forms.CharField(max_length=255, required=False,
min_length=PasswordMixin.min_length,
error_messages=PasswordMixin.error_msg,
widget=PasswordMixin.widget(render_value=False))
password2 = forms.CharField(max_length=255, required=False,
widget=forms.PasswordInput(render_value=False))
photo = forms.FileField(label=_lazy(u'Profile Photo'), required=False)
notifications = forms.MultipleChoiceField(
choices=[],
widget=NotificationsSelectMultiple,
initial=email.NOTIFICATIONS_DEFAULT,
required=False)
lang = forms.TypedChoiceField(label=_lazy(u'Default locale'),
choices=LOCALES)
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request', None)
super(UserEditForm, self).__init__(*args, **kwargs)
if not self.instance.lang and self.request:
self.initial['lang'] = self.request.LANG
if self.instance:
default = dict((i, n.default_checked) for i, n
in email.NOTIFICATIONS_BY_ID.items())
user = dict((n.notification_id, n.enabled) for n
in self.instance.notifications.all())
default.update(user)
# Add choices to Notification.
choices = email.NOTIFICATIONS_CHOICES
if not self.instance.is_developer:
choices = email.NOTIFICATIONS_CHOICES_NOT_DEV
if self.instance.fxa_migrated():
self.fields['email'].required = False
self.fields['email'].widget = forms.EmailInput(
attrs={'readonly': 'readonly'})
self.fields['email'].help_text = fxa_error_message(
_(u'Firefox Accounts users cannot currently change their '
u'email address.'))
# Append a "NEW" message to new notification options.
saved = self.instance.notifications.values_list('notification_id',
flat=True)
self.choices_status = {}
for idx, label in choices:
self.choices_status[idx] = idx not in saved
self.fields['notifications'].choices = choices
self.fields['notifications'].initial = [i for i, v
in default.items() if v]
self.fields['notifications'].widget.form_instance = self
# TODO: We should inherit from a base form not UserRegisterForm
if self.fields.get('recaptcha'):
del self.fields['recaptcha']
class Meta:
model = UserProfile
exclude = ('password', 'picture_type', 'last_login', 'fxa_id',
'read_dev_agreement')
def clean(self):
data = self.cleaned_data
amouser = self.request.user
# Passwords
p1 = data.get("password")
p2 = data.get("password2")
if p1 or p2:
if not amouser.check_password(data["oldpassword"]):
msg = _("Wrong password entered!")
self._errors["oldpassword"] = ErrorList([msg])
del data["oldpassword"]
super(UserEditForm, self).clean()
return data
def clean_email(self):
email = self.cleaned_data.get('email')
if self.instance.fxa_migrated():
if not email or email == self.instance.email:
return self.instance.email
else:
raise forms.ValidationError(_(u'Email cannot be changed.'))
else:
return email
def clean_photo(self):
photo = self.cleaned_data['photo']
if not photo:
return
if photo.content_type not in ('image/png', 'image/jpeg'):
raise forms.ValidationError(
_('Images must be either PNG or JPG.'))
if photo.size > settings.MAX_PHOTO_UPLOAD_SIZE:
raise forms.ValidationError(
_('Please use images smaller than %dMB.' %
(settings.MAX_PHOTO_UPLOAD_SIZE / 1024 / 1024 - 1)))
return photo
def clean_bio(self):
bio = self.cleaned_data['bio']
normalized = clean_nl(unicode(bio))
if has_links(normalized):
# There's some links, we don't want them.
raise forms.ValidationError(_('No links are allowed.'))
return bio
def save(self, log_for_developer=True):
u = super(UserEditForm, self).save(commit=False)
data = self.cleaned_data
photo = data['photo']
if photo:
u.picture_type = 'image/png'
tmp_destination = u.picture_path + '__unconverted'
with storage.open(tmp_destination, 'wb') as fh:
for chunk in photo.chunks():
fh.write(chunk)
tasks.resize_photo.delay(tmp_destination, u.picture_path,
set_modified_on=[u])
if data['password']:
u.set_password(data['password'])
log.info(u'User (%s) changed their password' % u.username)
if log_for_developer:
amo.log(amo.LOG.CHANGE_PASSWORD)
for (i, n) in email.NOTIFICATIONS_BY_ID.items():
enabled = n.mandatory or (str(i) in data['notifications'])
UserNotification.update_or_create(
user=u, notification_id=i, update={'enabled': enabled})
log.debug(u'User (%s) updated their profile' % u)
u.save()
return u
class BaseAdminUserEditForm(object):
def changed_fields(self):
"""Returns changed_data ignoring these fields."""
return (set(self.changed_data) -
set(['admin_log', 'notifications', 'photo',
'password', 'password2', 'oldpassword']))
def changes(self):
"""A dictionary of changed fields, old, new. Hides password."""
details = dict([(k, (self.initial[k], self.cleaned_data[k]))
for k in self.changed_fields()])
if 'password' in self.changed_data:
details['password'] = ['****', '****']
return details
def clean_anonymize(self):
if (self.cleaned_data['anonymize'] and
self.changed_fields() != set(['anonymize'])):
raise forms.ValidationError(_('To anonymize, enter a reason for'
' the change but do not change any'
' other field.'))
return self.cleaned_data['anonymize']
class AdminUserEditForm(BaseAdminUserEditForm, UserEditForm):
"""This is the form used by admins to edit users' info."""
admin_log = forms.CharField(required=True, label='Reason for change',
widget=RequiredTextarea(attrs={'rows': 4}))
confirmationcode = forms.CharField(required=False, max_length=255,
label='Confirmation code')
notes = forms.CharField(required=False, label='Notes',
widget=forms.Textarea(attrs={'rows': 4}))
anonymize = forms.BooleanField(required=False)
def save(self, *args, **kw):
profile = super(AdminUserEditForm, self).save(log_for_developer=False)
if self.cleaned_data['anonymize']:
amo.log(amo.LOG.ADMIN_USER_ANONYMIZED, self.instance,
self.cleaned_data['admin_log'])
profile.anonymize() # This also logs
else:
amo.log(amo.LOG.ADMIN_USER_EDITED, self.instance,
self.cleaned_data['admin_log'], details=self.changes())
log.info('Admin edit user: %s changed fields: %s' %
(self.instance, self.changed_fields()))
if 'password' in self.changes():
log.info(
'admin requested password reset (%s for %s)'
% (self.request.user.username, self.instance.username))
return profile
class BlacklistedNameAddForm(forms.Form):
"""Form for adding blacklisted names in bulk fashion."""
names = forms.CharField(widget=forms.Textarea(
attrs={'cols': 40, 'rows': 16}))
def clean_names(self):
names = self.cleaned_data['names'].strip()
if not names:
raise forms.ValidationError(
_('Please enter at least one name to blacklist.'))
names = os.linesep.join(
[s.strip() for s in names.splitlines() if s.strip()])
return names
class BlacklistedEmailDomainAddForm(forms.Form):
"""Form for adding blacklisted user e-mail domains in bulk fashion."""
domains = forms.CharField(
widget=forms.Textarea(attrs={'cols': 40, 'rows': 16}))
def clean(self):
super(BlacklistedEmailDomainAddForm, self).clean()
data = self.cleaned_data
if 'domains' in data:
l = filter(None, [s.strip() for s in data['domains'].splitlines()])
data['domains'] = os.linesep.join(l)
if not data.get('domains', ''):
msg = 'Please enter at least one e-mail domain to blacklist.'
self._errors['domains'] = ErrorList([msg])
return data
| Prashant-Surya/addons-server | src/olympia/users/forms.py | Python | bsd-3-clause | 24,188 |
# Copyright (C) 2012,2013,2016
# Max Planck Institute for Polymer Research
# Copyright (C) 2008,2009,2010,2011
# Max-Planck-Institute for Polymer Research & Fraunhofer SCAI
#
# This file is part of ESPResSo++.
#
# ESPResSo++ is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo++ is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# -*- coding: iso-8859-1 -*-
def createCubic(N, rho, perfect=True, RNG=None):
"""
Initializes particles on the sites of a simple cubic lattice.
By setting ``perfect=False``
the particle positions will be given random displacements
with a magnitude of one-tenth the lattice spacing.
"""
if RNG == None:
import random
cubes = []
for i in xrange(100):
cubes.append(i**3)
if(cubes.count(N) != 1):
print '\nWARNING: num_particles is not a perfect cube. Initial'
print ' configuration may be inhomogeneous.\n'
L = (N / rho)**(1.0/3.0)
a = int(N**(1.0/3.0))
if(a**3 < N):
a = a + 1
lattice_spacing = L / a
def rnd(magn_):
if RNG == None:
rand = random.random()
else :
rand = RNG()
return magn_ * (2.0 * rand - 1.0)
# magnitude of random displacements
magn = 0.0 if perfect else lattice_spacing / 10.0
ct = 0
x = []
y = []
z = []
for i in xrange(a):
for j in xrange(a):
for k in xrange(a):
if(ct < N):
x.append(0.5 * lattice_spacing + i * lattice_spacing + rnd(magn))
y.append(0.5 * lattice_spacing + j * lattice_spacing + rnd(magn))
z.append(0.5 * lattice_spacing + k * lattice_spacing + rnd(magn))
ct += 1
return x, y, z, L, L, L
# TODO implement checking for a wrong number of particles, lightly nonideal lattice etc.
def createDiamond(N, rho, perfect=True, RNG=None):
"""
Initializes particles on the sites of a diamond lattice.
"""
from espressopp import Real3D
#L = (N / 8.0 / rho)**(1.0/3.0)
L = (N / rho)**(1.0/3.0)
num_per_edge = int( (N/8.0)**(1.0/3.0) )
if(8.0*num_per_edge**3 < N):
num_per_edge = num_per_edge + 1
#print 'num_per_site= ', num_per_edge
a = L / num_per_edge
#print 'a= ', a
#print 'a1= ', (1.0 / rho)**(1.0/3.0)
pos = []
# in general structure is shifted relative to (0,0,0)
R0 = Real3D(0.125 * a, 0.125 * a, 0.125 * a)
R1 = Real3D(0.25 * a, 0.25 * a, 0.25 * a)
a11 = a * Real3D(1,0,0)
a22 = a * Real3D(0,1,0)
a33 = a * Real3D(0,0,1)
a1 = 0.5 * a * Real3D(0,1,1)
a2 = 0.5 * a * Real3D(1,0,1)
a3 = 0.5 * a * Real3D(1,1,0)
for i in xrange(num_per_edge):
for j in xrange(num_per_edge):
for k in xrange(num_per_edge):
Rijk = R0 + i*a11 + j*a22 + k*a33
pos.append(Rijk)
pos.append(Rijk+a1)
pos.append(Rijk+a2)
pos.append(Rijk+a3)
pos.append(Rijk+R1)
pos.append(Rijk+a1+R1)
pos.append(Rijk+a2+R1)
pos.append(Rijk+a3+R1)
'''
L1 = L-0.01
pos.append( Real3D(0.01, 0.01, 0.01) )
pos.append( Real3D(L1, 0.01, 0.01) )
pos.append( Real3D(0.01, L1, 0.01) )
pos.append( Real3D(0.01, 0.01, L1) )
pos.append( Real3D(0.01, L1, L1) )
pos.append( Real3D(L1, L1, 0.01) )
pos.append( Real3D(L1, 0.01, L1) )
pos.append( Real3D(L1, L1, L1) )
'''
return pos, L, L, L
| govarguz/espressopp | src/tools/lattice.py | Python | gpl-3.0 | 3,791 |
from __future__ import unicode_literals
try:
from unittest import mock
except ImportError:
import mock
import threading
from channels import Channel, route, DEFAULT_CHANNEL_LAYER
from channels.asgi import channel_layers
from channels.tests import ChannelTestCase
from channels.worker import Worker, WorkerGroup
from channels.exceptions import ConsumeLater
from channels.signals import worker_ready
class PatchedWorker(Worker):
"""Worker with specific numbers of loops"""
def get_termed(self):
if not self.__iters:
return True
self.__iters -= 1
return False
def set_termed(self, value):
self.__iters = value
termed = property(get_termed, set_termed)
class WorkerTests(ChannelTestCase):
"""
Tests that the router's routing code works correctly.
"""
def test_channel_filters(self):
"""
Tests that the include/exclude logic works
"""
# Include
worker = Worker(None, only_channels=["yes.*", "maybe.*"])
self.assertEqual(
worker.apply_channel_filters(["yes.1", "no.1"]),
["yes.1"],
)
self.assertEqual(
worker.apply_channel_filters(["yes.1", "no.1", "maybe.2", "yes"]),
["yes.1", "maybe.2"],
)
# Exclude
worker = Worker(None, exclude_channels=["no.*", "maybe.*"])
self.assertEqual(
worker.apply_channel_filters(["yes.1", "no.1", "maybe.2", "yes"]),
["yes.1", "yes"],
)
# Both
worker = Worker(None, exclude_channels=["no.*"], only_channels=["yes.*"])
self.assertEqual(
worker.apply_channel_filters(["yes.1", "no.1", "maybe.2", "yes"]),
["yes.1"],
)
def test_run_with_consume_later_error(self):
# consumer with ConsumeLater error at first call
def _consumer(message, **kwargs):
_consumer._call_count = getattr(_consumer, '_call_count', 0) + 1
if _consumer._call_count == 1:
raise ConsumeLater()
Channel('test').send({'test': 'test'}, immediately=True)
channel_layer = channel_layers[DEFAULT_CHANNEL_LAYER]
channel_layer.router.add_route(route('test', _consumer))
old_send = channel_layer.send
channel_layer.send = mock.Mock(side_effect=old_send) # proxy 'send' for counting
worker = PatchedWorker(channel_layer)
worker.termed = 2 # first loop with error, second with sending
worker.run()
self.assertEqual(getattr(_consumer, '_call_count', None), 2)
self.assertEqual(channel_layer.send.call_count, 1)
def test_normal_run(self):
consumer = mock.Mock()
Channel('test').send({'test': 'test'}, immediately=True)
channel_layer = channel_layers[DEFAULT_CHANNEL_LAYER]
channel_layer.router.add_route(route('test', consumer))
old_send = channel_layer.send
channel_layer.send = mock.Mock(side_effect=old_send) # proxy 'send' for counting
worker = PatchedWorker(channel_layer)
worker.termed = 2
worker.run()
self.assertEqual(consumer.call_count, 1)
self.assertEqual(channel_layer.send.call_count, 0)
class WorkerGroupTests(ChannelTestCase):
"""
Test threaded workers.
"""
def setUp(self):
self.channel_layer = channel_layers[DEFAULT_CHANNEL_LAYER]
self.worker = WorkerGroup(self.channel_layer, n_threads=4)
self.subworkers = self.worker.workers
def test_subworkers_created(self):
self.assertEqual(len(self.subworkers), 3)
def test_subworkers_no_sigterm(self):
for wrk in self.subworkers:
self.assertFalse(wrk.signal_handlers)
def test_ready_signals_sent(self):
self.in_signal = 0
def handle_signal(sender, *args, **kwargs):
self.in_signal += 1
worker_ready.connect(handle_signal)
WorkerGroup(self.channel_layer, n_threads=4)
self.worker.ready()
self.assertEqual(self.in_signal, 4)
def test_sigterm_handler(self):
threads = []
for wkr in self.subworkers:
t = threading.Thread(target=wkr.run)
t.start()
threads.append(t)
self.worker.sigterm_handler(None, None)
for t in threads:
t.join()
| linuxlewis/channels | channels/tests/test_worker.py | Python | bsd-3-clause | 4,367 |
import boto.ec2
from upscale import config
config=config.config
access=config['ec2']['access-key']
key= config['ec2']['secret-key']
region = config['ec2']['region']
vpcid = config['ec2']['vpc-id']
ec2_conn = boto.ec2.connect_to_region(region, aws_access_key_id=key, aws_secret_access_key=access)
import pprint
from collections import namedtuple
def list():
Host = namedtuple('Host', ['id', 'name', 'private_ip', 'ip'])
# public_dns_name
hosts = []
for reservation in ec2_conn.get_all_instances(filters={'vpc-id': vpcid}):
for instance in reservation.instances:
hosts.append(Host(id= instance.id, name = instance.private_dns_name, private_ip = instance.private_ip_address, ip=instance.ip_address))
return hosts
| nl5887/upscale | upscale/api/hosts.py | Python | apache-2.0 | 730 |
# flake8: noqa: F403, F405
from tests.core import mock
from trakt import Trakt
from datetime import datetime
from dateutil.tz import tzutc
from hamcrest import *
from httmock import HTTMock
def test_movies():
with HTTMock(mock.fixtures, mock.unknown):
with Trakt.configuration.auth('mock', 'mock'):
value = Trakt['users/me/lists/movies'].get()
# Validate movies list
assert_that(value, has_properties({
'name': 'Movies',
'description': None,
'likes': 0,
'allow_comments': True,
'display_numbers': False,
'updated_at': datetime(2015, 6, 22, 2, 25, tzinfo=tzutc()),
'comment_count': 0,
'item_count': 2,
'privacy': 'private'
}))
def test_shows():
with HTTMock(mock.fixtures, mock.unknown):
with Trakt.configuration.auth('mock', 'mock'):
value = Trakt['users/me/lists/shows'].get()
# Validate shows list
assert_that(value, has_properties({
'name': 'Shows',
'description': None,
'likes': 0,
'allow_comments': True,
'display_numbers': False,
'updated_at': datetime(2015, 6, 22, 2, 25, tzinfo=tzutc()),
'comment_count': 0,
'item_count': 3,
'privacy': 'private'
}))
| fuzeman/trakt.py | tests/users/lists/test_get.py | Python | mit | 1,290 |
import os
from spinspy import local_data
def isdim(dim):
if os.path.isfile('{0:s}{1:s}grid'.format(local_data.path,dim)):
return True
else:
return False
| bastorer/SPINSpy | spinspy/isdim.py | Python | mit | 178 |
from distutils.core import setup
setup(
name = 'markdown-attr-plus',
version = '0.3.0',
description = 'Markdown Extension to extend attr_list extension to add extra syntax.',
author = 'Claire Charron',
author_email = '[email protected]',
packages = ['markdown_attr_plus'],
url = 'https://github.com/Undeterminant/markdown-attr-plus',
license = 'CC0'
)
| Undeterminant/markdown-attr-plus | setup.py | Python | cc0-1.0 | 368 |
print("hello world".index("ll"))
print("hello world".index("ll", None))
print("hello world".index("ll", 1))
print("hello world".index("ll", 1, None))
print("hello world".index("ll", None, None))
print("hello world".index("ll", 1, -1))
try:
print("hello world".index("ll", 1, 1))
except ValueError:
print("Raised ValueError")
else:
print("Did not raise ValueError")
try:
print("hello world".index("ll", 1, 2))
except ValueError:
print("Raised ValueError")
else:
print("Did not raise ValueError")
try:
print("hello world".index("ll", 1, 3))
except ValueError:
print("Raised ValueError")
else:
print("Did not raise ValueError")
print("hello world".index("ll", 1, 4))
print("hello world".index("ll", 1, 5))
print("hello world".index("ll", -100))
print("0000".index('0'))
print("0000".index('0', 0))
print("0000".index('0', 1))
print("0000".index('0', 2))
print("0000".index('0', 3))
try:
print("0000".index('0', 4))
except ValueError:
print("Raised ValueError")
else:
print("Did not raise ValueError")
try:
print("0000".index('0', 5))
except ValueError:
print("Raised ValueError")
else:
print("Did not raise ValueError")
try:
print("0000".index('-1', 3))
except ValueError:
print("Raised ValueError")
else:
print("Did not raise ValueError")
try:
print("0000".index('1', 3))
except ValueError:
print("Raised ValueError")
else:
print("Did not raise ValueError")
try:
print("0000".index('1', 4))
except ValueError:
print("Raised ValueError")
else:
print("Did not raise ValueError")
try:
print("0000".index('1', 5))
except ValueError:
print("Raised ValueError")
else:
print("Did not raise ValueError")
| pfalcon/micropython | tests/basics/string_index.py | Python | mit | 1,712 |
"""Test OpenZWave Websocket API."""
from unittest.mock import patch
from openzwavemqtt.const import (
ATTR_CODE_SLOT,
ATTR_LABEL,
ATTR_OPTIONS,
ATTR_POSITION,
ATTR_VALUE,
ValueType,
)
from homeassistant.components.ozw.const import ATTR_CONFIG_PARAMETER
from homeassistant.components.ozw.lock import ATTR_USERCODE
from homeassistant.components.ozw.websocket_api import (
ATTR_IS_AWAKE,
ATTR_IS_BEAMING,
ATTR_IS_FAILED,
ATTR_IS_FLIRS,
ATTR_IS_ROUTING,
ATTR_IS_SECURITYV1,
ATTR_IS_ZWAVE_PLUS,
ATTR_NEIGHBORS,
ATTR_NODE_BASIC_STRING,
ATTR_NODE_BAUD_RATE,
ATTR_NODE_GENERIC_STRING,
ATTR_NODE_QUERY_STAGE,
ATTR_NODE_SPECIFIC_STRING,
ID,
NODE_ID,
OZW_INSTANCE,
PARAMETER,
SCHEMA,
TYPE,
VALUE,
)
from homeassistant.components.websocket_api.const import (
ERR_INVALID_FORMAT,
ERR_NOT_FOUND,
ERR_NOT_SUPPORTED,
)
from .common import MQTTMessage, setup_ozw
async def test_websocket_api(hass, generic_data, hass_ws_client, mqtt_mock):
"""Test the ozw websocket api."""
await setup_ozw(hass, fixture=generic_data)
client = await hass_ws_client(hass)
# Test instance list
await client.send_json({ID: 4, TYPE: "ozw/get_instances"})
msg = await client.receive_json()
assert len(msg["result"]) == 1
result = msg["result"][0]
assert result[OZW_INSTANCE] == 1
assert result["Status"] == "driverAllNodesQueried"
assert result["OpenZWave_Version"] == "1.6.1008"
# Test network status
await client.send_json({ID: 5, TYPE: "ozw/network_status"})
msg = await client.receive_json()
result = msg["result"]
assert result["Status"] == "driverAllNodesQueried"
assert result[OZW_INSTANCE] == 1
# Test node status
await client.send_json({ID: 6, TYPE: "ozw/node_status", NODE_ID: 32})
msg = await client.receive_json()
result = msg["result"]
assert result[OZW_INSTANCE] == 1
assert result[NODE_ID] == 32
assert result[ATTR_NODE_QUERY_STAGE] == "Complete"
assert result[ATTR_IS_ZWAVE_PLUS]
assert result[ATTR_IS_AWAKE]
assert not result[ATTR_IS_FAILED]
assert result[ATTR_NODE_BAUD_RATE] == 100000
assert result[ATTR_IS_BEAMING]
assert not result[ATTR_IS_FLIRS]
assert result[ATTR_IS_ROUTING]
assert not result[ATTR_IS_SECURITYV1]
assert result[ATTR_NODE_BASIC_STRING] == "Routing Slave"
assert result[ATTR_NODE_GENERIC_STRING] == "Binary Switch"
assert result[ATTR_NODE_SPECIFIC_STRING] == "Binary Power Switch"
assert result[ATTR_NEIGHBORS] == [1, 33, 36, 37, 39]
await client.send_json({ID: 7, TYPE: "ozw/node_status", NODE_ID: 999})
msg = await client.receive_json()
result = msg["error"]
assert result["code"] == ERR_NOT_FOUND
# Test node statistics
await client.send_json({ID: 8, TYPE: "ozw/node_statistics", NODE_ID: 39})
msg = await client.receive_json()
result = msg["result"]
assert result[OZW_INSTANCE] == 1
assert result[NODE_ID] == 39
assert result["send_count"] == 57
assert result["sent_failed"] == 0
assert result["retries"] == 1
assert result["last_request_rtt"] == 26
assert result["last_response_rtt"] == 38
assert result["average_request_rtt"] == 29
assert result["average_response_rtt"] == 37
assert result["received_packets"] == 3594
assert result["received_dup_packets"] == 12
assert result["received_unsolicited"] == 3546
# Test node metadata
await client.send_json({ID: 9, TYPE: "ozw/node_metadata", NODE_ID: 39})
msg = await client.receive_json()
result = msg["result"]
assert result["metadata"]["ProductPic"] == "images/aeotec/zwa002.png"
await client.send_json({ID: 10, TYPE: "ozw/node_metadata", NODE_ID: 999})
msg = await client.receive_json()
result = msg["error"]
assert result["code"] == ERR_NOT_FOUND
# Test network statistics
await client.send_json({ID: 11, TYPE: "ozw/network_statistics"})
msg = await client.receive_json()
result = msg["result"]
assert result["readCnt"] == 92220
assert result[OZW_INSTANCE] == 1
assert result["node_count"] == 5
# Test get nodes
await client.send_json({ID: 12, TYPE: "ozw/get_nodes"})
msg = await client.receive_json()
result = msg["result"]
assert len(result) == 5
assert result[2][ATTR_IS_AWAKE]
assert not result[1][ATTR_IS_FAILED]
# Test get config parameters
await client.send_json({ID: 13, TYPE: "ozw/get_config_parameters", NODE_ID: 39})
msg = await client.receive_json()
result = msg["result"]
assert len(result) == 8
for config_param in result:
assert config_param["type"] in (
ValueType.LIST.value,
ValueType.BOOL.value,
ValueType.INT.value,
ValueType.BYTE.value,
ValueType.SHORT.value,
ValueType.BITSET.value,
)
# Test set config parameter
config_param = result[0]
current_val = config_param[ATTR_VALUE]
new_val = next(
option[0]
for option in config_param[SCHEMA][0][ATTR_OPTIONS]
if option[0] != current_val
)
new_label = next(
option[1]
for option in config_param[SCHEMA][0][ATTR_OPTIONS]
if option[1] != current_val and option[0] != new_val
)
await client.send_json(
{
ID: 14,
TYPE: "ozw/set_config_parameter",
NODE_ID: 39,
PARAMETER: config_param[ATTR_CONFIG_PARAMETER],
VALUE: new_val,
}
)
msg = await client.receive_json()
assert msg["success"]
await client.send_json(
{
ID: 15,
TYPE: "ozw/set_config_parameter",
NODE_ID: 39,
PARAMETER: config_param[ATTR_CONFIG_PARAMETER],
VALUE: new_label,
}
)
msg = await client.receive_json()
assert msg["success"]
# Test OZW Instance not found error
await client.send_json(
{ID: 16, TYPE: "ozw/get_config_parameters", OZW_INSTANCE: 999, NODE_ID: 1}
)
msg = await client.receive_json()
result = msg["error"]
assert result["code"] == ERR_NOT_FOUND
# Test OZW Node not found error
await client.send_json(
{
ID: 18,
TYPE: "ozw/set_config_parameter",
NODE_ID: 999,
PARAMETER: 0,
VALUE: "test",
}
)
msg = await client.receive_json()
result = msg["error"]
assert result["code"] == ERR_NOT_FOUND
# Test parameter not found
await client.send_json(
{
ID: 19,
TYPE: "ozw/set_config_parameter",
NODE_ID: 39,
PARAMETER: 45,
VALUE: "test",
}
)
msg = await client.receive_json()
result = msg["error"]
assert result["code"] == ERR_NOT_FOUND
# Test list value not found
await client.send_json(
{
ID: 20,
TYPE: "ozw/set_config_parameter",
NODE_ID: 39,
PARAMETER: config_param[ATTR_CONFIG_PARAMETER],
VALUE: "test",
}
)
msg = await client.receive_json()
result = msg["error"]
assert result["code"] == ERR_NOT_FOUND
# Test value type invalid
await client.send_json(
{
ID: 21,
TYPE: "ozw/set_config_parameter",
NODE_ID: 39,
PARAMETER: 3,
VALUE: 0,
}
)
msg = await client.receive_json()
result = msg["error"]
assert result["code"] == ERR_NOT_SUPPORTED
# Test invalid bitset format
await client.send_json(
{
ID: 22,
TYPE: "ozw/set_config_parameter",
NODE_ID: 39,
PARAMETER: 3,
VALUE: {ATTR_POSITION: 1, ATTR_VALUE: True, ATTR_LABEL: "test"},
}
)
msg = await client.receive_json()
result = msg["error"]
assert result["code"] == ERR_INVALID_FORMAT
# Test valid bitset format passes validation
await client.send_json(
{
ID: 23,
TYPE: "ozw/set_config_parameter",
NODE_ID: 39,
PARAMETER: 10000,
VALUE: {ATTR_POSITION: 1, ATTR_VALUE: True},
}
)
msg = await client.receive_json()
result = msg["error"]
assert result["code"] == ERR_NOT_FOUND
async def test_ws_locks(hass, lock_data, hass_ws_client, mqtt_mock):
"""Test lock websocket apis."""
await setup_ozw(hass, fixture=lock_data)
client = await hass_ws_client(hass)
await client.send_json(
{
ID: 1,
TYPE: "ozw/get_code_slots",
NODE_ID: 10,
}
)
msg = await client.receive_json()
assert msg["success"]
await client.send_json(
{
ID: 2,
TYPE: "ozw/set_usercode",
NODE_ID: 10,
ATTR_CODE_SLOT: 1,
ATTR_USERCODE: "1234",
}
)
msg = await client.receive_json()
assert msg["success"]
await client.send_json(
{
ID: 3,
TYPE: "ozw/clear_usercode",
NODE_ID: 10,
ATTR_CODE_SLOT: 1,
}
)
msg = await client.receive_json()
assert msg["success"]
async def test_refresh_node(
hass, generic_data, sent_messages, hass_ws_client, mqtt_mock
):
"""Test the ozw refresh node api."""
receive_message = await setup_ozw(hass, fixture=generic_data)
client = await hass_ws_client(hass)
# Send the refresh_node_info command
await client.send_json({ID: 9, TYPE: "ozw/refresh_node_info", NODE_ID: 39})
msg = await client.receive_json()
assert len(sent_messages) == 1
assert msg["success"]
# Receive a mock status update from OZW
message = MQTTMessage(
topic="OpenZWave/1/node/39/",
payload={"NodeID": 39, "NodeQueryStage": "initializing"},
)
message.encode()
receive_message(message)
# Verify we got expected data on the websocket
msg = await client.receive_json()
result = msg["event"]
assert result["type"] == "node_updated"
assert result["node_query_stage"] == "initializing"
# Send another mock status update from OZW
message = MQTTMessage(
topic="OpenZWave/1/node/39/",
payload={"NodeID": 39, "NodeQueryStage": "versions"},
)
message.encode()
receive_message(message)
# Send a mock status update for a different node
message = MQTTMessage(
topic="OpenZWave/1/node/35/",
payload={"NodeID": 35, "NodeQueryStage": "fake_shouldnt_be_received"},
)
message.encode()
receive_message(message)
# Verify we received the message for node 39 but not for node 35
msg = await client.receive_json()
result = msg["event"]
assert result["type"] == "node_updated"
assert result["node_query_stage"] == "versions"
async def test_refresh_node_unsubscribe(hass, generic_data, hass_ws_client, mqtt_mock):
"""Test unsubscribing the ozw refresh node api."""
await setup_ozw(hass, fixture=generic_data)
client = await hass_ws_client(hass)
with patch("openzwavemqtt.OZWOptions.listen") as mock_listen:
# Send the refresh_node_info command
await client.send_json({ID: 9, TYPE: "ozw/refresh_node_info", NODE_ID: 39})
await client.receive_json()
# Send the unsubscribe command
await client.send_json({ID: 10, TYPE: "unsubscribe_events", "subscription": 9})
await client.receive_json()
assert mock_listen.return_value.called
| mezz64/home-assistant | tests/components/ozw/test_websocket_api.py | Python | apache-2.0 | 11,571 |
#!/usr/bin/env python3
# Copyright 2020 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import sys
from ros_buildfarm.argument import add_argument_pulp_base_url
from ros_buildfarm.argument import add_argument_pulp_password
from ros_buildfarm.argument import add_argument_pulp_resource_record
from ros_buildfarm.argument import add_argument_pulp_task_timeout
from ros_buildfarm.argument import add_argument_pulp_username
from ros_buildfarm.common import Scope
from ros_buildfarm.pulp import format_pkg_ver
from ros_buildfarm.pulp import PulpRpmClient
def main(argv=sys.argv[1:]):
parser = argparse.ArgumentParser(
description='Upload package to pulp')
parser.add_argument(
'package_file',
nargs='+', metavar='FILE',
help='Package file paths to upload')
add_argument_pulp_base_url(parser)
add_argument_pulp_password(parser)
add_argument_pulp_task_timeout(parser)
add_argument_pulp_username(parser)
add_argument_pulp_resource_record(parser)
args = parser.parse_args(argv)
pulp_client = PulpRpmClient(
args.pulp_base_url, args.pulp_username, args.pulp_password,
task_timeout=args.pulp_task_timeout)
with Scope('SUBSECTION', 'upload package(s) to pulp'):
created_resources = []
for file_path in args.package_file:
print("Uploading '%s'." % file_path)
created_rpm = pulp_client.upload_pkg(file_path)
created_resources.append(created_rpm.pulp_href)
print('Created RPM resource: %s' % created_rpm.pulp_href)
print("Package '%s' version: %s" % (created_rpm.name, format_pkg_ver(created_rpm)))
if args.pulp_resource_record:
print("Saving upload record to '%s'." % args.pulp_resource_record)
with open(args.pulp_resource_record, 'w') as resource_record:
resource_record.write('PULP_RESOURCES=%s\n' % ' '.join(created_resources))
if __name__ == '__main__':
sys.exit(main())
| ros-infrastructure/ros_buildfarm | scripts/release/rpm/upload_package.py | Python | apache-2.0 | 2,542 |
"""mrr log merger tool
usage: $ python3 logmerger.py ${filename}
author: Hwan hee, Lee
email: [email protected]
"""
import sys
try:
filename = sys.argv[1]
f = open(filename, 'r')
result = {}
gaplist = list(range(0, 820, 20))
for gap in gaplist:
result[gap] = 0
lines = f.readlines()
for line in lines:
if line.endswith(']\n'):
res_key = int(line.split(' ')[0].replace(':', ''))
res_value = int(line.split(' ')[1].split('[')[0])
for gap in gaplist:
if res_key == gap:
result[gap] += res_value
print('>>>>> Result of merging %s(unit: ms)' % (filename))
for k, v in sorted(result.items()):
print('%s:\t%s' % (k, v))
f.close()
except IndexError:
print('usage: $ python3 logmerger.py ${filename}') | naver/arcus-misc | mrr/logmerger.py | Python | apache-2.0 | 855 |
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Byteman(MavenPackage):
"""Byteman is a tool which makes it easy to trace, monitor and test
the behaviour of Java application and JDK runtime code."""
homepage = "https://www.jboss.org/byteman"
url = "https://github.com/bytemanproject/byteman/archive/4.0.12.tar.gz"
version('4.0.12', sha256='72fdc904d7b8df9e743fbb5ae84e51ffc81d32b6e0b0b80fc7ac165dd8c9c7c2')
version('4.0.11', sha256='8e4af6019702c8b22f354962f35f197f9ba2c8699235aac77ebd9263ac12261b')
version('4.0.10', sha256='1b3c9e66fc3f230e407904db1ac43eb5cd4c33620f0d0f9f6c0cb23e4d28784e')
version('4.0.9', sha256='4ffffa9e0bbc45d5c47d443dcae21191531e8b68ade9423d109d40826bf0bd2b')
version('4.0.8', sha256='f357d759c1dad52f4ae626d07fb2cf7c62855b7421723633d90ac49d83bd154b')
version('4.0.7', sha256='542d688c804cd7baa7efad59a94ef8e5d21cc81f3e897f31152c96a7df896aa5')
| LLNL/spack | var/spack/repos/builtin/packages/byteman/package.py | Python | lgpl-2.1 | 1,096 |
"""
AWGs
"""
from atom.api import Atom, List, Int, Float, Range, Enum, Bool, Constant, Str
from Instrument import Instrument
import enaml
from enaml.qt.qt_application import QtApplication
from instruments.AWGBase import AWGChannel, AWG, AWGDriver
from plugins import find_plugins
AWGList = []
# local plugin registration to enable access by AWGs.plugin
plugins = find_plugins(AWG, verbose=False)
for plugin in plugins:
if plugin not in AWGList:
AWGList.append(plugin)
if plugin.__name__ not in globals().keys():
globals().update({plugin.__name__: plugin})
print 'Registered Plugin {0}'.format(plugin.__name__)
if __name__ == "__main__":
with enaml.imports():
from AWGsViews import AWGView
awg = APS(label='BBNAPS1')
app = QtApplication()
view = AWGView(awg=awg)
view.show()
app.start()
| rmcgurrin/PyQLab | instruments/AWGs.py | Python | apache-2.0 | 872 |
from ..cmd import Cli
class VcsUpgrader(Cli):
"""
Base class for updating packages that are a version control repository on local disk
:param vcs_binary_paths:
The full filesystem path to the executable for the version control
system. May be set to None to allow the code to try and find it.
:param update_command:
The command to pass to the version control executable to update the
repository.
:param working_copy:
The local path to the working copy/package directory
:param cache_length:
The lenth of time to cache if incoming changesets are available
"""
def __init__(self, vcs_binary_paths, update_command, working_copy, cache_length, debug):
self.update_command = update_command
self.working_copy = working_copy
self.cache_length = cache_length
super(VcsUpgrader, self).__init__(vcs_binary_paths, debug)
| herove/dotfiles | sublime/Packages/Package Control/package_control/upgraders/vcs_upgrader.py | Python | mit | 929 |
#!/usr/bin/env python
# -*- coding: ascii -*- pylint: skip-file
r"""
:Copyright:
Copyright 2006 - 2016
Andr\xe9 Malo or his licensors, as applicable
:License:
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
===============
Build targets
===============
Build targets.
"""
if __doc__:
__doc__ = __doc__.encode('ascii').decode('unicode_escape')
__author__ = r"Andr\xe9 Malo".encode('ascii').decode('unicode_escape')
__docformat__ = "restructuredtext en"
import errno as _errno
import os as _os
import re as _re
import sys as _sys
from _setup import shell
from _setup import make
from _setup import term
from _setup.make import targets
if _sys.version_info[0] >= 3:
py3 = 1
cfgread = dict(encoding='utf-8')
def textopen(*args):
return open(*args, **cfgread)
exec ("def reraise(*e): raise e[1].with_traceback(e[2])")
else:
py3 = 0
try:
True
except NameError:
exec ("True = 1; False = 0")
textopen = open
cfgread = {}
exec ("def reraise(*e): raise e[0], e[1], e[2]")
class Target(make.Target):
def init(self):
self.dirs = {
'lib': '.',
'docs': 'docs',
'tests': 'tests',
'coverage': 'docs/coverage',
'tox': '.tox',
'apidoc': 'docs/apidoc',
'userdoc': 'docs/userdoc',
'userdoc_source': 'docs/_userdoc',
'userdoc_build': 'docs/_userdoc/_build',
'website': 'dist/website',
'_website': '_website', # source dir
'dist': 'dist',
'build': 'build',
'ebuild': '_pkg/ebuilds',
}
libpath = shell.native(self.dirs['lib'])
if libpath != _sys.path[0]:
while libpath in _sys.path:
_sys.path.remove(libpath)
_sys.path.insert(0, libpath)
self.ebuild_files = {
'wolfe-beta.ebuild.in': 'wolfe-%(VERSION)s_beta%(REV)s.ebuild',
'wolfe.ebuild.in': 'wolfe-%(VERSION)s.ebuild',
}
Manifest = targets.Manifest
class Distribution(targets.Distribution):
def init(self):
self._dist = 'dist'
self._ebuilds = '_pkg/ebuilds'
self._changes = 'docs/CHANGES'
class Check(Target):
""" Check the python code """
NAME = "check"
DEPS = ["compile-quiet"]
def run(self):
from _setup.dev import analysis
term.green('Linting wolfe sources...')
res = analysis.pylint('pylintrc', 'wolfe')
if res == 2:
make.warn('pylint not found', self.NAME)
class Test(Target):
""" Test the code """
NAME = "test"
DEPS = ["nose-test"]
class NoseTest(Target):
""" Run the nose tests """
NAME = "nose-test"
DEPS = ["compile-quiet"]
def run(self):
if shell.spawn(
'nosetests',
'-c', 'package.cfg',
self.dirs['tests'], self.dirs['lib']):
raise RuntimeError('tests failed')
def clean(self, scm, dist):
term.green("Removing coverage files...")
shell.rm_rf(self.dirs['coverage'])
shell.rm_rf(self.dirs['tox'])
shell.rm('.coverage')
class Compile(Target):
""" Compile the python code """
NAME = "compile"
# DEPS = None
def run(self):
import setup
_old_argv = _sys.argv
try:
_sys.argv = ['setup.py', '-q', 'build']
if not self.HIDDEN:
_sys.argv.remove('-q')
setup.setup()
if 'java' not in _sys.platform.lower():
_sys.argv = [
'setup.py', '-q', 'install_lib', '--install-dir',
shell.native(self.dirs['lib']),
'--optimize', '2',
]
if not self.HIDDEN:
_sys.argv.remove('-q')
setup.setup()
finally:
_sys.argv = _old_argv
for name in shell.files("%s/wolfe" % self.dirs['lib'], '*.py'):
self.compile(name)
term.write("%(ERASE)s")
term.green("All files successfully compiled.")
def compile(self, name):
path = shell.native(name)
term.write("%(ERASE)s%(BOLD)s>>> Compiling %(name)s...%(NORMAL)s",
name=name)
from distutils import util
try:
from distutils import log
except ImportError:
util.byte_compile([path], verbose=0, force=True)
else:
log.set_verbosity(0)
util.byte_compile([path], force=True)
def clean(self, scm, dist):
term.green("Removing python byte code...")
for name in shell.dirs('.', '__pycache__'):
shell.rm_rf(name)
for name in shell.files('.', '*.py[co]'):
shell.rm(name)
for name in shell.files('.', '*$py.class'):
shell.rm(name)
term.green("Removing c extensions...")
for name in shell.files('.', '*.so'):
shell.rm(name)
for name in shell.files('.', '*.pyd'):
shell.rm(name)
shell.rm_rf(self.dirs['build'])
class CompileQuiet(Compile):
NAME = "compile-quiet"
HIDDEN = True
def clean(self, scm, dist):
pass
class Doc(Target):
""" Build the docs (api + user) """
NAME = "doc"
DEPS = ['apidoc', 'userdoc']
class ApiDoc(Target):
""" Build the API docs """
NAME = "apidoc"
DEPS = ['compile-quiet']
def run(self):
from _setup.dev import apidoc
apidoc.epydoc(
prepend=[
shell.native(self.dirs['lib']),
],
env={'WOLFE_NO_C_OVERRIDE': '1', 'EPYDOC_INSPECTOR': '1'}
)
def clean(self, scm, dist):
if scm:
term.green("Removing apidocs...")
shell.rm_rf(self.dirs['apidoc'])
class UserDoc(Target):
""" Build the user docs """
NAME = "userdoc"
# DEPS = None
def run(self):
from _setup.dev import userdoc
userdoc.sphinx(
build=shell.native(self.dirs['userdoc_build']),
source=shell.native(self.dirs['userdoc_source']),
target=shell.native(self.dirs['userdoc']),
)
def clean(self, scm, dist):
if scm:
term.green("Removing userdocs...")
shell.rm_rf(self.dirs['userdoc'])
shell.rm_rf(self.dirs['userdoc_build'])
class Website(Target):
""" Build the website """
NAME = "website"
DEPS = ["apidoc"]
def run(self):
from _setup.util import SafeConfigParser as parser
parser = parser()
parser.read('package.cfg', **cfgread)
strversion = parser.get('package', 'version.number')
shortversion = tuple(map(int, strversion.split('.')[:2]))
shell.rm_rf(self.dirs['_website'])
shell.cp_r(
self.dirs['userdoc_source'],
_os.path.join(self.dirs['_website'], 'src')
)
shell.rm_rf(_os.path.join(self.dirs['_website'], 'build'))
shell.rm_rf(self.dirs['website'])
_os.makedirs(self.dirs['website'])
filename = _os.path.join(
self.dirs['_website'], 'src', 'website_download.txt'
)
fp = textopen(filename)
try:
download = fp.read()
finally:
fp.close()
filename = _os.path.join(self.dirs['_website'], 'src', 'index.txt')
fp = textopen(filename)
try:
indexlines = fp.readlines()
finally:
fp.close()
fp = textopen(filename, 'w')
try:
for line in indexlines:
if line.startswith('.. placeholder: Download'):
line = download
fp.write(line)
finally:
fp.close()
shell.cp_r(
self.dirs['apidoc'],
_os.path.join(self.dirs['website'], 'doc-%d.%d' % shortversion)
)
shell.cp_r(
self.dirs['apidoc'],
_os.path.join(
self.dirs['_website'], 'src', 'doc-%d.%d' % shortversion
)
)
fp = textopen(_os.path.join(
self.dirs['_website'], 'src', 'conf.py'
), 'a')
try:
fp.write("\nepydoc = dict(wolfe=%r)\n" % (
_os.path.join(
shell.native(self.dirs['_website']),
"src",
"doc-%d.%d" % shortversion,
),
))
fp.write("\nexclude_trees.append(%r)\n" %
"doc-%d.%d" % shortversion)
finally:
fp.close()
from _setup.dev import userdoc
userdoc.sphinx(
build=shell.native(_os.path.join(self.dirs['_website'], 'build')),
source=shell.native(_os.path.join(self.dirs['_website'], 'src')),
target=shell.native(self.dirs['website']),
)
shell.rm(_os.path.join(self.dirs['website'], '.buildinfo'))
def clean(self, scm, dist):
if scm:
term.green("Removing website...")
shell.rm_rf(self.dirs['website'])
shell.rm_rf(self.dirs['_website'])
class PreCheck(Target):
""" Run clean, doc, check """
NAME = "precheck"
DEPS = ["clean", "doc", "check", "test"]
class SVNRelease(Target):
""" Release current version """
# NAME = "release"
DEPS = None
def run(self):
self._check_committed()
self._update_versions()
self._tag_release()
self.runner('dist', seen={})
def _tag_release(self):
""" Tag release """
from _setup.util import SafeConfigParser as parser
parser = parser()
parser.read('package.cfg', **cfgread)
strversion = parser.get('package', 'version.number')
isdev = parser.getboolean('package', 'version.dev')
revision = parser.getint('package', 'version.revision')
version = strversion
if isdev:
version += '-dev-r%d' % (revision,)
trunk_url = self._repo_url()
if not trunk_url.endswith('/trunk'):
rex = _re.compile(r'/branches/\d+(?:\.\d+)*\.[xX]$').search
match = rex(trunk_url)
if not match:
make.fail("Not in trunk or release branch!")
found = match.start(0)
else:
found = -len('/trunk')
release_url = trunk_url[:found] + '/releases/' + version
svn = shell.frompath('svn')
shell.spawn(
svn, 'copy', '-m', 'Release version ' + version, '--',
trunk_url, release_url,
echo=True,
)
def _update_versions(self):
""" Update versions """
self.runner('revision', 'version', seen={})
svn = shell.frompath('svn')
shell.spawn(
svn, 'commit', '-m', 'Pre-release: version update',
echo=True
)
def _repo_url(self):
""" Determine URL """
from xml.dom import minidom
svn = shell.frompath('svn')
info = minidom.parseString(
shell.spawn(svn, 'info', '--xml', stdout=True)
)
try:
url = info.getElementsByTagName('url')[0]
text = []
for node in url.childNodes:
if node.nodeType == node.TEXT_NODE:
text.append(node.data)
finally:
info.unlink()
return (''.decode('ascii')).join(text).encode('utf-8')
def _check_committed(self):
""" Check if everything is committed """
if not self._repo_url().endswith('/trunk'):
rex = _re.compile(r'/branches/\d+(?:\.\d+)*\.[xX]$').search
match = rex(self._repo_url())
if not match:
make.fail("Not in trunk or release branch!")
svn = shell.frompath('svn')
lines = shell.spawn(
svn, 'stat', '--ignore-externals',
stdout=True, env=dict(_os.environ, LC_ALL='C'),
).splitlines()
for line in lines:
if line.startswith('X'):
continue
make.fail("Uncommitted changes!")
class GitRelease(Target):
""" Release current version """
# NAME = "release"
DEPS = None
def run(self):
self._check_committed()
self._update_versions()
self._tag_release()
self.runner('dist', seen={})
def _tag_release(self):
""" Tag release """
from _setup.util import SafeConfigParser as parser
parser = parser()
parser.read('package.cfg', **cfgread)
strversion = parser.get('package', 'version.number')
isdev = parser.getboolean('package', 'version.dev')
revision = parser.getint('package', 'version.revision')
version = strversion
if isdev:
version += '-dev-r%d' % (revision,)
git = shell.frompath('git')
shell.spawn(
git, 'tag', '-a', '-m', 'Release version ' + version, '--',
version,
echo=True,
)
def _update_versions(self):
""" Update versions """
self.runner('revision', 'version', seen={})
git = shell.frompath('git')
shell.spawn(git, 'commit', '-a', '-m', 'Pre-release: version update',
echo=True)
def _check_committed(self):
""" Check if everything is committed """
git = shell.frompath('git')
lines = shell.spawn(
git, 'branch', '--color=never',
stdout=True, env=dict(_os.environ, LC_ALL='C')
).splitlines()
for line in lines:
if line.startswith('*'):
branch = line.split(None, 1)[1]
break
else:
make.fail("Could not determine current branch.")
if branch != 'master':
rex = _re.compile(r'^\d+(?:\.\d+)*\.[xX]$').match
match = rex(branch)
if not match:
make.fail("Not in master or release branch.")
lines = shell.spawn(
git, 'status', '--porcelain',
stdout=True, env=dict(_os.environ, LC_ALL='C'),
)
if lines:
make.fail("Uncommitted changes!")
class Release(GitRelease):
NAME = "release"
# DEPS = None
class SVNRevision(Target):
""" Insert the svn revision into all relevant files """
# NAME = "revision"
# DEPS = None
def run(self):
revision = self._revision()
self._revision_cfg(revision)
def _revision(self):
""" Find SVN revision """
rev = shell.spawn(shell.frompath('svnversion'), '.', stdout=True)
rev = rev.strip()
if ':' in rev:
rev = rev.split(':')[1]
try:
rev = int(rev)
except ValueError:
try:
rev = int(rev[:-1])
except ValueError:
make.fail("No clean revision found (%s)" % rev)
return rev
def _revision_cfg(self, revision):
""" Modify version in package.cfg """
filename = 'package.cfg'
fp = textopen(filename)
try:
initlines = fp.readlines()
finally:
fp.close()
fp = textopen(filename, 'w')
replaced = False
try:
for line in initlines:
if line.startswith('version.revision'):
line = 'version.revision = %d\n' % (revision,)
replaced = True
fp.write(line)
finally:
fp.close()
assert replaced, "version.revision not found in package.cfg"
class SimpleRevision(Target):
""" Update the revision number and insert into all relevant files """
# NAME = "revision"
# DEPS = None
def run(self):
self._revision_cfg()
def _revision_cfg(self):
""" Modify version in package.cfg """
filename = 'package.cfg'
fp = textopen(filename)
try:
initlines = fp.readlines()
finally:
fp.close()
fp = textopen(filename, 'w')
revision, replaced = None, False
try:
for line in initlines:
if line.startswith('version.revision'):
if revision is None:
revision = int(line.split('=', 1)[1].strip() or 0, 10)
revision += 1
line = 'version.revision = %d\n' % (revision,)
replaced = True
fp.write(line)
finally:
fp.close()
assert replaced, "version.revision not found in package.cfg"
GitRevision = SimpleRevision
class Revision(GitRevision):
""" Insert the revision into all relevant files """
NAME = "revision"
# DEPS = None
class Version(Target):
""" Insert the program version into all relevant files """
NAME = "version"
# DEPS = None
def run(self):
from _setup.util import SafeConfigParser as parser
parser = parser()
parser.read('package.cfg', **cfgread)
strversion = parser.get('package', 'version.number')
isdev = parser.getboolean('package', 'version.dev')
revision = parser.getint('package', 'version.revision')
self._version_init(strversion, isdev, revision)
self._version_userdoc(strversion, isdev, revision)
# self._version_download(strversion, isdev, revision)
self._version_changes(strversion, isdev, revision)
# parm = {'VERSION': strversion, 'REV': revision}
# for src, dest in self.ebuild_files.items():
# src = "%s/%s" % (self.dirs['ebuild'], src)
# dest = "%s/%s" % (self.dirs['ebuild'], dest % parm)
# term.green("Creating %(name)s...", name=dest)
# shell.cp(src, dest)
def _version_init(self, strversion, isdev, revision):
""" Modify version in __init__ """
filename = _os.path.join(self.dirs['lib'], 'wolfe', '__init__.py')
fp = textopen(filename)
try:
initlines = fp.readlines()
finally:
fp.close()
fp = textopen(filename, 'w')
replaced = False
try:
for line in initlines:
if line.startswith('__version__'):
line = '__version__ = (%r, %r, %r)\n' % (
strversion, isdev, revision
)
replaced = True
fp.write(line)
finally:
fp.close()
assert replaced, "__version__ not found in __init__.py"
def _version_changes(self, strversion, isdev, revision):
""" Modify version in changes """
filename = _os.path.join(shell.native(self.dirs['docs']), 'CHANGES')
if isdev:
strversion = "%s-dev-r%d" % (strversion, revision)
fp = textopen(filename)
try:
initlines = fp.readlines()
finally:
fp.close()
fp = textopen(filename, 'w')
try:
for line in initlines:
if line.rstrip() == "Changes with version":
line = "%s %s\n" % (line.rstrip(), strversion)
fp.write(line)
finally:
fp.close()
def _version_userdoc(self, strversion, isdev, revision):
""" Modify version in userdoc """
filename = _os.path.join(self.dirs['userdoc_source'], 'conf.py')
shortversion = '.'.join(strversion.split('.')[:2])
longversion = strversion
if isdev:
longversion = "%s-dev-r%d" % (strversion, revision)
fp = textopen(filename)
try:
initlines = fp.readlines()
finally:
fp.close()
replaced = 0
fp = textopen(filename, 'w')
try:
for line in initlines:
if line.startswith('version'):
line = 'version = %r\n' % shortversion
replaced |= 1
elif line.startswith('release'):
line = 'release = %r\n' % longversion
replaced |= 2
fp.write(line)
finally:
fp.close()
assert replaced & 3 != 0, "version/release not found in conf.py"
def _version_download(self, strversion, isdev, revision):
""" Modify version in website download docs """
filename = _os.path.join(
self.dirs['userdoc_source'], 'website_download.txt'
)
dllines, VERSION, PATH = [], strversion, ''
if isdev:
oldstable = []
hasstable = False
try:
fp = textopen(filename)
except IOError:
e = _sys.exc_info()[1]
if e.args[0] != _errno.ENOENT:
raise
else:
try:
for line in fp:
if line.startswith('.. begin stable'):
hasstable = True
oldstable.append(line)
finally:
fp.close()
if hasstable:
dllines = oldstable
else:
VERSION = "%s-dev-%s" % (strversion, revision)
PATH = 'dev/'
newdev = []
fp = textopen(filename + '.in')
try:
if dllines:
for line in fp:
if newdev:
newdev.append(line)
if line.startswith('.. end dev'):
break
elif line.startswith('.. begin dev'):
newdev.append(line)
else:
raise AssertionError("Incomplete dev marker")
else:
dllines = fp.readlines()
finally:
fp.close()
instable, indev = [], []
fp = textopen(filename, 'w')
try:
for line in dllines:
if instable:
instable.append(line)
if line.startswith('.. end stable'):
if not isdev:
res = (
''.join(instable)
.replace('@@VERSION@@', strversion)
.replace('@@PATH@@', '')
)
elif not hasstable:
res = ''
else:
res = ''.join(instable)
fp.write(res)
instable = []
elif indev:
indev.append(line)
if line.startswith('.. end dev'):
if isdev:
if newdev:
indev = newdev
fp.write(
''.join(indev)
.replace('@@DEVVERSION@@', "%s-dev-r%d" % (
strversion, revision
))
.replace('@@PATH@@', 'dev/')
)
else:
fp.write(''.join([indev[0], indev[-1]]))
indev = []
elif line.startswith('.. begin stable'):
instable.append(line)
elif line.startswith('.. begin dev'):
indev.append(line)
elif isdev and hasstable:
fp.write(line)
else:
fp.write(
line
.replace('@@VERSION@@', VERSION)
.replace('@@PATH@@', PATH)
)
finally:
fp.close()
def clean(self, scm, dist):
""" Clean versioned files """
if scm:
term.green("Removing generated ebuild files")
for name in shell.files(self.dirs['ebuild'], '*.ebuild'):
shell.rm(name)
make.main(name=__name__)
| ndparker/wolfe | make.py | Python | apache-2.0 | 24,565 |
#!/usr/bin/env python3
'''
kicad-footprint-generator is free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
kicad-footprint-generator is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with kicad-footprint-generator. If not, see < http://www.gnu.org/licenses/ >.
'''
import sys
import os
#sys.path.append(os.path.join(sys.path[0],"..","..","kicad_mod")) # load kicad_mod path
# export PYTHONPATH="${PYTHONPATH}<path to kicad-footprint-generator directory>"
sys.path.append(os.path.join(sys.path[0], "..", "..", "..")) # load parent path of KicadModTree
from math import sqrt
import argparse
import yaml
from helpers import *
from KicadModTree import *
sys.path.append(os.path.join(sys.path[0], "..", "..", "tools")) # load parent path of tools
from footprint_text_fields import addTextFields
series = "Micro-Fit_3.0"
series_long = 'Micro-Fit 3.0 Connector System'
manufacturer = 'Molex'
orientation = 'H'
number_of_rows = 1
datasheet = 'https://www.molex.com/pdm_docs/sd/436500300_sd.pdf'
#Molex part number
#n = number of circuits per row
part_code = "43650-{n:02}00"
alternative_codes = [
"43650-{n:02}01",
"43650-{n:02}02"
]
pins_per_row_range = range(2,13)
pitch = 3.0
drill = 1.02
peg_drill = 3.0
pad_to_pad_clearance = 1.5 # Voltage rating is up to 600V (http://www.molex.com/pdm_docs/ps/PS-43045.pdf)
max_annular_ring = 0.5
min_annular_ring = 0.15
pad_size = [pitch - pad_to_pad_clearance, drill + 2*max_annular_ring]
if pad_size[0] - drill < 2*min_annular_ring:
pad_size[0] = drill + 2*min_annular_ring
if pad_size[0] - drill > 2*max_annular_ring:
pad_size[0] = drill + 2*max_annular_ring
if pad_size[1] - drill < 2*min_annular_ring:
pad_size[1] = drill + 2*min_annular_ring
if pad_size[1] - drill > 2*max_annular_ring:
pad_size[1] = drill + 2*max_annular_ring
pad_shape=Pad.SHAPE_OVAL
if pad_size[1] == pad_size[0]:
pad_shape=Pad.SHAPE_CIRCLE
def generate_one_footprint(pins, configuration):
pins_per_row = pins
mpn = part_code.format(n=pins)
alt_mpn = [code.format(n=pins) for code in alternative_codes]
# handle arguments
orientation_str = configuration['orientation_options'][orientation]
footprint_name = configuration['fp_name_format_string'].format(man=manufacturer,
series=series,
mpn=mpn, num_rows=number_of_rows, pins_per_row=pins_per_row, mounting_pad = "",
pitch=pitch, orientation=orientation_str)
kicad_mod = Footprint(footprint_name)
kicad_mod.setDescription("Molex {:s}, {:s} (compatible alternatives: {:s}), {:d} Pins per row ({:s}), generated with kicad-footprint-generator".format(series_long, mpn, ', '.join(alt_mpn), pins_per_row, datasheet))
kicad_mod.setTags(configuration['keyword_fp_string'].format(series=series,
orientation=orientation_str, man=manufacturer,
entry=configuration['entry_direction'][orientation]))
########################## Dimensions ##############################
B = (pins_per_row-1)*pitch
A = B + 6.65
#Centra os pinos em metade do pitch
pad_row_1_y = 0
pad_row_2_y = pad_row_1_y + pitch
pad1_x = 0
C = 1.7 + pitch*(pins-3) #1º need be 4.7mm
body_edge={
'left':-3.325,
'right':A-3.325,
'top': -8.92
}
body_edge['bottom'] = body_edge['top'] + 9.90
############################# Pads ##################################
#
# Pegs
#
if pins_per_row == 2:
kicad_mod.append(Pad(at=[pitch/2, pad_row_1_y - 4.32], number="",
type=Pad.TYPE_NPTH, shape=Pad.SHAPE_CIRCLE, size=peg_drill,
drill=peg_drill, layers=Pad.LAYERS_NPTH))
elif pins_per_row == 3:
kicad_mod.append(Pad(at=[pitch, pad_row_1_y - 4.32], number="",
type=Pad.TYPE_NPTH, shape=Pad.SHAPE_CIRCLE, size=peg_drill,
drill=peg_drill, layers=Pad.LAYERS_NPTH))
else:
kicad_mod.append(Pad(at=[pad1_x + 2.15, pad_row_1_y - 4.32], number="",
type=Pad.TYPE_NPTH, shape=Pad.SHAPE_CIRCLE, size=peg_drill,
drill=peg_drill, layers=Pad.LAYERS_NPTH))
kicad_mod.append(Pad(at=[pad1_x + 2.15 + C, pad_row_1_y - 4.32], number="",
type=Pad.TYPE_NPTH, shape=Pad.SHAPE_CIRCLE, size=peg_drill,
drill=peg_drill, layers=Pad.LAYERS_NPTH))
#
# Add pads
#
optional_pad_params = {}
if configuration['kicad4_compatible']:
optional_pad_params['tht_pad1_shape'] = Pad.SHAPE_RECT
else:
optional_pad_params['tht_pad1_shape'] = Pad.SHAPE_ROUNDRECT
kicad_mod.append(PadArray(start=[pad1_x, pad_row_1_y], initial=1,
pincount=pins_per_row, increment=1, x_spacing=pitch, size=pad_size,
type=Pad.TYPE_THT, shape=pad_shape, layers=Pad.LAYERS_THT, drill=drill,
**optional_pad_params))
######################## Fabrication Layer ###########################
main_body_poly= [
{'x': body_edge['left'], 'y': body_edge['bottom']},
{'x': body_edge['left'], 'y': body_edge['top']+1},
{'x': body_edge['left']+1, 'y': body_edge['top']},
{'x': body_edge['right']-1, 'y': body_edge['top']},
{'x': body_edge['right'], 'y': body_edge['top']+1},
{'x': body_edge['right'], 'y': body_edge['bottom']},
{'x': body_edge['left'], 'y': body_edge['bottom']}
]
kicad_mod.append(PolygoneLine(polygone=main_body_poly,
width=configuration['fab_line_width'], layer="F.Fab"))
main_arrow_poly= [
{'x': -.75, 'y': body_edge['bottom']},
{'x': 0, 'y': 0},
{'x': 0.75, 'y': body_edge['bottom']}
]
kicad_mod.append(PolygoneLine(polygone=main_arrow_poly,
width=configuration['fab_line_width'], layer="F.Fab"))
######################## SilkS Layer ###########################
off = configuration['silk_fab_offset']
pad_silk_off = configuration['silk_line_width']/2 + configuration['silk_pad_clearance']
r_no_silk = max(pad_size)/2 + pad_silk_off # simplified to circle instead of oval
dy = abs(body_edge['bottom']) + off
pin_center_silk_x = 0 if dy >= r_no_silk else sqrt(r_no_silk**2-dy**2)
pin1_center_silk_x = pad_size[0]/2 + pad_silk_off # simplified to rectangle instead of rounded rect
poly_s_t= [
{'x': body_edge['left'] - off, 'y': body_edge['bottom'] + off},
{'x': body_edge['left'] - off, 'y': body_edge['top'] + 1 - off},
{'x': body_edge['left'] + 1 - off, 'y': body_edge['top'] - off},
{'x': body_edge['right'] - 1 + off, 'y': body_edge['top'] - off},
{'x': body_edge['right'] + off, 'y': body_edge['top'] + 1 - off},
{'x': body_edge['right'] + off, 'y': body_edge['bottom'] + off}
]
kicad_mod.append(PolygoneLine(polygone=poly_s_t,
width=configuration['silk_line_width'], layer="F.SilkS"))
if pin_center_silk_x == 0:
kicad_mod.append(Line(
start=[body_edge['left']-off, body_edge['bottom']],
end=[body_edge['right']-off, body_edge['bottom']],
layer="F.SilkS", width=configuration['silk_line_width']
))
else:
kicad_mod.append(Line(
start=[body_edge['left']-off, body_edge['bottom']+off],
end=[-pin1_center_silk_x, body_edge['bottom']+off],
layer="F.SilkS", width=configuration['silk_line_width']
))
kicad_mod.append(Line(
start=[body_edge['right']+off, body_edge['bottom']+off],
end=[(pins_per_row-1)*pitch + pin_center_silk_x, body_edge['bottom']+off],
layer="F.SilkS", width=configuration['silk_line_width']
))
kicad_mod.append(Line(
start=[pin1_center_silk_x, body_edge['bottom']+off],
end=[pitch - pin_center_silk_x, body_edge['bottom']+off],
layer="F.SilkS", width=configuration['silk_line_width']
))
for i in range(1, pins_per_row-1):
xl = i*pitch + pin_center_silk_x
xr = (i+1)*pitch - pin_center_silk_x
kicad_mod.append(Line(
start=[xl, body_edge['bottom']+off],
end=[xr, body_edge['bottom']+off],
layer="F.SilkS", width=configuration['silk_line_width']
))
######################## CrtYd Layer ###########################
CrtYd_offset = configuration['courtyard_offset']['connector']
CrtYd_grid = configuration['courtyard_grid']
poly_yd = [
{'x': roundToBase(body_edge['left'] - CrtYd_offset, CrtYd_grid), 'y': roundToBase(body_edge['bottom'] + CrtYd_offset, CrtYd_grid)},
{'x': roundToBase(body_edge['left'] - CrtYd_offset, CrtYd_grid), 'y': roundToBase(body_edge['top'] - CrtYd_offset, CrtYd_grid)},
{'x': roundToBase(body_edge['right'] + CrtYd_offset, CrtYd_grid), 'y': roundToBase(body_edge['top'] - CrtYd_offset, CrtYd_grid)},
{'x': roundToBase(body_edge['right'] + CrtYd_offset, CrtYd_grid), 'y': roundToBase(body_edge['bottom'] + CrtYd_offset, CrtYd_grid)},
{'x': roundToBase(body_edge['left'] - CrtYd_offset, CrtYd_grid), 'y': roundToBase(body_edge['bottom'] + CrtYd_offset, CrtYd_grid)}
]
kicad_mod.append(PolygoneLine(polygone=poly_yd,
layer='F.CrtYd', width=configuration['courtyard_line_width']))
######################### Text Fields ###############################
cy1 = roundToBase(body_edge['top'] - configuration['courtyard_offset']['connector'], configuration['courtyard_grid'])
cy2 = roundToBase(pad_size[1] + configuration['courtyard_offset']['connector'], configuration['courtyard_grid'])
addTextFields(kicad_mod=kicad_mod, configuration=configuration, body_edges=body_edge,
courtyard={'top':cy1, 'bottom':cy2}, fp_name=footprint_name, text_y_inside_position='top')
##################### Write to File and 3D ############################
model3d_path_prefix = configuration.get('3d_model_prefix','${KISYS3DMOD}/')
lib_name = configuration['lib_name_format_string'].format(series=series, man=manufacturer)
model_name = '{model3d_path_prefix:s}{lib_name:s}.3dshapes/{fp_name:s}.wrl'.format(
model3d_path_prefix=model3d_path_prefix, lib_name=lib_name, fp_name=footprint_name)
kicad_mod.append(Model(filename=model_name))
output_dir = '{lib_name:s}.pretty/'.format(lib_name=lib_name)
if not os.path.isdir(output_dir): #returns false if path does not yet exist!! (Does not check path validity)
os.makedirs(output_dir)
filename = '{outdir:s}{fp_name:s}.kicad_mod'.format(outdir=output_dir, fp_name=footprint_name)
file_handler = KicadFileHandler(kicad_mod)
file_handler.writeFile(filename)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='use confing .yaml files to create footprints.')
parser.add_argument('--global_config', type=str, nargs='?', help='the config file defining how the footprint will look like. (KLC)', default='../../tools/global_config_files/config_KLCv3.0.yaml')
parser.add_argument('--series_config', type=str, nargs='?', help='the config file defining series parameters.', default='../conn_config_KLCv3.yaml')
parser.add_argument('--kicad4_compatible', action='store_true', help='Create footprints kicad 4 compatible')
args = parser.parse_args()
with open(args.global_config, 'r') as config_stream:
try:
configuration = yaml.load(config_stream)
except yaml.YAMLError as exc:
print(exc)
with open(args.series_config, 'r') as config_stream:
try:
configuration.update(yaml.load(config_stream))
except yaml.YAMLError as exc:
print(exc)
configuration['kicad4_compatible'] = args.kicad4_compatible
for pincount in pins_per_row_range:
generate_one_footprint(pincount, configuration)
| SchrodingersGat/kicad-footprint-generator | scripts/Connector/Connector_Molex/conn_molex_micro-fit-3.0_tht_side_single_row.py | Python | gpl-3.0 | 12,160 |
#!/usr/bin/env python3
from distutils.core import setup
setup(
name='conflib',
version='1.0.0',
description='Manage configuration hierarchies',
author='Les Aker',
author_email='[email protected]',
url='https://github.com/akerl/conflib',
license='MIT License',
packages=['conflib'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
| akerl/conflib | setup.py | Python | mit | 750 |
# Copyright 2017 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
import time
import uuid
from google.cloud import storage
import pytest
import storage_add_bucket_conditional_iam_binding
import storage_add_bucket_iam_member
import storage_remove_bucket_conditional_iam_binding
import storage_remove_bucket_iam_member
import storage_set_bucket_public_iam
import storage_view_bucket_iam_members
MEMBER = "group:[email protected]"
ROLE = "roles/storage.legacyBucketReader"
CONDITION_TITLE = "match-prefix"
CONDITION_DESCRIPTION = "Applies to objects matching a prefix"
CONDITION_EXPRESSION = (
'resource.name.startsWith("projects/_/buckets/bucket-name/objects/prefix-a-")'
)
@pytest.fixture(scope="module")
def bucket():
bucket = None
while bucket is None or bucket.exists():
storage_client = storage.Client()
bucket_name = "test-iam-{}".format(uuid.uuid4())
bucket = storage_client.bucket(bucket_name)
bucket.iam_configuration.uniform_bucket_level_access_enabled = True
storage_client.create_bucket(bucket)
yield bucket
time.sleep(3)
bucket.delete(force=True)
@pytest.fixture(scope="function")
def public_bucket():
# The new projects don't allow to make a bucket available to public, so
# we need to use the old main project for now.
original_value = os.environ['GOOGLE_CLOUD_PROJECT']
os.environ['GOOGLE_CLOUD_PROJECT'] = os.environ['MAIN_GOOGLE_CLOUD_PROJECT']
bucket = None
while bucket is None or bucket.exists():
storage_client = storage.Client()
bucket_name = "test-iam-{}".format(uuid.uuid4())
bucket = storage_client.bucket(bucket_name)
bucket.iam_configuration.uniform_bucket_level_access_enabled = True
storage_client.create_bucket(bucket)
yield bucket
time.sleep(3)
bucket.delete(force=True)
# Set the value back.
os.environ['GOOGLE_CLOUD_PROJECT'] = original_value
def test_view_bucket_iam_members(capsys, bucket):
storage_view_bucket_iam_members.view_bucket_iam_members(bucket.name)
assert re.match("Role: .*, Members: .*", capsys.readouterr().out)
def test_add_bucket_iam_member(bucket):
storage_add_bucket_iam_member.add_bucket_iam_member(bucket.name, ROLE, MEMBER)
policy = bucket.get_iam_policy(requested_policy_version=3)
assert any(
binding["role"] == ROLE and MEMBER in binding["members"]
for binding in policy.bindings
)
def test_add_bucket_conditional_iam_binding(bucket):
storage_add_bucket_conditional_iam_binding.add_bucket_conditional_iam_binding(
bucket.name,
ROLE,
CONDITION_TITLE,
CONDITION_DESCRIPTION,
CONDITION_EXPRESSION,
{MEMBER},
)
policy = bucket.get_iam_policy(requested_policy_version=3)
assert any(
binding["role"] == ROLE
and binding["members"] == {MEMBER}
and binding["condition"]
== {
"title": CONDITION_TITLE,
"description": CONDITION_DESCRIPTION,
"expression": CONDITION_EXPRESSION,
}
for binding in policy.bindings
)
def test_remove_bucket_iam_member(public_bucket):
storage_remove_bucket_iam_member.remove_bucket_iam_member(
public_bucket.name, ROLE, MEMBER)
policy = public_bucket.get_iam_policy(requested_policy_version=3)
assert not any(
binding["role"] == ROLE and MEMBER in binding["members"]
for binding in policy.bindings
)
def test_remove_bucket_conditional_iam_binding(bucket):
storage_remove_bucket_conditional_iam_binding.remove_bucket_conditional_iam_binding(
bucket.name, ROLE, CONDITION_TITLE, CONDITION_DESCRIPTION, CONDITION_EXPRESSION
)
policy = bucket.get_iam_policy(requested_policy_version=3)
condition = {
"title": CONDITION_TITLE,
"description": CONDITION_DESCRIPTION,
"expression": CONDITION_EXPRESSION,
}
assert not any(
(binding["role"] == ROLE and binding.get("condition") == condition)
for binding in policy.bindings
)
def test_set_bucket_public_iam(public_bucket):
# The test project has org policy restricting identities by domain.
# Testing "domain:google.com" instead of "allUsers"
storage_set_bucket_public_iam.set_bucket_public_iam(public_bucket.name, ["domain:google.com"])
policy = public_bucket.get_iam_policy(requested_policy_version=3)
assert any(
binding["role"] == "roles/storage.objectViewer"
and "domain:google.com" in binding["members"]
for binding in policy.bindings
)
| googleapis/python-storage | samples/snippets/iam_test.py | Python | apache-2.0 | 5,106 |
import sys
import socket
import signal
import weakref
import errno
import logging
import pyuv
logging.basicConfig(level=logging.DEBUG)
STOPSIGNALS = (signal.SIGINT, signal.SIGTERM)
NONBLOCKING = (errno.EAGAIN, errno.EWOULDBLOCK)
if sys.platform == "win32":
NONBLOCKING = NONBLOCKING + (errno.WSAEWOULDBLOCK,)
class Connection(object):
def __init__(self, sock, address, loop):
self.sock = sock
self.address = address
self.sock.setblocking(0)
self.buf = ""
self.watcher = pyuv.Poll(loop, self.sock.fileno())
self.watcher.start(pyuv.UV_READABLE, self.io_cb)
logging.debug("{0}: ready".format(self))
def reset(self, events):
self.watcher.start(events, self.io_cb)
def handle_error(self, msg, level=logging.ERROR, exc_info=True):
logging.log(level, "{0}: {1} --> closing".format(self, msg), exc_info=exc_info)
self.close()
def handle_read(self):
try:
buf = self.sock.recv(1024)
except socket.error as err:
if err.args[0] not in NONBLOCKING:
self.handle_error("error reading from {0}".format(self.sock))
if buf:
self.buf += buf
self.reset(pyuv.UV_READABLE | pyuv.UV_WRITABLE)
else:
self.handle_error("connection closed by peer", logging.DEBUG, False)
def handle_write(self):
try:
sent = self.sock.send(self.buf)
except socket.error as err:
if err.args[0] not in NONBLOCKING:
self.handle_error("error writing to {0}".format(self.sock))
else :
self.buf = self.buf[sent:]
if not self.buf:
self.reset(pyuv.UV_READABLE)
def io_cb(self, watcher, revents, error):
if error is not None:
logging.error("Error in connection: %d: %s" % (error, pyuv.errno.strerror(error)))
return
if revents & pyuv.UV_READABLE:
self.handle_read()
elif revents & pyuv.UV_WRITABLE:
self.handle_write()
def close(self):
self.watcher.stop()
self.watcher = None
self.sock.close()
logging.debug("{0}: closed".format(self))
class Server(object):
def __init__(self, address):
self.sock = socket.socket()
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.sock.bind(address)
self.sock.setblocking(0)
self.address = self.sock.getsockname()
self.loop = pyuv.Loop.default_loop()
self.poll_watcher = pyuv.Poll(self.loop, self.sock.fileno())
self.async = pyuv.Async(self.loop, self.async_cb)
self.conns = weakref.WeakValueDictionary()
self.signal_watchers = set()
def handle_error(self, msg, level=logging.ERROR, exc_info=True):
logging.log(level, "{0}: {1} --> stopping".format(self, msg), exc_info=exc_info)
self.stop()
def signal_cb(self, handle, signum):
self.async.send()
def async_cb(self, handle):
handle.close()
self.stop()
def io_cb(self, watcher, revents, error):
try:
while True:
try:
sock, address = self.sock.accept()
except socket.error as err:
if err.args[0] in NONBLOCKING:
break
else:
raise
else:
self.conns[address] = Connection(sock, address, self.loop)
except Exception:
self.handle_error("error accepting a connection")
def start(self):
self.sock.listen(socket.SOMAXCONN)
self.poll_watcher.start(pyuv.UV_READABLE, self.io_cb)
for sig in STOPSIGNALS:
handle = pyuv.Signal(self.loop)
handle.start(self.signal_cb, sig)
self.signal_watchers.add(handle)
logging.debug("{0}: started on {0.address}".format(self))
self.loop.run()
logging.debug("{0}: stopped".format(self))
def stop(self):
self.poll_watcher.stop()
for watcher in self.signal_watchers:
watcher.stop()
self.signal_watchers.clear()
self.sock.close()
for conn in self.conns.values():
conn.close()
logging.debug("{0}: stopping".format(self))
if __name__ == "__main__":
server = Server(("127.0.0.1", 9876))
server.start()
| saghul/pyuv | examples/echo-server-poll.py | Python | mit | 4,446 |
# Copyright (C) 2015 Custodia Project Contributors - see LICENSE file
from custodia.httpd.consumer import HTTPConsumer
from custodia.httpd.server import HTTPError
from custodia.httpd.authorizers import HTTPAuthorizer
from custodia.message.formats import Validator
from custodia.message.common import UnknownMessageType
from custodia.message.common import UnallowedMessage
from custodia.store.interface import CSStoreError
from custodia.store.interface import CSStoreExists
from custodia import log
import json
import os
class Namespaces(HTTPAuthorizer):
def __init__(self, *args, **kwargs):
super(Namespaces, self).__init__(*args, **kwargs)
self.path = self.config.get('path', '/')
# warn if self.path does not end with '/' ?
def handle(self, request):
# First of all check we are in the right path
path = request.get('path', '/')
if not path.startswith(self.path):
return None
if 'remote_user' not in request:
return False
# At the moment we just have one namespace, the user's name
namespaces = [request['remote_user']]
# Check the request is in a valid namespace
trail = request.get('trail', [])
if len(trail) > 0 and trail[0] != namespaces[0]:
return False
request['default_namespace'] = namespaces[0]
return True
class Secrets(HTTPConsumer):
def __init__(self, *args, **kwargs):
super(Secrets, self).__init__(*args, **kwargs)
self.allowed_keytypes = ['simple']
if self.config and 'allowed_keytypes' in self.config:
kt = self.config['allowed_keytypes'].split()
self.allowed_keytypes = kt
self._validator = Validator(self.allowed_keytypes)
self._auditlog = log.audit_log(self.config)
def _db_key(self, trail):
if len(trail) < 2:
raise HTTPError(403)
return os.path.join('keys', *trail)
def _db_container_key(self, default, trail):
f = None
if len(trail) > 1:
f = self._db_key(trail)
elif len(trail) == 1 and trail[0] != '':
raise HTTPError(403)
elif default is None:
# No dfault namespace, fail
raise HTTPError(403)
else:
# Use the default namespace
f = self._db_key([default, ''])
return f
def _parse(self, request, value, name):
return self._validator.parse(request, value, name)
def _parent_exists(self, default, trail):
# check that the containers exist
basename = self._db_container_key(trail[0], '')
try:
keylist = self.root.store.list(basename)
except CSStoreError:
raise HTTPError(500)
# create default namespace if it is the only missing piece
if keylist is None and len(trail) == 2 and default == trail[0]:
container = self._db_container_key(default, '')
self.root.store.set(container, '')
return True
# check if any parent is missing
for n in range(1, len(trail)):
c = self._db_key(trail[:n] + [''])
if c not in keylist:
return False
return True
def GET(self, request, response):
trail = request.get('trail', [])
if len(trail) == 0 or trail[-1] == '':
self._list(trail, request, response)
else:
self._get_key(trail, request, response)
def PUT(self, request, response):
trail = request.get('trail', [])
if len(trail) == 0 or trail[-1] == '':
raise HTTPError(405)
else:
self._set_key(trail, request, response)
def DELETE(self, request, response):
trail = request.get('trail', [])
if len(trail) == 0:
raise HTTPError(405)
if trail[-1] == '':
self._destroy(trail, request, response)
else:
self._del_key(trail, request, response)
def POST(self, request, response):
trail = request.get('trail', [])
if len(trail) > 0 and trail[-1] == '':
self._create(trail, request, response)
else:
raise HTTPError(405)
def _list(self, trail, request, response):
default = request.get('default_namespace', None)
basename = self._db_container_key(default, trail)
userfilter = request.get('query', dict()).get('filter', '')
try:
keylist = self.root.store.list(basename + userfilter)
if keylist is None:
raise HTTPError(404)
# remove the base container itself
output = list()
for k in keylist:
if k == basename:
continue
# strip away the internal prefix for storing keys
name = k[len('keys/'):]
output.append(name)
response['output'] = json.dumps(output)
except CSStoreError:
raise HTTPError(500)
def _create(self, trail, request, response):
default = request.get('default_namespace', None)
basename = self._db_container_key(None, trail)
try:
ok = self._parent_exists(default, trail[:-1])
if not ok:
raise HTTPError(404)
self.root.store.set(basename, '')
except CSStoreExists:
raise HTTPError(409)
except CSStoreError:
raise HTTPError(500)
response['code'] = 201
def _destroy(self, trail, request, response):
basename = self._db_container_key(None, trail)
try:
keylist = self.root.store.list(basename)
if keylist is None:
raise HTTPError(404)
if basename not in keylist:
# uh ?
raise HTTPError(409)
if len(keylist) != 1:
raise HTTPError(409)
ret = self.root.store.cut(basename)
except CSStoreError:
raise HTTPError(500)
if ret is False:
raise HTTPError(404)
response['code'] = 204
def _client_name(self, request):
if 'remote_user' in request:
return request['remote_user']
elif 'creds' in request:
creds = request['creds']
return '<pid={pid:d} uid={uid:d} gid={gid:d}>'.format(**creds)
else:
return 'Unknown'
def _audit(self, ok, fail, fn, trail, request, response):
action = fail
client = self._client_name(request)
key = '/'.join(trail)
try:
fn(trail, request, response)
action = ok
finally:
self._auditlog.key_access(action, client, key)
def _get_key(self, trail, request, response):
self._audit(log.AUDIT_GET_ALLOWED, log.AUDIT_GET_DENIED,
self._int_get_key, trail, request, response)
def _int_get_key(self, trail, request, response):
# default to simple
query = request.get('query', '')
if len(query) == 0:
query = {'type': 'simple', 'value': ''}
try:
name = '/'.join(trail)
msg = self._parse(request, query, name)
except Exception as e:
raise HTTPError(406, str(e))
key = self._db_key(trail)
try:
output = self.root.store.get(key)
if output is None:
raise HTTPError(404)
response['output'] = msg.reply(output)
except CSStoreError:
raise HTTPError(500)
def _set_key(self, trail, request, response):
self._audit(log.AUDIT_SET_ALLOWED, log.AUDIT_SET_DENIED,
self._int_set_key, trail, request, response)
def _int_set_key(self, trail, request, response):
content_type = request.get('headers',
dict()).get('Content-Type', '')
if content_type.split(';')[0].strip() != 'application/json':
raise HTTPError(400, 'Invalid Content-Type')
body = request.get('body')
if body is None:
raise HTTPError(400)
value = bytes(body).decode('utf-8')
try:
name = '/'.join(trail)
msg = self._parse(request, json.loads(value), name)
except UnknownMessageType as e:
raise HTTPError(406, str(e))
except UnallowedMessage as e:
raise HTTPError(406, str(e))
except Exception as e:
raise HTTPError(400, str(e))
# must _db_key first as access control is done here for now
# otherwise users would e able to probe containers in namespaces
# they do not have access to.
key = self._db_key(trail)
try:
default = request.get('default_namespace', None)
ok = self._parent_exists(default, trail)
if not ok:
raise HTTPError(404)
ok = self.root.store.set(key, msg.payload)
except CSStoreExists:
raise HTTPError(409)
except CSStoreError:
raise HTTPError(500)
response['code'] = 201
def _del_key(self, trail, request, response):
self._audit(log.AUDIT_DEL_ALLOWED, log.AUDIT_DEL_DENIED,
self._int_del_key, trail, request, response)
def _int_del_key(self, trail, request, response):
key = self._db_key(trail)
try:
ret = self.root.store.cut(key)
except CSStoreError:
raise HTTPError(500)
if ret is False:
raise HTTPError(404)
response['code'] = 204
# unit tests
import unittest
from custodia.store.sqlite import SqliteStore
class SecretsTests(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.secrets = Secrets({'auditlog': 'test.audit.log'})
cls.secrets.root.store = SqliteStore({'dburi': 'testdb.sqlite'})
cls.authz = Namespaces({})
@classmethod
def tearDownClass(self):
try:
os.unlink('test.audit.log')
os.unlink('testdb.sqlite')
except OSError:
pass
def check_authz(self, req):
if self.authz.handle(req) is False:
raise HTTPError(403)
def DELETE(self, req, rep):
self.check_authz(req)
self.secrets.DELETE(req, rep)
def GET(self, req, rep):
self.check_authz(req)
self.secrets.GET(req, rep)
def POST(self, req, rep):
self.check_authz(req)
self.secrets.POST(req, rep)
def PUT(self, req, rep):
self.check_authz(req)
self.secrets.PUT(req, rep)
def test_0_LISTkey_404(self):
req = {'remote_user': 'test',
'trail': ['test', '']}
rep = {}
with self.assertRaises(HTTPError) as err:
self.GET(req, rep)
self.assertEqual(err.exception.code, 404)
def test_1_PUTKey(self):
req = {'headers': {'Content-Type': 'application/json'},
'remote_user': 'test',
'trail': ['test', 'key1'],
'body': '{"type":"simple","value":"1234"}'.encode('utf-8')}
rep = {}
self.PUT(req, rep)
def test_2_GETKey(self):
req = {'remote_user': 'test',
'trail': ['test', 'key1']}
rep = {}
self.GET(req, rep)
self.assertEqual(json.loads(rep['output']),
{"type": "simple", "value": "1234"})
def test_3_LISTKeys(self):
req = {'remote_user': 'test',
'trail': ['test', '']}
rep = {}
self.GET(req, rep)
self.assertEqual(json.loads(rep['output']),
json.loads('["test/key1"]'))
def test_3_LISTKeys_2(self):
req = {'remote_user': 'test',
'query': {'filter': 'key'},
'trail': ['test', '']}
rep = {}
self.GET(req, rep)
self.assertEqual(json.loads(rep['output']),
json.loads('["test/key1"]'))
def test_4_PUTKey_errors_400_1(self):
req = {'headers': {'Content-Type': 'text/plain'},
'remote_user': 'test',
'trail': ['test', 'key2'],
'body': '{"type":"simple","value":"2345"}'.encode('utf-8')}
rep = {}
with self.assertRaises(HTTPError) as err:
self.PUT(req, rep)
self.assertEqual(err.exception.code, 400)
def test_4_PUTKey_errors_400_2(self):
req = {'headers': {'Content-Type': 'text/plain'},
'remote_user': 'test',
'trail': ['test', 'key2']}
rep = {}
with self.assertRaises(HTTPError) as err:
self.PUT(req, rep)
self.assertEqual(err.exception.code, 400)
def test_4_PUTKey_errors_400_3(self):
req = {'headers': {'Content-Type': 'text/plain'},
'remote_user': 'test',
'trail': ['test', 'key2'],
'body': '{"type":}"simple","value":"2345"}'.encode('utf-8')}
rep = {}
with self.assertRaises(HTTPError) as err:
self.PUT(req, rep)
self.assertEqual(err.exception.code, 400)
def test_4_PUTKey_errors_403(self):
req = {'headers': {'Content-Type': 'application/json; charset=utf-8'},
'remote_user': 'test',
'trail': ['case', 'key2'],
'body': '{"type":"simple","value":"2345"}'.encode('utf-8')}
rep = {}
with self.assertRaises(HTTPError) as err:
self.PUT(req, rep)
self.assertEqual(err.exception.code, 403)
def test_4_PUTKey_errors_404(self):
req = {'headers': {'Content-Type': 'application/json; charset=utf-8'},
'remote_user': 'test',
'trail': ['test', 'more', 'key1'],
'body': '{"type":"simple","value":"1234"}'.encode('utf-8')}
rep = {}
with self.assertRaises(HTTPError) as err:
self.PUT(req, rep)
self.assertEqual(err.exception.code, 404)
def test_4_PUTKey_errors_405(self):
req = {'headers': {'Content-Type': 'application/json; charset=utf-8'},
'remote_user': 'test',
'trail': ['test', 'key2', ''],
'body': '{"type":"simple","value":"2345"}'.encode('utf-8')}
rep = {}
with self.assertRaises(HTTPError) as err:
self.PUT(req, rep)
self.assertEqual(err.exception.code, 405)
def test_4_PUTKey_errors_409(self):
req = {'headers': {'Content-Type': 'application/json; charset=utf-8'},
'remote_user': 'test',
'trail': ['test', 'key3'],
'body': '{"type":"simple","value":"2345"}'.encode('utf-8')}
rep = {}
self.PUT(req, rep)
with self.assertRaises(HTTPError) as err:
self.PUT(req, rep)
self.assertEqual(err.exception.code, 409)
def test_5_GETKey_errors_403(self):
req = {'remote_user': 'case',
'trail': ['test', 'key1']}
rep = {}
with self.assertRaises(HTTPError) as err:
self.GET(req, rep)
self.assertEqual(err.exception.code, 403)
def test_5_GETkey_errors_404(self):
req = {'remote_user': 'test',
'trail': ['test', 'key0']}
rep = {}
with self.assertRaises(HTTPError) as err:
self.GET(req, rep)
self.assertEqual(err.exception.code, 404)
def test_5_GETkey_errors_406(self):
req = {'remote_user': 'test',
'query': {'type': 'complex'},
'trail': ['test', 'key1']}
rep = {}
with self.assertRaises(HTTPError) as err:
self.GET(req, rep)
self.assertEqual(err.exception.code, 406)
def test_6_LISTkeys_errors_404_1(self):
req = {'remote_user': 'test',
'trail': ['test', 'case', '']}
rep = {}
with self.assertRaises(HTTPError) as err:
self.GET(req, rep)
self.assertEqual(err.exception.code, 404)
def test_6_LISTkeys_errors_404_2(self):
req = {'remote_user': 'test',
'query': {'filter': 'foo'},
'trail': ['test', '']}
rep = {}
with self.assertRaises(HTTPError) as err:
self.GET(req, rep)
self.assertEqual(err.exception.code, 404)
def test_7_DELETEKey(self):
req = {'remote_user': 'test',
'trail': ['test', 'key1']}
rep = {}
self.DELETE(req, rep)
def test_7_DELETEKey_errors_403(self):
req = {'remote_user': 'case',
'trail': ['test', 'key1']}
rep = {}
with self.assertRaises(HTTPError) as err:
self.DELETE(req, rep)
self.assertEqual(err.exception.code, 403)
def test_7_DELETEKey_errors_404(self):
req = {'remote_user': 'test',
'trail': ['test', 'nokey']}
rep = {}
with self.assertRaises(HTTPError) as err:
self.DELETE(req, rep)
self.assertEqual(err.exception.code, 404)
def test_7_DELETEKey_errors_405(self):
req = {'remote_user': 'test'}
rep = {}
with self.assertRaises(HTTPError) as err:
self.DELETE(req, rep)
self.assertEqual(err.exception.code, 405)
def test_8_CREATEcont(self):
req = {'remote_user': 'test',
'trail': ['test', 'container', '']}
rep = {}
self.POST(req, rep)
self.assertEqual(rep['code'], 201)
def test_8_CREATEcont_erros_403(self):
req = {'remote_user': 'case',
'trail': ['test', 'container', '']}
rep = {}
with self.assertRaises(HTTPError) as err:
self.POST(req, rep)
self.assertEqual(err.exception.code, 403)
def test_8_CREATEcont_erros_404(self):
req = {'remote_user': 'test',
'trail': ['test', 'mid', 'container', '']}
rep = {}
with self.assertRaises(HTTPError) as err:
self.POST(req, rep)
self.assertEqual(err.exception.code, 404)
def test_8_CREATEcont_erros_405(self):
req = {'remote_user': 'test',
'trail': ['test', 'container']}
rep = {}
with self.assertRaises(HTTPError) as err:
self.POST(req, rep)
self.assertEqual(err.exception.code, 405)
def test_8_CREATEcont_erros_409(self):
req = {'remote_user': 'test',
'trail': ['test', 'exists', '']}
rep = {}
self.POST(req, rep)
with self.assertRaises(HTTPError) as err:
self.POST(req, rep)
self.assertEqual(err.exception.code, 409)
def test_8_DESTROYcont(self):
req = {'remote_user': 'test',
'trail': ['test', 'container', '']}
rep = {}
self.DELETE(req, rep)
self.assertEqual(rep['code'], 204)
def test_8_DESTROYcont_erros_403(self):
req = {'remote_user': 'case',
'trail': ['test', 'container', '']}
rep = {}
with self.assertRaises(HTTPError) as err:
self.DELETE(req, rep)
self.assertEqual(err.exception.code, 403)
def test_8_DESTROYcont_erros_404(self):
req = {'remote_user': 'test',
'trail': ['test', 'mid', 'container', '']}
rep = {}
with self.assertRaises(HTTPError) as err:
self.DELETE(req, rep)
self.assertEqual(err.exception.code, 404)
def test_8_DESTROYcont_erros_409(self):
self.test_1_PUTKey()
req = {'remote_user': 'test',
'trail': ['test', '']}
rep = {}
with self.assertRaises(HTTPError) as err:
self.DELETE(req, rep)
self.assertEqual(err.exception.code, 409)
| cgwalters/custodia | custodia/secrets.py | Python | gpl-3.0 | 19,749 |
#! /usr/bin/env python
# ==========================================================================
#
# Copyright NumFOCUS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ===========================================================================
import unittest
import os
import SimpleITK as sitk
class ExternalViewerTest(unittest.TestCase):
"""Test external viewer launch"""
IMG = None
@classmethod
def setUpClass(cls):
ExternalViewerTest.IMG = sitk.GaussianSource()
this_dir = os.path.dirname(os.path.abspath(__file__))
cmd = 'python ' + this_dir + '/dummy_viewer.py'
os.environ['SITK_SHOW_COMMAND'] = cmd
def test_show(self):
print("\n\nFirst Show Test")
print("Trying command: ", os.environ['SITK_SHOW_COMMAND'])
fail = False
try:
sitk.Show(ExternalViewerTest.IMG, debugOn=True)
except BaseException:
fail = True
if fail:
self.fail("Show failed for command " +
os.environ['SITK_SHOW_COMMAND'])
def test_show2(self):
"""Show2 test should work even though we set SITK_SHOW_COMMAND to
something else, since that var is only read at the beginning. This set
is ignored."""
print("\n\nSecond Show Test")
os.environ['SITK_SHOW_COMMAND'] = 'none'
fail = False
try:
sitk.Show(ExternalViewerTest.IMG, debugOn=True)
except BaseException:
fail = True
if fail:
self.fail("Show failed for command " +
os.environ['SITK_SHOW_COMMAND'])
def test_image_viewer(self):
print("\n\nBasic Image Viewer Test")
try:
viewer = sitk.ImageViewer()
viewer.SetTitle("Basic Image Viewer Test")
viewer.Execute(ExternalViewerTest.IMG)
print("\nImageViewer parameters")
print(" Application: ", viewer.GetApplication())
print(" Command: ", viewer.GetCommand())
print(" Extension: ", viewer.GetFileExtension())
print("\nGlobal ImageViewer parameters")
print(" Search path: ",
sitk.ImageViewer.GetGlobalDefaultSearchPath())
print(" Default executable names: ",
sitk.ImageViewer.GetGlobalDefaultExecutableNames())
print(" Process delay: ", sitk.ImageViewer.GetProcessDelay())
print(" Debug flag: ", sitk.ImageViewer.GetGlobalDefaultDebug())
except BaseException:
self.fail("Basic Image Viewer Test FAILED")
def test_bad_image_viewer(self):
print("\n\nBad Image Viewer Test")
try:
viewer = sitk.ImageViewer()
viewer.SetCommand('none')
viewer.SetTitle("BAD Image Viewer Test")
viewer.Execute(ExternalViewerTest.IMG)
except BaseException:
print("Exception triggered, as expected")
if __name__ == '__main__':
unittest.main()
| richardbeare/SimpleITK | Testing/Unit/Python/sitkExternalViewerTest.py | Python | apache-2.0 | 3,560 |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Implements rotations, including spherical rotations as defined in WCS Paper II
[1]_
`RotateNative2Celestial` and `RotateCelestial2Native` follow the convention in
WCS Paper II to rotate to/from a native sphere and the celestial sphere.
The implementation uses `EulerAngleRotation`. The model parameters are
three angles: the longitude (``lon``) and latitude (``lat``) of the fiducial point
in the celestial system (``CRVAL`` keywords in FITS), and the longitude of the celestial
pole in the native system (``lon_pole``). The Euler angles are ``lon+90``, ``90-lat``
and ``-(lon_pole-90)``.
References
----------
.. [1] Calabretta, M.R., Greisen, E.W., 2002, A&A, 395, 1077 (Paper II)
"""
import math
import numpy as np
from .core import Model
from .parameters import Parameter
from astropy.coordinates.matrix_utilities import rotation_matrix, matrix_product
from astropy import units as u
from .utils import _to_radian, _to_orig_unit
__all__ = ['RotateCelestial2Native', 'RotateNative2Celestial', 'Rotation2D',
'EulerAngleRotation', 'RotationSequence3D', 'SphericalRotationSequence']
def _create_matrix(angles, axes_order):
matrices = []
for angle, axis in zip(angles, axes_order):
if isinstance(angle, u.Quantity):
angle = angle.value
angle = angle.item()
matrices.append(rotation_matrix(angle, axis, unit=u.rad))
result = matrix_product(*matrices[::-1])
return result
def spherical2cartesian(alpha, delta):
alpha = np.deg2rad(alpha)
delta = np.deg2rad(delta)
x = np.cos(alpha) * np.cos(delta)
y = np.cos(delta) * np.sin(alpha)
z = np.sin(delta)
return np.array([x, y, z])
def cartesian2spherical(x, y, z):
h = np.hypot(x, y)
alpha = np.rad2deg(np.arctan2(y, x))
delta = np.rad2deg(np.arctan2(z, h))
return alpha, delta
class RotationSequence3D(Model):
"""
Perform a series of rotations about different axis in 3D space.
Positive angles represent a counter-clockwise rotation.
Parameters
----------
angles : array_like
Angles of rotation in deg in the order of axes_order.
axes_order : str
A sequence of 'x', 'y', 'z' corresponding to axis of rotation.
Examples
--------
>>> model = RotationSequence3D([1.1, 2.1, 3.1, 4.1], axes_order='xyzx')
"""
standard_broadcasting = False
_separable = False
n_inputs = 3
n_outputs = 3
angles = Parameter(default=[], getter=_to_orig_unit, setter=_to_radian)
def __init__(self, angles, axes_order, name=None):
self.axes = ['x', 'y', 'z']
unrecognized = set(axes_order).difference(self.axes)
if unrecognized:
raise ValueError("Unrecognized axis label {0}; "
"should be one of {1} ".format(unrecognized,
self.axes))
self.axes_order = axes_order
if len(angles) != len(axes_order):
raise ValueError("The number of angles {0} should match the number \
of axes {1}.".format(len(angles),
len(axes_order)))
super().__init__(angles, name=name)
self._inputs = ('x', 'y', 'z')
self._outputs = ('x', 'y', 'z')
@property
def inverse(self):
"""Inverse rotation."""
angles = self.angles.value[::-1] * -1
return self.__class__(angles, axes_order=self.axes_order[::-1])
def evaluate(self, x, y, z, angles):
"""
Apply the rotation to a set of 3D Cartesian coordinates.
"""
if x.shape != y.shape != z.shape:
raise ValueError("Expected input arrays to have the same shape")
# Note: If the original shape was () (an array scalar) convert to a
# 1-element 1-D array on output for consistency with most other models
orig_shape = x.shape or (1,)
inarr = np.array([x.flatten(), y.flatten(), z.flatten()])
result = np.dot(_create_matrix(angles[0], self.axes_order), inarr)
x, y, z = result[0], result[1], result[2]
x.shape = y.shape = z.shape = orig_shape
return x, y, z
class SphericalRotationSequence(RotationSequence3D):
"""
Perform a sequence of rotations about arbitrary number of axes
in spherical coordinates.
Parameters
----------
angles : list
A sequence of angles (in deg).
axes_order : str
A sequence of characters ('x', 'y', or 'z') corresponding to the
axis of rotation and matching the order in ``angles``.
"""
def __init__(self, angles, axes_order, name=None, **kwargs):
self._n_inputs = 2
self._n_outputs = 2
super().__init__(angles, axes_order=axes_order, name=name, **kwargs)
self._inputs = ("lon", "lat")
self._outputs = ("lon", "lat")
@property
def n_inputs(self):
return self._n_inputs
@property
def n_outputs(self):
return self._n_outputs
def evaluate(self, lon, lat, angles):
x, y, z = spherical2cartesian(lon, lat)
x1, y1, z1 = super().evaluate(x, y, z, angles)
lon, lat = cartesian2spherical(x1, y1, z1)
return lon, lat
class _EulerRotation:
"""
Base class which does the actual computation.
"""
_separable = False
def evaluate(self, alpha, delta, phi, theta, psi, axes_order):
shape = None
if isinstance(alpha, np.ndarray) and alpha.ndim == 2:
alpha = alpha.flatten()
delta = delta.flatten()
shape = alpha.shape
inp = spherical2cartesian(alpha, delta)
matrix = _create_matrix([phi, theta, psi], axes_order)
result = np.dot(matrix, inp)
a, b = cartesian2spherical(*result)
if shape is not None:
a.shape = shape
b.shape = shape
return a, b
_input_units_strict = True
_input_units_allow_dimensionless = True
@property
def input_units(self):
""" Input units. """
return {'alpha': u.deg, 'delta': u.deg}
@property
def return_units(self):
""" Output units. """
return {'alpha': u.deg, 'delta': u.deg}
class EulerAngleRotation(_EulerRotation, Model):
"""
Implements Euler angle intrinsic rotations.
Rotates one coordinate system into another (fixed) coordinate system.
All coordinate systems are right-handed. The sign of the angles is
determined by the right-hand rule..
Parameters
----------
phi, theta, psi : float or `~astropy.units.Quantity`
"proper" Euler angles in deg.
If floats, they should be in deg.
axes_order : str
A 3 character string, a combination of 'x', 'y' and 'z',
where each character denotes an axis in 3D space.
"""
n_inputs = 2
n_outputs = 2
phi = Parameter(default=0, getter=_to_orig_unit, setter=_to_radian)
theta = Parameter(default=0, getter=_to_orig_unit, setter=_to_radian)
psi = Parameter(default=0, getter=_to_orig_unit, setter=_to_radian)
def __init__(self, phi, theta, psi, axes_order, **kwargs):
self.axes = ['x', 'y', 'z']
if len(axes_order) != 3:
raise TypeError(
"Expected axes_order to be a character sequence of length 3,"
"got {}".format(axes_order))
unrecognized = set(axes_order).difference(self.axes)
if unrecognized:
raise ValueError("Unrecognized axis label {}; "
"should be one of {} ".format(unrecognized, self.axes))
self.axes_order = axes_order
qs = [isinstance(par, u.Quantity) for par in [phi, theta, psi]]
if any(qs) and not all(qs):
raise TypeError("All parameters should be of the same type - float or Quantity.")
super().__init__(phi=phi, theta=theta, psi=psi, **kwargs)
self._inputs = ('alpha', 'delta')
self._outputs = ('alpha', 'delta')
def inverse(self):
return self.__class__(phi=-self.psi,
theta=-self.theta,
psi=-self.phi,
axes_order=self.axes_order[::-1])
def evaluate(self, alpha, delta, phi, theta, psi):
a, b = super().evaluate(alpha, delta, phi, theta, psi, self.axes_order)
return a, b
class _SkyRotation(_EulerRotation, Model):
"""
Base class for RotateNative2Celestial and RotateCelestial2Native.
"""
lon = Parameter(default=0, getter=_to_orig_unit, setter=_to_radian)
lat = Parameter(default=0, getter=_to_orig_unit, setter=_to_radian)
lon_pole = Parameter(default=0, getter=_to_orig_unit, setter=_to_radian)
def __init__(self, lon, lat, lon_pole, **kwargs):
qs = [isinstance(par, u.Quantity) for par in [lon, lat, lon_pole]]
if any(qs) and not all(qs):
raise TypeError("All parameters should be of the same type - float or Quantity.")
super().__init__(lon, lat, lon_pole, **kwargs)
self.axes_order = 'zxz'
def _evaluate(self, phi, theta, lon, lat, lon_pole):
alpha, delta = super().evaluate(phi, theta, lon, lat, lon_pole,
self.axes_order)
mask = alpha < 0
if isinstance(mask, np.ndarray):
alpha[mask] += 360
else:
alpha += 360
return alpha, delta
class RotateNative2Celestial(_SkyRotation):
"""
Transform from Native to Celestial Spherical Coordinates.
Parameters
----------
lon : float or or `~astropy.units.Quantity`
Celestial longitude of the fiducial point.
lat : float or or `~astropy.units.Quantity`
Celestial latitude of the fiducial point.
lon_pole : float or or `~astropy.units.Quantity`
Longitude of the celestial pole in the native system.
Notes
-----
If ``lon``, ``lat`` and ``lon_pole`` are numerical values they
should be in units of deg. Inputs are angles on the native sphere.
Outputs are angles on the celestial sphere.
"""
n_inputs = 2
n_outputs = 2
@property
def input_units(self):
""" Input units. """
return {'phi_N': u.deg, 'theta_N': u.deg}
@property
def return_units(self):
""" Output units. """
return {'alpha_C': u.deg, 'delta_C': u.deg}
def __init__(self, lon, lat, lon_pole, **kwargs):
super().__init__(lon, lat, lon_pole, **kwargs)
self.inputs = ('phi_N', 'theta_N')
self.outputs = ('alpha_C', 'delta_C')
def evaluate(self, phi_N, theta_N, lon, lat, lon_pole):
"""
Parameters
----------
phi_N, theta_N : float (deg) or `~astropy.units.Quantity`
Angles in the Native coordinate system.
lon, lat, lon_pole : float (in deg) or `~astropy.units.Quantity`
Parameter values when the model was initialized.
Returns
-------
alpha_C, delta_C : float (deg) or `~astropy.units.Quantity`
Angles on the Celestial sphere.
"""
# The values are in radians since they have already been through the setter.
if isinstance(lon, u.Quantity):
lon = lon.value
lat = lat.value
lon_pole = lon_pole.value
# Convert to Euler angles
phi = lon_pole - np.pi / 2
theta = - (np.pi / 2 - lat)
psi = -(np.pi / 2 + lon)
alpha_C, delta_C = super()._evaluate(phi_N, theta_N, phi, theta, psi)
return alpha_C, delta_C
@property
def inverse(self):
# convert to angles on the celestial sphere
return RotateCelestial2Native(self.lon, self.lat, self.lon_pole)
class RotateCelestial2Native(_SkyRotation):
"""
Transform from Celestial to Native Spherical Coordinates.
Parameters
----------
lon : float or or `~astropy.units.Quantity`
Celestial longitude of the fiducial point.
lat : float or or `~astropy.units.Quantity`
Celestial latitude of the fiducial point.
lon_pole : float or or `~astropy.units.Quantity`
Longitude of the celestial pole in the native system.
Notes
-----
If ``lon``, ``lat`` and ``lon_pole`` are numerical values they should be
in units of deg. Inputs are angles on the celestial sphere.
Outputs are angles on the native sphere.
"""
n_inputs = 2
n_outputs = 2
@property
def input_units(self):
""" Input units. """
return {'alpha_C': u.deg, 'delta_C': u.deg}
@property
def return_units(self):
""" Output units. """
return {'phi_N': u.deg, 'theta_N': u.deg}
def __init__(self, lon, lat, lon_pole, **kwargs):
super().__init__(lon, lat, lon_pole, **kwargs)
# Inputs are angles on the celestial sphere
self.inputs = ('alpha_C', 'delta_C')
# Outputs are angles on the native sphere
self.outputs = ('phi_N', 'theta_N')
def evaluate(self, alpha_C, delta_C, lon, lat, lon_pole):
"""
Parameters
----------
alpha_C, delta_C : float (deg) or `~astropy.units.Quantity`
Angles in the Celestial coordinate frame.
lon, lat, lon_pole : float (deg) or `~astropy.units.Quantity`
Parameter values when the model was initialized.
Returns
-------
phi_N, theta_N : float (deg) or `~astropy.units.Quantity`
Angles on the Native sphere.
"""
if isinstance(lon, u.Quantity):
lon = lon.value
lat = lat.value
lon_pole = lon_pole.value
# Convert to Euler angles
phi = (np.pi / 2 + lon)
theta = (np.pi / 2 - lat)
psi = -(lon_pole - np.pi / 2)
phi_N, theta_N = super()._evaluate(alpha_C, delta_C, phi, theta, psi)
return phi_N, theta_N
@property
def inverse(self):
return RotateNative2Celestial(self.lon, self.lat, self.lon_pole)
class Rotation2D(Model):
"""
Perform a 2D rotation given an angle.
Positive angles represent a counter-clockwise rotation and vice-versa.
Parameters
----------
angle : float or `~astropy.units.Quantity`
Angle of rotation (if float it should be in deg).
"""
n_inputs = 2
n_outputs = 2
_separable = False
angle = Parameter(default=0.0, getter=_to_orig_unit, setter=_to_radian)
def __init__(self, angle=angle, **kwargs):
super().__init__(angle=angle, **kwargs)
self._inputs = ("x", "y")
self._outputs = ("x", "y")
@property
def inverse(self):
"""Inverse rotation."""
return self.__class__(angle=-self.angle)
@classmethod
def evaluate(cls, x, y, angle):
"""
Rotate (x, y) about ``angle``.
Parameters
----------
x, y : array_like
Input quantities
angle : float (deg) or `~astropy.units.Quantity`
Angle of rotations.
"""
if x.shape != y.shape:
raise ValueError("Expected input arrays to have the same shape")
# If one argument has units, enforce they both have units and they are compatible.
x_unit = getattr(x, 'unit', None)
y_unit = getattr(y, 'unit', None)
has_units = x_unit is not None and y_unit is not None
if x_unit != y_unit:
if has_units and y_unit.is_equivalent(x_unit):
y = y.to(x_unit)
y_unit = x_unit
else:
raise u.UnitsError("x and y must have compatible units")
# Note: If the original shape was () (an array scalar) convert to a
# 1-element 1-D array on output for consistency with most other models
orig_shape = x.shape or (1,)
inarr = np.array([x.flatten(), y.flatten()])
if isinstance(angle, u.Quantity):
angle = angle.to_value(u.rad)
result = np.dot(cls._compute_matrix(angle), inarr)
x, y = result[0], result[1]
x.shape = y.shape = orig_shape
if has_units:
return u.Quantity(x, unit=x_unit), u.Quantity(y, unit=y_unit)
else:
return x, y
@staticmethod
def _compute_matrix(angle):
return np.array([[math.cos(angle), -math.sin(angle)],
[math.sin(angle), math.cos(angle)]],
dtype=np.float64)
| stargaser/astropy | astropy/modeling/rotations.py | Python | bsd-3-clause | 16,503 |
# -*- coding: utf-8 -*-
# Copyright (C) 2014-2017 Andrey Antukh <[email protected]>
# Copyright (C) 2014-2017 Jesús Espino <[email protected]>
# Copyright (C) 2014-2017 David Barragán <[email protected]>
# Copyright (C) 2014-2017 Alejandro Alonso <[email protected]>
# Copyright (C) 2014-2017 Anler Hernández <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.core.urlresolvers import reverse
from taiga.base.utils import json
from tests import factories as f
from tests.utils import disconnect_signals, reconnect_signals
import pytest
pytestmark = pytest.mark.django_db
def setup_module(module):
disconnect_signals()
def teardown_module(module):
reconnect_signals()
def test_auth_create(client):
url = reverse('auth-list')
user = f.UserFactory.create()
login_data = json.dumps({
"type": "normal",
"username": user.username,
"password": user.username,
})
result = client.post(url, login_data, content_type="application/json")
assert result.status_code == 200
def test_auth_action_register(client, settings):
settings.PUBLIC_REGISTER_ENABLED = True
url = reverse('auth-register')
register_data = json.dumps({
"type": "public",
"username": "test",
"password": "test",
"full_name": "test",
"email": "[email protected]",
})
result = client.post(url, register_data, content_type="application/json")
assert result.status_code == 201
| dayatz/taiga-back | tests/integration/resources_permissions/test_auth_resources.py | Python | agpl-3.0 | 2,107 |
#!/usr/bin/python
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: ec2_vpc_vpn
short_description: Create, modify, and delete EC2 VPN connections.
description:
- This module creates, modifies, and deletes VPN connections. Idempotence is achieved by using the filters
option or specifying the VPN connection identifier.
version_added: "2.4"
extends_documentation_fragment:
- ec2
- aws
requirements: ['boto3', 'botocore']
author: "Sloane Hertel (@s-hertel)"
options:
state:
description:
- The desired state of the VPN connection.
choices: ['present', 'absent']
default: present
required: no
type: str
customer_gateway_id:
description:
- The ID of the customer gateway.
type: str
connection_type:
description:
- The type of VPN connection.
- At this time only 'ipsec.1' is supported.
default: ipsec.1
type: str
vpn_gateway_id:
description:
- The ID of the virtual private gateway.
type: str
vpn_connection_id:
description:
- The ID of the VPN connection. Required to modify or delete a connection if the filters option does not provide a unique match.
type: str
tags:
description:
- Tags to attach to the VPN connection.
type: dict
purge_tags:
description:
- Whether or not to delete VPN connections tags that are associated with the connection but not specified in the task.
type: bool
default: false
static_only:
description:
- Indicates whether the VPN connection uses static routes only. Static routes must be used for devices that don't support BGP.
default: False
type: bool
required: no
tunnel_options:
description:
- An optional list object containing no more than two dict members, each of which may contain 'TunnelInsideCidr'
and/or 'PreSharedKey' keys with appropriate string values. AWS defaults will apply in absence of either of
the aforementioned keys.
required: no
version_added: "2.5"
type: list
elements: dict
suboptions:
TunnelInsideCidr:
type: str
description: The range of inside IP addresses for the tunnel.
PreSharedKey:
type: str
description: The pre-shared key (PSK) to establish initial authentication between the virtual private gateway and customer gateway.
filters:
description:
- An alternative to using vpn_connection_id. If multiple matches are found, vpn_connection_id is required.
If one of the following suboptions is a list of items to filter by, only one item needs to match to find the VPN
that correlates. e.g. if the filter 'cidr' is ['194.168.2.0/24', '192.168.2.0/24'] and the VPN route only has the
destination cidr block of '192.168.2.0/24' it will be found with this filter (assuming there are not multiple
VPNs that are matched). Another example, if the filter 'vpn' is equal to ['vpn-ccf7e7ad', 'vpn-cb0ae2a2'] and one
of of the VPNs has the state deleted (exists but is unmodifiable) and the other exists and is not deleted,
it will be found via this filter. See examples.
suboptions:
cgw-config:
description:
- The customer gateway configuration of the VPN as a string (in the format of the return value) or a list of those strings.
static-routes-only:
description:
- The type of routing; true or false.
cidr:
description:
- The destination cidr of the VPN's route as a string or a list of those strings.
bgp:
description:
- The BGP ASN number associated with a BGP device. Only works if the connection is attached.
This filtering option is currently not working.
vpn:
description:
- The VPN connection id as a string or a list of those strings.
vgw:
description:
- The virtual private gateway as a string or a list of those strings.
tag-keys:
description:
- The key of a tag as a string or a list of those strings.
tag-values:
description:
- The value of a tag as a string or a list of those strings.
tags:
description:
- A dict of key value pairs.
cgw:
description:
- The customer gateway id as a string or a list of those strings.
type: dict
routes:
description:
- Routes to add to the connection.
type: list
elements: str
purge_routes:
description:
- Whether or not to delete VPN connections routes that are not specified in the task.
type: bool
wait_timeout:
description:
- How long before wait gives up, in seconds.
default: 600
type: int
required: false
version_added: "2.8"
delay:
description:
- The time to wait before checking operation again. in seconds.
required: false
type: int
default: 15
version_added: "2.8"
"""
EXAMPLES = """
# Note: None of these examples set aws_access_key, aws_secret_key, or region.
# It is assumed that their matching environment variables are set.
- name: create a VPN connection
ec2_vpc_vpn:
state: present
vpn_gateway_id: vgw-XXXXXXXX
customer_gateway_id: cgw-XXXXXXXX
- name: modify VPN connection tags
ec2_vpc_vpn:
state: present
vpn_connection_id: vpn-XXXXXXXX
tags:
Name: ansible-tag-1
Other: ansible-tag-2
- name: delete a connection
ec2_vpc_vpn:
vpn_connection_id: vpn-XXXXXXXX
state: absent
- name: modify VPN tags (identifying VPN by filters)
ec2_vpc_vpn:
state: present
filters:
cidr: 194.168.1.0/24
tag-keys:
- Ansible
- Other
tags:
New: Tag
purge_tags: true
static_only: true
- name: set up VPN with tunnel options utilizing 'TunnelInsideCidr' only
ec2_vpc_vpn:
state: present
filters:
vpn: vpn-XXXXXXXX
static_only: true
tunnel_options:
-
TunnelInsideCidr: '169.254.100.1/30'
-
TunnelInsideCidr: '169.254.100.5/30'
- name: add routes and remove any preexisting ones
ec2_vpc_vpn:
state: present
filters:
vpn: vpn-XXXXXXXX
routes:
- 195.168.2.0/24
- 196.168.2.0/24
purge_routes: true
- name: remove all routes
ec2_vpc_vpn:
state: present
vpn_connection_id: vpn-XXXXXXXX
routes: []
purge_routes: true
- name: delete a VPN identified by filters
ec2_vpc_vpn:
state: absent
filters:
tags:
Ansible: Tag
"""
RETURN = """
changed:
description: If the VPN connection has changed.
type: bool
returned: always
sample:
changed: true
customer_gateway_configuration:
description: The configuration of the VPN connection.
returned: I(state=present)
type: str
customer_gateway_id:
description: The customer gateway connected via the connection.
type: str
returned: I(state=present)
sample:
customer_gateway_id: cgw-1220c87b
vpn_gateway_id:
description: The virtual private gateway connected via the connection.
type: str
returned: I(state=present)
sample:
vpn_gateway_id: vgw-cb0ae2a2
options:
description: The VPN connection options (currently only containing static_routes_only).
type: complex
returned: I(state=present)
contains:
static_routes_only:
description: If the VPN connection only allows static routes.
returned: I(state=present)
type: str
sample:
static_routes_only: true
routes:
description: The routes of the VPN connection.
type: list
returned: I(state=present)
sample:
routes: [{
'destination_cidr_block': '192.168.1.0/24',
'state': 'available'
}]
state:
description: The status of the VPN connection.
type: str
returned: I(state=present)
sample:
state: available
tags:
description: The tags associated with the connection.
type: dict
returned: I(state=present)
sample:
tags:
name: ansible-test
other: tag
type:
description: The type of VPN connection (currently only ipsec.1 is available).
type: str
returned: I(state=present)
sample:
type: "ipsec.1"
vgw_telemetry:
type: list
returned: I(state=present)
description: The telemetry for the VPN tunnel.
sample:
vgw_telemetry: [{
'outside_ip_address': 'string',
'status': 'up',
'last_status_change': datetime(2015, 1, 1),
'status_message': 'string',
'accepted_route_count': 123
}]
vpn_connection_id:
description: The identifier for the VPN connection.
type: str
returned: I(state=present)
sample:
vpn_connection_id: vpn-781e0e19
"""
from ansible.module_utils.aws.core import AnsibleAWSModule
from ansible.module_utils._text import to_text
from ansible.module_utils.ec2 import (
camel_dict_to_snake_dict,
boto3_tag_list_to_ansible_dict,
compare_aws_tags,
ansible_dict_to_boto3_tag_list,
)
try:
from botocore.exceptions import BotoCoreError, ClientError, WaiterError
except ImportError:
pass # Handled by AnsibleAWSModule
class VPNConnectionException(Exception):
def __init__(self, msg, exception=None):
self.msg = msg
self.exception = exception
def find_connection(connection, module_params, vpn_connection_id=None):
''' Looks for a unique VPN connection. Uses find_connection_response() to return the connection found, None,
or raise an error if there were multiple viable connections. '''
filters = module_params.get('filters')
# vpn_connection_id may be provided via module option; takes precedence over any filter values
if not vpn_connection_id and module_params.get('vpn_connection_id'):
vpn_connection_id = module_params.get('vpn_connection_id')
if not isinstance(vpn_connection_id, list) and vpn_connection_id:
vpn_connection_id = [to_text(vpn_connection_id)]
elif isinstance(vpn_connection_id, list):
vpn_connection_id = [to_text(connection) for connection in vpn_connection_id]
formatted_filter = []
# if vpn_connection_id is provided it will take precedence over any filters since it is a unique identifier
if not vpn_connection_id:
formatted_filter = create_filter(module_params, provided_filters=filters)
# see if there is a unique matching connection
try:
if vpn_connection_id:
existing_conn = connection.describe_vpn_connections(VpnConnectionIds=vpn_connection_id,
Filters=formatted_filter)
else:
existing_conn = connection.describe_vpn_connections(Filters=formatted_filter)
except (BotoCoreError, ClientError) as e:
raise VPNConnectionException(msg="Failed while describing VPN connection.",
exception=e)
return find_connection_response(connections=existing_conn)
def add_routes(connection, vpn_connection_id, routes_to_add):
for route in routes_to_add:
try:
connection.create_vpn_connection_route(VpnConnectionId=vpn_connection_id,
DestinationCidrBlock=route)
except (BotoCoreError, ClientError) as e:
raise VPNConnectionException(msg="Failed while adding route {0} to the VPN connection {1}.".format(route, vpn_connection_id),
exception=e)
def remove_routes(connection, vpn_connection_id, routes_to_remove):
for route in routes_to_remove:
try:
connection.delete_vpn_connection_route(VpnConnectionId=vpn_connection_id,
DestinationCidrBlock=route)
except (BotoCoreError, ClientError) as e:
raise VPNConnectionException(msg="Failed to remove route {0} from the VPN connection {1}.".format(route, vpn_connection_id),
exception=e)
def create_filter(module_params, provided_filters):
""" Creates a filter using the user-specified parameters and unmodifiable options that may have been specified in the task """
boto3ify_filter = {'cgw-config': 'customer-gateway-configuration',
'static-routes-only': 'option.static-routes-only',
'cidr': 'route.destination-cidr-block',
'bgp': 'bgp-asn',
'vpn': 'vpn-connection-id',
'vgw': 'vpn-gateway-id',
'tag-keys': 'tag-key',
'tag-values': 'tag-value',
'tags': 'tag',
'cgw': 'customer-gateway-id'}
# unmodifiable options and their filter name counterpart
param_to_filter = {"customer_gateway_id": "customer-gateway-id",
"vpn_gateway_id": "vpn-gateway-id",
"vpn_connection_id": "vpn-connection-id"}
flat_filter_dict = {}
formatted_filter = []
for raw_param in dict(provided_filters):
# fix filter names to be recognized by boto3
if raw_param in boto3ify_filter:
param = boto3ify_filter[raw_param]
provided_filters[param] = provided_filters.pop(raw_param)
elif raw_param in list(boto3ify_filter.items()):
param = raw_param
else:
raise VPNConnectionException(msg="{0} is not a valid filter.".format(raw_param))
# reformat filters with special formats
if param == 'tag':
for key in provided_filters[param]:
formatted_key = 'tag:' + key
if isinstance(provided_filters[param][key], list):
flat_filter_dict[formatted_key] = str(provided_filters[param][key])
else:
flat_filter_dict[formatted_key] = [str(provided_filters[param][key])]
elif param == 'option.static-routes-only':
flat_filter_dict[param] = [str(provided_filters[param]).lower()]
else:
if isinstance(provided_filters[param], list):
flat_filter_dict[param] = provided_filters[param]
else:
flat_filter_dict[param] = [str(provided_filters[param])]
# if customer_gateway, vpn_gateway, or vpn_connection was specified in the task but not the filter, add it
for param in param_to_filter:
if param_to_filter[param] not in flat_filter_dict and module_params.get(param):
flat_filter_dict[param_to_filter[param]] = [module_params.get(param)]
# change the flat dict into something boto3 will understand
formatted_filter = [{'Name': key, 'Values': value} for key, value in flat_filter_dict.items()]
return formatted_filter
def find_connection_response(connections=None):
""" Determine if there is a viable unique match in the connections described. Returns the unique VPN connection if one is found,
returns None if the connection does not exist, raise an error if multiple matches are found. """
# Found no connections
if not connections or 'VpnConnections' not in connections:
return None
# Too many results
elif connections and len(connections['VpnConnections']) > 1:
viable = []
for each in connections['VpnConnections']:
# deleted connections are not modifiable
if each['State'] not in ("deleted", "deleting"):
viable.append(each)
if len(viable) == 1:
# Found one viable result; return unique match
return viable[0]
elif len(viable) == 0:
# Found a result but it was deleted already; since there was only one viable result create a new one
return None
else:
raise VPNConnectionException(msg="More than one matching VPN connection was found. "
"To modify or delete a VPN please specify vpn_connection_id or add filters.")
# Found unique match
elif connections and len(connections['VpnConnections']) == 1:
# deleted connections are not modifiable
if connections['VpnConnections'][0]['State'] not in ("deleted", "deleting"):
return connections['VpnConnections'][0]
def create_connection(connection, customer_gateway_id, static_only, vpn_gateway_id, connection_type, max_attempts, delay, tunnel_options=None):
""" Creates a VPN connection """
options = {'StaticRoutesOnly': static_only}
if tunnel_options and len(tunnel_options) <= 2:
t_opt = []
for m in tunnel_options:
# See Boto3 docs regarding 'create_vpn_connection'
# tunnel options for allowed 'TunnelOptions' keys.
if not isinstance(m, dict):
raise TypeError("non-dict list member")
t_opt.append(m)
if t_opt:
options['TunnelOptions'] = t_opt
if not (customer_gateway_id and vpn_gateway_id):
raise VPNConnectionException(msg="No matching connection was found. To create a new connection you must provide "
"both vpn_gateway_id and customer_gateway_id.")
try:
vpn = connection.create_vpn_connection(Type=connection_type,
CustomerGatewayId=customer_gateway_id,
VpnGatewayId=vpn_gateway_id,
Options=options)
connection.get_waiter('vpn_connection_available').wait(
VpnConnectionIds=[vpn['VpnConnection']['VpnConnectionId']],
WaiterConfig={'Delay': delay, 'MaxAttempts': max_attempts}
)
except WaiterError as e:
raise VPNConnectionException(msg="Failed to wait for VPN connection {0} to be available".format(vpn['VpnConnection']['VpnConnectionId']),
exception=e)
except (BotoCoreError, ClientError) as e:
raise VPNConnectionException(msg="Failed to create VPN connection",
exception=e)
return vpn['VpnConnection']
def delete_connection(connection, vpn_connection_id, delay, max_attempts):
""" Deletes a VPN connection """
try:
connection.delete_vpn_connection(VpnConnectionId=vpn_connection_id)
connection.get_waiter('vpn_connection_deleted').wait(
VpnConnectionIds=[vpn_connection_id],
WaiterConfig={'Delay': delay, 'MaxAttempts': max_attempts}
)
except WaiterError as e:
raise VPNConnectionException(msg="Failed to wait for VPN connection {0} to be removed".format(vpn_connection_id),
exception=e)
except (BotoCoreError, ClientError) as e:
raise VPNConnectionException(msg="Failed to delete the VPN connection: {0}".format(vpn_connection_id),
exception=e)
def add_tags(connection, vpn_connection_id, add):
try:
connection.create_tags(Resources=[vpn_connection_id],
Tags=add)
except (BotoCoreError, ClientError) as e:
raise VPNConnectionException(msg="Failed to add the tags: {0}.".format(add),
exception=e)
def remove_tags(connection, vpn_connection_id, remove):
# format tags since they are a list in the format ['tag1', 'tag2', 'tag3']
key_dict_list = [{'Key': tag} for tag in remove]
try:
connection.delete_tags(Resources=[vpn_connection_id],
Tags=key_dict_list)
except (BotoCoreError, ClientError) as e:
raise VPNConnectionException(msg="Failed to remove the tags: {0}.".format(remove),
exception=e)
def check_for_update(connection, module_params, vpn_connection_id):
""" Determines if there are any tags or routes that need to be updated. Ensures non-modifiable attributes aren't expected to change. """
tags = module_params.get('tags')
routes = module_params.get('routes')
purge_tags = module_params.get('purge_tags')
purge_routes = module_params.get('purge_routes')
vpn_connection = find_connection(connection, module_params, vpn_connection_id=vpn_connection_id)
current_attrs = camel_dict_to_snake_dict(vpn_connection)
# Initialize changes dict
changes = {'tags_to_add': [],
'tags_to_remove': [],
'routes_to_add': [],
'routes_to_remove': []}
# Get changes to tags
current_tags = boto3_tag_list_to_ansible_dict(current_attrs.get('tags', []), u'key', u'value')
tags_to_add, changes['tags_to_remove'] = compare_aws_tags(current_tags, tags, purge_tags)
changes['tags_to_add'] = ansible_dict_to_boto3_tag_list(tags_to_add)
# Get changes to routes
if 'Routes' in vpn_connection:
current_routes = [route['DestinationCidrBlock'] for route in vpn_connection['Routes']]
if purge_routes:
changes['routes_to_remove'] = [old_route for old_route in current_routes if old_route not in routes]
changes['routes_to_add'] = [new_route for new_route in routes if new_route not in current_routes]
# Check if nonmodifiable attributes are attempted to be modified
for attribute in current_attrs:
if attribute in ("tags", "routes", "state"):
continue
elif attribute == 'options':
will_be = module_params.get('static_only', None)
is_now = bool(current_attrs[attribute]['static_routes_only'])
attribute = 'static_only'
elif attribute == 'type':
will_be = module_params.get("connection_type", None)
is_now = current_attrs[attribute]
else:
is_now = current_attrs[attribute]
will_be = module_params.get(attribute, None)
if will_be is not None and to_text(will_be) != to_text(is_now):
raise VPNConnectionException(msg="You cannot modify {0}, the current value of which is {1}. Modifiable VPN "
"connection attributes are tags and routes. The value you tried to change it to "
"is {2}.".format(attribute, is_now, will_be))
return changes
def make_changes(connection, vpn_connection_id, changes):
""" changes is a dict with the keys 'tags_to_add', 'tags_to_remove', 'routes_to_add', 'routes_to_remove',
the values of which are lists (generated by check_for_update()).
"""
changed = False
if changes['tags_to_add']:
changed = True
add_tags(connection, vpn_connection_id, changes['tags_to_add'])
if changes['tags_to_remove']:
changed = True
remove_tags(connection, vpn_connection_id, changes['tags_to_remove'])
if changes['routes_to_add']:
changed = True
add_routes(connection, vpn_connection_id, changes['routes_to_add'])
if changes['routes_to_remove']:
changed = True
remove_routes(connection, vpn_connection_id, changes['routes_to_remove'])
return changed
def get_check_mode_results(connection, module_params, vpn_connection_id=None, current_state=None):
""" Returns the changes that would be made to a VPN Connection """
state = module_params.get('state')
if state == 'absent':
if vpn_connection_id:
return True, {}
else:
return False, {}
changed = False
results = {'customer_gateway_configuration': '',
'customer_gateway_id': module_params.get('customer_gateway_id'),
'vpn_gateway_id': module_params.get('vpn_gateway_id'),
'options': {'static_routes_only': module_params.get('static_only')},
'routes': [module_params.get('routes')]}
# get combined current tags and tags to set
present_tags = module_params.get('tags')
if current_state and 'Tags' in current_state:
current_tags = boto3_tag_list_to_ansible_dict(current_state['Tags'])
if module_params.get('purge_tags'):
if current_tags != present_tags:
changed = True
elif current_tags != present_tags:
if not set(present_tags.keys()) < set(current_tags.keys()):
changed = True
# add preexisting tags that new tags didn't overwrite
present_tags.update((tag, current_tags[tag]) for tag in current_tags if tag not in present_tags)
elif current_tags.keys() == present_tags.keys() and set(present_tags.values()) != set(current_tags.values()):
changed = True
elif module_params.get('tags'):
changed = True
if present_tags:
results['tags'] = present_tags
# get combined current routes and routes to add
present_routes = module_params.get('routes')
if current_state and 'Routes' in current_state:
current_routes = [route['DestinationCidrBlock'] for route in current_state['Routes']]
if module_params.get('purge_routes'):
if set(current_routes) != set(present_routes):
changed = True
elif set(present_routes) != set(current_routes):
if not set(present_routes) < set(current_routes):
changed = True
present_routes.extend([route for route in current_routes if route not in present_routes])
elif module_params.get('routes'):
changed = True
results['routes'] = [{"destination_cidr_block": cidr, "state": "available"} for cidr in present_routes]
# return the vpn_connection_id if it's known
if vpn_connection_id:
results['vpn_connection_id'] = vpn_connection_id
else:
changed = True
results['vpn_connection_id'] = 'vpn-XXXXXXXX'
return changed, results
def ensure_present(connection, module_params, check_mode=False):
""" Creates and adds tags to a VPN connection. If the connection already exists update tags. """
vpn_connection = find_connection(connection, module_params)
changed = False
delay = module_params.get('delay')
max_attempts = module_params.get('wait_timeout') // delay
# No match but vpn_connection_id was specified.
if not vpn_connection and module_params.get('vpn_connection_id'):
raise VPNConnectionException(msg="There is no VPN connection available or pending with that id. Did you delete it?")
# Unique match was found. Check if attributes provided differ.
elif vpn_connection:
vpn_connection_id = vpn_connection['VpnConnectionId']
# check_for_update returns a dict with the keys tags_to_add, tags_to_remove, routes_to_add, routes_to_remove
changes = check_for_update(connection, module_params, vpn_connection_id)
if check_mode:
return get_check_mode_results(connection, module_params, vpn_connection_id, current_state=vpn_connection)
changed = make_changes(connection, vpn_connection_id, changes)
# No match was found. Create and tag a connection and add routes.
else:
changed = True
if check_mode:
return get_check_mode_results(connection, module_params)
vpn_connection = create_connection(connection,
customer_gateway_id=module_params.get('customer_gateway_id'),
static_only=module_params.get('static_only'),
vpn_gateway_id=module_params.get('vpn_gateway_id'),
connection_type=module_params.get('connection_type'),
tunnel_options=module_params.get('tunnel_options'),
max_attempts=max_attempts,
delay=delay)
changes = check_for_update(connection, module_params, vpn_connection['VpnConnectionId'])
make_changes(connection, vpn_connection['VpnConnectionId'], changes)
# get latest version if a change has been made and make tags output nice before returning it
if vpn_connection:
vpn_connection = find_connection(connection, module_params, vpn_connection['VpnConnectionId'])
if 'Tags' in vpn_connection:
vpn_connection['Tags'] = boto3_tag_list_to_ansible_dict(vpn_connection['Tags'])
return changed, vpn_connection
def ensure_absent(connection, module_params, check_mode=False):
""" Deletes a VPN connection if it exists. """
vpn_connection = find_connection(connection, module_params)
if check_mode:
return get_check_mode_results(connection, module_params, vpn_connection['VpnConnectionId'] if vpn_connection else None)
delay = module_params.get('delay')
max_attempts = module_params.get('wait_timeout') // delay
if vpn_connection:
delete_connection(connection, vpn_connection['VpnConnectionId'], delay=delay, max_attempts=max_attempts)
changed = True
else:
changed = False
return changed, {}
def main():
argument_spec = dict(
state=dict(type='str', default='present', choices=['present', 'absent']),
filters=dict(type='dict', default={}),
vpn_gateway_id=dict(type='str'),
tags=dict(default={}, type='dict'),
connection_type=dict(default='ipsec.1', type='str'),
tunnel_options=dict(no_log=True, type='list', default=[]),
static_only=dict(default=False, type='bool'),
customer_gateway_id=dict(type='str'),
vpn_connection_id=dict(type='str'),
purge_tags=dict(type='bool', default=False),
routes=dict(type='list', default=[]),
purge_routes=dict(type='bool', default=False),
wait_timeout=dict(type='int', default=600),
delay=dict(type='int', default=15),
)
module = AnsibleAWSModule(argument_spec=argument_spec,
supports_check_mode=True)
connection = module.client('ec2')
state = module.params.get('state')
parameters = dict(module.params)
try:
if state == 'present':
changed, response = ensure_present(connection, parameters, module.check_mode)
elif state == 'absent':
changed, response = ensure_absent(connection, parameters, module.check_mode)
except VPNConnectionException as e:
if e.exception:
module.fail_json_aws(e.exception, msg=e.msg)
else:
module.fail_json(msg=e.msg)
module.exit_json(changed=changed, **camel_dict_to_snake_dict(response))
if __name__ == '__main__':
main()
| roadmapper/ansible | lib/ansible/modules/cloud/amazon/ec2_vpc_vpn.py | Python | gpl-3.0 | 30,979 |
from __future__ import absolute_import, print_function
import six
from rest_framework import serializers
from sentry.models import User
from sentry.utils.auth import find_users
class UserField(serializers.Field):
def to_representation(self, value):
return value.username
def to_internal_value(self, data):
if not data:
return None
if isinstance(data, six.integer_types) or data.isdigit():
try:
return User.objects.get(id=data)
except User.DoesNotExist:
pass
try:
return find_users(data)[0]
except IndexError:
raise serializers.ValidationError("Unable to find user")
| mvaled/sentry | src/sentry/api/fields/user.py | Python | bsd-3-clause | 714 |
try:
import keyring
except ImportError:
raise NotImplemented()
from plyer.facades import Keystore
class LinuxKeystore(Keystore):
def _set_key(self, servicename, key, value, **kwargs):
keyring.set_password(servicename, key, value)
def _get_key(self, servicename, key, **kwargs):
return keyring.get_password(servicename, key)
def instance():
return LinuxKeystore()
| KeyWeeUsr/plyer | plyer/platforms/linux/keystore.py | Python | mit | 423 |
Subsets and Splits