code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
# -*- coding: utf-8 -*-
"""
flask.sessions
~~~~~~~~~~~~~~
Implements cookie based sessions based on itsdangerous.
:copyright: (c) 2015 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import uuid
import hashlib
from base64 import b64encode, b64decode
from datetime import datetime
from werkzeug.http import http_date, parse_date
from werkzeug.datastructures import CallbackDict
from . import Markup, json
from ._compat import iteritems, text_type
from .helpers import total_seconds
from itsdangerous import URLSafeTimedSerializer, BadSignature
class SessionMixin(object):
"""Expands a basic dictionary with an accessors that are expected
by Flask extensions and users for the session.
"""
def _get_permanent(self):
return self.get('_permanent', False)
def _set_permanent(self, value):
self['_permanent'] = bool(value)
#: this reflects the ``'_permanent'`` key in the dict.
permanent = property(_get_permanent, _set_permanent)
del _get_permanent, _set_permanent
#: some session backends can tell you if a session is new, but that is
#: not necessarily guaranteed. Use with caution. The default mixin
#: implementation just hardcodes ``False`` in.
new = False
#: for some backends this will always be ``True``, but some backends will
#: default this to false and detect changes in the dictionary for as
#: long as changes do not happen on mutable structures in the session.
#: The default mixin implementation just hardcodes ``True`` in.
modified = True
def _tag(value):
if isinstance(value, tuple):
return {' t': [_tag(x) for x in value]}
elif isinstance(value, uuid.UUID):
return {' u': value.hex}
elif isinstance(value, bytes):
return {' b': b64encode(value).decode('ascii')}
elif callable(getattr(value, '__html__', None)):
return {' m': text_type(value.__html__())}
elif isinstance(value, list):
return [_tag(x) for x in value]
elif isinstance(value, datetime):
return {' d': http_date(value)}
elif isinstance(value, dict):
return dict((k, _tag(v)) for k, v in iteritems(value))
elif isinstance(value, str):
try:
return text_type(value)
except UnicodeError:
from flask.debughelpers import UnexpectedUnicodeError
raise UnexpectedUnicodeError(u'A byte string with '
u'non-ASCII data was passed to the session system '
u'which can only store unicode strings. Consider '
u'base64 encoding your string (String was %r)' % value)
return value
class TaggedJSONSerializer(object):
"""A customized JSON serializer that supports a few extra types that
we take for granted when serializing (tuples, markup objects, datetime).
"""
def dumps(self, value):
return json.dumps(_tag(value), separators=(',', ':'))
def loads(self, value):
def object_hook(obj):
if len(obj) != 1:
return obj
the_key, the_value = next(iteritems(obj))
if the_key == ' t':
return tuple(the_value)
elif the_key == ' u':
return uuid.UUID(the_value)
elif the_key == ' b':
return b64decode(the_value)
elif the_key == ' m':
return Markup(the_value)
elif the_key == ' d':
return parse_date(the_value)
return obj
return json.loads(value, object_hook=object_hook)
session_json_serializer = TaggedJSONSerializer()
class SecureCookieSession(CallbackDict, SessionMixin):
"""Base class for sessions based on signed cookies."""
def __init__(self, initial=None):
def on_update(self):
self.modified = True
CallbackDict.__init__(self, initial, on_update)
self.modified = False
class NullSession(SecureCookieSession):
"""Class used to generate nicer error messages if sessions are not
available. Will still allow read-only access to the empty session
but fail on setting.
"""
def _fail(self, *args, **kwargs):
raise RuntimeError('The session is unavailable because no secret '
'key was set. Set the secret_key on the '
'application to something unique and secret.')
__setitem__ = __delitem__ = clear = pop = popitem = \
update = setdefault = _fail
del _fail
class SessionInterface(object):
"""The basic interface you have to implement in order to replace the
default session interface which uses werkzeug's securecookie
implementation. The only methods you have to implement are
:meth:`open_session` and :meth:`save_session`, the others have
useful defaults which you don't need to change.
The session object returned by the :meth:`open_session` method has to
provide a dictionary like interface plus the properties and methods
from the :class:`SessionMixin`. We recommend just subclassing a dict
and adding that mixin::
class Session(dict, SessionMixin):
pass
If :meth:`open_session` returns ``None`` Flask will call into
:meth:`make_null_session` to create a session that acts as replacement
if the session support cannot work because some requirement is not
fulfilled. The default :class:`NullSession` class that is created
will complain that the secret key was not set.
To replace the session interface on an application all you have to do
is to assign :attr:`flask.Flask.session_interface`::
app = Flask(__name__)
app.session_interface = MySessionInterface()
.. versionadded:: 0.8
"""
#: :meth:`make_null_session` will look here for the class that should
#: be created when a null session is requested. Likewise the
#: :meth:`is_null_session` method will perform a typecheck against
#: this type.
null_session_class = NullSession
#: A flag that indicates if the session interface is pickle based.
#: This can be used by flask extensions to make a decision in regards
#: to how to deal with the session object.
#:
#: .. versionadded:: 0.10
pickle_based = False
def make_null_session(self, app):
"""Creates a null session which acts as a replacement object if the
real session support could not be loaded due to a configuration
error. This mainly aids the user experience because the job of the
null session is to still support lookup without complaining but
modifications are answered with a helpful error message of what
failed.
This creates an instance of :attr:`null_session_class` by default.
"""
return self.null_session_class()
def is_null_session(self, obj):
"""Checks if a given object is a null session. Null sessions are
not asked to be saved.
This checks if the object is an instance of :attr:`null_session_class`
by default.
"""
return isinstance(obj, self.null_session_class)
def get_cookie_domain(self, app):
"""Helpful helper method that returns the cookie domain that should
be used for the session cookie if session cookies are used.
"""
if app.config['SESSION_COOKIE_DOMAIN'] is not None:
return app.config['SESSION_COOKIE_DOMAIN']
if app.config['SERVER_NAME'] is not None:
# chop off the port which is usually not supported by browsers
rv = '.' + app.config['SERVER_NAME'].rsplit(':', 1)[0]
# Google chrome does not like cookies set to .localhost, so
# we just go with no domain then. Flask documents anyways that
# cross domain cookies need a fully qualified domain name
if rv == '.localhost':
rv = None
# If we infer the cookie domain from the server name we need
# to check if we are in a subpath. In that case we can't
# set a cross domain cookie.
if rv is not None:
path = self.get_cookie_path(app)
if path != '/':
rv = rv.lstrip('.')
return rv
def get_cookie_path(self, app):
"""Returns the path for which the cookie should be valid. The
default implementation uses the value from the ``SESSION_COOKIE_PATH``
config var if it's set, and falls back to ``APPLICATION_ROOT`` or
uses ``/`` if it's ``None``.
"""
return app.config['SESSION_COOKIE_PATH'] or \
app.config['APPLICATION_ROOT'] or '/'
def get_cookie_httponly(self, app):
"""Returns True if the session cookie should be httponly. This
currently just returns the value of the ``SESSION_COOKIE_HTTPONLY``
config var.
"""
return app.config['SESSION_COOKIE_HTTPONLY']
def get_cookie_secure(self, app):
"""Returns True if the cookie should be secure. This currently
just returns the value of the ``SESSION_COOKIE_SECURE`` setting.
"""
return app.config['SESSION_COOKIE_SECURE']
def get_expiration_time(self, app, session):
"""A helper method that returns an expiration date for the session
or ``None`` if the session is linked to the browser session. The
default implementation returns now + the permanent session
lifetime configured on the application.
"""
if session.permanent:
return datetime.utcnow() + app.permanent_session_lifetime
def should_set_cookie(self, app, session):
"""Indicates whether a cookie should be set now or not. This is
used by session backends to figure out if they should emit a
set-cookie header or not. The default behavior is controlled by
the ``SESSION_REFRESH_EACH_REQUEST`` config variable. If
it's set to ``False`` then a cookie is only set if the session is
modified, if set to ``True`` it's always set if the session is
permanent.
This check is usually skipped if sessions get deleted.
.. versionadded:: 1.0
"""
if session.modified:
return True
save_each = app.config['SESSION_REFRESH_EACH_REQUEST']
return save_each and session.permanent
def open_session(self, app, request):
"""This method has to be implemented and must either return ``None``
in case the loading failed because of a configuration error or an
instance of a session object which implements a dictionary like
interface + the methods and attributes on :class:`SessionMixin`.
"""
raise NotImplementedError()
def save_session(self, app, session, response):
"""This is called for actual sessions returned by :meth:`open_session`
at the end of the request. This is still called during a request
context so if you absolutely need access to the request you can do
that.
"""
raise NotImplementedError()
class SecureCookieSessionInterface(SessionInterface):
"""The default session interface that stores sessions in signed cookies
through the :mod:`itsdangerous` module.
"""
#: the salt that should be applied on top of the secret key for the
#: signing of cookie based sessions.
salt = 'cookie-session'
#: the hash function to use for the signature. The default is sha1
digest_method = staticmethod(hashlib.sha1)
#: the name of the itsdangerous supported key derivation. The default
#: is hmac.
key_derivation = 'hmac'
#: A python serializer for the payload. The default is a compact
#: JSON derived serializer with support for some extra Python types
#: such as datetime objects or tuples.
serializer = session_json_serializer
session_class = SecureCookieSession
def get_signing_serializer(self, app):
if not app.secret_key:
return None
signer_kwargs = dict(
key_derivation=self.key_derivation,
digest_method=self.digest_method
)
return URLSafeTimedSerializer(app.secret_key, salt=self.salt,
serializer=self.serializer,
signer_kwargs=signer_kwargs)
def open_session(self, app, request):
s = self.get_signing_serializer(app)
if s is None:
return None
val = request.cookies.get(app.session_cookie_name)
if not val:
return self.session_class()
max_age = total_seconds(app.permanent_session_lifetime)
try:
data = s.loads(val, max_age=max_age)
return self.session_class(data)
except BadSignature:
return self.session_class()
def save_session(self, app, session, response):
domain = self.get_cookie_domain(app)
path = self.get_cookie_path(app)
# Delete case. If there is no session we bail early.
# If the session was modified to be empty we remove the
# whole cookie.
if not session:
if session.modified:
response.delete_cookie(app.session_cookie_name,
domain=domain, path=path)
return
# Modification case. There are upsides and downsides to
# emitting a set-cookie header each request. The behavior
# is controlled by the :meth:`should_set_cookie` method
# which performs a quick check to figure out if the cookie
# should be set or not. This is controlled by the
# SESSION_REFRESH_EACH_REQUEST config flag as well as
# the permanent flag on the session itself.
if not self.should_set_cookie(app, session):
return
httponly = self.get_cookie_httponly(app)
secure = self.get_cookie_secure(app)
expires = self.get_expiration_time(app, session)
val = self.get_signing_serializer(app).dumps(dict(session))
response.set_cookie(app.session_cookie_name, val,
expires=expires, httponly=httponly,
domain=domain, path=path, secure=secure)
| lord63-forks/flask | flask/sessions.py | Python | bsd-3-clause | 14,332 |
from nested_serializers import NestedModelSerializer
from rest_framework import serializers
from example.app.models import Channel, Video
class ChannelSerializer(serializers.ModelSerializer):
class Meta(object):
model = Channel
class VideoSerializer(NestedModelSerializer):
class Meta(object):
model = Video
depth = 1
| theonion/djesrf | example/app/serializers.py | Python | mit | 355 |
""" __slots__的值表示只允许对象绑定的属性 """
from types import MethodType
class Student(object):
# 定义属性允许绑定的属性或者方法名,限制性不会传递到子类
__slots__ = ('name', 'age', 'tts')
# 上面的标签不妨碍类定义属性
city = 'CQ'
# 子类没有限制, 如果子类加了 __slots__ ,则子类实例的类型的属性范围为父子类合并
class GraduateStudent(Student):
pass
class ChinaStudent(Student):
__slots__ = 'score'
def tts(self):
print("lalala")
st = Student()
st.name = 'Colin'
st.tts = MethodType(tts, st)
st.tts()
# 超过了允许绑定的属性范围,报错
# AttributeError: 'Student' object has no attribute 'score'
# st.score = 100
print(st.name, st.city)
# 没有超过范围,但是没有定义:
# AttributeError: age
# print(st.age)
# 属性没有限制
gst = GraduateStudent()
gst.score = 9999
print(gst.score)
cst = ChinaStudent()
cst.name = 'Lucd'
cst.age = 26
cst.score = 100
print(cst.name, cst.age, cst.score)
# 超出父类和子类限制并集的属性不能绑定
# AttributeError: 'ChinaStudent' object has no attribute 'weight'
# cst.weight = 46
| lucd1990/self-learn-python | oop_senior/slots_test.py | Python | unlicense | 1,173 |
"""Tests for certbot.error_handler."""
import contextlib
import os
import signal
import sys
import unittest
import mock
def get_signals(signums):
"""Get the handlers for an iterable of signums."""
return dict((s, signal.getsignal(s)) for s in signums)
def set_signals(sig_handler_dict):
"""Set the signal (keys) with the handler (values) from the input dict."""
for s, h in sig_handler_dict.items():
signal.signal(s, h)
@contextlib.contextmanager
def signal_receiver(signums):
"""Context manager to catch signals"""
signals = []
prev_handlers = {}
prev_handlers = get_signals(signums)
set_signals(dict((s, lambda s, _: signals.append(s)) for s in signums))
yield signals
set_signals(prev_handlers)
def send_signal(signum):
"""Send the given signal"""
os.kill(os.getpid(), signum)
class ErrorHandlerTest(unittest.TestCase):
"""Tests for certbot.error_handler."""
def setUp(self):
from certbot import error_handler
self.init_func = mock.MagicMock()
self.init_args = set((42,))
self.init_kwargs = {'foo': 'bar'}
self.handler = error_handler.ErrorHandler(self.init_func,
*self.init_args,
**self.init_kwargs)
# pylint: disable=protected-access
self.signals = error_handler._SIGNALS
def test_context_manager(self):
exception_raised = False
try:
with self.handler:
raise ValueError
except ValueError:
exception_raised = True
self.assertTrue(exception_raised)
self.init_func.assert_called_once_with(*self.init_args,
**self.init_kwargs)
def test_context_manager_with_signal(self):
init_signals = get_signals(self.signals)
with signal_receiver(self.signals) as signals_received:
with self.handler:
should_be_42 = 42
send_signal(self.signals[0])
should_be_42 *= 10
# check execution stoped when the signal was sent
self.assertEqual(42, should_be_42)
# assert signals were caught
self.assertEqual([self.signals[0]], signals_received)
# assert the error handling function was just called once
self.init_func.assert_called_once_with(*self.init_args,
**self.init_kwargs)
for signum in self.signals:
self.assertEqual(init_signals[signum], signal.getsignal(signum))
def test_bad_recovery(self):
bad_func = mock.MagicMock(side_effect=[ValueError])
self.handler.register(bad_func)
try:
with self.handler:
raise ValueError
except ValueError:
pass
self.init_func.assert_called_once_with(*self.init_args,
**self.init_kwargs)
bad_func.assert_called_once_with()
def test_bad_recovery_with_signal(self):
sig1 = self.signals[0]
sig2 = self.signals[-1]
bad_func = mock.MagicMock(side_effect=lambda: send_signal(sig1))
self.handler.register(bad_func)
with signal_receiver(self.signals) as signals_received:
with self.handler:
send_signal(sig2)
self.assertEqual([sig2, sig1], signals_received)
self.init_func.assert_called_once_with(*self.init_args,
**self.init_kwargs)
bad_func.assert_called_once_with()
def test_sysexit_ignored(self):
try:
with self.handler:
sys.exit(0)
except SystemExit:
pass
self.assertFalse(self.init_func.called)
if __name__ == "__main__":
unittest.main() # pragma: no cover
| nohona/cron-crm | usr/local/certbot/certbot/tests/error_handler_test.py | Python | gpl-3.0 | 3,897 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Author : <github.com/tintinweb>
import unittest
import json
from evmlab.tools.statetests import rndval
import evmlab.tools.statetests.templates
class EthFillerObjectifiedTest(unittest.TestCase):
def setUp(self):
rndval.RandomSeed.set_state(None) # generate new seed
self.template = evmlab.tools.statetests.templates.new(evmlab.tools.statetests.templates.object_based.TEMPLATE_RandomStateTest)
def test_pprint(self):
import pprint
pprint_str = pprint.pformat(self.template, indent=4)
self.assertIn(list(self.template.keys())[0], pprint_str)
print(pprint_str)
def test_json_dumps_with_default(self):
print(json.dumps(self.template, default=str))
def test_json_dumps_with_encoder_class(self):
import evmlab.tools.statetests.randomtest
print(json.dumps(self.template, cls=evmlab.tools.statetests.randomtest.RandomTestsJsonEncoder))
| holiman/evmlab | tests/tools/statetests/test_objectified.py | Python | gpl-3.0 | 974 |
from __future__ import unicode_literals
from django.conf import settings
import six.moves.urllib.parse as urlparse
from horizon.utils.memoized import memoized # noqa
import requests
import json
@memoized
def get_token(request):
return request.user.token.id
# -----------------------------------------------------------------------------
#
# Global Controllers
#
def add_controller(request, data, in_memory_file):
token = get_token(request)
headers = {}
url = settings.IOSTACK_CONTROLLER_URL + "/controllers/data"
headers["X-Auth-Token"] = str(token)
# Content-Type header will be set to multipart by django because a file is uploaded
files = {'file': (in_memory_file.name, in_memory_file.read())}
data_to_send = {'metadata': json.dumps(data)}
r = requests.post(url, data_to_send, files=files, headers=headers)
return r
def get_all_controllers(request):
token = get_token(request)
headers = {}
url = settings.IOSTACK_CONTROLLER_URL + "/controllers/"
headers["X-Auth-Token"] = str(token)
headers['Content-Type'] = "application/json"
r = requests.get(url, headers=headers)
return r
def update_controller_data(request, controller_id, in_memory_file):
token = get_token(request)
headers = {}
url = settings.IOSTACK_CONTROLLER_URL + "/controllers/" + str(controller_id) + "/data"
headers["X-Auth-Token"] = str(token)
files = {'file': (in_memory_file.name, in_memory_file.read())}
r = requests.put(url, files=files, headers=headers)
return r
def update_controller_metadata(request, controller_id, data):
token = get_token(request)
headers = {}
url = settings.IOSTACK_CONTROLLER_URL + "/controllers/" + str(controller_id)
headers["X-Auth-Token"] = str(token)
headers['Content-Type'] = "application/json"
r = requests.put(url, json.dumps(data), headers=headers)
return r
def get_controller(request, controller_id):
token = get_token(request)
headers = {}
url = settings.IOSTACK_CONTROLLER_URL + "/controllers/" + str(controller_id)
headers["X-Auth-Token"] = str(token)
headers['Content-Type'] = "application/json"
r = requests.get(url, headers=headers)
return r
def delete_controller(request, controller_id):
token = get_token(request)
headers = {}
url = settings.IOSTACK_CONTROLLER_URL + "/controllers/" + str(controller_id)
headers["X-Auth-Token"] = str(token)
headers['Content-Type'] = "application/json"
r = requests.delete(url, headers=headers)
return r
def download_controller(request, controller_id):
token = get_token(request)
headers = {}
url = settings.IOSTACK_CONTROLLER_URL + "/controllers/" + str(controller_id) + '/data'
headers["X-Auth-Token"] = str(token)
r = requests.get(url, headers=headers)
return r
# -----------------------------------------------------------------------------
#
# Instances
#
def get_all_instances(request):
token = get_token(request)
headers = {}
url = settings.IOSTACK_CONTROLLER_URL + "/controllers/instances/"
headers["X-Auth-Token"] = str(token)
headers['Content-Type'] = "application/json"
r = requests.get(url, headers=headers)
return r
def get_instance(request, instance_id):
token = get_token(request)
headers = {}
url = settings.IOSTACK_CONTROLLER_URL + "/controllers/instance/" + str(instance_id)
headers["X-Auth-Token"] = str(token)
headers['Content-Type'] = "application/json"
r = requests.get(url, headers=headers)
return r
def add_instance(request, data):
token = get_token(request)
headers = {}
url = settings.IOSTACK_CONTROLLER_URL + "/controllers/instance/"
headers["X-Auth-Token"] = str(token)
headers['Content-Type'] = "application/json"
r = requests.post(url, json.dumps(data), headers=headers)
return r
def delete_instance(request, instance_id):
token = get_token(request)
headers = {}
url = settings.IOSTACK_CONTROLLER_URL + "/controllers/instance/" + str(instance_id)
headers["X-Auth-Token"] = str(token)
headers['Content-Type'] = "application/json"
r = requests.delete(url, headers=headers)
return r
def update_instance(request, instance_id, data):
token = get_token(request)
headers = {}
url = settings.IOSTACK_CONTROLLER_URL + "/controllers/instance/" + str(instance_id)
headers["X-Auth-Token"] = str(token)
headers['Content-Type'] = "application/json"
r = requests.put(url, json.dumps(data), headers=headers)
return r
| Crystal-SDS/dashboard | crystal_dashboard/api/controllers.py | Python | gpl-3.0 | 4,593 |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2014-2015 Université Catholique de Louvain.
#
# This file is part of INGInious.
#
# INGInious is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# INGInious is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with INGInious. If not, see <http://www.gnu.org/licenses/>.
import web
from frontend.accessible_time import AccessibleTime
from frontend.base import renderer
from frontend.pages.course_admin.utils import get_course_and_check_rights
import frontend.user as User
class CourseSettings(object):
""" Couse settings """
def GET(self, courseid):
""" GET request """
course = get_course_and_check_rights(courseid)
return self.page(course)
def POST(self, courseid):
""" POST request """
course = get_course_and_check_rights(courseid)
errors = []
try:
data = web.input()
course_content = course.get_course_descriptor_content(courseid)
course_content['name'] = data['name']
if course_content['name'] == "":
errors.append('Invalid name')
course_content['admins'] = data['admins'].split(',')
if User.get_username() not in course_content['admins']:
errors.append('You cannot remove yourself from the administrators of this course')
course_content['admins'] = data['admins'].split(',')
if data["accessible"] == "custom":
course_content['accessible'] = "{}/{}".format(data["accessible_start"], data["accessible_end"])
elif data["accessible"] == "true":
course_content['accessible'] = True
else:
course_content['accessible'] = False
try:
AccessibleTime(course_content['accessible'])
except:
errors.append('Invalid accessibility dates')
if data["registration"] == "custom":
course_content['registration'] = "{}/{}".format(data["registration_start"], data["registration_end"])
elif data["registration"] == "true":
course_content['registration'] = True
else:
course_content['registration'] = False
try:
AccessibleTime(course_content['registration'])
except:
errors.append('Invalid registration dates')
course_content['registration_password'] = data['registration_password']
if course_content['registration_password'] == "":
course_content['registration_password'] = None
course_content['registration_ac'] = data['registration_ac']
if course_content['registration_ac'] not in ["None", "username", "realname", "email"]:
errors.append('Invalid ACL value')
if course_content['registration_ac'] == "None":
course_content['registration_ac'] = None
course_content['registration_ac_list'] = data['registration_ac_list'].split("\n")
except:
errors.append('User returned an invalid form.')
if len(errors) == 0:
course.update_course_descriptor_content(courseid, course_content)
errors = None
course = get_course_and_check_rights(courseid) # don't forget to reload the modified course
return self.page(course, errors, errors is None)
def page(self, course, errors=None, saved=False):
""" Get all data and display the page """
return renderer.course_admin.settings(course, errors, saved)
| GuillaumeDerval/INGInious | frontend/pages/course_admin/settings.py | Python | agpl-3.0 | 4,059 |
# -*- coding: utf-8 -*-
# Test the support for SSL and sockets
import sys
import unittest
from test import test_support as support
from test.script_helper import assert_python_ok
import asyncore
import socket
import select
import time
import datetime
import gc
import os
import errno
import pprint
import shutil
import urllib2
import traceback
import weakref
import platform
import functools
from contextlib import closing
ssl = support.import_module("ssl")
PROTOCOLS = sorted(ssl._PROTOCOL_NAMES)
HOST = support.HOST
IS_LIBRESSL = ssl.OPENSSL_VERSION.startswith('LibreSSL')
IS_OPENSSL_1_1 = not IS_LIBRESSL and ssl.OPENSSL_VERSION_INFO >= (1, 1, 0)
def data_file(*name):
return os.path.join(os.path.dirname(__file__), *name)
# The custom key and certificate files used in test_ssl are generated
# using Lib/test/make_ssl_certs.py.
# Other certificates are simply fetched from the Internet servers they
# are meant to authenticate.
CERTFILE = data_file("keycert.pem")
BYTES_CERTFILE = CERTFILE.encode(sys.getfilesystemencoding())
ONLYCERT = data_file("ssl_cert.pem")
ONLYKEY = data_file("ssl_key.pem")
BYTES_ONLYCERT = ONLYCERT.encode(sys.getfilesystemencoding())
BYTES_ONLYKEY = ONLYKEY.encode(sys.getfilesystemencoding())
CERTFILE_PROTECTED = data_file("keycert.passwd.pem")
ONLYKEY_PROTECTED = data_file("ssl_key.passwd.pem")
KEY_PASSWORD = "somepass"
CAPATH = data_file("capath")
BYTES_CAPATH = CAPATH.encode(sys.getfilesystemencoding())
CAFILE_NEURONIO = data_file("capath", "4e1295a3.0")
CAFILE_CACERT = data_file("capath", "5ed36f99.0")
# empty CRL
CRLFILE = data_file("revocation.crl")
# Two keys and certs signed by the same CA (for SNI tests)
SIGNED_CERTFILE = data_file("keycert3.pem")
SIGNED_CERTFILE2 = data_file("keycert4.pem")
SIGNING_CA = data_file("pycacert.pem")
# cert with all kinds of subject alt names
ALLSANFILE = data_file("allsans.pem")
REMOTE_HOST = "self-signed.pythontest.net"
REMOTE_ROOT_CERT = data_file("selfsigned_pythontestdotnet.pem")
EMPTYCERT = data_file("nullcert.pem")
BADCERT = data_file("badcert.pem")
NONEXISTINGCERT = data_file("XXXnonexisting.pem")
BADKEY = data_file("badkey.pem")
NOKIACERT = data_file("nokia.pem")
NULLBYTECERT = data_file("nullbytecert.pem")
DHFILE = data_file("dh1024.pem")
BYTES_DHFILE = DHFILE.encode(sys.getfilesystemencoding())
def handle_error(prefix):
exc_format = ' '.join(traceback.format_exception(*sys.exc_info()))
if support.verbose:
sys.stdout.write(prefix + exc_format)
class BasicTests(unittest.TestCase):
def test_sslwrap_simple(self):
# A crude test for the legacy API
try:
ssl.sslwrap_simple(socket.socket(socket.AF_INET))
except IOError, e:
if e.errno == 32: # broken pipe when ssl_sock.do_handshake(), this test doesn't care about that
pass
else:
raise
try:
ssl.sslwrap_simple(socket.socket(socket.AF_INET)._sock)
except IOError, e:
if e.errno == 32: # broken pipe when ssl_sock.do_handshake(), this test doesn't care about that
pass
else:
raise
def can_clear_options():
# 0.9.8m or higher
return ssl._OPENSSL_API_VERSION >= (0, 9, 8, 13, 15)
def no_sslv2_implies_sslv3_hello():
# 0.9.7h or higher
return ssl.OPENSSL_VERSION_INFO >= (0, 9, 7, 8, 15)
def have_verify_flags():
# 0.9.8 or higher
return ssl.OPENSSL_VERSION_INFO >= (0, 9, 8, 0, 15)
def utc_offset(): #NOTE: ignore issues like #1647654
# local time = utc time + utc offset
if time.daylight and time.localtime().tm_isdst > 0:
return -time.altzone # seconds
return -time.timezone
def asn1time(cert_time):
# Some versions of OpenSSL ignore seconds, see #18207
# 0.9.8.i
if ssl._OPENSSL_API_VERSION == (0, 9, 8, 9, 15):
fmt = "%b %d %H:%M:%S %Y GMT"
dt = datetime.datetime.strptime(cert_time, fmt)
dt = dt.replace(second=0)
cert_time = dt.strftime(fmt)
# %d adds leading zero but ASN1_TIME_print() uses leading space
if cert_time[4] == "0":
cert_time = cert_time[:4] + " " + cert_time[5:]
return cert_time
# Issue #9415: Ubuntu hijacks their OpenSSL and forcefully disables SSLv2
def skip_if_broken_ubuntu_ssl(func):
if hasattr(ssl, 'PROTOCOL_SSLv2'):
@functools.wraps(func)
def f(*args, **kwargs):
try:
ssl.SSLContext(ssl.PROTOCOL_SSLv2)
except ssl.SSLError:
if (ssl.OPENSSL_VERSION_INFO == (0, 9, 8, 15, 15) and
platform.linux_distribution() == ('debian', 'squeeze/sid', '')):
raise unittest.SkipTest("Patched Ubuntu OpenSSL breaks behaviour")
return func(*args, **kwargs)
return f
else:
return func
needs_sni = unittest.skipUnless(ssl.HAS_SNI, "SNI support needed for this test")
class BasicSocketTests(unittest.TestCase):
def test_constants(self):
ssl.CERT_NONE
ssl.CERT_OPTIONAL
ssl.CERT_REQUIRED
ssl.OP_CIPHER_SERVER_PREFERENCE
ssl.OP_SINGLE_DH_USE
if ssl.HAS_ECDH:
ssl.OP_SINGLE_ECDH_USE
if ssl.OPENSSL_VERSION_INFO >= (1, 0):
ssl.OP_NO_COMPRESSION
self.assertIn(ssl.HAS_SNI, {True, False})
self.assertIn(ssl.HAS_ECDH, {True, False})
ssl.OP_NO_SSLv2
ssl.OP_NO_SSLv3
ssl.OP_NO_TLSv1
ssl.OP_NO_TLSv1_3
if ssl.OPENSSL_VERSION_INFO >= (1, 0, 1):
ssl.OP_NO_TLSv1_1
ssl.OP_NO_TLSv1_2
def test_random(self):
v = ssl.RAND_status()
if support.verbose:
sys.stdout.write("\n RAND_status is %d (%s)\n"
% (v, (v and "sufficient randomness") or
"insufficient randomness"))
if hasattr(ssl, 'RAND_egd'):
self.assertRaises(TypeError, ssl.RAND_egd, 1)
self.assertRaises(TypeError, ssl.RAND_egd, 'foo', 1)
ssl.RAND_add("this is a random string", 75.0)
def test_parse_cert(self):
# note that this uses an 'unofficial' function in _ssl.c,
# provided solely for this test, to exercise the certificate
# parsing code
p = ssl._ssl._test_decode_cert(CERTFILE)
if support.verbose:
sys.stdout.write("\n" + pprint.pformat(p) + "\n")
self.assertEqual(p['issuer'],
((('countryName', 'XY'),),
(('localityName', 'Castle Anthrax'),),
(('organizationName', 'Python Software Foundation'),),
(('commonName', 'localhost'),))
)
# Note the next three asserts will fail if the keys are regenerated
self.assertEqual(p['notAfter'], asn1time('Oct 5 23:01:56 2020 GMT'))
self.assertEqual(p['notBefore'], asn1time('Oct 8 23:01:56 2010 GMT'))
self.assertEqual(p['serialNumber'], 'D7C7381919AFC24E')
self.assertEqual(p['subject'],
((('countryName', 'XY'),),
(('localityName', 'Castle Anthrax'),),
(('organizationName', 'Python Software Foundation'),),
(('commonName', 'localhost'),))
)
self.assertEqual(p['subjectAltName'], (('DNS', 'localhost'),))
# Issue #13034: the subjectAltName in some certificates
# (notably projects.developer.nokia.com:443) wasn't parsed
p = ssl._ssl._test_decode_cert(NOKIACERT)
if support.verbose:
sys.stdout.write("\n" + pprint.pformat(p) + "\n")
self.assertEqual(p['subjectAltName'],
(('DNS', 'projects.developer.nokia.com'),
('DNS', 'projects.forum.nokia.com'))
)
# extra OCSP and AIA fields
self.assertEqual(p['OCSP'], ('http://ocsp.verisign.com',))
self.assertEqual(p['caIssuers'],
('http://SVRIntl-G3-aia.verisign.com/SVRIntlG3.cer',))
self.assertEqual(p['crlDistributionPoints'],
('http://SVRIntl-G3-crl.verisign.com/SVRIntlG3.crl',))
def test_parse_cert_CVE_2013_4238(self):
p = ssl._ssl._test_decode_cert(NULLBYTECERT)
if support.verbose:
sys.stdout.write("\n" + pprint.pformat(p) + "\n")
subject = ((('countryName', 'US'),),
(('stateOrProvinceName', 'Oregon'),),
(('localityName', 'Beaverton'),),
(('organizationName', 'Python Software Foundation'),),
(('organizationalUnitName', 'Python Core Development'),),
(('commonName', 'null.python.org\x00example.org'),),
(('emailAddress', '[email protected]'),))
self.assertEqual(p['subject'], subject)
self.assertEqual(p['issuer'], subject)
if ssl._OPENSSL_API_VERSION >= (0, 9, 8):
san = (('DNS', 'altnull.python.org\x00example.com'),
('email', '[email protected]\[email protected]'),
('URI', 'http://null.python.org\x00http://example.org'),
('IP Address', '192.0.2.1'),
('IP Address', '2001:DB8:0:0:0:0:0:1\n'))
else:
# OpenSSL 0.9.7 doesn't support IPv6 addresses in subjectAltName
san = (('DNS', 'altnull.python.org\x00example.com'),
('email', '[email protected]\[email protected]'),
('URI', 'http://null.python.org\x00http://example.org'),
('IP Address', '192.0.2.1'),
('IP Address', '<invalid>'))
self.assertEqual(p['subjectAltName'], san)
def test_parse_all_sans(self):
p = ssl._ssl._test_decode_cert(ALLSANFILE)
self.assertEqual(p['subjectAltName'],
(
('DNS', 'allsans'),
('othername', '<unsupported>'),
('othername', '<unsupported>'),
('email', '[email protected]'),
('DNS', 'www.example.org'),
('DirName',
((('countryName', 'XY'),),
(('localityName', 'Castle Anthrax'),),
(('organizationName', 'Python Software Foundation'),),
(('commonName', 'dirname example'),))),
('URI', 'https://www.python.org/'),
('IP Address', '127.0.0.1'),
('IP Address', '0:0:0:0:0:0:0:1\n'),
('Registered ID', '1.2.3.4.5')
)
)
def test_DER_to_PEM(self):
with open(CAFILE_CACERT, 'r') as f:
pem = f.read()
d1 = ssl.PEM_cert_to_DER_cert(pem)
p2 = ssl.DER_cert_to_PEM_cert(d1)
d2 = ssl.PEM_cert_to_DER_cert(p2)
self.assertEqual(d1, d2)
if not p2.startswith(ssl.PEM_HEADER + '\n'):
self.fail("DER-to-PEM didn't include correct header:\n%r\n" % p2)
if not p2.endswith('\n' + ssl.PEM_FOOTER + '\n'):
self.fail("DER-to-PEM didn't include correct footer:\n%r\n" % p2)
def test_openssl_version(self):
n = ssl.OPENSSL_VERSION_NUMBER
t = ssl.OPENSSL_VERSION_INFO
s = ssl.OPENSSL_VERSION
self.assertIsInstance(n, (int, long))
self.assertIsInstance(t, tuple)
self.assertIsInstance(s, str)
# Some sanity checks follow
# >= 0.9
self.assertGreaterEqual(n, 0x900000)
# < 3.0
self.assertLess(n, 0x30000000)
major, minor, fix, patch, status = t
self.assertGreaterEqual(major, 0)
self.assertLess(major, 3)
self.assertGreaterEqual(minor, 0)
self.assertLess(minor, 256)
self.assertGreaterEqual(fix, 0)
self.assertLess(fix, 256)
self.assertGreaterEqual(patch, 0)
self.assertLessEqual(patch, 63)
self.assertGreaterEqual(status, 0)
self.assertLessEqual(status, 15)
# Version string as returned by {Open,Libre}SSL, the format might change
if IS_LIBRESSL:
self.assertTrue(s.startswith("LibreSSL {:d}".format(major)),
(s, t, hex(n)))
else:
self.assertTrue(s.startswith("OpenSSL {:d}.{:d}.{:d}".format(major, minor, fix)),
(s, t))
@support.cpython_only
def test_refcycle(self):
# Issue #7943: an SSL object doesn't create reference cycles with
# itself.
s = socket.socket(socket.AF_INET)
ss = ssl.wrap_socket(s)
wr = weakref.ref(ss)
del ss
self.assertEqual(wr(), None)
def test_wrapped_unconnected(self):
# Methods on an unconnected SSLSocket propagate the original
# socket.error raise by the underlying socket object.
s = socket.socket(socket.AF_INET)
with closing(ssl.wrap_socket(s)) as ss:
self.assertRaises(socket.error, ss.recv, 1)
self.assertRaises(socket.error, ss.recv_into, bytearray(b'x'))
self.assertRaises(socket.error, ss.recvfrom, 1)
self.assertRaises(socket.error, ss.recvfrom_into, bytearray(b'x'), 1)
self.assertRaises(socket.error, ss.send, b'x')
self.assertRaises(socket.error, ss.sendto, b'x', ('0.0.0.0', 0))
def test_timeout(self):
# Issue #8524: when creating an SSL socket, the timeout of the
# original socket should be retained.
for timeout in (None, 0.0, 5.0):
s = socket.socket(socket.AF_INET)
s.settimeout(timeout)
with closing(ssl.wrap_socket(s)) as ss:
self.assertEqual(timeout, ss.gettimeout())
def test_errors(self):
sock = socket.socket()
self.assertRaisesRegexp(ValueError,
"certfile must be specified",
ssl.wrap_socket, sock, keyfile=CERTFILE)
self.assertRaisesRegexp(ValueError,
"certfile must be specified for server-side operations",
ssl.wrap_socket, sock, server_side=True)
self.assertRaisesRegexp(ValueError,
"certfile must be specified for server-side operations",
ssl.wrap_socket, sock, server_side=True, certfile="")
with closing(ssl.wrap_socket(sock, server_side=True, certfile=CERTFILE)) as s:
self.assertRaisesRegexp(ValueError, "can't connect in server-side mode",
s.connect, (HOST, 8080))
with self.assertRaises(IOError) as cm:
with closing(socket.socket()) as sock:
ssl.wrap_socket(sock, certfile=NONEXISTINGCERT)
self.assertEqual(cm.exception.errno, errno.ENOENT)
with self.assertRaises(IOError) as cm:
with closing(socket.socket()) as sock:
ssl.wrap_socket(sock,
certfile=CERTFILE, keyfile=NONEXISTINGCERT)
self.assertEqual(cm.exception.errno, errno.ENOENT)
with self.assertRaises(IOError) as cm:
with closing(socket.socket()) as sock:
ssl.wrap_socket(sock,
certfile=NONEXISTINGCERT, keyfile=NONEXISTINGCERT)
self.assertEqual(cm.exception.errno, errno.ENOENT)
def bad_cert_test(self, certfile):
"""Check that trying to use the given client certificate fails"""
certfile = os.path.join(os.path.dirname(__file__) or os.curdir,
certfile)
sock = socket.socket()
self.addCleanup(sock.close)
with self.assertRaises(ssl.SSLError):
ssl.wrap_socket(sock,
certfile=certfile,
ssl_version=ssl.PROTOCOL_TLSv1)
def test_empty_cert(self):
"""Wrapping with an empty cert file"""
self.bad_cert_test("nullcert.pem")
def test_malformed_cert(self):
"""Wrapping with a badly formatted certificate (syntax error)"""
self.bad_cert_test("badcert.pem")
def test_malformed_key(self):
"""Wrapping with a badly formatted key (syntax error)"""
self.bad_cert_test("badkey.pem")
def test_match_hostname(self):
def ok(cert, hostname):
ssl.match_hostname(cert, hostname)
def fail(cert, hostname):
self.assertRaises(ssl.CertificateError,
ssl.match_hostname, cert, hostname)
cert = {'subject': ((('commonName', 'example.com'),),)}
ok(cert, 'example.com')
ok(cert, 'ExAmple.cOm')
fail(cert, 'www.example.com')
fail(cert, '.example.com')
fail(cert, 'example.org')
fail(cert, 'exampleXcom')
cert = {'subject': ((('commonName', '*.a.com'),),)}
ok(cert, 'foo.a.com')
fail(cert, 'bar.foo.a.com')
fail(cert, 'a.com')
fail(cert, 'Xa.com')
fail(cert, '.a.com')
# only match one left-most wildcard
cert = {'subject': ((('commonName', 'f*.com'),),)}
ok(cert, 'foo.com')
ok(cert, 'f.com')
fail(cert, 'bar.com')
fail(cert, 'foo.a.com')
fail(cert, 'bar.foo.com')
# NULL bytes are bad, CVE-2013-4073
cert = {'subject': ((('commonName',
'null.python.org\x00example.org'),),)}
ok(cert, 'null.python.org\x00example.org') # or raise an error?
fail(cert, 'example.org')
fail(cert, 'null.python.org')
# error cases with wildcards
cert = {'subject': ((('commonName', '*.*.a.com'),),)}
fail(cert, 'bar.foo.a.com')
fail(cert, 'a.com')
fail(cert, 'Xa.com')
fail(cert, '.a.com')
cert = {'subject': ((('commonName', 'a.*.com'),),)}
fail(cert, 'a.foo.com')
fail(cert, 'a..com')
fail(cert, 'a.com')
# wildcard doesn't match IDNA prefix 'xn--'
idna = u'püthon.python.org'.encode("idna").decode("ascii")
cert = {'subject': ((('commonName', idna),),)}
ok(cert, idna)
cert = {'subject': ((('commonName', 'x*.python.org'),),)}
fail(cert, idna)
cert = {'subject': ((('commonName', 'xn--p*.python.org'),),)}
fail(cert, idna)
# wildcard in first fragment and IDNA A-labels in sequent fragments
# are supported.
idna = u'www*.pythön.org'.encode("idna").decode("ascii")
cert = {'subject': ((('commonName', idna),),)}
ok(cert, u'www.pythön.org'.encode("idna").decode("ascii"))
ok(cert, u'www1.pythön.org'.encode("idna").decode("ascii"))
fail(cert, u'ftp.pythön.org'.encode("idna").decode("ascii"))
fail(cert, u'pythön.org'.encode("idna").decode("ascii"))
# Slightly fake real-world example
cert = {'notAfter': 'Jun 26 21:41:46 2011 GMT',
'subject': ((('commonName', 'linuxfrz.org'),),),
'subjectAltName': (('DNS', 'linuxfr.org'),
('DNS', 'linuxfr.com'),
('othername', '<unsupported>'))}
ok(cert, 'linuxfr.org')
ok(cert, 'linuxfr.com')
# Not a "DNS" entry
fail(cert, '<unsupported>')
# When there is a subjectAltName, commonName isn't used
fail(cert, 'linuxfrz.org')
# A pristine real-world example
cert = {'notAfter': 'Dec 18 23:59:59 2011 GMT',
'subject': ((('countryName', 'US'),),
(('stateOrProvinceName', 'California'),),
(('localityName', 'Mountain View'),),
(('organizationName', 'Google Inc'),),
(('commonName', 'mail.google.com'),))}
ok(cert, 'mail.google.com')
fail(cert, 'gmail.com')
# Only commonName is considered
fail(cert, 'California')
# Neither commonName nor subjectAltName
cert = {'notAfter': 'Dec 18 23:59:59 2011 GMT',
'subject': ((('countryName', 'US'),),
(('stateOrProvinceName', 'California'),),
(('localityName', 'Mountain View'),),
(('organizationName', 'Google Inc'),))}
fail(cert, 'mail.google.com')
# No DNS entry in subjectAltName but a commonName
cert = {'notAfter': 'Dec 18 23:59:59 2099 GMT',
'subject': ((('countryName', 'US'),),
(('stateOrProvinceName', 'California'),),
(('localityName', 'Mountain View'),),
(('commonName', 'mail.google.com'),)),
'subjectAltName': (('othername', 'blabla'), )}
ok(cert, 'mail.google.com')
# No DNS entry subjectAltName and no commonName
cert = {'notAfter': 'Dec 18 23:59:59 2099 GMT',
'subject': ((('countryName', 'US'),),
(('stateOrProvinceName', 'California'),),
(('localityName', 'Mountain View'),),
(('organizationName', 'Google Inc'),)),
'subjectAltName': (('othername', 'blabla'),)}
fail(cert, 'google.com')
# Empty cert / no cert
self.assertRaises(ValueError, ssl.match_hostname, None, 'example.com')
self.assertRaises(ValueError, ssl.match_hostname, {}, 'example.com')
# Issue #17980: avoid denials of service by refusing more than one
# wildcard per fragment.
cert = {'subject': ((('commonName', 'a*b.com'),),)}
ok(cert, 'axxb.com')
cert = {'subject': ((('commonName', 'a*b.co*'),),)}
fail(cert, 'axxb.com')
cert = {'subject': ((('commonName', 'a*b*.com'),),)}
with self.assertRaises(ssl.CertificateError) as cm:
ssl.match_hostname(cert, 'axxbxxc.com')
self.assertIn("too many wildcards", str(cm.exception))
def test_server_side(self):
# server_hostname doesn't work for server sockets
ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
with closing(socket.socket()) as sock:
self.assertRaises(ValueError, ctx.wrap_socket, sock, True,
server_hostname="some.hostname")
def test_unknown_channel_binding(self):
# should raise ValueError for unknown type
s = socket.socket(socket.AF_INET)
with closing(ssl.wrap_socket(s)) as ss:
with self.assertRaises(ValueError):
ss.get_channel_binding("unknown-type")
@unittest.skipUnless("tls-unique" in ssl.CHANNEL_BINDING_TYPES,
"'tls-unique' channel binding not available")
def test_tls_unique_channel_binding(self):
# unconnected should return None for known type
s = socket.socket(socket.AF_INET)
with closing(ssl.wrap_socket(s)) as ss:
self.assertIsNone(ss.get_channel_binding("tls-unique"))
# the same for server-side
s = socket.socket(socket.AF_INET)
with closing(ssl.wrap_socket(s, server_side=True, certfile=CERTFILE)) as ss:
self.assertIsNone(ss.get_channel_binding("tls-unique"))
def test_get_default_verify_paths(self):
paths = ssl.get_default_verify_paths()
self.assertEqual(len(paths), 6)
self.assertIsInstance(paths, ssl.DefaultVerifyPaths)
with support.EnvironmentVarGuard() as env:
env["SSL_CERT_DIR"] = CAPATH
env["SSL_CERT_FILE"] = CERTFILE
paths = ssl.get_default_verify_paths()
self.assertEqual(paths.cafile, CERTFILE)
self.assertEqual(paths.capath, CAPATH)
@unittest.skipUnless(sys.platform == "win32", "Windows specific")
def test_enum_certificates(self):
self.assertTrue(ssl.enum_certificates("CA"))
self.assertTrue(ssl.enum_certificates("ROOT"))
self.assertRaises(TypeError, ssl.enum_certificates)
self.assertRaises(WindowsError, ssl.enum_certificates, "")
trust_oids = set()
for storename in ("CA", "ROOT"):
store = ssl.enum_certificates(storename)
self.assertIsInstance(store, list)
for element in store:
self.assertIsInstance(element, tuple)
self.assertEqual(len(element), 3)
cert, enc, trust = element
self.assertIsInstance(cert, bytes)
self.assertIn(enc, {"x509_asn", "pkcs_7_asn"})
self.assertIsInstance(trust, (set, bool))
if isinstance(trust, set):
trust_oids.update(trust)
serverAuth = "1.3.6.1.5.5.7.3.1"
self.assertIn(serverAuth, trust_oids)
@unittest.skipUnless(sys.platform == "win32", "Windows specific")
def test_enum_crls(self):
self.assertTrue(ssl.enum_crls("CA"))
self.assertRaises(TypeError, ssl.enum_crls)
self.assertRaises(WindowsError, ssl.enum_crls, "")
crls = ssl.enum_crls("CA")
self.assertIsInstance(crls, list)
for element in crls:
self.assertIsInstance(element, tuple)
self.assertEqual(len(element), 2)
self.assertIsInstance(element[0], bytes)
self.assertIn(element[1], {"x509_asn", "pkcs_7_asn"})
def test_asn1object(self):
expected = (129, 'serverAuth', 'TLS Web Server Authentication',
'1.3.6.1.5.5.7.3.1')
val = ssl._ASN1Object('1.3.6.1.5.5.7.3.1')
self.assertEqual(val, expected)
self.assertEqual(val.nid, 129)
self.assertEqual(val.shortname, 'serverAuth')
self.assertEqual(val.longname, 'TLS Web Server Authentication')
self.assertEqual(val.oid, '1.3.6.1.5.5.7.3.1')
self.assertIsInstance(val, ssl._ASN1Object)
self.assertRaises(ValueError, ssl._ASN1Object, 'serverAuth')
val = ssl._ASN1Object.fromnid(129)
self.assertEqual(val, expected)
self.assertIsInstance(val, ssl._ASN1Object)
self.assertRaises(ValueError, ssl._ASN1Object.fromnid, -1)
with self.assertRaisesRegexp(ValueError, "unknown NID 100000"):
ssl._ASN1Object.fromnid(100000)
for i in range(1000):
try:
obj = ssl._ASN1Object.fromnid(i)
except ValueError:
pass
else:
self.assertIsInstance(obj.nid, int)
self.assertIsInstance(obj.shortname, str)
self.assertIsInstance(obj.longname, str)
self.assertIsInstance(obj.oid, (str, type(None)))
val = ssl._ASN1Object.fromname('TLS Web Server Authentication')
self.assertEqual(val, expected)
self.assertIsInstance(val, ssl._ASN1Object)
self.assertEqual(ssl._ASN1Object.fromname('serverAuth'), expected)
self.assertEqual(ssl._ASN1Object.fromname('1.3.6.1.5.5.7.3.1'),
expected)
with self.assertRaisesRegexp(ValueError, "unknown object 'serverauth'"):
ssl._ASN1Object.fromname('serverauth')
def test_purpose_enum(self):
val = ssl._ASN1Object('1.3.6.1.5.5.7.3.1')
self.assertIsInstance(ssl.Purpose.SERVER_AUTH, ssl._ASN1Object)
self.assertEqual(ssl.Purpose.SERVER_AUTH, val)
self.assertEqual(ssl.Purpose.SERVER_AUTH.nid, 129)
self.assertEqual(ssl.Purpose.SERVER_AUTH.shortname, 'serverAuth')
self.assertEqual(ssl.Purpose.SERVER_AUTH.oid,
'1.3.6.1.5.5.7.3.1')
val = ssl._ASN1Object('1.3.6.1.5.5.7.3.2')
self.assertIsInstance(ssl.Purpose.CLIENT_AUTH, ssl._ASN1Object)
self.assertEqual(ssl.Purpose.CLIENT_AUTH, val)
self.assertEqual(ssl.Purpose.CLIENT_AUTH.nid, 130)
self.assertEqual(ssl.Purpose.CLIENT_AUTH.shortname, 'clientAuth')
self.assertEqual(ssl.Purpose.CLIENT_AUTH.oid,
'1.3.6.1.5.5.7.3.2')
def test_unsupported_dtls(self):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.addCleanup(s.close)
with self.assertRaises(NotImplementedError) as cx:
ssl.wrap_socket(s, cert_reqs=ssl.CERT_NONE)
self.assertEqual(str(cx.exception), "only stream sockets are supported")
ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
with self.assertRaises(NotImplementedError) as cx:
ctx.wrap_socket(s)
self.assertEqual(str(cx.exception), "only stream sockets are supported")
def cert_time_ok(self, timestring, timestamp):
self.assertEqual(ssl.cert_time_to_seconds(timestring), timestamp)
def cert_time_fail(self, timestring):
with self.assertRaises(ValueError):
ssl.cert_time_to_seconds(timestring)
@unittest.skipUnless(utc_offset(),
'local time needs to be different from UTC')
def test_cert_time_to_seconds_timezone(self):
# Issue #19940: ssl.cert_time_to_seconds() returns wrong
# results if local timezone is not UTC
self.cert_time_ok("May 9 00:00:00 2007 GMT", 1178668800.0)
self.cert_time_ok("Jan 5 09:34:43 2018 GMT", 1515144883.0)
def test_cert_time_to_seconds(self):
timestring = "Jan 5 09:34:43 2018 GMT"
ts = 1515144883.0
self.cert_time_ok(timestring, ts)
# accept keyword parameter, assert its name
self.assertEqual(ssl.cert_time_to_seconds(cert_time=timestring), ts)
# accept both %e and %d (space or zero generated by strftime)
self.cert_time_ok("Jan 05 09:34:43 2018 GMT", ts)
# case-insensitive
self.cert_time_ok("JaN 5 09:34:43 2018 GmT", ts)
self.cert_time_fail("Jan 5 09:34 2018 GMT") # no seconds
self.cert_time_fail("Jan 5 09:34:43 2018") # no GMT
self.cert_time_fail("Jan 5 09:34:43 2018 UTC") # not GMT timezone
self.cert_time_fail("Jan 35 09:34:43 2018 GMT") # invalid day
self.cert_time_fail("Jon 5 09:34:43 2018 GMT") # invalid month
self.cert_time_fail("Jan 5 24:00:00 2018 GMT") # invalid hour
self.cert_time_fail("Jan 5 09:60:43 2018 GMT") # invalid minute
newyear_ts = 1230768000.0
# leap seconds
self.cert_time_ok("Dec 31 23:59:60 2008 GMT", newyear_ts)
# same timestamp
self.cert_time_ok("Jan 1 00:00:00 2009 GMT", newyear_ts)
self.cert_time_ok("Jan 5 09:34:59 2018 GMT", 1515144899)
# allow 60th second (even if it is not a leap second)
self.cert_time_ok("Jan 5 09:34:60 2018 GMT", 1515144900)
# allow 2nd leap second for compatibility with time.strptime()
self.cert_time_ok("Jan 5 09:34:61 2018 GMT", 1515144901)
self.cert_time_fail("Jan 5 09:34:62 2018 GMT") # invalid seconds
# no special treatement for the special value:
# 99991231235959Z (rfc 5280)
self.cert_time_ok("Dec 31 23:59:59 9999 GMT", 253402300799.0)
@support.run_with_locale('LC_ALL', '')
def test_cert_time_to_seconds_locale(self):
# `cert_time_to_seconds()` should be locale independent
def local_february_name():
return time.strftime('%b', (1, 2, 3, 4, 5, 6, 0, 0, 0))
if local_february_name().lower() == 'feb':
self.skipTest("locale-specific month name needs to be "
"different from C locale")
# locale-independent
self.cert_time_ok("Feb 9 00:00:00 2007 GMT", 1170979200.0)
self.cert_time_fail(local_february_name() + " 9 00:00:00 2007 GMT")
class ContextTests(unittest.TestCase):
@skip_if_broken_ubuntu_ssl
def test_constructor(self):
for protocol in PROTOCOLS:
ssl.SSLContext(protocol)
self.assertRaises(TypeError, ssl.SSLContext)
self.assertRaises(ValueError, ssl.SSLContext, -1)
self.assertRaises(ValueError, ssl.SSLContext, 42)
@skip_if_broken_ubuntu_ssl
def test_protocol(self):
for proto in PROTOCOLS:
ctx = ssl.SSLContext(proto)
self.assertEqual(ctx.protocol, proto)
def test_ciphers(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.set_ciphers("ALL")
ctx.set_ciphers("DEFAULT")
with self.assertRaisesRegexp(ssl.SSLError, "No cipher can be selected"):
ctx.set_ciphers("^$:,;?*'dorothyx")
@skip_if_broken_ubuntu_ssl
def test_options(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
# OP_ALL | OP_NO_SSLv2 | OP_NO_SSLv3 is the default value
default = (ssl.OP_ALL | ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3)
if not IS_LIBRESSL and ssl.OPENSSL_VERSION_INFO >= (1, 1, 0):
default |= ssl.OP_NO_COMPRESSION
self.assertEqual(default, ctx.options)
ctx.options |= ssl.OP_NO_TLSv1
self.assertEqual(default | ssl.OP_NO_TLSv1, ctx.options)
if can_clear_options():
ctx.options = (ctx.options & ~ssl.OP_NO_TLSv1)
self.assertEqual(default, ctx.options)
ctx.options = 0
# Ubuntu has OP_NO_SSLv3 forced on by default
self.assertEqual(0, ctx.options & ~ssl.OP_NO_SSLv3)
else:
with self.assertRaises(ValueError):
ctx.options = 0
def test_verify_mode(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
# Default value
self.assertEqual(ctx.verify_mode, ssl.CERT_NONE)
ctx.verify_mode = ssl.CERT_OPTIONAL
self.assertEqual(ctx.verify_mode, ssl.CERT_OPTIONAL)
ctx.verify_mode = ssl.CERT_REQUIRED
self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED)
ctx.verify_mode = ssl.CERT_NONE
self.assertEqual(ctx.verify_mode, ssl.CERT_NONE)
with self.assertRaises(TypeError):
ctx.verify_mode = None
with self.assertRaises(ValueError):
ctx.verify_mode = 42
@unittest.skipUnless(have_verify_flags(),
"verify_flags need OpenSSL > 0.9.8")
def test_verify_flags(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
# default value
tf = getattr(ssl, "VERIFY_X509_TRUSTED_FIRST", 0)
self.assertEqual(ctx.verify_flags, ssl.VERIFY_DEFAULT | tf)
ctx.verify_flags = ssl.VERIFY_CRL_CHECK_LEAF
self.assertEqual(ctx.verify_flags, ssl.VERIFY_CRL_CHECK_LEAF)
ctx.verify_flags = ssl.VERIFY_CRL_CHECK_CHAIN
self.assertEqual(ctx.verify_flags, ssl.VERIFY_CRL_CHECK_CHAIN)
ctx.verify_flags = ssl.VERIFY_DEFAULT
self.assertEqual(ctx.verify_flags, ssl.VERIFY_DEFAULT)
# supports any value
ctx.verify_flags = ssl.VERIFY_CRL_CHECK_LEAF | ssl.VERIFY_X509_STRICT
self.assertEqual(ctx.verify_flags,
ssl.VERIFY_CRL_CHECK_LEAF | ssl.VERIFY_X509_STRICT)
with self.assertRaises(TypeError):
ctx.verify_flags = None
def test_load_cert_chain(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
# Combined key and cert in a single file
ctx.load_cert_chain(CERTFILE, keyfile=None)
ctx.load_cert_chain(CERTFILE, keyfile=CERTFILE)
self.assertRaises(TypeError, ctx.load_cert_chain, keyfile=CERTFILE)
with self.assertRaises(IOError) as cm:
ctx.load_cert_chain(NONEXISTINGCERT)
self.assertEqual(cm.exception.errno, errno.ENOENT)
with self.assertRaisesRegexp(ssl.SSLError, "PEM lib"):
ctx.load_cert_chain(BADCERT)
with self.assertRaisesRegexp(ssl.SSLError, "PEM lib"):
ctx.load_cert_chain(EMPTYCERT)
# Separate key and cert
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.load_cert_chain(ONLYCERT, ONLYKEY)
ctx.load_cert_chain(certfile=ONLYCERT, keyfile=ONLYKEY)
ctx.load_cert_chain(certfile=BYTES_ONLYCERT, keyfile=BYTES_ONLYKEY)
with self.assertRaisesRegexp(ssl.SSLError, "PEM lib"):
ctx.load_cert_chain(ONLYCERT)
with self.assertRaisesRegexp(ssl.SSLError, "PEM lib"):
ctx.load_cert_chain(ONLYKEY)
with self.assertRaisesRegexp(ssl.SSLError, "PEM lib"):
ctx.load_cert_chain(certfile=ONLYKEY, keyfile=ONLYCERT)
# Mismatching key and cert
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
with self.assertRaisesRegexp(ssl.SSLError, "key values mismatch"):
ctx.load_cert_chain(CAFILE_CACERT, ONLYKEY)
# Password protected key and cert
ctx.load_cert_chain(CERTFILE_PROTECTED, password=KEY_PASSWORD)
ctx.load_cert_chain(CERTFILE_PROTECTED, password=KEY_PASSWORD.encode())
ctx.load_cert_chain(CERTFILE_PROTECTED,
password=bytearray(KEY_PASSWORD.encode()))
ctx.load_cert_chain(ONLYCERT, ONLYKEY_PROTECTED, KEY_PASSWORD)
ctx.load_cert_chain(ONLYCERT, ONLYKEY_PROTECTED, KEY_PASSWORD.encode())
ctx.load_cert_chain(ONLYCERT, ONLYKEY_PROTECTED,
bytearray(KEY_PASSWORD.encode()))
with self.assertRaisesRegexp(TypeError, "should be a string"):
ctx.load_cert_chain(CERTFILE_PROTECTED, password=True)
with self.assertRaises(ssl.SSLError):
ctx.load_cert_chain(CERTFILE_PROTECTED, password="badpass")
with self.assertRaisesRegexp(ValueError, "cannot be longer"):
# openssl has a fixed limit on the password buffer.
# PEM_BUFSIZE is generally set to 1kb.
# Return a string larger than this.
ctx.load_cert_chain(CERTFILE_PROTECTED, password=b'a' * 102400)
# Password callback
def getpass_unicode():
return KEY_PASSWORD
def getpass_bytes():
return KEY_PASSWORD.encode()
def getpass_bytearray():
return bytearray(KEY_PASSWORD.encode())
def getpass_badpass():
return "badpass"
def getpass_huge():
return b'a' * (1024 * 1024)
def getpass_bad_type():
return 9
def getpass_exception():
raise Exception('getpass error')
class GetPassCallable:
def __call__(self):
return KEY_PASSWORD
def getpass(self):
return KEY_PASSWORD
ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_unicode)
ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_bytes)
ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_bytearray)
ctx.load_cert_chain(CERTFILE_PROTECTED, password=GetPassCallable())
ctx.load_cert_chain(CERTFILE_PROTECTED,
password=GetPassCallable().getpass)
with self.assertRaises(ssl.SSLError):
ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_badpass)
with self.assertRaisesRegexp(ValueError, "cannot be longer"):
ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_huge)
with self.assertRaisesRegexp(TypeError, "must return a string"):
ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_bad_type)
with self.assertRaisesRegexp(Exception, "getpass error"):
ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_exception)
# Make sure the password function isn't called if it isn't needed
ctx.load_cert_chain(CERTFILE, password=getpass_exception)
def test_load_verify_locations(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.load_verify_locations(CERTFILE)
ctx.load_verify_locations(cafile=CERTFILE, capath=None)
ctx.load_verify_locations(BYTES_CERTFILE)
ctx.load_verify_locations(cafile=BYTES_CERTFILE, capath=None)
ctx.load_verify_locations(cafile=BYTES_CERTFILE.decode('utf-8'))
self.assertRaises(TypeError, ctx.load_verify_locations)
self.assertRaises(TypeError, ctx.load_verify_locations, None, None, None)
with self.assertRaises(IOError) as cm:
ctx.load_verify_locations(NONEXISTINGCERT)
self.assertEqual(cm.exception.errno, errno.ENOENT)
with self.assertRaises(IOError):
ctx.load_verify_locations(u'')
with self.assertRaisesRegexp(ssl.SSLError, "PEM lib"):
ctx.load_verify_locations(BADCERT)
ctx.load_verify_locations(CERTFILE, CAPATH)
ctx.load_verify_locations(CERTFILE, capath=BYTES_CAPATH)
# Issue #10989: crash if the second argument type is invalid
self.assertRaises(TypeError, ctx.load_verify_locations, None, True)
def test_load_verify_cadata(self):
# test cadata
with open(CAFILE_CACERT) as f:
cacert_pem = f.read().decode("ascii")
cacert_der = ssl.PEM_cert_to_DER_cert(cacert_pem)
with open(CAFILE_NEURONIO) as f:
neuronio_pem = f.read().decode("ascii")
neuronio_der = ssl.PEM_cert_to_DER_cert(neuronio_pem)
# test PEM
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
self.assertEqual(ctx.cert_store_stats()["x509_ca"], 0)
ctx.load_verify_locations(cadata=cacert_pem)
self.assertEqual(ctx.cert_store_stats()["x509_ca"], 1)
ctx.load_verify_locations(cadata=neuronio_pem)
self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2)
# cert already in hash table
ctx.load_verify_locations(cadata=neuronio_pem)
self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2)
# combined
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
combined = "\n".join((cacert_pem, neuronio_pem))
ctx.load_verify_locations(cadata=combined)
self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2)
# with junk around the certs
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
combined = ["head", cacert_pem, "other", neuronio_pem, "again",
neuronio_pem, "tail"]
ctx.load_verify_locations(cadata="\n".join(combined))
self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2)
# test DER
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.load_verify_locations(cadata=cacert_der)
ctx.load_verify_locations(cadata=neuronio_der)
self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2)
# cert already in hash table
ctx.load_verify_locations(cadata=cacert_der)
self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2)
# combined
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
combined = b"".join((cacert_der, neuronio_der))
ctx.load_verify_locations(cadata=combined)
self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2)
# error cases
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
self.assertRaises(TypeError, ctx.load_verify_locations, cadata=object)
with self.assertRaisesRegexp(ssl.SSLError, "no start line"):
ctx.load_verify_locations(cadata=u"broken")
with self.assertRaisesRegexp(ssl.SSLError, "not enough data"):
ctx.load_verify_locations(cadata=b"broken")
def test_load_dh_params(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.load_dh_params(DHFILE)
if os.name != 'nt':
ctx.load_dh_params(BYTES_DHFILE)
self.assertRaises(TypeError, ctx.load_dh_params)
self.assertRaises(TypeError, ctx.load_dh_params, None)
with self.assertRaises(IOError) as cm:
ctx.load_dh_params(NONEXISTINGCERT)
self.assertEqual(cm.exception.errno, errno.ENOENT)
with self.assertRaises(ssl.SSLError) as cm:
ctx.load_dh_params(CERTFILE)
with support.temp_dir() as d:
fname = os.path.join(d, u'dhpäräm.pem')
shutil.copy(DHFILE, fname)
ctx.load_dh_params(fname)
@skip_if_broken_ubuntu_ssl
def test_session_stats(self):
for proto in PROTOCOLS:
ctx = ssl.SSLContext(proto)
self.assertEqual(ctx.session_stats(), {
'number': 0,
'connect': 0,
'connect_good': 0,
'connect_renegotiate': 0,
'accept': 0,
'accept_good': 0,
'accept_renegotiate': 0,
'hits': 0,
'misses': 0,
'timeouts': 0,
'cache_full': 0,
})
def test_set_default_verify_paths(self):
# There's not much we can do to test that it acts as expected,
# so just check it doesn't crash or raise an exception.
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.set_default_verify_paths()
@unittest.skipUnless(ssl.HAS_ECDH, "ECDH disabled on this OpenSSL build")
def test_set_ecdh_curve(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.set_ecdh_curve("prime256v1")
ctx.set_ecdh_curve(b"prime256v1")
self.assertRaises(TypeError, ctx.set_ecdh_curve)
self.assertRaises(TypeError, ctx.set_ecdh_curve, None)
self.assertRaises(ValueError, ctx.set_ecdh_curve, "foo")
self.assertRaises(ValueError, ctx.set_ecdh_curve, b"foo")
@needs_sni
def test_sni_callback(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
# set_servername_callback expects a callable, or None
self.assertRaises(TypeError, ctx.set_servername_callback)
self.assertRaises(TypeError, ctx.set_servername_callback, 4)
self.assertRaises(TypeError, ctx.set_servername_callback, "")
self.assertRaises(TypeError, ctx.set_servername_callback, ctx)
def dummycallback(sock, servername, ctx):
pass
ctx.set_servername_callback(None)
ctx.set_servername_callback(dummycallback)
@needs_sni
def test_sni_callback_refcycle(self):
# Reference cycles through the servername callback are detected
# and cleared.
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
def dummycallback(sock, servername, ctx, cycle=ctx):
pass
ctx.set_servername_callback(dummycallback)
wr = weakref.ref(ctx)
del ctx, dummycallback
gc.collect()
self.assertIs(wr(), None)
def test_cert_store_stats(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
self.assertEqual(ctx.cert_store_stats(),
{'x509_ca': 0, 'crl': 0, 'x509': 0})
ctx.load_cert_chain(CERTFILE)
self.assertEqual(ctx.cert_store_stats(),
{'x509_ca': 0, 'crl': 0, 'x509': 0})
ctx.load_verify_locations(CERTFILE)
self.assertEqual(ctx.cert_store_stats(),
{'x509_ca': 0, 'crl': 0, 'x509': 1})
ctx.load_verify_locations(CAFILE_CACERT)
self.assertEqual(ctx.cert_store_stats(),
{'x509_ca': 1, 'crl': 0, 'x509': 2})
def test_get_ca_certs(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
self.assertEqual(ctx.get_ca_certs(), [])
# CERTFILE is not flagged as X509v3 Basic Constraints: CA:TRUE
ctx.load_verify_locations(CERTFILE)
self.assertEqual(ctx.get_ca_certs(), [])
# but CAFILE_CACERT is a CA cert
ctx.load_verify_locations(CAFILE_CACERT)
self.assertEqual(ctx.get_ca_certs(),
[{'issuer': ((('organizationName', 'Root CA'),),
(('organizationalUnitName', 'http://www.cacert.org'),),
(('commonName', 'CA Cert Signing Authority'),),
(('emailAddress', '[email protected]'),)),
'notAfter': asn1time('Mar 29 12:29:49 2033 GMT'),
'notBefore': asn1time('Mar 30 12:29:49 2003 GMT'),
'serialNumber': '00',
'crlDistributionPoints': ('https://www.cacert.org/revoke.crl',),
'subject': ((('organizationName', 'Root CA'),),
(('organizationalUnitName', 'http://www.cacert.org'),),
(('commonName', 'CA Cert Signing Authority'),),
(('emailAddress', '[email protected]'),)),
'version': 3}])
with open(CAFILE_CACERT) as f:
pem = f.read()
der = ssl.PEM_cert_to_DER_cert(pem)
self.assertEqual(ctx.get_ca_certs(True), [der])
def test_load_default_certs(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.load_default_certs()
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.load_default_certs(ssl.Purpose.SERVER_AUTH)
ctx.load_default_certs()
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.load_default_certs(ssl.Purpose.CLIENT_AUTH)
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
self.assertRaises(TypeError, ctx.load_default_certs, None)
self.assertRaises(TypeError, ctx.load_default_certs, 'SERVER_AUTH')
@unittest.skipIf(sys.platform == "win32", "not-Windows specific")
@unittest.skipIf(IS_LIBRESSL, "LibreSSL doesn't support env vars")
def test_load_default_certs_env(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
with support.EnvironmentVarGuard() as env:
env["SSL_CERT_DIR"] = CAPATH
env["SSL_CERT_FILE"] = CERTFILE
ctx.load_default_certs()
self.assertEqual(ctx.cert_store_stats(), {"crl": 0, "x509": 1, "x509_ca": 0})
@unittest.skipUnless(sys.platform == "win32", "Windows specific")
def test_load_default_certs_env_windows(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.load_default_certs()
stats = ctx.cert_store_stats()
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
with support.EnvironmentVarGuard() as env:
env["SSL_CERT_DIR"] = CAPATH
env["SSL_CERT_FILE"] = CERTFILE
ctx.load_default_certs()
stats["x509"] += 1
self.assertEqual(ctx.cert_store_stats(), stats)
def test_create_default_context(self):
ctx = ssl.create_default_context()
self.assertEqual(ctx.protocol, ssl.PROTOCOL_SSLv23)
self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED)
self.assertTrue(ctx.check_hostname)
self.assertEqual(ctx.options & ssl.OP_NO_SSLv2, ssl.OP_NO_SSLv2)
self.assertEqual(
ctx.options & getattr(ssl, "OP_NO_COMPRESSION", 0),
getattr(ssl, "OP_NO_COMPRESSION", 0),
)
with open(SIGNING_CA) as f:
cadata = f.read().decode("ascii")
ctx = ssl.create_default_context(cafile=SIGNING_CA, capath=CAPATH,
cadata=cadata)
self.assertEqual(ctx.protocol, ssl.PROTOCOL_SSLv23)
self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED)
self.assertEqual(ctx.options & ssl.OP_NO_SSLv2, ssl.OP_NO_SSLv2)
self.assertEqual(
ctx.options & getattr(ssl, "OP_NO_COMPRESSION", 0),
getattr(ssl, "OP_NO_COMPRESSION", 0),
)
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
self.assertEqual(ctx.protocol, ssl.PROTOCOL_SSLv23)
self.assertEqual(ctx.verify_mode, ssl.CERT_NONE)
self.assertEqual(ctx.options & ssl.OP_NO_SSLv2, ssl.OP_NO_SSLv2)
self.assertEqual(
ctx.options & getattr(ssl, "OP_NO_COMPRESSION", 0),
getattr(ssl, "OP_NO_COMPRESSION", 0),
)
self.assertEqual(
ctx.options & getattr(ssl, "OP_SINGLE_DH_USE", 0),
getattr(ssl, "OP_SINGLE_DH_USE", 0),
)
self.assertEqual(
ctx.options & getattr(ssl, "OP_SINGLE_ECDH_USE", 0),
getattr(ssl, "OP_SINGLE_ECDH_USE", 0),
)
def test__create_stdlib_context(self):
ctx = ssl._create_stdlib_context()
self.assertEqual(ctx.protocol, ssl.PROTOCOL_SSLv23)
self.assertEqual(ctx.verify_mode, ssl.CERT_NONE)
self.assertFalse(ctx.check_hostname)
self.assertEqual(ctx.options & ssl.OP_NO_SSLv2, ssl.OP_NO_SSLv2)
ctx = ssl._create_stdlib_context(ssl.PROTOCOL_TLSv1)
self.assertEqual(ctx.protocol, ssl.PROTOCOL_TLSv1)
self.assertEqual(ctx.verify_mode, ssl.CERT_NONE)
self.assertEqual(ctx.options & ssl.OP_NO_SSLv2, ssl.OP_NO_SSLv2)
ctx = ssl._create_stdlib_context(ssl.PROTOCOL_TLSv1,
cert_reqs=ssl.CERT_REQUIRED,
check_hostname=True)
self.assertEqual(ctx.protocol, ssl.PROTOCOL_TLSv1)
self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED)
self.assertTrue(ctx.check_hostname)
self.assertEqual(ctx.options & ssl.OP_NO_SSLv2, ssl.OP_NO_SSLv2)
ctx = ssl._create_stdlib_context(purpose=ssl.Purpose.CLIENT_AUTH)
self.assertEqual(ctx.protocol, ssl.PROTOCOL_SSLv23)
self.assertEqual(ctx.verify_mode, ssl.CERT_NONE)
self.assertEqual(ctx.options & ssl.OP_NO_SSLv2, ssl.OP_NO_SSLv2)
def test__https_verify_certificates(self):
# Unit test to check the contect factory mapping
# The factories themselves are tested above
# This test will fail by design if run under PYTHONHTTPSVERIFY=0
# (as will various test_httplib tests)
# Uses a fresh SSL module to avoid affecting the real one
local_ssl = support.import_fresh_module("ssl")
# Certificate verification is enabled by default
self.assertIs(local_ssl._create_default_https_context,
local_ssl.create_default_context)
# Turn default verification off
local_ssl._https_verify_certificates(enable=False)
self.assertIs(local_ssl._create_default_https_context,
local_ssl._create_unverified_context)
# And back on
local_ssl._https_verify_certificates(enable=True)
self.assertIs(local_ssl._create_default_https_context,
local_ssl.create_default_context)
# The default behaviour is to enable
local_ssl._https_verify_certificates(enable=False)
local_ssl._https_verify_certificates()
self.assertIs(local_ssl._create_default_https_context,
local_ssl.create_default_context)
def test__https_verify_envvar(self):
# Unit test to check the PYTHONHTTPSVERIFY handling
# Need to use a subprocess so it can still be run under -E
https_is_verified = """import ssl, sys; \
status = "Error: _create_default_https_context does not verify certs" \
if ssl._create_default_https_context is \
ssl._create_unverified_context \
else None; \
sys.exit(status)"""
https_is_not_verified = """import ssl, sys; \
status = "Error: _create_default_https_context verifies certs" \
if ssl._create_default_https_context is \
ssl.create_default_context \
else None; \
sys.exit(status)"""
extra_env = {}
# Omitting it leaves verification on
assert_python_ok("-c", https_is_verified, **extra_env)
# Setting it to zero turns verification off
extra_env[ssl._https_verify_envvar] = "0"
assert_python_ok("-c", https_is_not_verified, **extra_env)
# Any other value should also leave it on
for setting in ("", "1", "enabled", "foo"):
extra_env[ssl._https_verify_envvar] = setting
assert_python_ok("-c", https_is_verified, **extra_env)
def test_check_hostname(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
self.assertFalse(ctx.check_hostname)
# Requires CERT_REQUIRED or CERT_OPTIONAL
with self.assertRaises(ValueError):
ctx.check_hostname = True
ctx.verify_mode = ssl.CERT_REQUIRED
self.assertFalse(ctx.check_hostname)
ctx.check_hostname = True
self.assertTrue(ctx.check_hostname)
ctx.verify_mode = ssl.CERT_OPTIONAL
ctx.check_hostname = True
self.assertTrue(ctx.check_hostname)
# Cannot set CERT_NONE with check_hostname enabled
with self.assertRaises(ValueError):
ctx.verify_mode = ssl.CERT_NONE
ctx.check_hostname = False
self.assertFalse(ctx.check_hostname)
class SSLErrorTests(unittest.TestCase):
def test_str(self):
# The str() of a SSLError doesn't include the errno
e = ssl.SSLError(1, "foo")
self.assertEqual(str(e), "foo")
self.assertEqual(e.errno, 1)
# Same for a subclass
e = ssl.SSLZeroReturnError(1, "foo")
self.assertEqual(str(e), "foo")
self.assertEqual(e.errno, 1)
def test_lib_reason(self):
# Test the library and reason attributes
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
with self.assertRaises(ssl.SSLError) as cm:
ctx.load_dh_params(CERTFILE)
self.assertEqual(cm.exception.library, 'PEM')
self.assertEqual(cm.exception.reason, 'NO_START_LINE')
s = str(cm.exception)
self.assertTrue(s.startswith("[PEM: NO_START_LINE] no start line"), s)
def test_subclass(self):
# Check that the appropriate SSLError subclass is raised
# (this only tests one of them)
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
with closing(socket.socket()) as s:
s.bind(("127.0.0.1", 0))
s.listen(5)
c = socket.socket()
c.connect(s.getsockname())
c.setblocking(False)
with closing(ctx.wrap_socket(c, False, do_handshake_on_connect=False)) as c:
with self.assertRaises(ssl.SSLWantReadError) as cm:
c.do_handshake()
s = str(cm.exception)
self.assertTrue(s.startswith("The operation did not complete (read)"), s)
# For compatibility
self.assertEqual(cm.exception.errno, ssl.SSL_ERROR_WANT_READ)
class NetworkedTests(unittest.TestCase):
def test_connect(self):
with support.transient_internet(REMOTE_HOST):
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_NONE)
try:
s.connect((REMOTE_HOST, 443))
self.assertEqual({}, s.getpeercert())
finally:
s.close()
# this should fail because we have no verification certs
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_REQUIRED)
self.assertRaisesRegexp(ssl.SSLError, "certificate verify failed",
s.connect, (REMOTE_HOST, 443))
s.close()
# this should succeed because we specify the root cert
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_REQUIRED,
ca_certs=REMOTE_ROOT_CERT)
try:
s.connect((REMOTE_HOST, 443))
self.assertTrue(s.getpeercert())
finally:
s.close()
def test_connect_ex(self):
# Issue #11326: check connect_ex() implementation
with support.transient_internet(REMOTE_HOST):
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_REQUIRED,
ca_certs=REMOTE_ROOT_CERT)
try:
self.assertEqual(0, s.connect_ex((REMOTE_HOST, 443)))
self.assertTrue(s.getpeercert())
finally:
s.close()
def test_non_blocking_connect_ex(self):
# Issue #11326: non-blocking connect_ex() should allow handshake
# to proceed after the socket gets ready.
with support.transient_internet(REMOTE_HOST):
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_REQUIRED,
ca_certs=REMOTE_ROOT_CERT,
do_handshake_on_connect=False)
try:
s.setblocking(False)
rc = s.connect_ex((REMOTE_HOST, 443))
# EWOULDBLOCK under Windows, EINPROGRESS elsewhere
self.assertIn(rc, (0, errno.EINPROGRESS, errno.EWOULDBLOCK))
# Wait for connect to finish
select.select([], [s], [], 5.0)
# Non-blocking handshake
while True:
try:
s.do_handshake()
break
except ssl.SSLWantReadError:
select.select([s], [], [], 5.0)
except ssl.SSLWantWriteError:
select.select([], [s], [], 5.0)
# SSL established
self.assertTrue(s.getpeercert())
finally:
s.close()
def test_timeout_connect_ex(self):
# Issue #12065: on a timeout, connect_ex() should return the original
# errno (mimicking the behaviour of non-SSL sockets).
with support.transient_internet(REMOTE_HOST):
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_REQUIRED,
ca_certs=REMOTE_ROOT_CERT,
do_handshake_on_connect=False)
try:
s.settimeout(0.0000001)
rc = s.connect_ex((REMOTE_HOST, 443))
if rc == 0:
self.skipTest("REMOTE_HOST responded too quickly")
self.assertIn(rc, (errno.EAGAIN, errno.EWOULDBLOCK))
finally:
s.close()
def test_connect_ex_error(self):
with support.transient_internet(REMOTE_HOST):
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_REQUIRED,
ca_certs=REMOTE_ROOT_CERT)
try:
rc = s.connect_ex((REMOTE_HOST, 444))
# Issue #19919: Windows machines or VMs hosted on Windows
# machines sometimes return EWOULDBLOCK.
errors = (
errno.ECONNREFUSED, errno.EHOSTUNREACH, errno.ETIMEDOUT,
errno.EWOULDBLOCK,
)
self.assertIn(rc, errors)
finally:
s.close()
def test_connect_with_context(self):
with support.transient_internet(REMOTE_HOST):
# Same as test_connect, but with a separately created context
ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
s = ctx.wrap_socket(socket.socket(socket.AF_INET))
s.connect((REMOTE_HOST, 443))
try:
self.assertEqual({}, s.getpeercert())
finally:
s.close()
# Same with a server hostname
s = ctx.wrap_socket(socket.socket(socket.AF_INET),
server_hostname=REMOTE_HOST)
s.connect((REMOTE_HOST, 443))
s.close()
# This should fail because we have no verification certs
ctx.verify_mode = ssl.CERT_REQUIRED
s = ctx.wrap_socket(socket.socket(socket.AF_INET))
self.assertRaisesRegexp(ssl.SSLError, "certificate verify failed",
s.connect, (REMOTE_HOST, 443))
s.close()
# This should succeed because we specify the root cert
ctx.load_verify_locations(REMOTE_ROOT_CERT)
s = ctx.wrap_socket(socket.socket(socket.AF_INET))
s.connect((REMOTE_HOST, 443))
try:
cert = s.getpeercert()
self.assertTrue(cert)
finally:
s.close()
def test_connect_capath(self):
# Verify server certificates using the `capath` argument
# NOTE: the subject hashing algorithm has been changed between
# OpenSSL 0.9.8n and 1.0.0, as a result the capath directory must
# contain both versions of each certificate (same content, different
# filename) for this test to be portable across OpenSSL releases.
with support.transient_internet(REMOTE_HOST):
ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
ctx.verify_mode = ssl.CERT_REQUIRED
ctx.load_verify_locations(capath=CAPATH)
s = ctx.wrap_socket(socket.socket(socket.AF_INET))
s.connect((REMOTE_HOST, 443))
try:
cert = s.getpeercert()
self.assertTrue(cert)
finally:
s.close()
# Same with a bytes `capath` argument
ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
ctx.verify_mode = ssl.CERT_REQUIRED
ctx.load_verify_locations(capath=BYTES_CAPATH)
s = ctx.wrap_socket(socket.socket(socket.AF_INET))
s.connect((REMOTE_HOST, 443))
try:
cert = s.getpeercert()
self.assertTrue(cert)
finally:
s.close()
def test_connect_cadata(self):
with open(REMOTE_ROOT_CERT) as f:
pem = f.read().decode('ascii')
der = ssl.PEM_cert_to_DER_cert(pem)
with support.transient_internet(REMOTE_HOST):
ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
ctx.verify_mode = ssl.CERT_REQUIRED
ctx.load_verify_locations(cadata=pem)
with closing(ctx.wrap_socket(socket.socket(socket.AF_INET))) as s:
s.connect((REMOTE_HOST, 443))
cert = s.getpeercert()
self.assertTrue(cert)
# same with DER
ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
ctx.verify_mode = ssl.CERT_REQUIRED
ctx.load_verify_locations(cadata=der)
with closing(ctx.wrap_socket(socket.socket(socket.AF_INET))) as s:
s.connect((REMOTE_HOST, 443))
cert = s.getpeercert()
self.assertTrue(cert)
@unittest.skipIf(os.name == "nt", "Can't use a socket as a file under Windows")
def test_makefile_close(self):
# Issue #5238: creating a file-like object with makefile() shouldn't
# delay closing the underlying "real socket" (here tested with its
# file descriptor, hence skipping the test under Windows).
with support.transient_internet(REMOTE_HOST):
ss = ssl.wrap_socket(socket.socket(socket.AF_INET))
ss.connect((REMOTE_HOST, 443))
fd = ss.fileno()
f = ss.makefile()
f.close()
# The fd is still open
os.read(fd, 0)
# Closing the SSL socket should close the fd too
ss.close()
gc.collect()
with self.assertRaises(OSError) as e:
os.read(fd, 0)
self.assertEqual(e.exception.errno, errno.EBADF)
def test_non_blocking_handshake(self):
with support.transient_internet(REMOTE_HOST):
s = socket.socket(socket.AF_INET)
s.connect((REMOTE_HOST, 443))
s.setblocking(False)
s = ssl.wrap_socket(s,
cert_reqs=ssl.CERT_NONE,
do_handshake_on_connect=False)
count = 0
while True:
try:
count += 1
s.do_handshake()
break
except ssl.SSLWantReadError:
select.select([s], [], [])
except ssl.SSLWantWriteError:
select.select([], [s], [])
s.close()
if support.verbose:
sys.stdout.write("\nNeeded %d calls to do_handshake() to establish session.\n" % count)
def test_get_server_certificate(self):
def _test_get_server_certificate(host, port, cert=None):
with support.transient_internet(host):
pem = ssl.get_server_certificate((host, port))
if not pem:
self.fail("No server certificate on %s:%s!" % (host, port))
try:
pem = ssl.get_server_certificate((host, port),
ca_certs=CERTFILE)
except ssl.SSLError as x:
#should fail
if support.verbose:
sys.stdout.write("%s\n" % x)
else:
self.fail("Got server certificate %s for %s:%s!" % (pem, host, port))
pem = ssl.get_server_certificate((host, port),
ca_certs=cert)
if not pem:
self.fail("No server certificate on %s:%s!" % (host, port))
if support.verbose:
sys.stdout.write("\nVerified certificate for %s:%s is\n%s\n" % (host, port ,pem))
_test_get_server_certificate(REMOTE_HOST, 443, REMOTE_ROOT_CERT)
if support.IPV6_ENABLED:
_test_get_server_certificate('ipv6.google.com', 443)
def test_ciphers(self):
remote = (REMOTE_HOST, 443)
with support.transient_internet(remote[0]):
with closing(ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_NONE, ciphers="ALL")) as s:
s.connect(remote)
with closing(ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_NONE, ciphers="DEFAULT")) as s:
s.connect(remote)
# Error checking can happen at instantiation or when connecting
with self.assertRaisesRegexp(ssl.SSLError, "No cipher can be selected"):
with closing(socket.socket(socket.AF_INET)) as sock:
s = ssl.wrap_socket(sock,
cert_reqs=ssl.CERT_NONE, ciphers="^$:,;?*'dorothyx")
s.connect(remote)
def test_get_ca_certs_capath(self):
# capath certs are loaded on request
with support.transient_internet(REMOTE_HOST):
ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
ctx.verify_mode = ssl.CERT_REQUIRED
ctx.load_verify_locations(capath=CAPATH)
self.assertEqual(ctx.get_ca_certs(), [])
s = ctx.wrap_socket(socket.socket(socket.AF_INET))
s.connect((REMOTE_HOST, 443))
try:
cert = s.getpeercert()
self.assertTrue(cert)
finally:
s.close()
self.assertEqual(len(ctx.get_ca_certs()), 1)
@needs_sni
def test_context_setget(self):
# Check that the context of a connected socket can be replaced.
with support.transient_internet(REMOTE_HOST):
ctx1 = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx2 = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
s = socket.socket(socket.AF_INET)
with closing(ctx1.wrap_socket(s)) as ss:
ss.connect((REMOTE_HOST, 443))
self.assertIs(ss.context, ctx1)
self.assertIs(ss._sslobj.context, ctx1)
ss.context = ctx2
self.assertIs(ss.context, ctx2)
self.assertIs(ss._sslobj.context, ctx2)
try:
import threading
except ImportError:
_have_threads = False
else:
_have_threads = True
from test.ssl_servers import make_https_server
class ThreadedEchoServer(threading.Thread):
class ConnectionHandler(threading.Thread):
"""A mildly complicated class, because we want it to work both
with and without the SSL wrapper around the socket connection, so
that we can test the STARTTLS functionality."""
def __init__(self, server, connsock, addr):
self.server = server
self.running = False
self.sock = connsock
self.addr = addr
self.sock.setblocking(1)
self.sslconn = None
threading.Thread.__init__(self)
self.daemon = True
def wrap_conn(self):
try:
self.sslconn = self.server.context.wrap_socket(
self.sock, server_side=True)
self.server.selected_npn_protocols.append(self.sslconn.selected_npn_protocol())
self.server.selected_alpn_protocols.append(self.sslconn.selected_alpn_protocol())
except socket.error as e:
# We treat ConnectionResetError as though it were an
# SSLError - OpenSSL on Ubuntu abruptly closes the
# connection when asked to use an unsupported protocol.
#
# XXX Various errors can have happened here, for example
# a mismatching protocol version, an invalid certificate,
# or a low-level bug. This should be made more discriminating.
if not isinstance(e, ssl.SSLError) and e.errno != errno.ECONNRESET:
raise
self.server.conn_errors.append(e)
if self.server.chatty:
handle_error("\n server: bad connection attempt from " + repr(self.addr) + ":\n")
self.running = False
self.server.stop()
self.close()
return False
else:
if self.server.context.verify_mode == ssl.CERT_REQUIRED:
cert = self.sslconn.getpeercert()
if support.verbose and self.server.chatty:
sys.stdout.write(" client cert is " + pprint.pformat(cert) + "\n")
cert_binary = self.sslconn.getpeercert(True)
if support.verbose and self.server.chatty:
sys.stdout.write(" cert binary is " + str(len(cert_binary)) + " bytes\n")
cipher = self.sslconn.cipher()
if support.verbose and self.server.chatty:
sys.stdout.write(" server: connection cipher is now " + str(cipher) + "\n")
sys.stdout.write(" server: selected protocol is now "
+ str(self.sslconn.selected_npn_protocol()) + "\n")
return True
def read(self):
if self.sslconn:
return self.sslconn.read()
else:
return self.sock.recv(1024)
def write(self, bytes):
if self.sslconn:
return self.sslconn.write(bytes)
else:
return self.sock.send(bytes)
def close(self):
if self.sslconn:
self.sslconn.close()
else:
self.sock.close()
def run(self):
self.running = True
if not self.server.starttls_server:
if not self.wrap_conn():
return
while self.running:
try:
msg = self.read()
stripped = msg.strip()
if not stripped:
# eof, so quit this handler
self.running = False
self.close()
elif stripped == b'over':
if support.verbose and self.server.connectionchatty:
sys.stdout.write(" server: client closed connection\n")
self.close()
return
elif (self.server.starttls_server and
stripped == b'STARTTLS'):
if support.verbose and self.server.connectionchatty:
sys.stdout.write(" server: read STARTTLS from client, sending OK...\n")
self.write(b"OK\n")
if not self.wrap_conn():
return
elif (self.server.starttls_server and self.sslconn
and stripped == b'ENDTLS'):
if support.verbose and self.server.connectionchatty:
sys.stdout.write(" server: read ENDTLS from client, sending OK...\n")
self.write(b"OK\n")
self.sock = self.sslconn.unwrap()
self.sslconn = None
if support.verbose and self.server.connectionchatty:
sys.stdout.write(" server: connection is now unencrypted...\n")
elif stripped == b'CB tls-unique':
if support.verbose and self.server.connectionchatty:
sys.stdout.write(" server: read CB tls-unique from client, sending our CB data...\n")
data = self.sslconn.get_channel_binding("tls-unique")
self.write(repr(data).encode("us-ascii") + b"\n")
else:
if (support.verbose and
self.server.connectionchatty):
ctype = (self.sslconn and "encrypted") or "unencrypted"
sys.stdout.write(" server: read %r (%s), sending back %r (%s)...\n"
% (msg, ctype, msg.lower(), ctype))
self.write(msg.lower())
except ssl.SSLError:
if self.server.chatty:
handle_error("Test server failure:\n")
self.close()
self.running = False
# normally, we'd just stop here, but for the test
# harness, we want to stop the server
self.server.stop()
def __init__(self, certificate=None, ssl_version=None,
certreqs=None, cacerts=None,
chatty=True, connectionchatty=False, starttls_server=False,
npn_protocols=None, alpn_protocols=None,
ciphers=None, context=None):
if context:
self.context = context
else:
self.context = ssl.SSLContext(ssl_version
if ssl_version is not None
else ssl.PROTOCOL_TLS)
self.context.verify_mode = (certreqs if certreqs is not None
else ssl.CERT_NONE)
if cacerts:
self.context.load_verify_locations(cacerts)
if certificate:
self.context.load_cert_chain(certificate)
if npn_protocols:
self.context.set_npn_protocols(npn_protocols)
if alpn_protocols:
self.context.set_alpn_protocols(alpn_protocols)
if ciphers:
self.context.set_ciphers(ciphers)
self.chatty = chatty
self.connectionchatty = connectionchatty
self.starttls_server = starttls_server
self.sock = socket.socket()
self.port = support.bind_port(self.sock)
self.flag = None
self.active = False
self.selected_npn_protocols = []
self.selected_alpn_protocols = []
self.conn_errors = []
threading.Thread.__init__(self)
self.daemon = True
def __enter__(self):
self.start(threading.Event())
self.flag.wait()
return self
def __exit__(self, *args):
self.stop()
self.join()
def start(self, flag=None):
self.flag = flag
threading.Thread.start(self)
def run(self):
self.sock.settimeout(0.05)
self.sock.listen(5)
self.active = True
if self.flag:
# signal an event
self.flag.set()
while self.active:
try:
newconn, connaddr = self.sock.accept()
if support.verbose and self.chatty:
sys.stdout.write(' server: new connection from '
+ repr(connaddr) + '\n')
handler = self.ConnectionHandler(self, newconn, connaddr)
handler.start()
handler.join()
except socket.timeout:
pass
except KeyboardInterrupt:
self.stop()
self.sock.close()
def stop(self):
self.active = False
class AsyncoreEchoServer(threading.Thread):
class EchoServer(asyncore.dispatcher):
class ConnectionHandler(asyncore.dispatcher_with_send):
def __init__(self, conn, certfile):
self.socket = ssl.wrap_socket(conn, server_side=True,
certfile=certfile,
do_handshake_on_connect=False)
asyncore.dispatcher_with_send.__init__(self, self.socket)
self._ssl_accepting = True
self._do_ssl_handshake()
def readable(self):
if isinstance(self.socket, ssl.SSLSocket):
while self.socket.pending() > 0:
self.handle_read_event()
return True
def _do_ssl_handshake(self):
try:
self.socket.do_handshake()
except (ssl.SSLWantReadError, ssl.SSLWantWriteError):
return
except ssl.SSLEOFError:
return self.handle_close()
except ssl.SSLError:
raise
except socket.error, err:
if err.args[0] == errno.ECONNABORTED:
return self.handle_close()
else:
self._ssl_accepting = False
def handle_read(self):
if self._ssl_accepting:
self._do_ssl_handshake()
else:
data = self.recv(1024)
if support.verbose:
sys.stdout.write(" server: read %s from client\n" % repr(data))
if not data:
self.close()
else:
self.send(data.lower())
def handle_close(self):
self.close()
if support.verbose:
sys.stdout.write(" server: closed connection %s\n" % self.socket)
def handle_error(self):
raise
def __init__(self, certfile):
self.certfile = certfile
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.port = support.bind_port(sock, '')
asyncore.dispatcher.__init__(self, sock)
self.listen(5)
def handle_accept(self):
sock_obj, addr = self.accept()
if support.verbose:
sys.stdout.write(" server: new connection from %s:%s\n" %addr)
self.ConnectionHandler(sock_obj, self.certfile)
def handle_error(self):
raise
def __init__(self, certfile):
self.flag = None
self.active = False
self.server = self.EchoServer(certfile)
self.port = self.server.port
threading.Thread.__init__(self)
self.daemon = True
def __str__(self):
return "<%s %s>" % (self.__class__.__name__, self.server)
def __enter__(self):
self.start(threading.Event())
self.flag.wait()
return self
def __exit__(self, *args):
if support.verbose:
sys.stdout.write(" cleanup: stopping server.\n")
self.stop()
if support.verbose:
sys.stdout.write(" cleanup: joining server thread.\n")
self.join()
if support.verbose:
sys.stdout.write(" cleanup: successfully joined.\n")
# make sure that ConnectionHandler is removed from socket_map
asyncore.close_all(ignore_all=True)
def start(self, flag=None):
self.flag = flag
threading.Thread.start(self)
def run(self):
self.active = True
if self.flag:
self.flag.set()
while self.active:
try:
asyncore.loop(1)
except:
pass
def stop(self):
self.active = False
self.server.close()
def server_params_test(client_context, server_context, indata=b"FOO\n",
chatty=True, connectionchatty=False, sni_name=None):
"""
Launch a server, connect a client to it and try various reads
and writes.
"""
stats = {}
server = ThreadedEchoServer(context=server_context,
chatty=chatty,
connectionchatty=False)
with server:
with closing(client_context.wrap_socket(socket.socket(),
server_hostname=sni_name)) as s:
s.connect((HOST, server.port))
for arg in [indata, bytearray(indata), memoryview(indata)]:
if connectionchatty:
if support.verbose:
sys.stdout.write(
" client: sending %r...\n" % indata)
s.write(arg)
outdata = s.read()
if connectionchatty:
if support.verbose:
sys.stdout.write(" client: read %r\n" % outdata)
if outdata != indata.lower():
raise AssertionError(
"bad data <<%r>> (%d) received; expected <<%r>> (%d)\n"
% (outdata[:20], len(outdata),
indata[:20].lower(), len(indata)))
s.write(b"over\n")
if connectionchatty:
if support.verbose:
sys.stdout.write(" client: closing connection.\n")
stats.update({
'compression': s.compression(),
'cipher': s.cipher(),
'peercert': s.getpeercert(),
'client_alpn_protocol': s.selected_alpn_protocol(),
'client_npn_protocol': s.selected_npn_protocol(),
'version': s.version(),
})
s.close()
stats['server_alpn_protocols'] = server.selected_alpn_protocols
stats['server_npn_protocols'] = server.selected_npn_protocols
return stats
def try_protocol_combo(server_protocol, client_protocol, expect_success,
certsreqs=None, server_options=0, client_options=0):
"""
Try to SSL-connect using *client_protocol* to *server_protocol*.
If *expect_success* is true, assert that the connection succeeds,
if it's false, assert that the connection fails.
Also, if *expect_success* is a string, assert that it is the protocol
version actually used by the connection.
"""
if certsreqs is None:
certsreqs = ssl.CERT_NONE
certtype = {
ssl.CERT_NONE: "CERT_NONE",
ssl.CERT_OPTIONAL: "CERT_OPTIONAL",
ssl.CERT_REQUIRED: "CERT_REQUIRED",
}[certsreqs]
if support.verbose:
formatstr = (expect_success and " %s->%s %s\n") or " {%s->%s} %s\n"
sys.stdout.write(formatstr %
(ssl.get_protocol_name(client_protocol),
ssl.get_protocol_name(server_protocol),
certtype))
client_context = ssl.SSLContext(client_protocol)
client_context.options |= client_options
server_context = ssl.SSLContext(server_protocol)
server_context.options |= server_options
# NOTE: we must enable "ALL" ciphers on the client, otherwise an
# SSLv23 client will send an SSLv3 hello (rather than SSLv2)
# starting from OpenSSL 1.0.0 (see issue #8322).
if client_context.protocol == ssl.PROTOCOL_SSLv23:
client_context.set_ciphers("ALL")
for ctx in (client_context, server_context):
ctx.verify_mode = certsreqs
ctx.load_cert_chain(CERTFILE)
ctx.load_verify_locations(CERTFILE)
try:
stats = server_params_test(client_context, server_context,
chatty=False, connectionchatty=False)
# Protocol mismatch can result in either an SSLError, or a
# "Connection reset by peer" error.
except ssl.SSLError:
if expect_success:
raise
except socket.error as e:
if expect_success or e.errno != errno.ECONNRESET:
raise
else:
if not expect_success:
raise AssertionError(
"Client protocol %s succeeded with server protocol %s!"
% (ssl.get_protocol_name(client_protocol),
ssl.get_protocol_name(server_protocol)))
elif (expect_success is not True
and expect_success != stats['version']):
raise AssertionError("version mismatch: expected %r, got %r"
% (expect_success, stats['version']))
class ThreadedTests(unittest.TestCase):
@skip_if_broken_ubuntu_ssl
def test_echo(self):
"""Basic test of an SSL client connecting to a server"""
if support.verbose:
sys.stdout.write("\n")
for protocol in PROTOCOLS:
context = ssl.SSLContext(protocol)
context.load_cert_chain(CERTFILE)
server_params_test(context, context,
chatty=True, connectionchatty=True)
def test_getpeercert(self):
if support.verbose:
sys.stdout.write("\n")
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(CERTFILE)
context.load_cert_chain(CERTFILE)
server = ThreadedEchoServer(context=context, chatty=False)
with server:
s = context.wrap_socket(socket.socket(),
do_handshake_on_connect=False)
s.connect((HOST, server.port))
# getpeercert() raise ValueError while the handshake isn't
# done.
with self.assertRaises(ValueError):
s.getpeercert()
s.do_handshake()
cert = s.getpeercert()
self.assertTrue(cert, "Can't get peer certificate.")
cipher = s.cipher()
if support.verbose:
sys.stdout.write(pprint.pformat(cert) + '\n')
sys.stdout.write("Connection cipher is " + str(cipher) + '.\n')
if 'subject' not in cert:
self.fail("No subject field in certificate: %s." %
pprint.pformat(cert))
if ((('organizationName', 'Python Software Foundation'),)
not in cert['subject']):
self.fail(
"Missing or invalid 'organizationName' field in certificate subject; "
"should be 'Python Software Foundation'.")
self.assertIn('notBefore', cert)
self.assertIn('notAfter', cert)
before = ssl.cert_time_to_seconds(cert['notBefore'])
after = ssl.cert_time_to_seconds(cert['notAfter'])
self.assertLess(before, after)
s.close()
@unittest.skipUnless(have_verify_flags(),
"verify_flags need OpenSSL > 0.9.8")
def test_crl_check(self):
if support.verbose:
sys.stdout.write("\n")
server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
server_context.load_cert_chain(SIGNED_CERTFILE)
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(SIGNING_CA)
tf = getattr(ssl, "VERIFY_X509_TRUSTED_FIRST", 0)
self.assertEqual(context.verify_flags, ssl.VERIFY_DEFAULT | tf)
# VERIFY_DEFAULT should pass
server = ThreadedEchoServer(context=server_context, chatty=True)
with server:
with closing(context.wrap_socket(socket.socket())) as s:
s.connect((HOST, server.port))
cert = s.getpeercert()
self.assertTrue(cert, "Can't get peer certificate.")
# VERIFY_CRL_CHECK_LEAF without a loaded CRL file fails
context.verify_flags |= ssl.VERIFY_CRL_CHECK_LEAF
server = ThreadedEchoServer(context=server_context, chatty=True)
with server:
with closing(context.wrap_socket(socket.socket())) as s:
with self.assertRaisesRegexp(ssl.SSLError,
"certificate verify failed"):
s.connect((HOST, server.port))
# now load a CRL file. The CRL file is signed by the CA.
context.load_verify_locations(CRLFILE)
server = ThreadedEchoServer(context=server_context, chatty=True)
with server:
with closing(context.wrap_socket(socket.socket())) as s:
s.connect((HOST, server.port))
cert = s.getpeercert()
self.assertTrue(cert, "Can't get peer certificate.")
def test_check_hostname(self):
if support.verbose:
sys.stdout.write("\n")
server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
server_context.load_cert_chain(SIGNED_CERTFILE)
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.verify_mode = ssl.CERT_REQUIRED
context.check_hostname = True
context.load_verify_locations(SIGNING_CA)
# correct hostname should verify
server = ThreadedEchoServer(context=server_context, chatty=True)
with server:
with closing(context.wrap_socket(socket.socket(),
server_hostname="localhost")) as s:
s.connect((HOST, server.port))
cert = s.getpeercert()
self.assertTrue(cert, "Can't get peer certificate.")
# incorrect hostname should raise an exception
server = ThreadedEchoServer(context=server_context, chatty=True)
with server:
with closing(context.wrap_socket(socket.socket(),
server_hostname="invalid")) as s:
with self.assertRaisesRegexp(ssl.CertificateError,
"hostname 'invalid' doesn't match u?'localhost'"):
s.connect((HOST, server.port))
# missing server_hostname arg should cause an exception, too
server = ThreadedEchoServer(context=server_context, chatty=True)
with server:
with closing(socket.socket()) as s:
with self.assertRaisesRegexp(ValueError,
"check_hostname requires server_hostname"):
context.wrap_socket(s)
def test_wrong_cert(self):
"""Connecting when the server rejects the client's certificate
Launch a server with CERT_REQUIRED, and check that trying to
connect to it with a wrong client certificate fails.
"""
certfile = os.path.join(os.path.dirname(__file__) or os.curdir,
"wrongcert.pem")
server = ThreadedEchoServer(CERTFILE,
certreqs=ssl.CERT_REQUIRED,
cacerts=CERTFILE, chatty=False,
connectionchatty=False)
with server, \
closing(socket.socket()) as sock, \
closing(ssl.wrap_socket(sock,
certfile=certfile,
ssl_version=ssl.PROTOCOL_TLSv1)) as s:
try:
# Expect either an SSL error about the server rejecting
# the connection, or a low-level connection reset (which
# sometimes happens on Windows)
s.connect((HOST, server.port))
except ssl.SSLError as e:
if support.verbose:
sys.stdout.write("\nSSLError is %r\n" % e)
except socket.error as e:
if e.errno != errno.ECONNRESET:
raise
if support.verbose:
sys.stdout.write("\nsocket.error is %r\n" % e)
else:
self.fail("Use of invalid cert should have failed!")
def test_rude_shutdown(self):
"""A brutal shutdown of an SSL server should raise an OSError
in the client when attempting handshake.
"""
listener_ready = threading.Event()
listener_gone = threading.Event()
s = socket.socket()
port = support.bind_port(s, HOST)
# `listener` runs in a thread. It sits in an accept() until
# the main thread connects. Then it rudely closes the socket,
# and sets Event `listener_gone` to let the main thread know
# the socket is gone.
def listener():
s.listen(5)
listener_ready.set()
newsock, addr = s.accept()
newsock.close()
s.close()
listener_gone.set()
def connector():
listener_ready.wait()
with closing(socket.socket()) as c:
c.connect((HOST, port))
listener_gone.wait()
try:
ssl_sock = ssl.wrap_socket(c)
except socket.error:
pass
else:
self.fail('connecting to closed SSL socket should have failed')
t = threading.Thread(target=listener)
t.start()
try:
connector()
finally:
t.join()
@skip_if_broken_ubuntu_ssl
@unittest.skipUnless(hasattr(ssl, 'PROTOCOL_SSLv2'),
"OpenSSL is compiled without SSLv2 support")
def test_protocol_sslv2(self):
"""Connecting to an SSLv2 server with various client options"""
if support.verbose:
sys.stdout.write("\n")
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv2, True)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv2, True, ssl.CERT_OPTIONAL)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv2, True, ssl.CERT_REQUIRED)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, False)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv3, False)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_TLSv1, False)
# SSLv23 client with specific SSL options
if no_sslv2_implies_sslv3_hello():
# No SSLv2 => client will use an SSLv3 hello on recent OpenSSLs
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, False,
client_options=ssl.OP_NO_SSLv2)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, False,
client_options=ssl.OP_NO_SSLv3)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, False,
client_options=ssl.OP_NO_TLSv1)
@skip_if_broken_ubuntu_ssl
def test_protocol_sslv23(self):
"""Connecting to an SSLv23 server with various client options"""
if support.verbose:
sys.stdout.write("\n")
if hasattr(ssl, 'PROTOCOL_SSLv2'):
try:
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv2, True)
except socket.error as x:
# this fails on some older versions of OpenSSL (0.9.7l, for instance)
if support.verbose:
sys.stdout.write(
" SSL2 client to SSL23 server test unexpectedly failed:\n %s\n"
% str(x))
if hasattr(ssl, 'PROTOCOL_SSLv3'):
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, False)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1')
if hasattr(ssl, 'PROTOCOL_SSLv3'):
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, False, ssl.CERT_OPTIONAL)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True, ssl.CERT_OPTIONAL)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_OPTIONAL)
if hasattr(ssl, 'PROTOCOL_SSLv3'):
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, False, ssl.CERT_REQUIRED)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True, ssl.CERT_REQUIRED)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_REQUIRED)
# Server with specific SSL options
if hasattr(ssl, 'PROTOCOL_SSLv3'):
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, False,
server_options=ssl.OP_NO_SSLv3)
# Will choose TLSv1
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True,
server_options=ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, False,
server_options=ssl.OP_NO_TLSv1)
@skip_if_broken_ubuntu_ssl
@unittest.skipUnless(hasattr(ssl, 'PROTOCOL_SSLv3'),
"OpenSSL is compiled without SSLv3 support")
def test_protocol_sslv3(self):
"""Connecting to an SSLv3 server with various client options"""
if support.verbose:
sys.stdout.write("\n")
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, 'SSLv3')
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, 'SSLv3', ssl.CERT_OPTIONAL)
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, 'SSLv3', ssl.CERT_REQUIRED)
if hasattr(ssl, 'PROTOCOL_SSLv2'):
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv2, False)
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv23, False,
client_options=ssl.OP_NO_SSLv3)
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_TLSv1, False)
if no_sslv2_implies_sslv3_hello():
# No SSLv2 => client will use an SSLv3 hello on recent OpenSSLs
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv23,
False, client_options=ssl.OP_NO_SSLv2)
@skip_if_broken_ubuntu_ssl
def test_protocol_tlsv1(self):
"""Connecting to a TLSv1 server with various client options"""
if support.verbose:
sys.stdout.write("\n")
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1')
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_OPTIONAL)
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_REQUIRED)
if hasattr(ssl, 'PROTOCOL_SSLv2'):
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv2, False)
if hasattr(ssl, 'PROTOCOL_SSLv3'):
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv3, False)
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv23, False,
client_options=ssl.OP_NO_TLSv1)
@skip_if_broken_ubuntu_ssl
@unittest.skipUnless(hasattr(ssl, "PROTOCOL_TLSv1_1"),
"TLS version 1.1 not supported.")
def test_protocol_tlsv1_1(self):
"""Connecting to a TLSv1.1 server with various client options.
Testing against older TLS versions."""
if support.verbose:
sys.stdout.write("\n")
try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_TLSv1_1, 'TLSv1.1')
if hasattr(ssl, 'PROTOCOL_SSLv2'):
try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_SSLv2, False)
if hasattr(ssl, 'PROTOCOL_SSLv3'):
try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_SSLv3, False)
try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_SSLv23, False,
client_options=ssl.OP_NO_TLSv1_1)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1_1, 'TLSv1.1')
try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_TLSv1, False)
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1_1, False)
@skip_if_broken_ubuntu_ssl
@unittest.skipUnless(hasattr(ssl, "PROTOCOL_TLSv1_2"),
"TLS version 1.2 not supported.")
def test_protocol_tlsv1_2(self):
"""Connecting to a TLSv1.2 server with various client options.
Testing against older TLS versions."""
if support.verbose:
sys.stdout.write("\n")
try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_TLSv1_2, 'TLSv1.2',
server_options=ssl.OP_NO_SSLv3|ssl.OP_NO_SSLv2,
client_options=ssl.OP_NO_SSLv3|ssl.OP_NO_SSLv2,)
if hasattr(ssl, 'PROTOCOL_SSLv2'):
try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_SSLv2, False)
if hasattr(ssl, 'PROTOCOL_SSLv3'):
try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_SSLv3, False)
try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_SSLv23, False,
client_options=ssl.OP_NO_TLSv1_2)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1_2, 'TLSv1.2')
try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_TLSv1, False)
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1_2, False)
try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_TLSv1_1, False)
try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_TLSv1_2, False)
def test_starttls(self):
"""Switching from clear text to encrypted and back again."""
msgs = (b"msg 1", b"MSG 2", b"STARTTLS", b"MSG 3", b"msg 4", b"ENDTLS", b"msg 5", b"msg 6")
server = ThreadedEchoServer(CERTFILE,
ssl_version=ssl.PROTOCOL_TLSv1,
starttls_server=True,
chatty=True,
connectionchatty=True)
wrapped = False
with server:
s = socket.socket()
s.setblocking(1)
s.connect((HOST, server.port))
if support.verbose:
sys.stdout.write("\n")
for indata in msgs:
if support.verbose:
sys.stdout.write(
" client: sending %r...\n" % indata)
if wrapped:
conn.write(indata)
outdata = conn.read()
else:
s.send(indata)
outdata = s.recv(1024)
msg = outdata.strip().lower()
if indata == b"STARTTLS" and msg.startswith(b"ok"):
# STARTTLS ok, switch to secure mode
if support.verbose:
sys.stdout.write(
" client: read %r from server, starting TLS...\n"
% msg)
conn = ssl.wrap_socket(s, ssl_version=ssl.PROTOCOL_TLSv1)
wrapped = True
elif indata == b"ENDTLS" and msg.startswith(b"ok"):
# ENDTLS ok, switch back to clear text
if support.verbose:
sys.stdout.write(
" client: read %r from server, ending TLS...\n"
% msg)
s = conn.unwrap()
wrapped = False
else:
if support.verbose:
sys.stdout.write(
" client: read %r from server\n" % msg)
if support.verbose:
sys.stdout.write(" client: closing connection.\n")
if wrapped:
conn.write(b"over\n")
else:
s.send(b"over\n")
if wrapped:
conn.close()
else:
s.close()
def test_socketserver(self):
"""Using a SocketServer to create and manage SSL connections."""
server = make_https_server(self, certfile=CERTFILE)
# try to connect
if support.verbose:
sys.stdout.write('\n')
with open(CERTFILE, 'rb') as f:
d1 = f.read()
d2 = ''
# now fetch the same data from the HTTPS server
url = 'https://localhost:%d/%s' % (
server.port, os.path.split(CERTFILE)[1])
context = ssl.create_default_context(cafile=CERTFILE)
f = urllib2.urlopen(url, context=context)
try:
dlen = f.info().getheader("content-length")
if dlen and (int(dlen) > 0):
d2 = f.read(int(dlen))
if support.verbose:
sys.stdout.write(
" client: read %d bytes from remote server '%s'\n"
% (len(d2), server))
finally:
f.close()
self.assertEqual(d1, d2)
def test_asyncore_server(self):
"""Check the example asyncore integration."""
if support.verbose:
sys.stdout.write("\n")
indata = b"FOO\n"
server = AsyncoreEchoServer(CERTFILE)
with server:
s = ssl.wrap_socket(socket.socket())
s.connect(('127.0.0.1', server.port))
if support.verbose:
sys.stdout.write(
" client: sending %r...\n" % indata)
s.write(indata)
outdata = s.read()
if support.verbose:
sys.stdout.write(" client: read %r\n" % outdata)
if outdata != indata.lower():
self.fail(
"bad data <<%r>> (%d) received; expected <<%r>> (%d)\n"
% (outdata[:20], len(outdata),
indata[:20].lower(), len(indata)))
s.write(b"over\n")
if support.verbose:
sys.stdout.write(" client: closing connection.\n")
s.close()
if support.verbose:
sys.stdout.write(" client: connection closed.\n")
def test_recv_send(self):
"""Test recv(), send() and friends."""
if support.verbose:
sys.stdout.write("\n")
server = ThreadedEchoServer(CERTFILE,
certreqs=ssl.CERT_NONE,
ssl_version=ssl.PROTOCOL_TLSv1,
cacerts=CERTFILE,
chatty=True,
connectionchatty=False)
with server:
s = ssl.wrap_socket(socket.socket(),
server_side=False,
certfile=CERTFILE,
ca_certs=CERTFILE,
cert_reqs=ssl.CERT_NONE,
ssl_version=ssl.PROTOCOL_TLSv1)
s.connect((HOST, server.port))
# helper methods for standardising recv* method signatures
def _recv_into():
b = bytearray(b"\0"*100)
count = s.recv_into(b)
return b[:count]
def _recvfrom_into():
b = bytearray(b"\0"*100)
count, addr = s.recvfrom_into(b)
return b[:count]
# (name, method, whether to expect success, *args)
send_methods = [
('send', s.send, True, []),
('sendto', s.sendto, False, ["some.address"]),
('sendall', s.sendall, True, []),
]
recv_methods = [
('recv', s.recv, True, []),
('recvfrom', s.recvfrom, False, ["some.address"]),
('recv_into', _recv_into, True, []),
('recvfrom_into', _recvfrom_into, False, []),
]
data_prefix = u"PREFIX_"
for meth_name, send_meth, expect_success, args in send_methods:
indata = (data_prefix + meth_name).encode('ascii')
try:
send_meth(indata, *args)
outdata = s.read()
if outdata != indata.lower():
self.fail(
"While sending with <<{name:s}>> bad data "
"<<{outdata:r}>> ({nout:d}) received; "
"expected <<{indata:r}>> ({nin:d})\n".format(
name=meth_name, outdata=outdata[:20],
nout=len(outdata),
indata=indata[:20], nin=len(indata)
)
)
except ValueError as e:
if expect_success:
self.fail(
"Failed to send with method <<{name:s}>>; "
"expected to succeed.\n".format(name=meth_name)
)
if not str(e).startswith(meth_name):
self.fail(
"Method <<{name:s}>> failed with unexpected "
"exception message: {exp:s}\n".format(
name=meth_name, exp=e
)
)
for meth_name, recv_meth, expect_success, args in recv_methods:
indata = (data_prefix + meth_name).encode('ascii')
try:
s.send(indata)
outdata = recv_meth(*args)
if outdata != indata.lower():
self.fail(
"While receiving with <<{name:s}>> bad data "
"<<{outdata:r}>> ({nout:d}) received; "
"expected <<{indata:r}>> ({nin:d})\n".format(
name=meth_name, outdata=outdata[:20],
nout=len(outdata),
indata=indata[:20], nin=len(indata)
)
)
except ValueError as e:
if expect_success:
self.fail(
"Failed to receive with method <<{name:s}>>; "
"expected to succeed.\n".format(name=meth_name)
)
if not str(e).startswith(meth_name):
self.fail(
"Method <<{name:s}>> failed with unexpected "
"exception message: {exp:s}\n".format(
name=meth_name, exp=e
)
)
# consume data
s.read()
# read(-1, buffer) is supported, even though read(-1) is not
data = b"data"
s.send(data)
buffer = bytearray(len(data))
self.assertEqual(s.read(-1, buffer), len(data))
self.assertEqual(buffer, data)
s.write(b"over\n")
self.assertRaises(ValueError, s.recv, -1)
self.assertRaises(ValueError, s.read, -1)
s.close()
def test_recv_zero(self):
server = ThreadedEchoServer(CERTFILE)
server.__enter__()
self.addCleanup(server.__exit__, None, None)
s = socket.create_connection((HOST, server.port))
self.addCleanup(s.close)
s = ssl.wrap_socket(s, suppress_ragged_eofs=False)
self.addCleanup(s.close)
# recv/read(0) should return no data
s.send(b"data")
self.assertEqual(s.recv(0), b"")
self.assertEqual(s.read(0), b"")
self.assertEqual(s.read(), b"data")
# Should not block if the other end sends no data
s.setblocking(False)
self.assertEqual(s.recv(0), b"")
self.assertEqual(s.recv_into(bytearray()), 0)
def test_handshake_timeout(self):
# Issue #5103: SSL handshake must respect the socket timeout
server = socket.socket(socket.AF_INET)
host = "127.0.0.1"
port = support.bind_port(server)
started = threading.Event()
finish = False
def serve():
server.listen(5)
started.set()
conns = []
while not finish:
r, w, e = select.select([server], [], [], 0.1)
if server in r:
# Let the socket hang around rather than having
# it closed by garbage collection.
conns.append(server.accept()[0])
for sock in conns:
sock.close()
t = threading.Thread(target=serve)
t.start()
started.wait()
try:
try:
c = socket.socket(socket.AF_INET)
c.settimeout(0.2)
c.connect((host, port))
# Will attempt handshake and time out
self.assertRaisesRegexp(ssl.SSLError, "timed out",
ssl.wrap_socket, c)
finally:
c.close()
try:
c = socket.socket(socket.AF_INET)
c = ssl.wrap_socket(c)
c.settimeout(0.2)
# Will attempt handshake and time out
self.assertRaisesRegexp(ssl.SSLError, "timed out",
c.connect, (host, port))
finally:
c.close()
finally:
finish = True
t.join()
server.close()
def test_server_accept(self):
# Issue #16357: accept() on a SSLSocket created through
# SSLContext.wrap_socket().
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(CERTFILE)
context.load_cert_chain(CERTFILE)
server = socket.socket(socket.AF_INET)
host = "127.0.0.1"
port = support.bind_port(server)
server = context.wrap_socket(server, server_side=True)
evt = threading.Event()
remote = [None]
peer = [None]
def serve():
server.listen(5)
# Block on the accept and wait on the connection to close.
evt.set()
remote[0], peer[0] = server.accept()
remote[0].recv(1)
t = threading.Thread(target=serve)
t.start()
# Client wait until server setup and perform a connect.
evt.wait()
client = context.wrap_socket(socket.socket())
client.connect((host, port))
client_addr = client.getsockname()
client.close()
t.join()
remote[0].close()
server.close()
# Sanity checks.
self.assertIsInstance(remote[0], ssl.SSLSocket)
self.assertEqual(peer[0], client_addr)
def test_getpeercert_enotconn(self):
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
with closing(context.wrap_socket(socket.socket())) as sock:
with self.assertRaises(socket.error) as cm:
sock.getpeercert()
self.assertEqual(cm.exception.errno, errno.ENOTCONN)
def test_do_handshake_enotconn(self):
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
with closing(context.wrap_socket(socket.socket())) as sock:
with self.assertRaises(socket.error) as cm:
sock.do_handshake()
self.assertEqual(cm.exception.errno, errno.ENOTCONN)
def test_default_ciphers(self):
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
try:
# Force a set of weak ciphers on our client context
context.set_ciphers("DES")
except ssl.SSLError:
self.skipTest("no DES cipher available")
with ThreadedEchoServer(CERTFILE,
ssl_version=ssl.PROTOCOL_SSLv23,
chatty=False) as server:
with closing(context.wrap_socket(socket.socket())) as s:
with self.assertRaises(ssl.SSLError):
s.connect((HOST, server.port))
self.assertIn("no shared cipher", str(server.conn_errors[0]))
def test_version_basic(self):
"""
Basic tests for SSLSocket.version().
More tests are done in the test_protocol_*() methods.
"""
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
with ThreadedEchoServer(CERTFILE,
ssl_version=ssl.PROTOCOL_TLSv1,
chatty=False) as server:
with closing(context.wrap_socket(socket.socket())) as s:
self.assertIs(s.version(), None)
s.connect((HOST, server.port))
self.assertEqual(s.version(), 'TLSv1')
self.assertIs(s.version(), None)
@unittest.skipUnless(ssl.HAS_TLSv1_3,
"test requires TLSv1.3 enabled OpenSSL")
def test_tls1_3(self):
context = ssl.SSLContext(ssl.PROTOCOL_TLS)
context.load_cert_chain(CERTFILE)
# disable all but TLS 1.3
context.options |= (
ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1 | ssl.OP_NO_TLSv1_2
)
with ThreadedEchoServer(context=context) as server:
with context.wrap_socket(socket.socket()) as s:
s.connect((HOST, server.port))
self.assertIn(s.cipher()[0], [
'TLS13-AES-256-GCM-SHA384',
'TLS13-CHACHA20-POLY1305-SHA256',
'TLS13-AES-128-GCM-SHA256',
])
@unittest.skipUnless(ssl.HAS_ECDH, "test requires ECDH-enabled OpenSSL")
def test_default_ecdh_curve(self):
# Issue #21015: elliptic curve-based Diffie Hellman key exchange
# should be enabled by default on SSL contexts.
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.load_cert_chain(CERTFILE)
# Prior to OpenSSL 1.0.0, ECDH ciphers have to be enabled
# explicitly using the 'ECCdraft' cipher alias. Otherwise,
# our default cipher list should prefer ECDH-based ciphers
# automatically.
if ssl.OPENSSL_VERSION_INFO < (1, 0, 0):
context.set_ciphers("ECCdraft:ECDH")
with ThreadedEchoServer(context=context) as server:
with closing(context.wrap_socket(socket.socket())) as s:
s.connect((HOST, server.port))
self.assertIn("ECDH", s.cipher()[0])
@unittest.skipUnless("tls-unique" in ssl.CHANNEL_BINDING_TYPES,
"'tls-unique' channel binding not available")
def test_tls_unique_channel_binding(self):
"""Test tls-unique channel binding."""
if support.verbose:
sys.stdout.write("\n")
server = ThreadedEchoServer(CERTFILE,
certreqs=ssl.CERT_NONE,
ssl_version=ssl.PROTOCOL_TLSv1,
cacerts=CERTFILE,
chatty=True,
connectionchatty=False)
with server:
s = ssl.wrap_socket(socket.socket(),
server_side=False,
certfile=CERTFILE,
ca_certs=CERTFILE,
cert_reqs=ssl.CERT_NONE,
ssl_version=ssl.PROTOCOL_TLSv1)
s.connect((HOST, server.port))
# get the data
cb_data = s.get_channel_binding("tls-unique")
if support.verbose:
sys.stdout.write(" got channel binding data: {0!r}\n"
.format(cb_data))
# check if it is sane
self.assertIsNotNone(cb_data)
self.assertEqual(len(cb_data), 12) # True for TLSv1
# and compare with the peers version
s.write(b"CB tls-unique\n")
peer_data_repr = s.read().strip()
self.assertEqual(peer_data_repr,
repr(cb_data).encode("us-ascii"))
s.close()
# now, again
s = ssl.wrap_socket(socket.socket(),
server_side=False,
certfile=CERTFILE,
ca_certs=CERTFILE,
cert_reqs=ssl.CERT_NONE,
ssl_version=ssl.PROTOCOL_TLSv1)
s.connect((HOST, server.port))
new_cb_data = s.get_channel_binding("tls-unique")
if support.verbose:
sys.stdout.write(" got another channel binding data: {0!r}\n"
.format(new_cb_data))
# is it really unique
self.assertNotEqual(cb_data, new_cb_data)
self.assertIsNotNone(cb_data)
self.assertEqual(len(cb_data), 12) # True for TLSv1
s.write(b"CB tls-unique\n")
peer_data_repr = s.read().strip()
self.assertEqual(peer_data_repr,
repr(new_cb_data).encode("us-ascii"))
s.close()
def test_compression(self):
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.load_cert_chain(CERTFILE)
stats = server_params_test(context, context,
chatty=True, connectionchatty=True)
if support.verbose:
sys.stdout.write(" got compression: {!r}\n".format(stats['compression']))
self.assertIn(stats['compression'], { None, 'ZLIB', 'RLE' })
@unittest.skipUnless(hasattr(ssl, 'OP_NO_COMPRESSION'),
"ssl.OP_NO_COMPRESSION needed for this test")
def test_compression_disabled(self):
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.load_cert_chain(CERTFILE)
context.options |= ssl.OP_NO_COMPRESSION
stats = server_params_test(context, context,
chatty=True, connectionchatty=True)
self.assertIs(stats['compression'], None)
def test_dh_params(self):
# Check we can get a connection with ephemeral Diffie-Hellman
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.load_cert_chain(CERTFILE)
context.load_dh_params(DHFILE)
context.set_ciphers("kEDH")
stats = server_params_test(context, context,
chatty=True, connectionchatty=True)
cipher = stats["cipher"][0]
parts = cipher.split("-")
if "ADH" not in parts and "EDH" not in parts and "DHE" not in parts:
self.fail("Non-DH cipher: " + cipher[0])
def test_selected_alpn_protocol(self):
# selected_alpn_protocol() is None unless ALPN is used.
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.load_cert_chain(CERTFILE)
stats = server_params_test(context, context,
chatty=True, connectionchatty=True)
self.assertIs(stats['client_alpn_protocol'], None)
@unittest.skipUnless(ssl.HAS_ALPN, "ALPN support required")
def test_selected_alpn_protocol_if_server_uses_alpn(self):
# selected_alpn_protocol() is None unless ALPN is used by the client.
client_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
client_context.load_verify_locations(CERTFILE)
server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
server_context.load_cert_chain(CERTFILE)
server_context.set_alpn_protocols(['foo', 'bar'])
stats = server_params_test(client_context, server_context,
chatty=True, connectionchatty=True)
self.assertIs(stats['client_alpn_protocol'], None)
@unittest.skipUnless(ssl.HAS_ALPN, "ALPN support needed for this test")
def test_alpn_protocols(self):
server_protocols = ['foo', 'bar', 'milkshake']
protocol_tests = [
(['foo', 'bar'], 'foo'),
(['bar', 'foo'], 'foo'),
(['milkshake'], 'milkshake'),
(['http/3.0', 'http/4.0'], None)
]
for client_protocols, expected in protocol_tests:
server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
server_context.load_cert_chain(CERTFILE)
server_context.set_alpn_protocols(server_protocols)
client_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
client_context.load_cert_chain(CERTFILE)
client_context.set_alpn_protocols(client_protocols)
try:
stats = server_params_test(client_context,
server_context,
chatty=True,
connectionchatty=True)
except ssl.SSLError as e:
stats = e
if (expected is None and IS_OPENSSL_1_1
and ssl.OPENSSL_VERSION_INFO < (1, 1, 0, 6)):
# OpenSSL 1.1.0 to 1.1.0e raises handshake error
self.assertIsInstance(stats, ssl.SSLError)
else:
msg = "failed trying %s (s) and %s (c).\n" \
"was expecting %s, but got %%s from the %%s" \
% (str(server_protocols), str(client_protocols),
str(expected))
client_result = stats['client_alpn_protocol']
self.assertEqual(client_result, expected,
msg % (client_result, "client"))
server_result = stats['server_alpn_protocols'][-1] \
if len(stats['server_alpn_protocols']) else 'nothing'
self.assertEqual(server_result, expected,
msg % (server_result, "server"))
def test_selected_npn_protocol(self):
# selected_npn_protocol() is None unless NPN is used
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.load_cert_chain(CERTFILE)
stats = server_params_test(context, context,
chatty=True, connectionchatty=True)
self.assertIs(stats['client_npn_protocol'], None)
@unittest.skipUnless(ssl.HAS_NPN, "NPN support needed for this test")
def test_npn_protocols(self):
server_protocols = ['http/1.1', 'spdy/2']
protocol_tests = [
(['http/1.1', 'spdy/2'], 'http/1.1'),
(['spdy/2', 'http/1.1'], 'http/1.1'),
(['spdy/2', 'test'], 'spdy/2'),
(['abc', 'def'], 'abc')
]
for client_protocols, expected in protocol_tests:
server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
server_context.load_cert_chain(CERTFILE)
server_context.set_npn_protocols(server_protocols)
client_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
client_context.load_cert_chain(CERTFILE)
client_context.set_npn_protocols(client_protocols)
stats = server_params_test(client_context, server_context,
chatty=True, connectionchatty=True)
msg = "failed trying %s (s) and %s (c).\n" \
"was expecting %s, but got %%s from the %%s" \
% (str(server_protocols), str(client_protocols),
str(expected))
client_result = stats['client_npn_protocol']
self.assertEqual(client_result, expected, msg % (client_result, "client"))
server_result = stats['server_npn_protocols'][-1] \
if len(stats['server_npn_protocols']) else 'nothing'
self.assertEqual(server_result, expected, msg % (server_result, "server"))
def sni_contexts(self):
server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
server_context.load_cert_chain(SIGNED_CERTFILE)
other_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
other_context.load_cert_chain(SIGNED_CERTFILE2)
client_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
client_context.verify_mode = ssl.CERT_REQUIRED
client_context.load_verify_locations(SIGNING_CA)
return server_context, other_context, client_context
def check_common_name(self, stats, name):
cert = stats['peercert']
self.assertIn((('commonName', name),), cert['subject'])
@needs_sni
def test_sni_callback(self):
calls = []
server_context, other_context, client_context = self.sni_contexts()
def servername_cb(ssl_sock, server_name, initial_context):
calls.append((server_name, initial_context))
if server_name is not None:
ssl_sock.context = other_context
server_context.set_servername_callback(servername_cb)
stats = server_params_test(client_context, server_context,
chatty=True,
sni_name='supermessage')
# The hostname was fetched properly, and the certificate was
# changed for the connection.
self.assertEqual(calls, [("supermessage", server_context)])
# CERTFILE4 was selected
self.check_common_name(stats, 'fakehostname')
calls = []
# The callback is called with server_name=None
stats = server_params_test(client_context, server_context,
chatty=True,
sni_name=None)
self.assertEqual(calls, [(None, server_context)])
self.check_common_name(stats, 'localhost')
# Check disabling the callback
calls = []
server_context.set_servername_callback(None)
stats = server_params_test(client_context, server_context,
chatty=True,
sni_name='notfunny')
# Certificate didn't change
self.check_common_name(stats, 'localhost')
self.assertEqual(calls, [])
@needs_sni
def test_sni_callback_alert(self):
# Returning a TLS alert is reflected to the connecting client
server_context, other_context, client_context = self.sni_contexts()
def cb_returning_alert(ssl_sock, server_name, initial_context):
return ssl.ALERT_DESCRIPTION_ACCESS_DENIED
server_context.set_servername_callback(cb_returning_alert)
with self.assertRaises(ssl.SSLError) as cm:
stats = server_params_test(client_context, server_context,
chatty=False,
sni_name='supermessage')
self.assertEqual(cm.exception.reason, 'TLSV1_ALERT_ACCESS_DENIED')
@needs_sni
def test_sni_callback_raising(self):
# Raising fails the connection with a TLS handshake failure alert.
server_context, other_context, client_context = self.sni_contexts()
def cb_raising(ssl_sock, server_name, initial_context):
1.0/0.0
server_context.set_servername_callback(cb_raising)
with self.assertRaises(ssl.SSLError) as cm, \
support.captured_stderr() as stderr:
stats = server_params_test(client_context, server_context,
chatty=False,
sni_name='supermessage')
self.assertEqual(cm.exception.reason, 'SSLV3_ALERT_HANDSHAKE_FAILURE')
self.assertIn("ZeroDivisionError", stderr.getvalue())
@needs_sni
def test_sni_callback_wrong_return_type(self):
# Returning the wrong return type terminates the TLS connection
# with an internal error alert.
server_context, other_context, client_context = self.sni_contexts()
def cb_wrong_return_type(ssl_sock, server_name, initial_context):
return "foo"
server_context.set_servername_callback(cb_wrong_return_type)
with self.assertRaises(ssl.SSLError) as cm, \
support.captured_stderr() as stderr:
stats = server_params_test(client_context, server_context,
chatty=False,
sni_name='supermessage')
self.assertEqual(cm.exception.reason, 'TLSV1_ALERT_INTERNAL_ERROR')
self.assertIn("TypeError", stderr.getvalue())
def test_read_write_after_close_raises_valuerror(self):
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(CERTFILE)
context.load_cert_chain(CERTFILE)
server = ThreadedEchoServer(context=context, chatty=False)
with server:
s = context.wrap_socket(socket.socket())
s.connect((HOST, server.port))
s.close()
self.assertRaises(ValueError, s.read, 1024)
self.assertRaises(ValueError, s.write, b'hello')
def test_main(verbose=False):
if support.verbose:
plats = {
'Linux': platform.linux_distribution,
'Mac': platform.mac_ver,
'Windows': platform.win32_ver,
}
for name, func in plats.items():
plat = func()
if plat and plat[0]:
plat = '%s %r' % (name, plat)
break
else:
plat = repr(platform.platform())
print("test_ssl: testing with %r %r" %
(ssl.OPENSSL_VERSION, ssl.OPENSSL_VERSION_INFO))
print(" under %s" % plat)
print(" HAS_SNI = %r" % ssl.HAS_SNI)
print(" OP_ALL = 0x%8x" % ssl.OP_ALL)
try:
print(" OP_NO_TLSv1_1 = 0x%8x" % ssl.OP_NO_TLSv1_1)
except AttributeError:
pass
for filename in [
CERTFILE, REMOTE_ROOT_CERT, BYTES_CERTFILE,
ONLYCERT, ONLYKEY, BYTES_ONLYCERT, BYTES_ONLYKEY,
SIGNED_CERTFILE, SIGNED_CERTFILE2, SIGNING_CA,
BADCERT, BADKEY, EMPTYCERT]:
if not os.path.exists(filename):
raise support.TestFailed("Can't read certificate file %r" % filename)
tests = [ContextTests, BasicTests, BasicSocketTests, SSLErrorTests]
if support.is_resource_enabled('network'):
tests.append(NetworkedTests)
if _have_threads:
thread_info = support.threading_setup()
if thread_info:
tests.append(ThreadedTests)
try:
support.run_unittest(*tests)
finally:
if _have_threads:
support.threading_cleanup(*thread_info)
if __name__ == "__main__":
test_main()
| HiSPARC/station-software | user/python/Lib/test/test_ssl.py | Python | gpl-3.0 | 144,422 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Interpreter version: python 2.7
#
# !!! DO NOT EDIT THIS FILE - THIS IS GENERATED FILE !!!
# Imports =====================================================================
from collections import namedtuple
# Functions and classes =======================================================
# !!! DO NOT EDIT THIS FILE - THIS IS GENERATED FILE !!!
# !!! DO NOT EDIT THIS FILE - THIS IS GENERATED FILE !!!
_PUB_FIELDS = [
"title",
"author",
"pub_year",
"isbn",
"urnnbn",
"uuid",
"aleph_id",
"producent_id",
"is_public",
"filename",
"is_periodical",
"path",
"b64_data",
"url",
"file_pointer",
]
class Publication(namedtuple('Publication', _PUB_FIELDS)):
'''
Communication structure used to sent data to `storage` subsystem over AMQP.
Attributes:
title (str): Title of the publication.
author (str): Name of the author.
pub_year (str): Year when the publication was released.
isbn (str): ISBN for the publication.
urnnbn (str): URN:NBN for the publication.
uuid (str): UUID string to pair the publication with edeposit.
aleph_id (str): ID used in aleph.
producent_id (str): ID used for producent.
is_public (bool): Is the file public?
filename (str): Original filename.
is_periodical (bool): Is the publication periodical?
path (str): Path in the tree (used for periodicals).
b64_data (str): Base64 encoded data ebook file.
url (str): URL in case that publication is public.
file_pointer (str): Pointer to the file on the file server.
'''
def __new__(self, *args, **kwargs):
for field, arg in zip(_PUB_FIELDS, args):
kwargs[field] = arg
for key in _PUB_FIELDS:
if key not in kwargs:
kwargs[key] = None
return super(Publication, self).__new__(self, **kwargs)
def __init__(self, *args, **kwargs):
for field, arg in zip(_PUB_FIELDS, args):
kwargs[field] = arg
for key, val in kwargs.iteritems():
if key not in _PUB_FIELDS:
raise ValueError("Unknown parameter '%s'!" % key)
self.__dict__[key] = val
# !!! DO NOT EDIT THIS FILE - THIS IS GENERATED FILE !!!
# !!! DO NOT EDIT THIS FILE - THIS IS GENERATED FILE !!!
| edeposit/edeposit.amqp.storage | src/edeposit/amqp/storage/structures/comm/publication.py | Python | mit | 2,401 |
"""Http json-rpc client transport implementation."""
import requests
import logging
# configure logger
_logger = logging.getLogger(__name__)
_logger.setLevel(logging.ERROR)
class HttpClient(object):
"""json-rpc http client transport."""
def __init__(self, url):
"""
Create HttpClient object.
usage:
HttpClient('http://hostname:port')
HttpClient('http://127.0.0.1:8080')
args:
url -- server url
"""
self.url = url
def send_rpc_message(self, message):
"""
Send a json-rpc request to a server.
args:
message -- json-rpc request string
returns:
json-rpc response string (None if an error occurred)
"""
try:
response = requests.post(self.url, data=message)
return response.text
except requests.exceptions.RequestException as e:
# If something goes wrong, we'll just log the exception
# and move on so we don't totally break the client.
# _logger.exception('http requests error')
_logger.error('http requests error: %s', e)
return None
| tvannoy/jsonrpc_pyclient | jsonrpc_pyclient/connectors/httpclient.py | Python | mit | 1,247 |
import os
import unittest
import waftester
import devkitarm
import Build
import Utils
def DKAConf(conf):
conf.check_tool('devkitarm')
def DKABuild(bld):
pass
def DKAWithCompiler(conf):
conf.check_tool('devkitarm')
conf.check_tool('compiler_cxx')
conf.check_tool('compiler_cc')
def DKAWithLibnds(conf):
DKAWithCompiler(conf)
conf.check_libnds()
class TestDevkitArm(waftester.WafTestCase):
def testDevkitArmUnset(self):
dka = os.environ.pop('DEVKITARM')
dkp = os.environ.pop('DEVKITPRO')
try:
self.suite.write_wscript(configure=DKAConf, build=DKABuild)
# this shouldn't raise an error
self.suite.run_waf()
# and CC, etc, should not be set
self.assertFalse('CC' in Build.bld.env)
self.assertFalse('CXX' in Build.bld.env)
self.assertFalse('CPP' in Build.bld.env)
self.assertFalse('AR' in Build.bld.env)
self.assertFalse('RANLIB' in Build.bld.env)
self.assertFalse('OBJCOPY' in Build.bld.env)
finally:
os.environ['DEVKITARM'] = dka
os.environ['DEVKITPRO'] = dkp
def testDevkitArmCompiler(self):
self.suite.write_wscript(options=waftester.CompilerOptions, configure=DKAWithCompiler, build=DKABuild)
self.suite.run_waf()
# ensure the variables are what we expect...
def prog(name):
return [os.path.join(os.environ['DEVKITARM'],
'bin', 'arm-eabi-'+name)]
def env(val):
return Utils.to_list(Build.bld.env[val])
self.assertEqual(prog('ar'), env('AR'))
self.assertEqual(prog('cpp'), env('CPP'))
self.assertEqual(prog('gcc'), env('CC'))
self.assertEqual(prog('g++'), env('CXX'))
self.assertEqual(prog('objcopy'), env('OBJCOPY'))
self.assertEqual(prog('ranlib'), env('RANLIB'))
self.assertEqual(prog('g++'), env('LINK_CXX'))
def testCompilerFlags(self):
self.suite.write_wscript(options=waftester.CompilerOptions, configure=DKAWithCompiler, build=DKABuild)
self.suite.run_waf()
self.assertTrue('CXXFLAGS' in Build.bld.env)
self.assertTrue('CCFLAGS' in Build.bld.env)
self.assertTrue('CCDEFINES' in Build.bld.env)
self.assertTrue('CXXDEFINES' in Build.bld.env)
self.assertTrue('CPPPATH' in Build.bld.env)
self.assertTrue('LIBPATH' in Build.bld.env)
self.assertTrue('LINKFLAGS' in Build.bld.env)
self.assertEqual(list, type(Build.bld.env['CPPPATH']))
self.assertEqual(list, type(Build.bld.env['LIBPATH']))
self.assertEqual(list, type(Build.bld.env['LINKFLAGS']))
self.assertEqual(5, len(Build.bld.env['LINKFLAGS']))
def testCheckLibnds(self):
self.suite.write_wscript(options=waftester.CompilerOptions, configure=DKAWithLibnds, build=DKABuild)
self.suite.run_waf()
self.assertEquals(['fat', 'nds9'], Build.bld.env['LIB_NDS'])
if __name__ == '__main__':
unittest.main()
| google-code-export/quirkysoft | waf_tools/test_devkitarm.py | Python | gpl-3.0 | 3,052 |
from flask import Flask
from flask.ext.restful import reqparse, abort, Api, Resource
app = Flask(__name__)
api = Api(app)
QUESTIONS = {
'1': {'question': 'How do we do XY?', 'answers': {'1': 'We do it like that...', '2': 'lalala ...'}},
'2': {'question': 'Where to find XY?', 'answers': {'3': 'It is found here and there'}},
'3': {'question': 'On which time is XY?', 'answers': {'4': 'Abount XY'}},
}
def abort_if_question_doesnt_exist(question_id):
if question_id not in QUESTIONS:
abort(404, message="Question {} doesn't exist".format(question_id))
parser = reqparse.RequestParser()
parser.add_argument('question', type=str)
# TODO move to resources as own file
# Question
# show a single question item and lets you delete them
class Question(Resource):
def get(self, question_id):
abort_if_question_doesnt_exist(question_id)
return QUESTIONS[question_id]
def delete(self, question_id):
abort_if_question_doesnt_exist(question_id)
del QUESTIONS[question_id]
return '', 204
def put(self, question_id):
args = parser.parse_args()
task = {'question': args['question']}
QUESTIONS[question_id] = question
return task, 201
# TODO move to resources as own file
# QuestionList
# shows a list of all questions, and lets you POST to add new questions
class QuestionList(Resource):
def get(self):
return QUESTIONS
def post(self):
args = parser.parse_args()
question_id = 'question%d' % (len(QUESTIONS) + 1)
QUESTIONS[question_id] = {'question': args['question']}
return QUESTIONS[question_id], 201
##
## Actually setup the Api resource routing here
##
api.add_resource(QuestionList, '/questions')
api.add_resource(Question, '/questions/<string:question_id>')
if __name__ == '__main__':
app.run(debug=True) | thorbenegberts/qali | qali/app.py | Python | apache-2.0 | 1,874 |
from sympy import (S, sympify, trigsimp, expand, sqrt, Add, zeros,
ImmutableMatrix as Matrix)
from sympy.core.compatibility import u, unicode
from sympy.utilities.misc import filldedent
__all__ = ['Vector']
class Vector(object):
"""The class used to define vectors.
It along with ReferenceFrame are the building blocks of describing a
classical mechanics system in PyDy and sympy.physics.vector.
Attributes
==========
simp : Boolean
Let certain methods use trigsimp on their outputs
"""
simp = False
def __init__(self, inlist):
"""This is the constructor for the Vector class. You shouldn't be
calling this, it should only be used by other functions. You should be
treating Vectors like you would with if you were doing the math by
hand, and getting the first 3 from the standard basis vectors from a
ReferenceFrame.
The only exception is to create a zero vector:
zv = Vector(0)
"""
self.args = []
if inlist == 0:
inlist = []
while len(inlist) != 0:
added = 0
for i, v in enumerate(self.args):
if inlist[0][1] == self.args[i][1]:
self.args[i] = (self.args[i][0] + inlist[0][0],
inlist[0][1])
inlist.remove(inlist[0])
added = 1
break
if added != 1:
self.args.append(inlist[0])
inlist.remove(inlist[0])
i = 0
# This code is to remove empty frames from the list
while i < len(self.args):
if self.args[i][0] == Matrix([0, 0, 0]):
self.args.remove(self.args[i])
i -= 1
i += 1
def __hash__(self):
return hash(tuple(self.args))
def __add__(self, other):
"""The add operator for Vector. """
other = _check_vector(other)
return Vector(self.args + other.args)
def __and__(self, other):
"""Dot product of two vectors.
Returns a scalar, the dot product of the two Vectors
Parameters
==========
other : Vector
The Vector which we are dotting with
Examples
========
>>> from sympy.physics.vector import ReferenceFrame, dot
>>> from sympy import symbols
>>> q1 = symbols('q1')
>>> N = ReferenceFrame('N')
>>> dot(N.x, N.x)
1
>>> dot(N.x, N.y)
0
>>> A = N.orientnew('A', 'Axis', [q1, N.x])
>>> dot(N.y, A.y)
cos(q1)
"""
from sympy.physics.vector.dyadic import Dyadic
if isinstance(other, Dyadic):
return NotImplemented
other = _check_vector(other)
out = S(0)
for i, v1 in enumerate(self.args):
for j, v2 in enumerate(other.args):
out += ((v2[0].T)
* (v2[1].dcm(v1[1]))
* (v1[0]))[0]
if Vector.simp:
return trigsimp(sympify(out), recursive=True)
else:
return sympify(out)
def __div__(self, other):
"""This uses mul and inputs self and 1 divided by other. """
return self.__mul__(sympify(1) / other)
__truediv__ = __div__
def __eq__(self, other):
"""Tests for equality.
It is very import to note that this is only as good as the SymPy
equality test; False does not always mean they are not equivalent
Vectors.
If other is 0, and self is empty, returns True.
If other is 0 and self is not empty, returns False.
If none of the above, only accepts other as a Vector.
"""
if other == 0:
other = Vector(0)
other = _check_vector(other)
if (self.args == []) and (other.args == []):
return True
elif (self.args == []) or (other.args == []):
return False
frame = self.args[0][1]
for v in frame:
if expand((self - other) & v) != 0:
return False
return True
def __mul__(self, other):
"""Multiplies the Vector by a sympifyable expression.
Parameters
==========
other : Sympifyable
The scalar to multiply this Vector with
Examples
========
>>> from sympy.physics.vector import ReferenceFrame
>>> from sympy import Symbol
>>> N = ReferenceFrame('N')
>>> b = Symbol('b')
>>> V = 10 * b * N.x
>>> print(V)
10*b*N.x
"""
newlist = [v for v in self.args]
for i, v in enumerate(newlist):
newlist[i] = (sympify(other) * newlist[i][0], newlist[i][1])
return Vector(newlist)
def __ne__(self, other):
return not self.__eq__(other)
def __neg__(self):
return self * -1
def __or__(self, other):
"""Outer product between two Vectors.
A rank increasing operation, which returns a Dyadic from two Vectors
Parameters
==========
other : Vector
The Vector to take the outer product with
Examples
========
>>> from sympy.physics.vector import ReferenceFrame, outer
>>> N = ReferenceFrame('N')
>>> outer(N.x, N.x)
(N.x|N.x)
"""
from sympy.physics.vector.dyadic import Dyadic
other = _check_vector(other)
ol = Dyadic(0)
for i, v in enumerate(self.args):
for i2, v2 in enumerate(other.args):
# it looks this way because if we are in the same frame and
# use the enumerate function on the same frame in a nested
# fashion, then bad things happen
ol += Dyadic([(v[0][0] * v2[0][0], v[1].x, v2[1].x)])
ol += Dyadic([(v[0][0] * v2[0][1], v[1].x, v2[1].y)])
ol += Dyadic([(v[0][0] * v2[0][2], v[1].x, v2[1].z)])
ol += Dyadic([(v[0][1] * v2[0][0], v[1].y, v2[1].x)])
ol += Dyadic([(v[0][1] * v2[0][1], v[1].y, v2[1].y)])
ol += Dyadic([(v[0][1] * v2[0][2], v[1].y, v2[1].z)])
ol += Dyadic([(v[0][2] * v2[0][0], v[1].z, v2[1].x)])
ol += Dyadic([(v[0][2] * v2[0][1], v[1].z, v2[1].y)])
ol += Dyadic([(v[0][2] * v2[0][2], v[1].z, v2[1].z)])
return ol
def _latex(self, printer=None):
"""Latex Printing method. """
from sympy.physics.vector.printing import VectorLatexPrinter
ar = self.args # just to shorten things
if len(ar) == 0:
return str(0)
ol = [] # output list, to be concatenated to a string
for i, v in enumerate(ar):
for j in 0, 1, 2:
# if the coef of the basis vector is 1, we skip the 1
if ar[i][0][j] == 1:
ol.append(' + ' + ar[i][1].latex_vecs[j])
# if the coef of the basis vector is -1, we skip the 1
elif ar[i][0][j] == -1:
ol.append(' - ' + ar[i][1].latex_vecs[j])
elif ar[i][0][j] != 0:
# If the coefficient of the basis vector is not 1 or -1;
# also, we might wrap it in parentheses, for readability.
arg_str = VectorLatexPrinter().doprint(ar[i][0][j])
if isinstance(ar[i][0][j], Add):
arg_str = "(%s)" % arg_str
if arg_str[0] == '-':
arg_str = arg_str[1:]
str_start = ' - '
else:
str_start = ' + '
ol.append(str_start + arg_str + ar[i][1].latex_vecs[j])
outstr = ''.join(ol)
if outstr.startswith(' + '):
outstr = outstr[3:]
elif outstr.startswith(' '):
outstr = outstr[1:]
return outstr
def _pretty(self, printer=None):
"""Pretty Printing method. """
from sympy.physics.vector.printing import VectorPrettyPrinter
e = self
class Fake(object):
baseline = 0
def render(self, *args, **kwargs):
ar = e.args # just to shorten things
if len(ar) == 0:
return unicode(0)
settings = printer._settings if printer else {}
vp = printer if printer else VectorPrettyPrinter(settings)
ol = [] # output list, to be concatenated to a string
for i, v in enumerate(ar):
for j in 0, 1, 2:
# if the coef of the basis vector is 1, we skip the 1
if ar[i][0][j] == 1:
ol.append(u(" + ") + ar[i][1].pretty_vecs[j])
# if the coef of the basis vector is -1, we skip the 1
elif ar[i][0][j] == -1:
ol.append(u(" - ") + ar[i][1].pretty_vecs[j])
elif ar[i][0][j] != 0:
# If the basis vector coeff is not 1 or -1,
# we might wrap it in parentheses, for readability.
if isinstance(ar[i][0][j], Add):
arg_str = vp._print(
ar[i][0][j]).parens()[0]
else:
arg_str = (vp.doprint(
ar[i][0][j]))
if arg_str[0] == u("-"):
arg_str = arg_str[1:]
str_start = u(" - ")
else:
str_start = u(" + ")
ol.append(str_start + arg_str + ' ' +
ar[i][1].pretty_vecs[j])
outstr = u("").join(ol)
if outstr.startswith(u(" + ")):
outstr = outstr[3:]
elif outstr.startswith(" "):
outstr = outstr[1:]
return outstr
return Fake()
def __ror__(self, other):
"""Outer product between two Vectors.
A rank increasing operation, which returns a Dyadic from two Vectors
Parameters
==========
other : Vector
The Vector to take the outer product with
Examples
========
>>> from sympy.physics.vector import ReferenceFrame, outer
>>> N = ReferenceFrame('N')
>>> outer(N.x, N.x)
(N.x|N.x)
"""
from sympy.physics.vector.dyadic import Dyadic
other = _check_vector(other)
ol = Dyadic(0)
for i, v in enumerate(other.args):
for i2, v2 in enumerate(self.args):
# it looks this way because if we are in the same frame and
# use the enumerate function on the same frame in a nested
# fashion, then bad things happen
ol += Dyadic([(v[0][0] * v2[0][0], v[1].x, v2[1].x)])
ol += Dyadic([(v[0][0] * v2[0][1], v[1].x, v2[1].y)])
ol += Dyadic([(v[0][0] * v2[0][2], v[1].x, v2[1].z)])
ol += Dyadic([(v[0][1] * v2[0][0], v[1].y, v2[1].x)])
ol += Dyadic([(v[0][1] * v2[0][1], v[1].y, v2[1].y)])
ol += Dyadic([(v[0][1] * v2[0][2], v[1].y, v2[1].z)])
ol += Dyadic([(v[0][2] * v2[0][0], v[1].z, v2[1].x)])
ol += Dyadic([(v[0][2] * v2[0][1], v[1].z, v2[1].y)])
ol += Dyadic([(v[0][2] * v2[0][2], v[1].z, v2[1].z)])
return ol
def __rsub__(self, other):
return (-1 * self) + other
def __str__(self, printer=None):
"""Printing method. """
from sympy.physics.vector.printing import VectorStrPrinter
ar = self.args # just to shorten things
if len(ar) == 0:
return str(0)
ol = [] # output list, to be concatenated to a string
for i, v in enumerate(ar):
for j in 0, 1, 2:
# if the coef of the basis vector is 1, we skip the 1
if ar[i][0][j] == 1:
ol.append(' + ' + ar[i][1].str_vecs[j])
# if the coef of the basis vector is -1, we skip the 1
elif ar[i][0][j] == -1:
ol.append(' - ' + ar[i][1].str_vecs[j])
elif ar[i][0][j] != 0:
# If the coefficient of the basis vector is not 1 or -1;
# also, we might wrap it in parentheses, for readability.
arg_str = VectorStrPrinter().doprint(ar[i][0][j])
if isinstance(ar[i][0][j], Add):
arg_str = "(%s)" % arg_str
if arg_str[0] == '-':
arg_str = arg_str[1:]
str_start = ' - '
else:
str_start = ' + '
ol.append(str_start + arg_str + '*' + ar[i][1].str_vecs[j])
outstr = ''.join(ol)
if outstr.startswith(' + '):
outstr = outstr[3:]
elif outstr.startswith(' '):
outstr = outstr[1:]
return outstr
def __sub__(self, other):
"""The subraction operator. """
return self.__add__(other * -1)
def __xor__(self, other):
"""The cross product operator for two Vectors.
Returns a Vector, expressed in the same ReferenceFrames as self.
Parameters
==========
other : Vector
The Vector which we are crossing with
Examples
========
>>> from sympy.physics.vector import ReferenceFrame, Vector
>>> from sympy import symbols
>>> q1 = symbols('q1')
>>> N = ReferenceFrame('N')
>>> N.x ^ N.y
N.z
>>> A = N.orientnew('A', 'Axis', [q1, N.x])
>>> A.x ^ N.y
N.z
>>> N.y ^ A.x
- sin(q1)*A.y - cos(q1)*A.z
"""
from sympy.physics.vector.dyadic import Dyadic
if isinstance(other, Dyadic):
return NotImplemented
other = _check_vector(other)
if other.args == []:
return Vector(0)
def _det(mat):
"""This is needed as a little method for to find the determinant
of a list in python; needs to work for a 3x3 list.
SymPy's Matrix won't take in Vector, so need a custom function.
You shouldn't be calling this.
"""
return (mat[0][0] * (mat[1][1] * mat[2][2] - mat[1][2] * mat[2][1])
+ mat[0][1] * (mat[1][2] * mat[2][0] - mat[1][0] *
mat[2][2]) + mat[0][2] * (mat[1][0] * mat[2][1] -
mat[1][1] * mat[2][0]))
outvec = Vector(0)
ar = other.args # For brevity
for i, v in enumerate(ar):
tempx = v[1].x
tempy = v[1].y
tempz = v[1].z
tempm = ([[tempx, tempy, tempz], [self & tempx, self & tempy,
self & tempz], [Vector([ar[i]]) & tempx,
Vector([ar[i]]) & tempy, Vector([ar[i]]) & tempz]])
outvec += _det(tempm)
return outvec
_sympystr = __str__
_sympyrepr = _sympystr
__repr__ = __str__
__radd__ = __add__
__rand__ = __and__
__rmul__ = __mul__
def separate(self):
"""
The constituents of this vector in different reference frames,
as per its definition.
Returns a dict mapping each ReferenceFrame to the corresponding
constituent Vector.
Examples
========
>>> from sympy.physics.vector import ReferenceFrame
>>> R1 = ReferenceFrame('R1')
>>> R2 = ReferenceFrame('R2')
>>> v = R1.x + R2.x
>>> v.separate() == {R1: R1.x, R2: R2.x}
True
"""
components = {}
for x in self.args:
components[x[1]] = Vector([x])
return components
def dot(self, other):
return self & other
dot.__doc__ = __and__.__doc__
def cross(self, other):
return self ^ other
cross.__doc__ = __xor__.__doc__
def outer(self, other):
return self | other
outer.__doc__ = __or__.__doc__
def diff(self, wrt, otherframe):
"""Takes the partial derivative, with respect to a value, in a frame.
Returns a Vector.
Parameters
==========
wrt : Symbol
What the partial derivative is taken with respect to.
otherframe : ReferenceFrame
The ReferenceFrame that the partial derivative is taken in.
Examples
========
>>> from sympy.physics.vector import ReferenceFrame, Vector, dynamicsymbols
>>> from sympy import Symbol
>>> Vector.simp = True
>>> t = Symbol('t')
>>> q1 = dynamicsymbols('q1')
>>> N = ReferenceFrame('N')
>>> A = N.orientnew('A', 'Axis', [q1, N.y])
>>> A.x.diff(t, N)
- q1'*A.z
"""
from sympy.physics.vector.frame import _check_frame
wrt = sympify(wrt)
_check_frame(otherframe)
outvec = Vector(0)
for i, v in enumerate(self.args):
if v[1] == otherframe:
outvec += Vector([(v[0].diff(wrt), otherframe)])
else:
if otherframe.dcm(v[1]).diff(wrt) == zeros(3, 3):
d = v[0].diff(wrt)
outvec += Vector([(d, v[1])])
else:
d = (Vector([v]).express(otherframe)).args[0][0].diff(wrt)
outvec += Vector([(d, otherframe)]).express(v[1])
return outvec
def express(self, otherframe, variables=False):
"""
Returns a Vector equivalent to this one, expressed in otherframe.
Uses the global express method.
Parameters
==========
otherframe : ReferenceFrame
The frame for this Vector to be described in
variables : boolean
If True, the coordinate symbols(if present) in this Vector
are re-expressed in terms otherframe
Examples
========
>>> from sympy.physics.vector import ReferenceFrame, Vector, dynamicsymbols
>>> q1 = dynamicsymbols('q1')
>>> N = ReferenceFrame('N')
>>> A = N.orientnew('A', 'Axis', [q1, N.y])
>>> A.x.express(N)
cos(q1)*N.x - sin(q1)*N.z
"""
from sympy.physics.vector import express
return express(self, otherframe, variables=variables)
def to_matrix(self, reference_frame):
"""Returns the matrix form of the vector with respect to the given
frame.
Parameters
----------
reference_frame : ReferenceFrame
The reference frame that the rows of the matrix correspond to.
Returns
-------
matrix : ImmutableMatrix, shape(3,1)
The matrix that gives the 1D vector.
Examples
--------
>>> from sympy import symbols
>>> from sympy.physics.vector import ReferenceFrame
>>> from sympy.physics.mechanics.functions import inertia
>>> a, b, c = symbols('a, b, c')
>>> N = ReferenceFrame('N')
>>> vector = a * N.x + b * N.y + c * N.z
>>> vector.to_matrix(N)
Matrix([
[a],
[b],
[c]])
>>> beta = symbols('beta')
>>> A = N.orientnew('A', 'Axis', (beta, N.x))
>>> vector.to_matrix(A)
Matrix([
[ a],
[ b*cos(beta) + c*sin(beta)],
[-b*sin(beta) + c*cos(beta)]])
"""
return Matrix([self.dot(unit_vec) for unit_vec in
reference_frame]).reshape(3, 1)
def doit(self, **hints):
"""Calls .doit() on each term in the Vector"""
ov = Vector(0)
for i, v in enumerate(self.args):
ov += Vector([(v[0].applyfunc(lambda x: x.doit(**hints)), v[1])])
return ov
def dt(self, otherframe):
"""
Returns a Vector which is the time derivative of
the self Vector, taken in frame otherframe.
Calls the global time_derivative method
Parameters
==========
otherframe : ReferenceFrame
The frame to calculate the time derivative in
"""
from sympy.physics.vector import time_derivative
return time_derivative(self, otherframe)
def simplify(self):
"""Returns a simplified Vector."""
outvec = Vector(0)
for i in self.args:
outvec += Vector([(i[0].simplify(), i[1])])
return outvec
def subs(self, *args, **kwargs):
"""Substituion on the Vector.
Examples
========
>>> from sympy.physics.vector import ReferenceFrame
>>> from sympy import Symbol
>>> N = ReferenceFrame('N')
>>> s = Symbol('s')
>>> a = N.x * s
>>> a.subs({s: 2})
2*N.x
"""
ov = Vector(0)
for i, v in enumerate(self.args):
ov += Vector([(v[0].subs(*args, **kwargs), v[1])])
return ov
def magnitude(self):
"""Returns the magnitude (Euclidean norm) of self."""
return sqrt(self & self)
def normalize(self):
"""Returns a Vector of magnitude 1, codirectional with self."""
return Vector(self.args + []) / self.magnitude()
class VectorTypeError(TypeError):
def __init__(self, other, want):
msg = filldedent("Expected an instance of %s, but received object "
"'%s' of %s." % (type(want), other, type(other)))
super(VectorTypeError, self).__init__(msg)
def _check_vector(other):
if not isinstance(other, Vector):
raise TypeError('A Vector must be supplied')
return other
| beni55/sympy | sympy/physics/vector/vector.py | Python | bsd-3-clause | 22,112 |
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 28 16:52:18 2016 by emin
"""
import os
import sys
import theano
import theano.tensor as T
import numpy as np
from lasagne.layers import InputLayer, ReshapeLayer, DenseLayer
from generators import CoordinateTransformationTaskFFWD
import lasagne.layers
import lasagne.nonlinearities
import lasagne.updates
import lasagne.objectives
import lasagne.init
import scipy.io as sio
os.chdir(os.path.dirname(sys.argv[0]))
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
job_idx = int(os.getenv('PBS_ARRAYID'))
np.random.seed(job_idx)
nnn = np.ceil(np.logspace(.5,2.5,14))
nhu_vec, nin_vec = np.meshgrid(nnn, nnn)
nhu_vec = nhu_vec.flatten()
nin_vec = nin_vec.flatten()
n_in = int(nin_vec[job_idx-1])
n_hid = int(nhu_vec[job_idx-1])
def model(input_var, batch_size=1, n_in=100, n_out=1, n_hid=200):
# Input Layer
l_in = InputLayer((batch_size, n_in), input_var=input_var)
# Hidden layer
l_in_hid = DenseLayer(l_in, n_hid, nonlinearity=lasagne.nonlinearities.rectify)
# Output Layer
l_shp = ReshapeLayer(l_in_hid, (-1, n_hid))
l_dense = DenseLayer(l_shp, num_units=n_out, nonlinearity=lasagne.nonlinearities.linear)
# To reshape back to our original shape, we can use the symbolic shape variables we retrieved above.
l_out = ReshapeLayer(l_dense, (batch_size, n_out))
return l_out, l_in_hid
if __name__ == '__main__':
# Define the input and expected output variable
input_var, target_var = T.fmatrices('input', 'target')
# The generator to sample examples from
tr_cond = 'all_gains'
test_cond = 'all_gains'
generator = CoordinateTransformationTaskFFWD(max_iter=50001, batch_size=100, n_in=n_in, n_out=1, sigma_sq=100.0, tr_cond=tr_cond)
# The model
l_out, l_rec = model(input_var, batch_size=generator.batch_size, n_in=2*generator.n_in, n_out=generator.n_out, n_hid=n_hid)
# The generated output variable and the loss function
# all_layers = lasagne.layers.get_all_layers(l_out)
# l2_penalty = lasagne.regularization.regularize_layer_params(all_layers, lasagne.regularization.l2) * 1e-6
pred_var = lasagne.layers.get_output(l_out)
loss = T.mean(lasagne.objectives.squared_error(pred_var, target_var)) # + l2_penalty
# Create the update expressions
params = lasagne.layers.get_all_params(l_out, trainable=True)
updates = lasagne.updates.adam(loss, params, learning_rate=0.001)
# Compile the function for a training step, as well as the prediction function and
# a utility function to get the inner details of the RNN
train_fn = theano.function([input_var, target_var], loss, updates=updates, allow_input_downcast=True)
pred_fn = theano.function([input_var], pred_var, allow_input_downcast=True)
rec_layer_fn = theano.function([input_var], lasagne.layers.get_output(l_rec, get_details=True), allow_input_downcast=True)
# If want to continue training an old model, uncomment below
# npzfile_lout = np.load('lout_trained_model.npz')
# npzfile_lrec = np.load('lrec_trained_model.npz')
# lasagne.layers.set_all_param_values(l_out,[npzfile_lout['arr_0'],npzfile_lout['arr_1'],npzfile_lout['arr_2'],npzfile_lout['arr_3'],npzfile_lout['arr_4'],npzfile_lout['arr_5'],npzfile_lout['arr_6']])
# lasagne.layers.set_all_param_values(l_rec,[npzfile_lout['arr_0'],npzfile_lout['arr_1'],npzfile_lout['arr_2'],npzfile_lout['arr_3'],npzfile_lout['arr_4']])
# TRAINING
success = 0.0
s_vec, opt_s_vec, ex_pred_vec, frac_rmse_vec = [], [], [], []
for i, (example_input, example_output, g1, g2, s, opt_s) in generator:
score = train_fn(example_input, example_output)
example_prediction = pred_fn(example_input)
s_vec.append(s)
opt_s_vec.append(opt_s)
ex_pred_vec.append(example_prediction)
if i % 500 == 0:
rmse_opt = np.sqrt(np.nanmean((np.asarray(s_vec) - np.asarray(opt_s_vec))**2))
rmse_net = np.sqrt(np.nanmean((np.asarray(s_vec) - np.asarray(ex_pred_vec))**2))
frac_rmse = (rmse_net - rmse_opt) / rmse_opt
frac_rmse_vec.append(frac_rmse)
print 'Batch #%d; Frac. RMSE: %.6f; Opt. RMSE: %.6f; Net. RMSE: %.6f' % (i, frac_rmse, rmse_opt, rmse_net)
if frac_rmse < 0.1:
success = 1.0
break;
s_vec = []
opt_s_vec = []
ex_pred_vec = []
# SAVE RESULTS
sio.savemat('ct_nin%i_nhu%i_jobidx%i.mat'%(n_in,n_hid,job_idx), {'frac_rmse':frac_rmse, 'frac_rmse_vec':np.asarray(frac_rmse_vec), 'success':success } ) | eminorhan/inevitable-probability | nin_nhu/ninnhu_coordinate_transformation_expt.py | Python | gpl-3.0 | 4,919 |
class Solution:
def numberOfPatterns(self, m, n):
"""
:type m: int
:type n: int
:rtype: int
"""
def valid(pre_key, key, used):
if not pre_key: return True
px, py = (pre_key-1) // 3, (pre_key-1) % 3
x, y = (key-1) // 3, (key-1) % 3
if py == y and abs(px-x) == 2:
return 4+y in used
elif abs(py-y) == 2:
if abs(px-x) == 2:
return 5 in used
elif px == x:
return min(pre_key, key)+1 in used
return True
def backtracking(res, used, length, used_set):
if len(used) == length:
res.append(used[:])
return
for i in range(1, 10):
if i not in used_set and (not used or valid(used[-1], i, used)):
used_set.add(i)
backtracking(res, used+[i], length, used_set)
used_set.remove(i)
ans = 0
moves = []
backtracking(moves, [], m, set())
ans += len(moves)
cnt = [0] * 10
for move in moves:
cnt[move[-1]] += 1
print(cnt)
for i in range(m+1, n+1):
pre = []
for move in moves:
backtracking(pre, move, i, set(move))
moves = pre
ans += len(moves)
cnt = [0] * 10
for move in moves:
cnt[move[-1]] += 1
print(cnt)
return ans
Solution().numberOfPatterns(1,3) | YiqunPeng/Leetcode-pyq | solutions/351AndroidUnlockPatterns.py | Python | gpl-3.0 | 1,648 |
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Dogodek'
db.create_table('infosys_dogodek', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('poucuje', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['infosys.Poucuje'])),
('ime', self.gf('django.db.models.fields.CharField')(max_length=100)),
('datum', self.gf('django.db.models.fields.DateField')()),
))
db.send_create_signal('infosys', ['Dogodek'])
# Changing field 'Ocena.datum_pridobitve'
db.alter_column('infosys_ocena', 'datum_pridobitve', self.gf('django.db.models.fields.DateField')(null=True))
def backwards(self, orm):
# Deleting model 'Dogodek'
db.delete_table('infosys_dogodek')
# Changing field 'Ocena.datum_pridobitve'
db.alter_column('infosys_ocena', 'datum_pridobitve', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, null=True))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'infosys.dijak': {
'Meta': {'ordering': "('uporabnik__last_name', 'uporabnik__first_name')", 'object_name': 'Dijak'},
'datum_rojstva': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'emso': ('django.db.models.fields.CharField', [], {'max_length': '13', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mati': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'mati_dijaka'", 'null': 'True', 'to': "orm['infosys.Stars']"}),
'mobitel': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'oce': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'oce_dijaka'", 'null': 'True', 'to': "orm['infosys.Stars']"}),
'stalno_prebivalisce': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'stalno_prebivalisce_dijaka'", 'unique': 'True', 'null': 'True', 'to': "orm['infosys.Naslov']"}),
'uporabnik': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'v_dijaskem_domu': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'zacasno_prebivalisce': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'zacasno_prebivalisce_dijaka'", 'unique': 'True', 'null': 'True', 'to': "orm['infosys.Naslov']"})
},
'infosys.dogodek': {
'Meta': {'object_name': 'Dogodek'},
'datum': ('django.db.models.fields.DateField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ime': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'poucuje': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['infosys.Poucuje']"})
},
'infosys.naslov': {
'Meta': {'object_name': 'Naslov'},
'hisna_stevilka': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kraj': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'posta': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'ulica': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'infosys.ocena': {
'Meta': {'ordering': "('datum_vnosa',)", 'object_name': 'Ocena'},
'datum_pridobitve': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'datum_spremembe': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'datum_vnosa': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'dijak': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['infosys.Dijak']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ocena': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'ocena_stevilka': ('django.db.models.fields.IntegerField', [], {}),
'ocenjevalno_obdobje': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['infosys.OcenjevalnoObdobje']"}),
'opomba': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'poucuje': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['infosys.Poucuje']"}),
'zakljucena': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'infosys.ocenjevalnoobdobje': {
'Meta': {'ordering': "('zacetek',)", 'object_name': 'OcenjevalnoObdobje'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ime': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'konec': ('django.db.models.fields.DateField', [], {}),
'solsko_leto': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['infosys.SolskoLeto']"}),
'zacetek': ('django.db.models.fields.DateField', [], {})
},
'infosys.poucuje': {
'Meta': {'object_name': 'Poucuje'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'predmet': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['infosys.Predmet']"}),
'profesor': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['infosys.Profesor']"}),
'razred': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['infosys.Razred']"})
},
'infosys.predmet': {
'Meta': {'ordering': "('ime',)", 'object_name': 'Predmet'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ime': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'predmet': ('django.db.models.fields.CharField', [], {'max_length': '5'})
},
'infosys.profesor': {
'Meta': {'ordering': "('uporabnik__last_name', 'uporabnik__first_name')", 'object_name': 'Profesor'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'uporabnik': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'infosys.razred': {
'Meta': {'ordering': "('ime',)", 'object_name': 'Razred'},
'dijaki': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['infosys.Dijak']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ime': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'predmeti': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'predmet_razredi'", 'blank': 'True', 'through': "orm['infosys.Poucuje']", 'to': "orm['infosys.Predmet']"}),
'profesorji': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'profesor_razredi'", 'blank': 'True', 'through': "orm['infosys.Poucuje']", 'to': "orm['infosys.Profesor']"}),
'razrednik': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['infosys.Profesor']"}),
'smer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['infosys.Smer']"}),
'solsko_leto': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['infosys.SolskoLeto']"})
},
'infosys.smer': {
'Meta': {'ordering': "('smer',)", 'object_name': 'Smer'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'smer': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'infosys.solskoleto': {
'Meta': {'ordering': "('zacetno_leto',)", 'object_name': 'SolskoLeto'},
'aktivno': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'koncno_leto': ('django.db.models.fields.PositiveIntegerField', [], {}),
'zacetno_leto': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'infosys.stars': {
'Meta': {'ordering': "('uporabnik__last_name', 'uporabnik__first_name')", 'object_name': 'Stars'},
'domaci_telefon': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mobitel': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'prebivalisce': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['infosys.Naslov']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'sluzbeni_telefon': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'uporabnik': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['infosys']
| bancek/egradebook | src/apps/infosys/migrations/0007_auto__add_dogodek__chg_field_ocena_datum_pridobitve.py | Python | gpl-3.0 | 12,753 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for ListGameServerClusters
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-game-servers
# [START gameservices_v1beta_generated_GameServerClustersService_ListGameServerClusters_async]
from google.cloud import gaming_v1beta
async def sample_list_game_server_clusters():
# Create a client
client = gaming_v1beta.GameServerClustersServiceAsyncClient()
# Initialize request argument(s)
request = gaming_v1beta.ListGameServerClustersRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_game_server_clusters(request=request)
# Handle the response
async for response in page_result:
print(response)
# [END gameservices_v1beta_generated_GameServerClustersService_ListGameServerClusters_async]
| googleapis/python-game-servers | samples/generated_samples/gameservices_v1beta_generated_game_server_clusters_service_list_game_server_clusters_async.py | Python | apache-2.0 | 1,629 |
class Hack:
def push_selection(self, a):
print("push_selection not implemented")
pass
def get_jump_history(a):
print("get_jump_history not implemented")
return Hack()
| farhaanbukhsh/lime | packages/Default/history_list.py | Python | bsd-2-clause | 198 |
# PyAutoGUI: Cross-platform GUI automation for human beings.
# BSD license
# Al Sweigart [email protected] (Send me feedback & suggestions!)
"""
IMPORTANT NOTE!
To use this module on Mac OS X, you need the PyObjC module installed.
For Python 3, run:
sudo pip3 install pyobjc-core
sudo pip3 install pyobjc
For Python 2, run:
sudo pip install pyobjc-core
sudo pip install pyobjc
(There's some bug with their installer, so install pyobjc-core first or else
the install takes forever.)
To use this module on Linux, you need Xlib module installed.
For Python 3, run:
sudo pip3 install python3-Xlib
For Python 2, run:
sudo pip install Xlib
To use this module on Windows, you do not need anything else.
You will need PIL/Pillow to use the screenshot features.
"""
from __future__ import absolute_import, division, print_function
__version__ = '0.9.33'
import collections
import sys
import time
KEY_NAMES = ['\t', '\n', '\r', ' ', '!', '"', '#', '$', '%', '&', "'", '(',
')', '*', '+', ',', '-', '.', '/', '0', '1', '2', '3', '4', '5', '6', '7',
'8', '9', ':', ';', '<', '=', '>', '?', '@', '[', '\\', ']', '^', '_', '`',
'a', 'b', 'c', 'd', 'e','f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o',
'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '{', '|', '}', '~',
'accept', 'add', 'alt', 'altleft', 'altright', 'apps', 'back' 'backspace',
'browserback', 'browserfavorites', 'browserforward', 'browserhome',
'browserrefresh', 'browsersearch', 'browserstop', 'capslock', 'clear',
'convert', 'ctrl', 'ctrlleft', 'ctrlright', 'decimal', 'del', 'delete',
'divide', 'down', 'end', 'enter', 'esc', 'escape', 'execute', 'f1', 'f10',
'f11', 'f12', 'f13', 'f14', 'f15', 'f16', 'f17', 'f18', 'f19', 'f2', 'f20',
'f21', 'f22', 'f23', 'f24', 'f3', 'f4', 'f5', 'f6', 'f7', 'f8', 'f9',
'final', 'fn', 'hanguel', 'hangul', 'hanja', 'help', 'home', 'insert', 'junja',
'kana', 'kanji', 'launchapp1', 'launchapp2', 'launchmail',
'launchmediaselect', 'left', 'modechange', 'multiply', 'nexttrack',
'nonconvert', 'num0', 'num1', 'num2', 'num3', 'num4', 'num5', 'num6',
'num7', 'num8', 'num9', 'numlock', 'pagedown', 'pageup', 'pause', 'pgdn',
'pgup', 'playpause', 'prevtrack', 'print', 'printscreen', 'prntscrn',
'prtsc', 'prtscr', 'return', 'right', 'scrolllock', 'select', 'separator',
'shift', 'shiftleft', 'shiftright', 'sleep', 'stop', 'subtract', 'tab',
'up', 'volumedown', 'volumemute', 'volumeup', 'win', 'winleft', 'winright', 'yen',
'command', 'option', 'optionleft', 'optionright']
KEYBOARD_KEYS = KEY_NAMES # keeping old KEYBOARD_KEYS for backwards compatibility
def isShiftCharacter(character):
"""Returns True if the key character is uppercase or shifted."""
return character.isupper() or character in '~!@#$%^&*()_+{}|:"<>?'
# The platformModule is where we reference the platform-specific functions.
if sys.platform.startswith('java'):
#from . import _pyautogui_java as platformModule
raise NotImplementedError('Jython is not yet supported by PyAutoGUI.')
elif sys.platform == 'darwin':
from . import _pyautogui_osx as platformModule
elif sys.platform == 'win32':
from . import _pyautogui_win as platformModule
else:
from . import _pyautogui_x11 as platformModule
# TODO: Having module-wide user-writable global variables is bad. It makes
# restructuring the code very difficult. For instance, what if we decide to
# move the mouse-related functions to a separate file (a submodule)? How that
# file will access this module vars? It will probably lead to a circular
# import.
# In seconds. Any duration less than this is rounded to 0.0 to instantly move
# the mouse.
MINIMUM_DURATION = 0.1
# If sleep_amount is too short, time.sleep() will be a no-op and the mouse
# cursor moves there instantly.
# TODO: This value should vary with the platform. http://stackoverflow.com/q/1133857
MINIMUM_SLEEP = 0.05
PAUSE = 0.1 # The number of seconds to pause after EVERY public function call. Useful for debugging.
FAILSAFE = True
# General Functions
# =================
def getPointOnLine(x1, y1, x2, y2, n):
"""Returns the (x, y) tuple of the point that has progressed a proportion
n along the line defined by the two x, y coordinates.
Copied from pytweening module.
"""
x = ((x2 - x1) * n) + x1
y = ((y2 - y1) * n) + y1
return (x, y)
def linear(n):
"""Trivial linear tweening function.
Copied from pytweening module.
"""
if not 0.0 <= n <= 1.0:
raise ValueError('Argument must be between 0.0 and 1.0.')
return n
def _autoPause(pause, _pause):
if _pause:
if pause is not None:
time.sleep(pause)
elif PAUSE != 0:
time.sleep(PAUSE)
def _unpackXY(x, y):
"""If x is a sequence and y is None, returns x[0], y[0]. Else, returns x, y.
On functions that receive a pair of x,y coordinates, they can be passed as
separate arguments, or as a single two-element sequence.
"""
if isinstance(x, collections.Sequence):
if len(x) == 2:
if y is None:
x, y = x
else:
raise ValueError('When passing a sequence at the x argument, the y argument must not be passed (received {0}).'.format(repr(y)))
else:
raise ValueError('The supplied sequence must have exactly 2 elements ({0} were received).'.format(len(x)))
else:
pass
return x, y
def position(x=None, y=None):
"""Returns the current xy coordinates of the mouse cursor as a two-integer
tuple.
Args:
x (int, None, optional) - If not None, this argument overrides the x in
the return value.
y (int, None, optional) - If not None, this argument overrides the y in
the return value.
Returns:
(x, y) tuple of the current xy coordinates of the mouse cursor.
"""
posx, posy = platformModule._position()
posx = int(posx)
posy = int(posy)
if x is not None:
posx = int(x)
if y is not None:
posy = int(y)
return posx, posy
def size():
"""Returns the width and height of the screen as a two-integer tuple.
Returns:
(width, height) tuple of the screen size, in pixels.
"""
return platformModule._size()
def onScreen(x, y=None):
"""Returns whether the given xy coordinates are on the screen or not.
Args:
Either the arguments are two separate values, first arg for x and second
for y, or there is a single argument of a sequence with two values, the
first x and the second y.
Example: onScreen(x, y) or onScreen([x, y])
Returns:
bool: True if the xy coordinates are on the screen at its current
resolution, otherwise False.
"""
x, y = _unpackXY(x, y)
x = int(x)
y = int(y)
width, height = platformModule._size()
return 0 <= x < width and 0 <= y < height
# Mouse Functions
# ===============
def mouseDown(x=None, y=None, button='left', duration=0.0, tween=linear, pause=None, _pause=True):
"""Performs pressing a mouse button down (but not up).
The x and y parameters detail where the mouse event happens. If None, the
current mouse position is used. If a float value, it is rounded down. If
outside the boundaries of the screen, the event happens at edge of the
screen.
Args:
x (int, float, None, tuple, optional): The x position on the screen where the
mouse down happens. None by default. If tuple, this is used for x and y.
y (int, float, None, optional): The y position on the screen where the
mouse down happens. None by default.
button (str, int, optional): The mouse button pressed down. Must be one of
'left', 'middle', 'right' (or 1, 2, or 3) respectively. 'left' by
default.
Returns:
None
Raises:
ValueError: If button is not one of 'left', 'middle', 'right', 1, 2, or 3
"""
if button not in ('left', 'middle', 'right', 1, 2, 3):
raise ValueError("button argument must be one of ('left', 'middle', 'right', 1, 2, 3), not %s" % button)
_failSafeCheck()
x, y = _unpackXY(x, y)
_mouseMoveDrag('move', x, y, 0, 0, duration=0, tween=None)
x, y = platformModule._position() # TODO - this isn't right. We need to check the params.
if button == 1 or str(button).lower() == 'left':
platformModule._mouseDown(x, y, 'left')
elif button == 2 or str(button).lower() == 'middle':
platformModule._mouseDown(x, y, 'middle')
elif button == 3 or str(button).lower() == 'right':
platformModule._mouseDown(x, y, 'right')
_autoPause(pause, _pause)
def mouseUp(x=None, y=None, button='left', duration=0.0, tween=linear, pause=None, _pause=True):
"""Performs releasing a mouse button up (but not down beforehand).
The x and y parameters detail where the mouse event happens. If None, the
current mouse position is used. If a float value, it is rounded down. If
outside the boundaries of the screen, the event happens at edge of the
screen.
Args:
x (int, float, None, tuple, optional): The x position on the screen where the
mouse up happens. None by default. If tuple, this is used for x and y.
y (int, float, None, optional): The y position on the screen where the
mouse up happens. None by default.
button (str, int, optional): The mouse button released. Must be one of
'left', 'middle', 'right' (or 1, 2, or 3) respectively. 'left' by
default.
Returns:
None
Raises:
ValueError: If button is not one of 'left', 'middle', 'right', 1, 2, or 3
"""
if button not in ('left', 'middle', 'right', 1, 2, 3):
raise ValueError("button argument must be one of ('left', 'middle', 'right', 1, 2, 3), not %s" % button)
_failSafeCheck()
x, y = _unpackXY(x, y)
_mouseMoveDrag('move', x, y, 0, 0, duration=0, tween=None)
x, y = platformModule._position()
if button == 1 or str(button).lower() == 'left':
platformModule._mouseUp(x, y, 'left')
elif button == 2 or str(button).lower() == 'middle':
platformModule._mouseUp(x, y, 'middle')
elif button == 3 or str(button).lower() == 'right':
platformModule._mouseUp(x, y, 'right')
_autoPause(pause, _pause)
def click(x=None, y=None, clicks=1, interval=0.0, button='left', duration=0.0, tween=linear, pause=None, _pause=True):
"""Performs pressing a mouse button down and then immediately releasing it.
The x and y parameters detail where the mouse event happens. If None, the
current mouse position is used. If a float value, it is rounded down. If
outside the boundaries of the screen, the event happens at edge of the
screen.
Args:
x (int, float, None, tuple, optional): The x position on the screen where
the click happens. None by default. If tuple, this is used for x and y.
y (int, float, None, optional): The y position on the screen where the
click happens. None by default.
clicks (int, optional): The number of clicks to perform. 1 by default.
For example, passing 2 would do a doubleclick.
interval (float, optional): The number of seconds in between each click,
if the number of clicks is greater than 1. 0.0 by default, for no
pause in between clicks.
button (str, int, optional): The mouse button clicked. Must be one of
'left', 'middle', 'right' (or 1, 2, or 3) respectively. 'left' by
default.
Returns:
None
Raises:
ValueError: If button is not one of 'left', 'middle', 'right', 1, 2, 3
"""
if button not in ('left', 'middle', 'right', 1, 2, 3):
raise ValueError("button argument must be one of ('left', 'middle', 'right', 1, 2, 3)")
_failSafeCheck()
x, y = _unpackXY(x, y)
_mouseMoveDrag('move', x, y, 0, 0, duration=0, tween=None)
x, y = platformModule._position()
for i in range(clicks):
_failSafeCheck()
if button == 1 or str(button).lower() == 'left':
platformModule._click(x, y, 'left')
elif button == 2 or str(button).lower() == 'middle':
platformModule._click(x, y, 'middle')
elif button == 3 or str(button).lower() == 'right':
platformModule._click(x, y, 'right')
else:
# These mouse buttons for hor. and vert. scrolling only apply to x11:
platformModule._click(x, y, button)
time.sleep(interval)
_autoPause(pause, _pause)
def rightClick(x=None, y=None, duration=0.0, tween=linear, pause=None, _pause=True):
"""Performs a right mouse button click.
This is a wrapper function for click('right', x, y).
The x and y parameters detail where the mouse event happens. If None, the
current mouse position is used. If a float value, it is rounded down. If
outside the boundaries of the screen, the event happens at edge of the
screen.
Args:
x (int, float, None, tuple, optional): The x position on the screen where the
click happens. None by default. If tuple, this is used for x and y.
y (int, float, None, optional): The y position on the screen where the
click happens. None by default.
Returns:
None
"""
_failSafeCheck()
click(x, y, 1, 0.0, 'right', _pause=False)
_autoPause(pause, _pause)
def middleClick(x=None, y=None, duration=0.0, tween=linear, pause=None, _pause=True):
"""Performs a middle mouse button click.
This is a wrapper function for click('right', x, y).
The x and y parameters detail where the mouse event happens. If None, the
current mouse position is used. If a float value, it is rounded down. If
outside the boundaries of the screen, the event happens at edge of the
screen.
Args:
x (int, float, None, tuple, optional): The x position on the screen where the
click happens. None by default. If tuple, this is used for x and y.
y (int, float, None, optional): The y position on the screen where the
click happens. None by default.
Returns:
None
"""
_failSafeCheck()
click(x, y, 1, 0.0, 'middle', _pause=False)
_autoPause(pause, _pause)
def doubleClick(x=None, y=None, interval=0.0, button='left', duration=0.0, tween=linear, pause=None, _pause=True):
"""Performs a double click.
This is a wrapper function for click('left', x, y, 2, interval).
The x and y parameters detail where the mouse event happens. If None, the
current mouse position is used. If a float value, it is rounded down. If
outside the boundaries of the screen, the event happens at edge of the
screen.
Args:
x (int, float, None, tuple, optional): The x position on the screen where the
click happens. None by default. If tuple, this is used for x and y.
y (int, float, None, optional): The y position on the screen where the
click happens. None by default.
interval (float, optional): The number of seconds in between each click,
if the number of clicks is greater than 1. 0.0 by default, for no
pause in between clicks.
button (str, int, optional): The mouse button clicked. Must be one of
'left', 'middle', 'right' (or 1, 2, or 3) respectively. 'left' by
default.
Returns:
None
Raises:
ValueError: If button is not one of 'left', 'middle', 'right', 1, 2, 3, 4,
5, 6, or 7
"""
_failSafeCheck()
click(x, y, 2, interval, button, _pause=False)
_autoPause(pause, _pause)
def tripleClick(x=None, y=None, interval=0.0, button='left', duration=0.0, tween=linear, pause=None, _pause=True):
"""Performs a triple click..
This is a wrapper function for click('left', x, y, 3, interval).
The x and y parameters detail where the mouse event happens. If None, the
current mouse position is used. If a float value, it is rounded down. If
outside the boundaries of the screen, the event happens at edge of the
screen.
Args:
x (int, float, None, tuple, optional): The x position on the screen where the
click happens. None by default. If tuple, this is used for x and y.
y (int, float, None, optional): The y position on the screen where the
click happens. None by default.
interval (float, optional): The number of seconds in between each click,
if the number of clicks is greater than 1. 0.0 by default, for no
pause in between clicks.
button (str, int, optional): The mouse button clicked. Must be one of
'left', 'middle', 'right' (or 1, 2, or 3) respectively. 'left' by
default.
Returns:
None
Raises:
ValueError: If button is not one of 'left', 'middle', 'right', 1, 2, 3, 4,
5, 6, or 7
"""
_failSafeCheck()
click(x, y, 3, interval, button, _pause=False)
_autoPause(pause, _pause)
def scroll(clicks, x=None, y=None, pause=None, _pause=True):
"""Performs a scroll of the mouse scroll wheel.
Whether this is a vertical or horizontal scroll depends on the underlying
operating system.
The x and y parameters detail where the mouse event happens. If None, the
current mouse position is used. If a float value, it is rounded down. If
outside the boundaries of the screen, the event happens at edge of the
screen.
Args:
clicks (int, float): The amount of scrolling to perform.
x (int, float, None, tuple, optional): The x position on the screen where the
click happens. None by default. If tuple, this is used for x and y.
y (int, float, None, optional): The y position on the screen where the
click happens. None by default.
Returns:
None
"""
_failSafeCheck()
if type(x) in (tuple, list):
x, y = x[0], x[1]
x, y = position(x, y)
platformModule._scroll(clicks, x, y)
_autoPause(pause, _pause)
def hscroll(clicks, x=None, y=None, pause=None, _pause=True):
"""Performs an explicitly horizontal scroll of the mouse scroll wheel,
if this is supported by the operating system. (Currently just Linux.)
The x and y parameters detail where the mouse event happens. If None, the
current mouse position is used. If a float value, it is rounded down. If
outside the boundaries of the screen, the event happens at edge of the
screen.
Args:
clicks (int, float): The amount of scrolling to perform.
x (int, float, None, tuple, optional): The x position on the screen where the
click happens. None by default. If tuple, this is used for x and y.
y (int, float, None, optional): The y position on the screen where the
click happens. None by default.
Returns:
None
"""
_failSafeCheck()
if type(x) in (tuple, list):
x, y = x[0], x[1]
x, y = position(x, y)
platformModule._hscroll(clicks, x, y)
_autoPause(pause, _pause)
def vscroll(clicks, x=None, y=None, pause=None, _pause=True):
"""Performs an explicitly vertical scroll of the mouse scroll wheel,
if this is supported by the operating system. (Currently just Linux.)
The x and y parameters detail where the mouse event happens. If None, the
current mouse position is used. If a float value, it is rounded down. If
outside the boundaries of the screen, the event happens at edge of the
screen.
Args:
clicks (int, float): The amount of scrolling to perform.
x (int, float, None, tuple, optional): The x position on the screen where the
click happens. None by default. If tuple, this is used for x and y.
y (int, float, None, optional): The y position on the screen where the
click happens. None by default.
Returns:
None
"""
_failSafeCheck()
if type(x) in (tuple, list):
x, y = x[0], x[1]
x, y = position(x, y)
platformModule._vscroll(clicks, x, y)
_autoPause(pause, _pause)
def moveTo(x=None, y=None, duration=0.0, tween=linear, pause=None, _pause=True):
"""Moves the mouse cursor to a point on the screen.
The x and y parameters detail where the mouse event happens. If None, the
current mouse position is used. If a float value, it is rounded down. If
outside the boundaries of the screen, the event happens at edge of the
screen.
Args:
x (int, float, None, tuple, optional): The x position on the screen where the
click happens. None by default. If tuple, this is used for x and y.
y (int, float, None, optional): The y position on the screen where the
click happens. None by default.
duration (float, optional): The amount of time it takes to move the mouse
cursor to the xy coordinates. If 0, then the mouse cursor is moved
instantaneously. 0.0 by default.
tween (func, optional): The tweening function used if the duration is not
0. A linear tween is used by default. See the tweens.py file for
details.
Returns:
None
"""
x, y = _unpackXY(x, y)
_failSafeCheck()
_mouseMoveDrag('move', x, y, 0, 0, duration, tween)
_autoPause(pause, _pause)
def moveRel(xOffset=None, yOffset=None, duration=0.0, tween=linear, pause=None, _pause=True):
"""Moves the mouse cursor to a point on the screen, relative to its current
position.
The x and y parameters detail where the mouse event happens. If None, the
current mouse position is used. If a float value, it is rounded down. If
outside the boundaries of the screen, the event happens at edge of the
screen.
Args:
x (int, float, None, tuple, optional): How far left (for negative values) or
right (for positive values) to move the cursor. 0 by default. If tuple, this is used for x and y.
y (int, float, None, optional): How far up (for negative values) or
down (for positive values) to move the cursor. 0 by default.
duration (float, optional): The amount of time it takes to move the mouse
cursor to the new xy coordinates. If 0, then the mouse cursor is moved
instantaneously. 0.0 by default.
tween (func, optional): The tweening function used if the duration is not
0. A linear tween is used by default. See the tweens.py file for
details.
Returns:
None
"""
xOffset, yOffset = _unpackXY(xOffset, yOffset)
_failSafeCheck()
_mouseMoveDrag('move', None, None, xOffset, yOffset, duration, tween)
_autoPause(pause, _pause)
def dragTo(x=None, y=None, duration=0.0, tween=linear, button='left', pause=None, _pause=True):
"""Performs a mouse drag (mouse movement while a button is held down) to a
point on the screen.
The x and y parameters detail where the mouse event happens. If None, the
current mouse position is used. If a float value, it is rounded down. If
outside the boundaries of the screen, the event happens at edge of the
screen.
Args:
x (int, float, None, tuple, optional): How far left (for negative values) or
right (for positive values) to move the cursor. 0 by default. If tuple, this is used for x and y.
y (int, float, None, optional): How far up (for negative values) or
down (for positive values) to move the cursor. 0 by default.
duration (float, optional): The amount of time it takes to move the mouse
cursor to the new xy coordinates. If 0, then the mouse cursor is moved
instantaneously. 0.0 by default.
tween (func, optional): The tweening function used if the duration is not
0. A linear tween is used by default. See the tweens.py file for
details.
button (str, int, optional): The mouse button clicked. Must be one of
'left', 'middle', 'right' (or 1, 2, or 3) respectively. 'left' by
default.
Returns:
None
"""
_failSafeCheck()
if type(x) in (tuple, list):
x, y = x[0], x[1]
mouseDown(button=button, _pause=False)
_mouseMoveDrag('drag', x, y, 0, 0, duration, tween, button)
mouseUp(button=button, _pause=False)
_autoPause(pause, _pause)
def dragRel(xOffset=0, yOffset=0, duration=0.0, tween=linear, button='left', pause=None, _pause=True):
"""Performs a mouse drag (mouse movement while a button is held down) to a
point on the screen, relative to its current position.
The x and y parameters detail where the mouse event happens. If None, the
current mouse position is used. If a float value, it is rounded down. If
outside the boundaries of the screen, the event happens at edge of the
screen.
Args:
x (int, float, None, tuple, optional): How far left (for negative values) or
right (for positive values) to move the cursor. 0 by default. If tuple, this is used for xOffset and yOffset.
y (int, float, None, optional): How far up (for negative values) or
down (for positive values) to move the cursor. 0 by default.
duration (float, optional): The amount of time it takes to move the mouse
cursor to the new xy coordinates. If 0, then the mouse cursor is moved
instantaneously. 0.0 by default.
tween (func, optional): The tweening function used if the duration is not
0. A linear tween is used by default. See the tweens.py file for
details.
button (str, int, optional): The mouse button clicked. Must be one of
'left', 'middle', 'right' (or 1, 2, or 3) respectively. 'left' by
default.
Returns:
None
"""
if xOffset is None:
xOffset = 0
if yOffset is None:
yOffset = 0
if type(xOffset) in (tuple, list):
xOffset, yOffset = xOffset[0], xOffset[1]
if xOffset == 0 and yOffset == 0:
return # no-op case
_failSafeCheck()
mousex, mousey = platformModule._position()
mouseDown(button=button, _pause=False)
_mouseMoveDrag('drag', mousex, mousey, xOffset, yOffset, duration, tween, button)
mouseUp(button=button, _pause=False)
_autoPause(pause, _pause)
def _mouseMoveDrag(moveOrDrag, x, y, xOffset, yOffset, duration, tween, button=None):
"""Handles the actual move or drag event, since different platforms
implement them differently.
On Windows & Linux, a drag is a normal mouse move while a mouse button is
held down. On OS X, a distinct "drag" event must be used instead.
The code for moving and dragging the mouse is similar, so this function
handles both. Users should call the moveTo() or dragTo() functions instead
of calling _mouseMoveDrag().
Args:
moveOrDrag (str): Either 'move' or 'drag', for the type of action this is.
x (int, float, None, optional): How far left (for negative values) or
right (for positive values) to move the cursor. 0 by default.
y (int, float, None, optional): How far up (for negative values) or
down (for positive values) to move the cursor. 0 by default.
xOffset (int, float, None, optional): How far left (for negative values) or
right (for positive values) to move the cursor. 0 by default.
yOffset (int, float, None, optional): How far up (for negative values) or
down (for positive values) to move the cursor. 0 by default.
duration (float, optional): The amount of time it takes to move the mouse
cursor to the new xy coordinates. If 0, then the mouse cursor is moved
instantaneously. 0.0 by default.
tween (func, optional): The tweening function used if the duration is not
0. A linear tween is used by default. See the tweens.py file for
details.
button (str, int, optional): The mouse button clicked. Must be one of
'left', 'middle', 'right' (or 1, 2, or 3) respectively. 'left' by
default.
Returns:
None
"""
# The move and drag code is similar, but OS X requires a special drag event instead of just a move event when dragging.
# See https://stackoverflow.com/a/2696107/1893164
assert moveOrDrag in ('move', 'drag'), "moveOrDrag must be in ('move', 'drag'), not %s" % (moveOrDrag)
if sys.platform != 'darwin':
moveOrDrag = 'move' # Only OS X needs the drag event specifically.
xOffset = int(xOffset) if xOffset is not None else 0
yOffset = int(yOffset) if yOffset is not None else 0
if x is None and y is None and xOffset == 0 and yOffset == 0:
return # Special case for no mouse movement at all.
startx, starty = position()
x = int(x) if x is not None else startx
y = int(y) if y is not None else starty
# x, y, xOffset, yOffset are now int.
x += xOffset
y += yOffset
width, height = size()
# Make sure x and y are within the screen bounds.
x = max(0, min(x, width - 1))
y = max(0, min(y, height - 1))
# If the duration is small enough, just move the cursor there instantly.
steps = [(x, y)]
if duration > MINIMUM_DURATION:
# Non-instant moving/dragging involves tweening:
num_steps = max(width, height)
sleep_amount = duration / num_steps
if sleep_amount < MINIMUM_SLEEP:
num_steps = int(duration / MINIMUM_SLEEP)
sleep_amount = duration / num_steps
steps = [
getPointOnLine(startx, starty, x, y, tween(n / num_steps))
for n in range(num_steps)
]
# Making sure the last position is the actual destination.
steps.append((x, y))
for tweenX, tweenY in steps:
if len(steps) > 1:
# A single step does not require tweening.
time.sleep(sleep_amount)
_failSafeCheck()
tweenX = int(round(tweenX))
tweenY = int(round(tweenY))
if moveOrDrag == 'move':
platformModule._moveTo(tweenX, tweenY)
elif moveOrDrag == 'drag':
platformModule._dragTo(tweenX, tweenY, button)
else:
raise NotImplementedError('Unknown value of moveOrDrag: {0}'.format(moveOrDrag))
_failSafeCheck()
# Keyboard Functions
# ==================
def isValidKey(key):
"""Returns a Boolean value if the given key is a valid value to pass to
PyAutoGUI's keyboard-related functions for the current platform.
This function is here because passing an invalid value to the PyAutoGUI
keyboard functions currently is a no-op that does not raise an exception.
Some keys are only valid on some platforms. For example, while 'esc' is
valid for the Escape key on all platforms, 'browserback' is only used on
Windows operating systems.
Args:
key (str): The key value.
Returns:
bool: True if key is a valid value, False if not.
"""
return platformModule.keyboardMapping.get(key, None) != None
def keyDown(key, pause=None, _pause=True):
"""Performs a keyboard key press without the release. This will put that
key in a held down state.
NOTE: For some reason, this does not seem to cause key repeats like would
happen if a keyboard key was held down on a text field.
Args:
key (str): The key to be pressed down. The valid names are listed in
KEYBOARD_KEYS.
Returns:
None
"""
if len(key) > 1:
key = key.lower()
_failSafeCheck()
platformModule._keyDown(key)
_autoPause(pause, _pause)
def keyUp(key, pause=None, _pause=True):
"""Performs a keyboard key release (without the press down beforehand).
Args:
key (str): The key to be released up. The valid names are listed in
KEYBOARD_KEYS.
Returns:
None
"""
if len(key) > 1:
key = key.lower()
_failSafeCheck()
platformModule._keyUp(key)
_autoPause(pause, _pause)
def press(keys, presses=1, interval=0.0, pause=None, _pause=True):
"""Performs a keyboard key press down, followed by a release.
Args:
key (str, list): The key to be pressed. The valid names are listed in
KEYBOARD_KEYS. Can also be a list of such strings.
presses (integer, optiional): the number of press repetition
1 by default, for just one press
interval (float, optional): How many seconds between each press.
0.0 by default, for no pause between presses.
pause (float, optional): How many seconds in the end of function process.
None by default, for no pause in the end of function process.
Returns:
None
"""
if type(keys) == str:
keys = [keys] # put string in a list
else:
lowerKeys = []
for s in keys:
if len(s) > 1:
lowerKeys.append(s.lower())
else:
lowerKeys.append(s)
interval = float(interval)
for i in range(presses):
for k in keys:
_failSafeCheck()
platformModule._keyDown(k)
platformModule._keyUp(k)
time.sleep(interval)
_autoPause(pause, _pause)
def typewrite(message, interval=0.0, pause=None, _pause=True):
"""Performs a keyboard key press down, followed by a release, for each of
the characters in message.
The message argument can also be list of strings, in which case any valid
keyboard name can be used.
Since this performs a sequence of keyboard presses and does not hold down
keys, it cannot be used to perform keyboard shortcuts. Use the hotkey()
function for that.
Args:
message (str, list): If a string, then the characters to be pressed. If a
list, then the key names of the keys to press in order. The valid names
are listed in KEYBOARD_KEYS.
interval (float, optional): The number of seconds in between each press.
0.0 by default, for no pause in between presses.
Returns:
None
"""
interval = float(interval)
_failSafeCheck()
for c in message:
if len(c) > 1:
c = c.lower()
press(c, _pause=False)
time.sleep(interval)
_failSafeCheck()
_autoPause(pause, _pause)
def hotkey(*args, **kwargs):
"""Performs key down presses on the arguments passed in order, then performs
key releases in reverse order.
The effect is that calling hotkey('ctrl', 'shift', 'c') would perform a
"Ctrl-Shift-C" hotkey/keyboard shortcut press.
Args:
key(s) (str): The series of keys to press, in order. This can also be a
list of key strings to press.
interval (float, optional): The number of seconds in between each press.
0.0 by default, for no pause in between presses.
Returns:
None
"""
interval = float(kwargs.get('interval', 0.0))
_failSafeCheck()
for c in args:
if len(c) > 1:
c = c.lower()
platformModule._keyDown(c)
time.sleep(interval)
for c in reversed(args):
if len(c) > 1:
c = c.lower()
platformModule._keyUp(c)
time.sleep(interval)
_autoPause(kwargs.get('pause', None), kwargs.get('_pause', True))
class FailSafeException(Exception):
pass
def _failSafeCheck():
if FAILSAFE and position() == (0, 0):
raise FailSafeException('PyAutoGUI fail-safe triggered from mouse moving to upper-left corner. To disable this fail-safe, set pyautogui.FAILSAFE to False.')
def displayMousePosition(xOffset=0, yOffset=0):
"""This function is meant to be run from the command line. It will
automatically display the location and RGB of the mouse cursor."""
print('Press Ctrl-C to quit.')
if xOffset != 0 or yOffset != 0:
print('xOffset: %s yOffset: %s' % (xOffset, yOffset))
resolution = size()
try:
while True:
# Get and print the mouse coordinates.
x, y = position()
positionStr = 'X: ' + str(x - xOffset).rjust(4) + ' Y: ' + str(y - yOffset).rjust(4)
if (x - xOffset) < 0 or (y - yOffset) < 0 or (x - xOffset) >= resolution[0] or (y - yOffset) >= resolution[1]:
pixelColor = ('NaN', 'NaN', 'NaN')
else:
pixelColor = pyscreeze.screenshot().getpixel((x, y))
positionStr += ' RGB: (' + str(pixelColor[0]).rjust(3)
positionStr += ', ' + str(pixelColor[1]).rjust(3)
positionStr += ', ' + str(pixelColor[2]).rjust(3) + ')'
sys.stdout.write(positionStr)
sys.stdout.write('\b' * len(positionStr))
sys.stdout.flush()
except KeyboardInterrupt:
sys.stdout.write('\n')
sys.stdout.flush()
| osspeak/osspeak | osspeak/pyautogui/__init__.py | Python | mit | 36,647 |
from datetime import datetime, timedelta
from freezegun import freeze_time
from mock import MagicMock
import pytest
from pytz import utc
from scanomatic.data.scanjobstore import ScanJobStore
from scanomatic.models.scanjob import ScanJob
from scanomatic.scanning.terminate_scanjob import (
TerminateScanJobError, UnknownScanjobError, terminate_scanjob
)
def make_scanjob(
start_time=datetime(
1985, 10, 26, 1, 20, tzinfo=utc
),
termination_time=None,
duration=timedelta(minutes=20),
):
return ScanJob(
duration=duration,
identifier='scjb000',
interval=timedelta(minutes=5),
name='Test Scan Job',
scanner_id='scnr000',
start_time=start_time,
termination_time=termination_time,
)
class TestTerminateScanjob:
def test_unknown_scanjob(self):
store = MagicMock(ScanJobStore)
store.get_scanjob_by_id.side_effect = LookupError
with pytest.raises(UnknownScanjobError):
terminate_scanjob(store, 'unknown', 'The Message')
def test_not_started(self):
store = MagicMock(ScanJobStore)
store.get_scanjob_by_id.return_value = make_scanjob(start_time=None)
with pytest.raises(TerminateScanJobError):
terminate_scanjob(store, 'scjb000', 'The Message')
def test_already_terminated(self):
store = MagicMock(ScanJobStore)
store.get_scanjob_by_id.return_value = make_scanjob(
start_time=datetime(
1985, 10, 26, 1, 20, tzinfo=utc
),
termination_time=datetime(
1985, 10, 26, 1, 21, tzinfo=utc
)
)
with pytest.raises(TerminateScanJobError):
terminate_scanjob(store, 'scjb000', 'The Message')
def test_already_ended(self):
store = MagicMock(ScanJobStore)
store.get_scanjob_by_id.return_value = make_scanjob(
start_time=datetime(
1985, 10, 26, 1, 20, tzinfo=utc
),
termination_time=None,
)
with pytest.raises(TerminateScanJobError):
terminate_scanjob(store, 'scjb000', 'The Message')
def test_running_scanjob(self):
store = MagicMock(ScanJobStore)
store.get_scanjob_by_id.return_value = make_scanjob(
start_time=datetime(
1985, 10, 26, 1, 20, tzinfo=utc
),
duration=timedelta(minutes=20),
)
now = datetime(1985, 10, 26, 1, 21, tzinfo=utc)
with freeze_time(now):
terminate_scanjob(store, 'scjb000', 'The Message')
store.terminate_scanjob.assert_called_with(
'scjb000', now, 'The Message'
)
| Scan-o-Matic/scanomatic | tests/unit/scanning/test_terminate_scanjob.py | Python | gpl-3.0 | 2,729 |
"""Foolscap"""
from ._version import get_versions
__version__ = str(get_versions()['version'])
del get_versions
| warner/foolscap | src/foolscap/__init__.py | Python | mit | 113 |
#
# Copyright (C) 2012-2014 The Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
import codecs
from collections import deque
import contextlib
import csv
from glob import iglob as std_iglob
import io
import json
import logging
import os
import py_compile
import re
import shutil
import socket
import ssl
import subprocess
import sys
import tarfile
import tempfile
try:
import threading
except ImportError:
import dummy_threading as threading
import time
from . import DistlibException
from .compat import (string_types, text_type, shutil, raw_input, StringIO,
cache_from_source, urlopen, httplib, xmlrpclib, splittype,
HTTPHandler, HTTPSHandler as BaseHTTPSHandler,
BaseConfigurator, valid_ident, Container, configparser,
URLError, match_hostname, CertificateError, ZipFile)
logger = logging.getLogger(__name__)
#
# Requirement parsing code for name + optional constraints + optional extras
#
# e.g. 'foo >= 1.2, < 2.0 [bar, baz]'
#
# The regex can seem a bit hairy, so we build it up out of smaller pieces
# which are manageable.
#
COMMA = r'\s*,\s*'
COMMA_RE = re.compile(COMMA)
IDENT = r'(\w|[.-])+'
EXTRA_IDENT = r'(\*|:(\*|\w+):|' + IDENT + ')'
VERSPEC = IDENT + r'\*?'
RELOP = '([<>=!~]=)|[<>]'
#
# The first relop is optional - if absent, will be taken as '~='
#
BARE_CONSTRAINTS = ('(' + RELOP + r')?\s*(' + VERSPEC + ')(' + COMMA + '(' +
RELOP + r')\s*(' + VERSPEC + '))*')
DIRECT_REF = '(from\s+(?P<diref>.*))'
#
# Either the bare constraints or the bare constraints in parentheses
#
CONSTRAINTS = (r'\(\s*(?P<c1>' + BARE_CONSTRAINTS + '|' + DIRECT_REF +
r')\s*\)|(?P<c2>' + BARE_CONSTRAINTS + '\s*)')
EXTRA_LIST = EXTRA_IDENT + '(' + COMMA + EXTRA_IDENT + ')*'
EXTRAS = r'\[\s*(?P<ex>' + EXTRA_LIST + r')?\s*\]'
REQUIREMENT = ('(?P<dn>' + IDENT + r')\s*(' + EXTRAS + r'\s*)?(\s*' +
CONSTRAINTS + ')?$')
REQUIREMENT_RE = re.compile(REQUIREMENT)
#
# Used to scan through the constraints
#
RELOP_IDENT = '(?P<op>' + RELOP + r')\s*(?P<vn>' + VERSPEC + ')'
RELOP_IDENT_RE = re.compile(RELOP_IDENT)
def parse_requirement(s):
def get_constraint(m):
d = m.groupdict()
return d['op'], d['vn']
result = None
m = REQUIREMENT_RE.match(s)
if m:
d = m.groupdict()
name = d['dn']
cons = d['c1'] or d['c2']
if not d['diref']:
url = None
else:
# direct reference
cons = None
url = d['diref'].strip()
if not cons:
cons = None
constr = ''
rs = d['dn']
else:
if cons[0] not in '<>!=':
cons = '~=' + cons
iterator = RELOP_IDENT_RE.finditer(cons)
cons = [get_constraint(m) for m in iterator]
rs = '%s (%s)' % (name, ', '.join(['%s %s' % con for con in cons]))
if not d['ex']:
extras = None
else:
extras = COMMA_RE.split(d['ex'])
result = Container(name=name, constraints=cons, extras=extras,
requirement=rs, source=s, url=url)
return result
def get_resources_dests(resources_root, rules):
"""Find destinations for resources files"""
def get_rel_path(base, path):
# normalizes and returns a lstripped-/-separated path
base = base.replace(os.path.sep, '/')
path = path.replace(os.path.sep, '/')
assert path.startswith(base)
return path[len(base):].lstrip('/')
destinations = {}
for base, suffix, dest in rules:
prefix = os.path.join(resources_root, base)
for abs_base in iglob(prefix):
abs_glob = os.path.join(abs_base, suffix)
for abs_path in iglob(abs_glob):
resource_file = get_rel_path(resources_root, abs_path)
if dest is None: # remove the entry if it was here
destinations.pop(resource_file, None)
else:
rel_path = get_rel_path(abs_base, abs_path)
rel_dest = dest.replace(os.path.sep, '/').rstrip('/')
destinations[resource_file] = rel_dest + '/' + rel_path
return destinations
def in_venv():
if hasattr(sys, 'real_prefix'):
# virtualenv venvs
result = True
else:
# PEP 405 venvs
result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix)
return result
def get_executable():
# The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as
# changes to the stub launcher mean that sys.executable always points
# to the stub on OS X
# if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__'
# in os.environ):
# result = os.environ['__PYVENV_LAUNCHER__']
# else:
# result = sys.executable
# return result
return sys.executable
def proceed(prompt, allowed_chars, error_prompt=None, default=None):
p = prompt
while True:
s = raw_input(p)
p = prompt
if not s and default:
s = default
if s:
c = s[0].lower()
if c in allowed_chars:
break
if error_prompt:
p = '%c: %s\n%s' % (c, error_prompt, prompt)
return c
def extract_by_key(d, keys):
if isinstance(keys, string_types):
keys = keys.split()
result = {}
for key in keys:
if key in d:
result[key] = d[key]
return result
def read_exports(stream):
if sys.version_info[0] >= 3:
# needs to be a text stream
stream = codecs.getreader('utf-8')(stream)
# Try to load as JSON, falling back on legacy format
data = stream.read()
stream = StringIO(data)
try:
data = json.load(stream)
result = data['extensions']['python.exports']['exports']
for group, entries in result.items():
for k, v in entries.items():
s = '%s = %s' % (k, v)
entry = get_export_entry(s)
assert entry is not None
entries[k] = entry
return result
except Exception:
stream.seek(0, 0)
cp = configparser.ConfigParser()
if hasattr(cp, 'read_file'):
cp.read_file(stream)
else:
cp.readfp(stream)
result = {}
for key in cp.sections():
result[key] = entries = {}
for name, value in cp.items(key):
s = '%s = %s' % (name, value)
entry = get_export_entry(s)
assert entry is not None
#entry.dist = self
entries[name] = entry
return result
def write_exports(exports, stream):
if sys.version_info[0] >= 3:
# needs to be a text stream
stream = codecs.getwriter('utf-8')(stream)
cp = configparser.ConfigParser()
for k, v in exports.items():
# TODO check k, v for valid values
cp.add_section(k)
for entry in v.values():
if entry.suffix is None:
s = entry.prefix
else:
s = '%s:%s' % (entry.prefix, entry.suffix)
if entry.flags:
s = '%s [%s]' % (s, ', '.join(entry.flags))
cp.set(k, entry.name, s)
cp.write(stream)
@contextlib.contextmanager
def tempdir():
td = tempfile.mkdtemp()
try:
yield td
finally:
shutil.rmtree(td)
@contextlib.contextmanager
def chdir(d):
cwd = os.getcwd()
try:
os.chdir(d)
yield
finally:
os.chdir(cwd)
@contextlib.contextmanager
def socket_timeout(seconds=15):
cto = socket.getdefaulttimeout()
try:
socket.setdefaulttimeout(seconds)
yield
finally:
socket.setdefaulttimeout(cto)
class cached_property(object):
def __init__(self, func):
self.func = func
#for attr in ('__name__', '__module__', '__doc__'):
# setattr(self, attr, getattr(func, attr, None))
def __get__(self, obj, cls=None):
if obj is None:
return self
value = self.func(obj)
object.__setattr__(obj, self.func.__name__, value)
#obj.__dict__[self.func.__name__] = value = self.func(obj)
return value
def convert_path(pathname):
"""Return 'pathname' as a name that will work on the native filesystem.
The path is split on '/' and put back together again using the current
directory separator. Needed because filenames in the setup script are
always supplied in Unix style, and have to be converted to the local
convention before we can actually use them in the filesystem. Raises
ValueError on non-Unix-ish systems if 'pathname' either starts or
ends with a slash.
"""
if os.sep == '/':
return pathname
if not pathname:
return pathname
if pathname[0] == '/':
raise ValueError("path '%s' cannot be absolute" % pathname)
if pathname[-1] == '/':
raise ValueError("path '%s' cannot end with '/'" % pathname)
paths = pathname.split('/')
while os.curdir in paths:
paths.remove(os.curdir)
if not paths:
return os.curdir
return os.path.join(*paths)
class FileOperator(object):
def __init__(self, dry_run=False):
self.dry_run = dry_run
self.ensured = set()
self._init_record()
def _init_record(self):
self.record = False
self.files_written = set()
self.dirs_created = set()
def record_as_written(self, path):
if self.record:
self.files_written.add(path)
def newer(self, source, target):
"""Tell if the target is newer than the source.
Returns true if 'source' exists and is more recently modified than
'target', or if 'source' exists and 'target' doesn't.
Returns false if both exist and 'target' is the same age or younger
than 'source'. Raise PackagingFileError if 'source' does not exist.
Note that this test is not very accurate: files created in the same
second will have the same "age".
"""
if not os.path.exists(source):
raise DistlibException("file '%r' does not exist" %
os.path.abspath(source))
if not os.path.exists(target):
return True
return os.stat(source).st_mtime > os.stat(target).st_mtime
def copy_file(self, infile, outfile, check=True):
"""Copy a file respecting dry-run and force flags.
"""
self.ensure_dir(os.path.dirname(outfile))
logger.info('Copying %s to %s', infile, outfile)
if not self.dry_run:
msg = None
if check:
if os.path.islink(outfile):
msg = '%s is a symlink' % outfile
elif os.path.exists(outfile) and not os.path.isfile(outfile):
msg = '%s is a non-regular file' % outfile
if msg:
raise ValueError(msg + ' which would be overwritten')
shutil.copyfile(infile, outfile)
self.record_as_written(outfile)
def copy_stream(self, instream, outfile, encoding=None):
assert not os.path.isdir(outfile)
self.ensure_dir(os.path.dirname(outfile))
logger.info('Copying stream %s to %s', instream, outfile)
if not self.dry_run:
if encoding is None:
outstream = open(outfile, 'wb')
else:
outstream = codecs.open(outfile, 'w', encoding=encoding)
try:
shutil.copyfileobj(instream, outstream)
finally:
outstream.close()
self.record_as_written(outfile)
def write_binary_file(self, path, data):
self.ensure_dir(os.path.dirname(path))
if not self.dry_run:
with open(path, 'wb') as f:
f.write(data)
self.record_as_written(path)
def write_text_file(self, path, data, encoding):
self.ensure_dir(os.path.dirname(path))
if not self.dry_run:
with open(path, 'wb') as f:
f.write(data.encode(encoding))
self.record_as_written(path)
def set_mode(self, bits, mask, files):
if os.name == 'posix':
# Set the executable bits (owner, group, and world) on
# all the files specified.
for f in files:
if self.dry_run:
logger.info("changing mode of %s", f)
else:
mode = (os.stat(f).st_mode | bits) & mask
logger.info("changing mode of %s to %o", f, mode)
os.chmod(f, mode)
set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f)
def ensure_dir(self, path):
path = os.path.abspath(path)
if path not in self.ensured and not os.path.exists(path):
self.ensured.add(path)
d, f = os.path.split(path)
self.ensure_dir(d)
logger.info('Creating %s' % path)
if not self.dry_run:
os.mkdir(path)
if self.record:
self.dirs_created.add(path)
def byte_compile(self, path, optimize=False, force=False, prefix=None):
dpath = cache_from_source(path, not optimize)
logger.info('Byte-compiling %s to %s', path, dpath)
if not self.dry_run:
if force or self.newer(path, dpath):
if not prefix:
diagpath = None
else:
assert path.startswith(prefix)
diagpath = path[len(prefix):]
py_compile.compile(path, dpath, diagpath, True) # raise error
self.record_as_written(dpath)
return dpath
def ensure_removed(self, path):
if os.path.exists(path):
if os.path.isdir(path) and not os.path.islink(path):
logger.debug('Removing directory tree at %s', path)
if not self.dry_run:
shutil.rmtree(path)
if self.record:
if path in self.dirs_created:
self.dirs_created.remove(path)
else:
if os.path.islink(path):
s = 'link'
else:
s = 'file'
logger.debug('Removing %s %s', s, path)
if not self.dry_run:
os.remove(path)
if self.record:
if path in self.files_written:
self.files_written.remove(path)
def is_writable(self, path):
result = False
while not result:
if os.path.exists(path):
result = os.access(path, os.W_OK)
break
parent = os.path.dirname(path)
if parent == path:
break
path = parent
return result
def commit(self):
"""
Commit recorded changes, turn off recording, return
changes.
"""
assert self.record
result = self.files_written, self.dirs_created
self._init_record()
return result
def rollback(self):
if not self.dry_run:
for f in list(self.files_written):
if os.path.exists(f):
os.remove(f)
# dirs should all be empty now, except perhaps for
# __pycache__ subdirs
# reverse so that subdirs appear before their parents
dirs = sorted(self.dirs_created, reverse=True)
for d in dirs:
flist = os.listdir(d)
if flist:
assert flist == ['__pycache__']
sd = os.path.join(d, flist[0])
os.rmdir(sd)
os.rmdir(d) # should fail if non-empty
self._init_record()
def resolve(module_name, dotted_path):
if module_name in sys.modules:
mod = sys.modules[module_name]
else:
mod = __import__(module_name)
if dotted_path is None:
result = mod
else:
parts = dotted_path.split('.')
result = getattr(mod, parts.pop(0))
for p in parts:
result = getattr(result, p)
return result
class ExportEntry(object):
def __init__(self, name, prefix, suffix, flags):
self.name = name
self.prefix = prefix
self.suffix = suffix
self.flags = flags
@cached_property
def value(self):
return resolve(self.prefix, self.suffix)
def __repr__(self):
return '<ExportEntry %s = %s:%s %s>' % (self.name, self.prefix,
self.suffix, self.flags)
def __eq__(self, other):
if not isinstance(other, ExportEntry):
result = False
else:
result = (self.name == other.name and
self.prefix == other.prefix and
self.suffix == other.suffix and
self.flags == other.flags)
return result
__hash__ = object.__hash__
ENTRY_RE = re.compile(r'''(?P<name>(\w|[-.])+)
\s*=\s*(?P<callable>(\w+)([:\.]\w+)*)
\s*(\[\s*(?P<flags>\w+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])?
''', re.VERBOSE)
def get_export_entry(specification):
m = ENTRY_RE.search(specification)
if not m:
result = None
if '[' in specification or ']' in specification:
raise DistlibException('Invalid specification '
'%r' % specification)
else:
d = m.groupdict()
name = d['name']
path = d['callable']
colons = path.count(':')
if colons == 0:
prefix, suffix = path, None
else:
if colons != 1:
raise DistlibException('Invalid specification '
'%r' % specification)
prefix, suffix = path.split(':')
flags = d['flags']
if flags is None:
if '[' in specification or ']' in specification:
raise DistlibException('Invalid specification '
'%r' % specification)
flags = []
else:
flags = [f.strip() for f in flags.split(',')]
result = ExportEntry(name, prefix, suffix, flags)
return result
def get_cache_base(suffix=None):
"""
Return the default base location for distlib caches. If the directory does
not exist, it is created. Use the suffix provided for the base directory,
and default to '.distlib' if it isn't provided.
On Windows, if LOCALAPPDATA is defined in the environment, then it is
assumed to be a directory, and will be the parent directory of the result.
On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home
directory - using os.expanduser('~') - will be the parent directory of
the result.
The result is just the directory '.distlib' in the parent directory as
determined above, or with the name specified with ``suffix``.
"""
if suffix is None:
suffix = '.distlib'
if os.name == 'nt' and 'LOCALAPPDATA' in os.environ:
result = os.path.expandvars('$localappdata')
else:
# Assume posix, or old Windows
result = os.path.expanduser('~')
# we use 'isdir' instead of 'exists', because we want to
# fail if there's a file with that name
if os.path.isdir(result):
usable = os.access(result, os.W_OK)
if not usable:
logger.warning('Directory exists but is not writable: %s', result)
else:
try:
os.makedirs(result)
usable = True
except OSError:
logger.warning('Unable to create %s', result, exc_info=True)
usable = False
if not usable:
result = tempfile.mkdtemp()
logger.warning('Default location unusable, using %s', result)
return os.path.join(result, suffix)
def path_to_cache_dir(path):
"""
Convert an absolute path to a directory name for use in a cache.
The algorithm used is:
#. On Windows, any ``':'`` in the drive is replaced with ``'---'``.
#. Any occurrence of ``os.sep`` is replaced with ``'--'``.
#. ``'.cache'`` is appended.
"""
d, p = os.path.splitdrive(os.path.abspath(path))
if d:
d = d.replace(':', '---')
p = p.replace(os.sep, '--')
return d + p + '.cache'
def ensure_slash(s):
if not s.endswith('/'):
return s + '/'
return s
def parse_credentials(netloc):
username = password = None
if '@' in netloc:
prefix, netloc = netloc.split('@', 1)
if ':' not in prefix:
username = prefix
else:
username, password = prefix.split(':', 1)
return username, password, netloc
def get_process_umask():
result = os.umask(0o22)
os.umask(result)
return result
def is_string_sequence(seq):
result = True
i = None
for i, s in enumerate(seq):
if not isinstance(s, string_types):
result = False
break
assert i is not None
return result
PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-'
'([a-z0-9_.+-]+)', re.I)
PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)')
def split_filename(filename, project_name=None):
"""
Extract name, version, python version from a filename (no extension)
Return name, version, pyver or None
"""
result = None
pyver = None
m = PYTHON_VERSION.search(filename)
if m:
pyver = m.group(1)
filename = filename[:m.start()]
if project_name and len(filename) > len(project_name) + 1:
m = re.match(re.escape(project_name) + r'\b', filename)
if m:
n = m.end()
result = filename[:n], filename[n + 1:], pyver
if result is None:
m = PROJECT_NAME_AND_VERSION.match(filename)
if m:
result = m.group(1), m.group(3), pyver
return result
# Allow spaces in name because of legacy dists like "Twisted Core"
NAME_VERSION_RE = re.compile(r'(?P<name>[\w .-]+)\s*'
r'\(\s*(?P<ver>[^\s)]+)\)$')
def parse_name_and_version(p):
"""
A utility method used to get name and version from a string.
From e.g. a Provides-Dist value.
:param p: A value in a form 'foo (1.0)'
:return: The name and version as a tuple.
"""
m = NAME_VERSION_RE.match(p)
if not m:
raise DistlibException('Ill-formed name/version string: \'%s\'' % p)
d = m.groupdict()
return d['name'].strip().lower(), d['ver']
def get_extras(requested, available):
result = set()
requested = set(requested or [])
available = set(available or [])
if '*' in requested:
requested.remove('*')
result |= available
for r in requested:
if r == '-':
result.add(r)
elif r.startswith('-'):
unwanted = r[1:]
if unwanted not in available:
logger.warning('undeclared extra: %s' % unwanted)
if unwanted in result:
result.remove(unwanted)
else:
if r not in available:
logger.warning('undeclared extra: %s' % r)
result.add(r)
return result
#
# Extended metadata functionality
#
def _get_external_data(url):
result = {}
try:
# urlopen might fail if it runs into redirections,
# because of Python issue #13696. Fixed in locators
# using a custom redirect handler.
resp = urlopen(url)
headers = resp.info()
if headers.get('Content-Type') != 'application/json':
logger.debug('Unexpected response for JSON request')
else:
reader = codecs.getreader('utf-8')(resp)
#data = reader.read().decode('utf-8')
#result = json.loads(data)
result = json.load(reader)
except Exception as e:
logger.exception('Failed to get external data for %s: %s', url, e)
return result
def get_project_data(name):
url = ('https://www.red-dove.com/pypi/projects/'
'%s/%s/project.json' % (name[0].upper(), name))
result = _get_external_data(url)
return result
def get_package_data(name, version):
url = ('https://www.red-dove.com/pypi/projects/'
'%s/%s/package-%s.json' % (name[0].upper(), name, version))
return _get_external_data(url)
class Cache(object):
"""
A class implementing a cache for resources that need to live in the file system
e.g. shared libraries. This class was moved from resources to here because it
could be used by other modules, e.g. the wheel module.
"""
def __init__(self, base):
"""
Initialise an instance.
:param base: The base directory where the cache should be located.
"""
# we use 'isdir' instead of 'exists', because we want to
# fail if there's a file with that name
if not os.path.isdir(base):
os.makedirs(base)
if (os.stat(base).st_mode & 0o77) != 0:
logger.warning('Directory \'%s\' is not private', base)
self.base = os.path.abspath(os.path.normpath(base))
def prefix_to_dir(self, prefix):
"""
Converts a resource prefix to a directory name in the cache.
"""
return path_to_cache_dir(prefix)
def clear(self):
"""
Clear the cache.
"""
not_removed = []
for fn in os.listdir(self.base):
fn = os.path.join(self.base, fn)
try:
if os.path.islink(fn) or os.path.isfile(fn):
os.remove(fn)
elif os.path.isdir(fn):
shutil.rmtree(fn)
except Exception:
not_removed.append(fn)
return not_removed
class EventMixin(object):
"""
A very simple publish/subscribe system.
"""
def __init__(self):
self._subscribers = {}
def add(self, event, subscriber, append=True):
"""
Add a subscriber for an event.
:param event: The name of an event.
:param subscriber: The subscriber to be added (and called when the
event is published).
:param append: Whether to append or prepend the subscriber to an
existing subscriber list for the event.
"""
subs = self._subscribers
if event not in subs:
subs[event] = deque([subscriber])
else:
sq = subs[event]
if append:
sq.append(subscriber)
else:
sq.appendleft(subscriber)
def remove(self, event, subscriber):
"""
Remove a subscriber for an event.
:param event: The name of an event.
:param subscriber: The subscriber to be removed.
"""
subs = self._subscribers
if event not in subs:
raise ValueError('No subscribers: %r' % event)
subs[event].remove(subscriber)
def get_subscribers(self, event):
"""
Return an iterator for the subscribers for an event.
:param event: The event to return subscribers for.
"""
return iter(self._subscribers.get(event, ()))
def publish(self, event, *args, **kwargs):
"""
Publish a event and return a list of values returned by its
subscribers.
:param event: The event to publish.
:param args: The positional arguments to pass to the event's
subscribers.
:param kwargs: The keyword arguments to pass to the event's
subscribers.
"""
result = []
for subscriber in self.get_subscribers(event):
try:
value = subscriber(event, *args, **kwargs)
except Exception:
logger.exception('Exception during event publication')
value = None
result.append(value)
logger.debug('publish %s: args = %s, kwargs = %s, result = %s',
event, args, kwargs, result)
return result
#
# Simple sequencing
#
class Sequencer(object):
def __init__(self):
self._preds = {}
self._succs = {}
self._nodes = set() # nodes with no preds/succs
def add_node(self, node):
self._nodes.add(node)
def remove_node(self, node, edges=False):
if node in self._nodes:
self._nodes.remove(node)
if edges:
for p in set(self._preds.get(node, ())):
self.remove(p, node)
for s in set(self._succs.get(node, ())):
self.remove(node, s)
# Remove empties
for k, v in list(self._preds.items()):
if not v:
del self._preds[k]
for k, v in list(self._succs.items()):
if not v:
del self._succs[k]
def add(self, pred, succ):
assert pred != succ
self._preds.setdefault(succ, set()).add(pred)
self._succs.setdefault(pred, set()).add(succ)
def remove(self, pred, succ):
assert pred != succ
try:
preds = self._preds[succ]
succs = self._succs[pred]
except KeyError:
raise ValueError('%r not a successor of anything' % succ)
try:
preds.remove(pred)
succs.remove(succ)
except KeyError:
raise ValueError('%r not a successor of %r' % (succ, pred))
def is_step(self, step):
return (step in self._preds or step in self._succs or
step in self._nodes)
def get_steps(self, final):
if not self.is_step(final):
raise ValueError('Unknown: %r' % final)
result = []
todo = []
seen = set()
todo.append(final)
while todo:
step = todo.pop(0)
if step in seen:
# if a step was already seen,
# move it to the end (so it will appear earlier
# when reversed on return) ... but not for the
# final step, as that would be confusing for
# users
if step != final:
result.remove(step)
result.append(step)
else:
seen.add(step)
result.append(step)
preds = self._preds.get(step, ())
todo.extend(preds)
return reversed(result)
@property
def strong_connections(self):
#http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm
index_counter = [0]
stack = []
lowlinks = {}
index = {}
result = []
graph = self._succs
def strongconnect(node):
# set the depth index for this node to the smallest unused index
index[node] = index_counter[0]
lowlinks[node] = index_counter[0]
index_counter[0] += 1
stack.append(node)
# Consider successors
try:
successors = graph[node]
except Exception:
successors = []
for successor in successors:
if successor not in lowlinks:
# Successor has not yet been visited
strongconnect(successor)
lowlinks[node] = min(lowlinks[node],lowlinks[successor])
elif successor in stack:
# the successor is in the stack and hence in the current
# strongly connected component (SCC)
lowlinks[node] = min(lowlinks[node],index[successor])
# If `node` is a root node, pop the stack and generate an SCC
if lowlinks[node] == index[node]:
connected_component = []
while True:
successor = stack.pop()
connected_component.append(successor)
if successor == node: break
component = tuple(connected_component)
# storing the result
result.append(component)
for node in graph:
if node not in lowlinks:
strongconnect(node)
return result
@property
def dot(self):
result = ['digraph G {']
for succ in self._preds:
preds = self._preds[succ]
for pred in preds:
result.append(' %s -> %s;' % (pred, succ))
for node in self._nodes:
result.append(' %s;' % node)
result.append('}')
return '\n'.join(result)
#
# Unarchiving functionality for zip, tar, tgz, tbz, whl
#
ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip',
'.tgz', '.tbz', '.whl')
def unarchive(archive_filename, dest_dir, format=None, check=True):
def check_path(path):
if not isinstance(path, text_type):
path = path.decode('utf-8')
p = os.path.abspath(os.path.join(dest_dir, path))
if not p.startswith(dest_dir) or p[plen] != os.sep:
raise ValueError('path outside destination: %r' % p)
dest_dir = os.path.abspath(dest_dir)
plen = len(dest_dir)
archive = None
if format is None:
if archive_filename.endswith(('.zip', '.whl')):
format = 'zip'
elif archive_filename.endswith(('.tar.gz', '.tgz')):
format = 'tgz'
mode = 'r:gz'
elif archive_filename.endswith(('.tar.bz2', '.tbz')):
format = 'tbz'
mode = 'r:bz2'
elif archive_filename.endswith('.tar'):
format = 'tar'
mode = 'r'
else:
raise ValueError('Unknown format for %r' % archive_filename)
try:
if format == 'zip':
archive = ZipFile(archive_filename, 'r')
if check:
names = archive.namelist()
for name in names:
check_path(name)
else:
archive = tarfile.open(archive_filename, mode)
if check:
names = archive.getnames()
for name in names:
check_path(name)
if format != 'zip' and sys.version_info[0] < 3:
# See Python issue 17153. If the dest path contains Unicode,
# tarfile extraction fails on Python 2.x if a member path name
# contains non-ASCII characters - it leads to an implicit
# bytes -> unicode conversion using ASCII to decode.
for tarinfo in archive.getmembers():
if not isinstance(tarinfo.name, text_type):
tarinfo.name = tarinfo.name.decode('utf-8')
archive.extractall(dest_dir)
finally:
if archive:
archive.close()
def zip_dir(directory):
"""zip a directory tree into a BytesIO object"""
result = io.BytesIO()
dlen = len(directory)
with ZipFile(result, "w") as zf:
for root, dirs, files in os.walk(directory):
for name in files:
full = os.path.join(root, name)
rel = root[dlen:]
dest = os.path.join(rel, name)
zf.write(full, dest)
return result
#
# Simple progress bar
#
UNITS = ('', 'K', 'M', 'G','T','P')
class Progress(object):
unknown = 'UNKNOWN'
def __init__(self, minval=0, maxval=100):
assert maxval is None or maxval >= minval
self.min = self.cur = minval
self.max = maxval
self.started = None
self.elapsed = 0
self.done = False
def update(self, curval):
assert self.min <= curval
assert self.max is None or curval <= self.max
self.cur = curval
now = time.time()
if self.started is None:
self.started = now
else:
self.elapsed = now - self.started
def increment(self, incr):
assert incr >= 0
self.update(self.cur + incr)
def start(self):
self.update(self.min)
return self
def stop(self):
if self.max is not None:
self.update(self.max)
self.done = True
@property
def maximum(self):
return self.unknown if self.max is None else self.max
@property
def percentage(self):
if self.done:
result = '100 %'
elif self.max is None:
result = ' ?? %'
else:
v = 100.0 * (self.cur - self.min) / (self.max - self.min)
result = '%3d %%' % v
return result
def format_duration(self, duration):
if (duration <= 0) and self.max is None or self.cur == self.min:
result = '??:??:??'
#elif duration < 1:
# result = '--:--:--'
else:
result = time.strftime('%H:%M:%S', time.gmtime(duration))
return result
@property
def ETA(self):
if self.done:
prefix = 'Done'
t = self.elapsed
#import pdb; pdb.set_trace()
else:
prefix = 'ETA '
if self.max is None:
t = -1
elif self.elapsed == 0 or (self.cur == self.min):
t = 0
else:
#import pdb; pdb.set_trace()
t = float(self.max - self.min)
t /= self.cur - self.min
t = (t - 1) * self.elapsed
return '%s: %s' % (prefix, self.format_duration(t))
@property
def speed(self):
if self.elapsed == 0:
result = 0.0
else:
result = (self.cur - self.min) / self.elapsed
for unit in UNITS:
if result < 1000:
break
result /= 1000.0
return '%d %sB/s' % (result, unit)
#
# Glob functionality
#
RICH_GLOB = re.compile(r'\{([^}]*)\}')
_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]')
_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$')
def iglob(path_glob):
"""Extended globbing function that supports ** and {opt1,opt2,opt3}."""
if _CHECK_RECURSIVE_GLOB.search(path_glob):
msg = """invalid glob %r: recursive glob "**" must be used alone"""
raise ValueError(msg % path_glob)
if _CHECK_MISMATCH_SET.search(path_glob):
msg = """invalid glob %r: mismatching set marker '{' or '}'"""
raise ValueError(msg % path_glob)
return _iglob(path_glob)
def _iglob(path_glob):
rich_path_glob = RICH_GLOB.split(path_glob, 1)
if len(rich_path_glob) > 1:
assert len(rich_path_glob) == 3, rich_path_glob
prefix, set, suffix = rich_path_glob
for item in set.split(','):
for path in _iglob(''.join((prefix, item, suffix))):
yield path
else:
if '**' not in path_glob:
for item in std_iglob(path_glob):
yield item
else:
prefix, radical = path_glob.split('**', 1)
if prefix == '':
prefix = '.'
if radical == '':
radical = '*'
else:
# we support both
radical = radical.lstrip('/')
radical = radical.lstrip('\\')
for path, dir, files in os.walk(prefix):
path = os.path.normpath(path)
for fn in _iglob(os.path.join(path, radical)):
yield fn
#
# HTTPSConnection which verifies certificates/matches domains
#
class HTTPSConnection(httplib.HTTPSConnection):
ca_certs = None # set this to the path to the certs file (.pem)
check_domain = True # only used if ca_certs is not None
# noinspection PyPropertyAccess
def connect(self):
sock = socket.create_connection((self.host, self.port), self.timeout)
if getattr(self, '_tunnel_host', False):
self.sock = sock
self._tunnel()
if not hasattr(ssl, 'SSLContext'):
# For 2.x
if self.ca_certs:
cert_reqs = ssl.CERT_REQUIRED
else:
cert_reqs = ssl.CERT_NONE
self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file,
cert_reqs=cert_reqs,
ssl_version=ssl.PROTOCOL_SSLv23,
ca_certs=self.ca_certs)
else:
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.options |= ssl.OP_NO_SSLv2
if self.cert_file:
context.load_cert_chain(self.cert_file, self.key_file)
kwargs = {}
if self.ca_certs:
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(cafile=self.ca_certs)
if getattr(ssl, 'HAS_SNI', False):
kwargs['server_hostname'] = self.host
self.sock = context.wrap_socket(sock, **kwargs)
if self.ca_certs and self.check_domain:
try:
match_hostname(self.sock.getpeercert(), self.host)
logger.debug('Host verified: %s', self.host)
except CertificateError:
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
raise
class HTTPSHandler(BaseHTTPSHandler):
def __init__(self, ca_certs, check_domain=True):
BaseHTTPSHandler.__init__(self)
self.ca_certs = ca_certs
self.check_domain = check_domain
def _conn_maker(self, *args, **kwargs):
"""
This is called to create a connection instance. Normally you'd
pass a connection class to do_open, but it doesn't actually check for
a class, and just expects a callable. As long as we behave just as a
constructor would have, we should be OK. If it ever changes so that
we *must* pass a class, we'll create an UnsafeHTTPSConnection class
which just sets check_domain to False in the class definition, and
choose which one to pass to do_open.
"""
result = HTTPSConnection(*args, **kwargs)
if self.ca_certs:
result.ca_certs = self.ca_certs
result.check_domain = self.check_domain
return result
def https_open(self, req):
try:
return self.do_open(self._conn_maker, req)
except URLError as e:
if 'certificate verify failed' in str(e.reason):
raise CertificateError('Unable to verify server certificate '
'for %s' % req.host)
else:
raise
#
# To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The-
# Middle proxy using HTTP listens on port 443, or an index mistakenly serves
# HTML containing a http://xyz link when it should be https://xyz),
# you can use the following handler class, which does not allow HTTP traffic.
#
# It works by inheriting from HTTPHandler - so build_opener won't add a
# handler for HTTP itself.
#
class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler):
def http_open(self, req):
raise URLError('Unexpected HTTP request on what should be a secure '
'connection: %s' % req)
#
# XML-RPC with timeouts
#
_ver_info = sys.version_info[:2]
if _ver_info == (2, 6):
class HTTP(httplib.HTTP):
def __init__(self, host='', port=None, **kwargs):
if port == 0: # 0 means use port 0, not the default port
port = None
self._setup(self._connection_class(host, port, **kwargs))
class HTTPS(httplib.HTTPS):
def __init__(self, host='', port=None, **kwargs):
if port == 0: # 0 means use port 0, not the default port
port = None
self._setup(self._connection_class(host, port, **kwargs))
class Transport(xmlrpclib.Transport):
def __init__(self, timeout, use_datetime=0):
self.timeout = timeout
xmlrpclib.Transport.__init__(self, use_datetime)
def make_connection(self, host):
h, eh, x509 = self.get_host_info(host)
if _ver_info == (2, 6):
result = HTTP(h, timeout=self.timeout)
else:
if not self._connection or host != self._connection[0]:
self._extra_headers = eh
self._connection = host, httplib.HTTPConnection(h)
result = self._connection[1]
return result
class SafeTransport(xmlrpclib.SafeTransport):
def __init__(self, timeout, use_datetime=0):
self.timeout = timeout
xmlrpclib.SafeTransport.__init__(self, use_datetime)
def make_connection(self, host):
h, eh, kwargs = self.get_host_info(host)
if not kwargs:
kwargs = {}
kwargs['timeout'] = self.timeout
if _ver_info == (2, 6):
result = HTTPS(host, None, **kwargs)
else:
if not self._connection or host != self._connection[0]:
self._extra_headers = eh
self._connection = host, httplib.HTTPSConnection(h, None,
**kwargs)
result = self._connection[1]
return result
class ServerProxy(xmlrpclib.ServerProxy):
def __init__(self, uri, **kwargs):
self.timeout = timeout = kwargs.pop('timeout', None)
# The above classes only come into play if a timeout
# is specified
if timeout is not None:
scheme, _ = splittype(uri)
use_datetime = kwargs.get('use_datetime', 0)
if scheme == 'https':
tcls = SafeTransport
else:
tcls = Transport
kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime)
self.transport = t
xmlrpclib.ServerProxy.__init__(self, uri, **kwargs)
#
# CSV functionality. This is provided because on 2.x, the csv module can't
# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files.
#
def _csv_open(fn, mode, **kwargs):
if sys.version_info[0] < 3:
mode += 'b'
else:
kwargs['newline'] = ''
return open(fn, mode, **kwargs)
class CSVBase(object):
defaults = {
'delimiter': str(','), # The strs are used because we need native
'quotechar': str('"'), # str in the csv API (2.x won't take
'lineterminator': str('\n') # Unicode)
}
def __enter__(self):
return self
def __exit__(self, *exc_info):
self.stream.close()
class CSVReader(CSVBase):
def __init__(self, **kwargs):
if 'stream' in kwargs:
stream = kwargs['stream']
if sys.version_info[0] >= 3:
# needs to be a text stream
stream = codecs.getreader('utf-8')(stream)
self.stream = stream
else:
self.stream = _csv_open(kwargs['path'], 'r')
self.reader = csv.reader(self.stream, **self.defaults)
def __iter__(self):
return self
def next(self):
result = next(self.reader)
if sys.version_info[0] < 3:
for i, item in enumerate(result):
if not isinstance(item, text_type):
result[i] = item.decode('utf-8')
return result
__next__ = next
class CSVWriter(CSVBase):
def __init__(self, fn, **kwargs):
self.stream = _csv_open(fn, 'w')
self.writer = csv.writer(self.stream, **self.defaults)
def writerow(self, row):
if sys.version_info[0] < 3:
r = []
for item in row:
if isinstance(item, text_type):
item = item.encode('utf-8')
r.append(item)
row = r
self.writer.writerow(row)
#
# Configurator functionality
#
class Configurator(BaseConfigurator):
value_converters = dict(BaseConfigurator.value_converters)
value_converters['inc'] = 'inc_convert'
def __init__(self, config, base=None):
super(Configurator, self).__init__(config)
self.base = base or os.getcwd()
def configure_custom(self, config):
def convert(o):
if isinstance(o, (list, tuple)):
result = type(o)([convert(i) for i in o])
elif isinstance(o, dict):
if '()' in o:
result = self.configure_custom(o)
else:
result = {}
for k in o:
result[k] = convert(o[k])
else:
result = self.convert(o)
return result
c = config.pop('()')
if not callable(c):
c = self.resolve(c)
props = config.pop('.', None)
# Check for valid identifiers
args = config.pop('[]', ())
if args:
args = tuple([convert(o) for o in args])
items = [(k, convert(config[k])) for k in config if valid_ident(k)]
kwargs = dict(items)
result = c(*args, **kwargs)
if props:
for n, v in props.items():
setattr(result, n, convert(v))
return result
def __getitem__(self, key):
result = self.config[key]
if isinstance(result, dict) and '()' in result:
self.config[key] = result = self.configure_custom(result)
return result
def inc_convert(self, value):
"""Default converter for the inc:// protocol."""
if not os.path.isabs(value):
value = os.path.join(self.base, value)
with codecs.open(value, 'r', encoding='utf-8') as f:
result = json.load(f)
return result
#
# Mixin for running subprocesses and capturing their output
#
class SubprocessMixin(object):
def __init__(self, verbose=False, progress=None):
self.verbose = verbose
self.progress = progress
def reader(self, stream, context):
"""
Read lines from a subprocess' output stream and either pass to a progress
callable (if specified) or write progress information to sys.stderr.
"""
progress = self.progress
verbose = self.verbose
while True:
s = stream.readline()
if not s:
break
if progress is not None:
progress(s, context)
else:
if not verbose:
sys.stderr.write('.')
else:
sys.stderr.write(s.decode('utf-8'))
sys.stderr.flush()
stream.close()
def run_command(self, cmd, **kwargs):
p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, **kwargs)
t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout'))
t1.start()
t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr'))
t2.start()
p.wait()
t1.join()
t2.join()
if self.progress is not None:
self.progress('done.', 'main')
elif self.verbose:
sys.stderr.write('done.\n')
return p
| gusai-francelabs/datafari | windows/python/Lib/site-packages/pip/_vendor/distlib/util.py | Python | apache-2.0 | 51,453 |
# -*- coding: utf-8 -*-
###
# (C) Copyright (2012-2017) Hewlett Packard Enterprise Development LP
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
###
from pprint import pprint
from config_loader import try_load_from_file
from hpOneView.exceptions import HPOneViewException
from hpOneView.oneview_client import OneViewClient
config = {
"ip": "<oneview_ip>",
"credentials": {
"userName": "<username>",
"password": "<password>"
}
}
# Try load config from a file (if there is a config file)
config = try_load_from_file(config)
# To run this example, you may set a WWN to add a volume using the WWN of the volume (optional)
unmanaged_volume_wwn = ''
oneview_client = OneViewClient(config)
# Defines the storage system and the storage pool which are provided to create the volumes
storage_system = oneview_client.storage_systems.get_all()[0]
storage_pools = oneview_client.storage_pools.get_all()
storage_pool_available = False
for sp in storage_pools:
if sp['storageSystemUri'] == storage_system['uri']:
storage_pool_available = True
storage_pool = sp
if not storage_pool_available:
raise ValueError("ERROR: No storage pools found attached to the storage system")
# Create a volume with a Storage Pool
print("\nCreate a volume with a specified Storage Pool and Snapshot Pool")
options = {
"properties": {
"storagePool": storage_pool['uri'],
"size": 1024 * 1024 * 1024, # 1GB
"isShareable": False,
"snapshotPool": storage_pool['uri'],
"provisioningType": "Thin",
"name": "ONEVIEW_SDK_TEST_VOLUME_TYPE_1"
},
"templateUri": "/rest/storage-volume-templates/6da3016e-7ced-4e0b-8dac-a8b200a66e4f",
"isPermanent": False
}
new_volume = oneview_client.volumes.create(options)
pprint(new_volume)
# Add a volume for management by the appliance using the WWN of the volume
if unmanaged_volume_wwn:
print("\nAdd a volume for management by the appliance using the WWN of the volume")
options_with_wwn = {
"type": "AddStorageVolumeV2",
"name": 'ONEVIEW_SDK_TEST_VOLUME_TYPE_4',
"description": 'Test volume added for management: Storage System + Storage Pool + WWN',
"storageSystemUri": storage_system['uri'],
"wwn": unmanaged_volume_wwn,
"provisioningParameters": {
"shareable": False
}
}
volume_added_with_wwn = oneview_client.volumes.create(options_with_wwn)
pprint(volume_added_with_wwn)
# Get all managed volumes
print("\nGet a list of all managed volumes")
volumes = oneview_client.volumes.get_all()
for volume in volumes:
print("Name: {name}".format(**volume))
# Find a volume by name
volume = oneview_client.volumes.get_by('name', new_volume['name'])[0]
print("\nFound a volume by name: '{name}'.\n uri = '{uri}'".format(**volume))
# Update the name of the volume recently found to 'ONEVIEW_SDK_TEST_VOLUME_TYPE_1_RENAMED'
volume['name'] = 'ONEVIEW_SDK_TEST_VOLUME_TYPE_1_RENAMED'
volume = oneview_client.volumes.update(volume)
print("\nVolume updated successfully.\n uri = '{uri}'\n with attribute 'name' = {name}".format(**volume))
# Find a volume by URI
volume_uri = new_volume['uri']
volume = oneview_client.volumes.get(volume_uri)
print("\nFind a volume by URI")
pprint(volume)
# Create a snapshot
print("\nCreate a snapshot")
snapshot_options = {
"name": "Test Snapshot",
"description": "Description for the snapshot"
}
volume_with_snapshot_pool = oneview_client.volumes.create_snapshot(new_volume['uri'], snapshot_options)
print("Created a snapshot for the volume '{name}'".format(**new_volume))
# Get recently created snapshot resource by name
print("\nGet a snapshot by name")
created_snapshot = oneview_client.volumes.get_snapshot_by(new_volume['uri'], 'name', 'Test Snapshot')[0]
print("Found snapshot at uri '{uri}'\n by name = '{name}'".format(**created_snapshot))
snapshot_uri = created_snapshot['uri']
# Get recently created snapshot resource by uri
print("\nGet a snapshot")
try:
snapshot = oneview_client.volumes.get_snapshot(snapshot_uri, volume_uri)
pprint(snapshot)
except HPOneViewException as e:
print(e.msg)
# Get a paginated list of snapshot resources sorting by name ascending
print("\nGet a list of the first 10 snapshots")
snapshots = oneview_client.volumes.get_snapshots(new_volume['uri'], 0, 10, sort='name:ascending')
for snapshot in snapshots:
print(' {name}'.format(**snapshot))
# Delete the recently created snapshot resource
print("\nDelete the recently created snapshot")
returned = oneview_client.volumes.delete_snapshot(created_snapshot)
print("Snapshot deleted successfully")
# Get the list of all extra managed storage volume paths from the appliance
extra_volumes = oneview_client.volumes.get_extra_managed_storage_volume_paths()
print("\nGet the list of all extra managed storage volume paths from the appliance")
pprint(extra_volumes)
# Remove extra presentations from the specified volume on the storage system
print("\nRemove extra presentations from the specified volume on the storage system")
oneview_client.volumes.repair(volume['uri'])
print(" Done.")
# Get all the attachable volumes which are managed by the appliance
print("\nGet all the attachable volumes which are managed by the appliance")
attachable_volumes = oneview_client.volumes.get_attachable_volumes()
pprint(attachable_volumes)
print("\nGet the attachable volumes which are managed by the appliance with scopes and connections")
scope_uris = ['/rest/scopes/e4a23533-9a72-4375-8cd3-a523016df852', '/rest/scopes/7799327d-6d79-4eb2-b969-a523016df869']
connections = [{'networkUri': '/rest/fc-networks/90bd0f63-3aab-49e2-a45f-a52500b46616',
'proxyName': '20:19:50:EB:1A:0F:0E:B6', 'initiatorName': '10:00:62:01:F8:70:00:0E'},
{'networkUri': '/rest/fc-networks/8acd0f62-1aab-49e2-a45a-d22500b4acdb',
'proxyName': '20:18:40:EB:1A:0F:0E:C7', 'initiatorName': '10:00:72:01:F8:70:00:0F'}]
attachable_volumes = oneview_client.volumes.get_attachable_volumes(scope_uris=scope_uris, connections=connections)
pprint(attachable_volumes)
print("\nDelete the recently created volumes")
if oneview_client.volumes.delete(new_volume):
print("The volume, that was previously created with a Storage Pool, was deleted from OneView and storage system")
if unmanaged_volume_wwn and oneview_client.volumes.delete(volume_added_with_wwn, export_only=True):
print("The volume, that was previously added using the WWN of the volume, was deleted from OneView")
| HewlettPackard/python-hpOneView | examples/volumes.py | Python | mit | 7,563 |
#!/usr/bin/env python
# Mininet Automatic Testing Tool (Prototype)
# Copyright (C) 2013 Jesse J. Cook
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from collections import namedtuple
import argparse
import hashlib
import libvirt
import os
import paramiko
import py_compile
import socket
import sys
import time
Environment = namedtuple('Environment', ['dom_name', 'ss_name'])
class GradingException(Exception):
def __init__(self,value):
self.value = "fail:\n%s" % value
def __str__(self):
return str(self.value)
# TODO: reevaluate try except blocks
def parse_prog_input():
desc = "Mininet Testing Tool (Prototype)"
contract = """program contract:
Requires:
- The sandbox environment is setup as described in README.md
- The following paths on the host to be provided and read access granted:
the code that is to be tested
the test suite that is to be run against the code
the guest domain from which the tests will be run
- The following paths on the guest to be provided and r/w access granted:
the destination for the code submission
the destination for the test suite
- The guest domain from which the tests will be run:
to be reachable via the network from the host
to have a client listening over the network (for now, ssh on port 22)
to have sufficent free space on the disk (60%% of allocated suggested)
Guarantees:
- The domain state will be saved in a snapshot
- The following will be installed on the snapshot of the guest:
the code that is to be tested (for now, 1 python file)
the test suite that is to be run (for now, 1 python file)
- The test suite will be run against the code on the guest
- The test results will be presented (for now, printed to stdout)
- A grade will be presented (for now, printed to stdout)
- The domain's state will be revereted to the saved state
- The snapshot will be deleted
"""
frmt = argparse.RawDescriptionHelpFormatter
parser = argparse.ArgumentParser( description=desc
, epilog=contract
, formatter_class=frmt
)
parser.add_argument( '--submission'
, dest='code'
, default='sample-submission.py'
, help='code submission to be tested'
)
parser.add_argument( '--submission-dest'
, dest='code_dest'
, default='/home/mininet/pyretic/pyretic/examples/fw.py'
, help='code submission destination within guest'
)
parser.add_argument( '--test-suite'
, dest='test'
, default='sample-test-suite.py'
, help='test suite to test the code submission with'
)
parser.add_argument( '--test-suite-dest'
, dest='test_dest'
, default='/tmp/test-suite.py'
, help='test suite destination within guest'
)
parser.add_argument( '--domain-name'
, dest='domain'
, default='mininet'
, help='libvirt domain to test the code submission on'
)
parser.add_argument( '--hostname'
, dest='hostname'
, default='mininet'
, help='hostname for the libvirt test domain'
)
return parser.parse_args()
def ssh_connect( hostname
, port=22
, username="mininet"
, keyfile="~/.ssh/id_rsa"
):
try:
ssh = paramiko.SSHClient()
ssh.load_system_host_keys()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect( hostname = hostname
, port = port
, username = username
, key_filename = os.path.expanduser(keyfile)
)
except Exception, e:
print "Connection to host '%s' failed (%s)" % (hostname, str(e))
sys.exit(1)
return ssh
def is_client_up( hostname
, port=22
):
up = False
ssh = ssh_connect(hostname=hostname, port=port)
(stdin, stdout, stderr) = ssh.exec_command("mn --version")
chan = stdout.channel
if 0 == chan.recv_exit_status():
up = True
chan.close()
ssh.close()
return up
def setup_test_env(hostname, domain):
xml = "<domainsnapshot><domain><name>%s</name></domain></domainsnapshot>"
conn = libvirt.open(None)
if conn == None:
print "Failed to open connection to the hypervisor"
sys.exit(1)
try:
dom = conn.lookupByName(domain)
except:
print "Failed to find domain '%s'" % domain
sys.exit(1)
if libvirt.VIR_DOMAIN_SHUTOFF == dom.state()[0]:
print "\n\tDomain is shutdown; starting '%s'" % domain
try:
dom.create()
except:
print "Failed to start domain (%s)" % domain
sys.exit(1)
state = dom.state()[0]
if libvirt.VIR_DOMAIN_RUNNING != state:
print 'Domain (%s) in unsupported state (%s)' % (domain, state)
sys.exit(1)
if not is_client_up(hostname):
print "Unable to reach client on host '%s'" % hostname
sys.exit(1)
try:
ss = dom.snapshotCreateXML(xml % domain, 0)
except:
print "Failed to create snapshot of domain (%s)" % domain
sys.exit(1)
conn.close()
return Environment(dom_name=domain, ss_name=ss.getName())
def teardown_test_env(env):
conn = libvirt.open(None)
if conn == None:
print "Failed to open connection to the hypervisor"
sys.exit(1)
try:
dom = conn.lookupByName(env.dom_name)
except:
print "Failed to find domain '%s'" % env.dom_name
sys.exit(1)
try:
ss = dom.snapshotLookupByName(env.ss_name)
dom.revertToSnapshot(ss)
ss.delete(0)
except:
print "Failed to cleanup snapshot of domain (%s)" % env.dom_name
sys.exit(1)
conn.close()
def sha1sum(path):
bs=65536
f = open(path, 'rb')
buf = f.read(bs)
h = hashlib.sha1()
while len(buf) > 0:
h.update(buf)
buf = f.read(bs)
f.close()
return h.hexdigest()
def push_file(src, tgt, hostname, port=22):
spath = os.path.expanduser(src)
dpath = os.path.expanduser(tgt)
sname = os.path.basename(spath)
chk_path = "/tmp/%s.sha1sum" % sname
f = open(chk_path, 'w')
f.write("%s %s" % (sha1sum(spath), dpath))
f.close()
ssh = ssh_connect(hostname=hostname, port=port)
scp = paramiko.SFTPClient.from_transport(ssh.get_transport())
scp.put(spath, dpath)
scp.put(chk_path, chk_path)
(stdin, stdout, stderr) = ssh.exec_command("sha1sum -c %s" % chk_path)
chan = stdout.channel
if 0 != chan.recv_exit_status():
raise Exception("Integrity checked failed for '%s'" % fpath)
chan.close()
scp.close()
ssh.close()
def test_code(test, hostname, port=22):
ssh = ssh_connect(hostname=hostname, port=port)
(stdin, stdout, stderr) = ssh.exec_command("sudo python -u %s" % test, 0)
chan = stdout.channel
while not chan.exit_status_ready():
time.sleep(.1)
if chan.recv_ready():
sys.stdout.write(chan.recv(1024))
sys.stdout.flush()
rc = chan.recv_exit_status()
chan.close()
ssh.close()
if 0 > rc or rc > 100:
raise Exception("Test suite returned error code (%s)" % rc)
return rc
def main():
grade = 0
args = parse_prog_input()
sys.stdout.write("Standing up test environment...")
sys.stdout.flush()
env = setup_test_env(args.hostname, args.domain)
print "complete"
try:
try:
sys.stdout.write("Checking syntax...")
sys.stdout.flush()
py_compile.compile(args.code, doraise=True)
except Exception, e:
raise GradingException(str(e))
print "success"
py_compile.compile(args.test, doraise=True)
push_file(args.code, args.code_dest, args.hostname)
push_file(args.test, args.test_dest, args.hostname)
print "Running tests:"
print "----------------------------------------------------------"
grade = test_code(args.test_dest, args.hostname)
print "----------------------------------------------------------"
except GradingException, e:
print str(e)
except Exception, e:
print "Error occurred grading submission (%s)" % str(e)
else:
print "Grade: %s%%" % grade
teardown_test_env(env)
if __name__ == "__main__":
main()
| CrashenX/auto-grader | src/auto-grade.py | Python | agpl-3.0 | 9,526 |
import os
import socket
import subprocess
import signal
import sys
import logging
import time
# - execute a process
#print 'executing process 1...'
#executable='sleep'
#param='3'
#subprocess.call([executable, param])
#print 'executing process 2...'
#os.system('sleep 1')
# - capture SIGINT and other signals
#print 'done.'
def init_logger(filename):
logger = logging.getLogger(filename)
hdlr = logging.FileHandler(filename)
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
logger.setLevel(logging.INFO)
return logger
def touch_file(file_name):
f = open(file_name,'w')
f.close
# - detect COSACS process running
def is_cosacs_running():
addr=('127.0.0.1', 8286)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.bind(addr)
sock.close()
return 0
except:
return 1
def block_while_file_exists(filename):
while 1:
if os.path.exists(filename):
time.sleep(1)
else:
break
def block_while_cosacs_runs():
while 1:
if is_cosacs_running():
time.sleep(1)
else:
break
| ow2-compatibleone/accords-platform | paprocci/test/to-cosacs/miscco.py | Python | apache-2.0 | 1,228 |
import time
import treetests
import pdb
import sys
def speedysort(mylist):
helpspeedysort(mylist, 0, len(mylist))
return mylist
def helpspeedysort(mylist, start, end):
if end - start <= 1:
return mylist
pivot = start
pivotval = mylist[pivot]
for i in xrange(start+1, end):
if mylist[i] < pivotval:
mylist[pivot] = mylist[i]
mylist[i] = mylist[pivot+1]
mylist[pivot+1] = pivotval
pivot+=1
helpspeedysort(mylist, start, pivot)
helpspeedysort(mylist, pivot+1, end)
return mylist
if __name__ == "__main__":
#quicksort actually works like crap when
#the list is already in order
sys.setrecursionlimit(10100)
a=range(10000)
start = time.time()
speedysort(a)
stop = time.time()
print "slowest: " + str(stop-start)
a=[]
#same perfect input as for binary search trees
#kindof a similar algorithm too
for i in treetests.perfectGenerator(0, 10000):
a.append(i)
start=time.time()
speedysort(a)
stop=time.time()
print "fastest: " + str(stop-start) | sniboboof/data-structures | sortquickly.py | Python | mit | 1,111 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from math import ceil
from django.db import migrations, models
def get_partner(org, user):
return user.partners.filter(org=org, is_active=True).first()
def calculate_totals_for_cases(apps, schema_editor):
from casepro.statistics.models import datetime_to_date
Case = apps.get_model("cases", "Case")
CaseAction = apps.get_model("cases", "CaseAction")
Outgoing = apps.get_model("msgs", "Outgoing")
DailySecondTotalCount = apps.get_model("statistics", "DailySecondTotalCount")
qs = Case.objects.all().order_by("id")
for case in qs:
partner = case.assignee
if case.closed_on is not None:
# we only consider the first time a case was closed, not any subsequent reopenings
close_action = case.actions.filter(action="C").earliest("created_on")
org = close_action.case.org
user = close_action.created_by
partner = close_action.case.assignee
case = close_action.case
day = datetime_to_date(close_action.created_on, close_action.case.org)
# count the time to close on an org level
td = close_action.created_on - case.opened_on
seconds_since_open = ceil(td.total_seconds())
DailySecondTotalCount.objects.create(
day=day, item_type="C", scope="org:%d" % org.pk, count=1, seconds=seconds_since_open
)
# count the time since case was last assigned to this partner till it was closed
if user.partners.filter(id=partner.id).exists():
# count the time since this case was (re)assigned to this partner
try:
action = case.actions.filter(action="A", assignee=partner).latest("created_on")
start_date = action.created_on
except CaseAction.DoesNotExist:
start_date = case.opened_on
td = close_action.created_on - start_date
seconds_since_open = ceil(td.total_seconds())
DailySecondTotalCount.objects.create(
day=day, item_type="C", scope="partner:%d" % partner.pk, count=1, seconds=seconds_since_open
)
# check if responded to
if case.outgoing_messages.exists():
# count the first reponse at an org level
first_response = case.outgoing_messages.earliest("created_on")
day = datetime_to_date(first_response.created_on, case.org)
td = first_response.created_on - case.opened_on
seconds_since_open = ceil(td.total_seconds())
DailySecondTotalCount.objects.create(
day=day, item_type="A", scope="org:%d" % case.org.pk, count=1, seconds=seconds_since_open
)
try:
first_response = case.outgoing_messages.filter(partner=partner).earliest("created_on")
except Outgoing.DoesNotExist:
continue
day = datetime_to_date(first_response.created_on, case.org)
# count the first response by this partner
author_action = case.actions.filter(action="O").order_by("created_on").first()
reassign_action = case.actions.filter(action="A", assignee=partner).order_by("created_on").first()
if author_action and get_partner(org, author_action.created_by) != partner:
# only count the time since this case was (re)assigned to this partner
# or cases that were assigned during creation by another partner
if reassign_action:
start_date = reassign_action.created_on
else:
start_date = author_action.created_on
td = first_response.created_on - start_date
seconds_since_open = ceil(td.total_seconds())
DailySecondTotalCount.objects.create(
day=day, item_type="A", scope="partner:%d" % partner.pk, count=1, seconds=seconds_since_open
)
def remove_totals_for_cases(apps, schema_editor):
DailySecondTotalCount = apps.get_model("statistics", "DailySecondTotalCount")
db_alias = schema_editor.connection.alias
DailySecondTotalCount.objects.using(db_alias).filter(item_type="A").delete()
DailySecondTotalCount.objects.using(db_alias).filter(item_type="C").delete()
class Migration(migrations.Migration):
dependencies = [("statistics", "0009_dailysecondtotalcount"), ("cases", "0042_auto_20160805_1003")]
operations = [
# migrations.RunPython(calculate_totals_for_cases, remove_totals_for_cases),
# the reverse migration is commented out because it could remove data created after this migration was run,
# so it should only be used when you know it will do what you want it to do
migrations.RunPython(calculate_totals_for_cases)
]
| praekelt/casepro | casepro/statistics/migrations/0010_existing_case_timings_count.py | Python | bsd-3-clause | 4,962 |
"""invoke task file to build CSS"""
from invoke import task, run
import os
from distutils.version import LooseVersion as V
from subprocess import check_output
pjoin = os.path.join
static_dir = 'static'
components_dir = pjoin(static_dir, 'components')
here = os.path.dirname(__file__)
min_less_version = '1.7.5'
max_less_version = None # exclusive if string
def _need_css_update():
"""Does less need to run?"""
static_path = pjoin(here, static_dir)
css_targets = [
pjoin(static_path, 'style', '%s.min.css' % name)
for name in ('style', 'ipython')
]
css_maps = [t + '.map' for t in css_targets]
targets = css_targets + css_maps
if not all(os.path.exists(t) for t in targets):
# some generated files don't exist
return True
earliest_target = sorted(os.stat(t).st_mtime for t in targets)[0]
# check if any .less files are newer than the generated targets
for (dirpath, dirnames, filenames) in os.walk(static_path):
for f in filenames:
if f.endswith('.less'):
path = pjoin(static_path, dirpath, f)
timestamp = os.stat(path).st_mtime
if timestamp > earliest_target:
return True
return False
@task
def css(minify=False, verbose=False, force=False):
"""generate the css from less files"""
# minify implies force because it's not the default behavior
if not force and not minify and not _need_css_update():
print("css up-to-date")
return
for name in ('style', 'ipython'):
source = pjoin('style', "%s.less" % name)
target = pjoin('style', "%s.min.css" % name)
sourcemap = pjoin('style', "%s.min.css.map" % name)
_compile_less(source, target, sourcemap, minify, verbose)
def _compile_less(source, target, sourcemap, minify=True, verbose=False):
"""Compile a less file by source and target relative to static_dir"""
min_flag = '-x' if minify else ''
ver_flag = '--verbose' if verbose else ''
# pin less to version number from above
try:
out = check_output(['lessc', '--version'])
except OSError as err:
raise ValueError("Unable to find lessc. Please install lessc >= %s and < %s "
% (min_less_version, max_less_version))
out = out.decode('utf8', 'replace')
less_version = out.split()[1]
if min_less_version and V(less_version) < V(min_less_version):
raise ValueError("lessc too old: %s < %s. Use `$ npm install [email protected]` to install a specific version of less" % (
less_version, min_less_version))
if max_less_version and V(less_version) >= V(max_less_version):
raise ValueError("lessc too new: %s >= %s. Use `$ npm install [email protected]` to install a specific version of less" % (
less_version, max_less_version))
static_path = pjoin(here, static_dir)
cwd = os.getcwd()
try:
os.chdir(static_dir)
run('lessc {min_flag} {ver_flag} --source-map={sourcemap} --source-map-basepath={static_path} --source-map-rootpath="../" {source} {target}'.format(**locals()),
echo=True,
)
finally:
os.chdir(cwd)
| mattvonrocketstein/smash | smashlib/ipy3x/html/tasks.py | Python | mit | 3,210 |
# Copyright Iris contributors
#
# This file is part of Iris and is released under the LGPL license.
# See COPYING and COPYING.LESSER in the root of the repository for full
# licensing details.
"""
Defines a Trajectory class, and a routine to extract a sub-cube along a
trajectory.
"""
import math
import numpy as np
from scipy.spatial import cKDTree
import iris.analysis
import iris.coord_systems
import iris.coords
from iris.analysis import Linear
from iris.analysis._interpolation import snapshot_grid
from iris.util import _meshgrid
class _Segment:
"""A single trajectory line segment: Two points, as described in the
Trajectory class."""
def __init__(self, p0, p1):
# check keys
if sorted(p0.keys()) != sorted(p1.keys()):
raise ValueError("keys do not match")
self.pts = [p0, p1]
# calculate our length
squares = 0
for key in self.pts[0].keys():
delta = self.pts[1][key] - self.pts[0][key]
squares += delta * delta
self.length = math.sqrt(squares)
class Trajectory:
"""A series of given waypoints with pre-calculated sample points."""
def __init__(self, waypoints, sample_count=10):
"""
Defines a trajectory using a sequence of waypoints.
For example::
waypoints = [{'latitude': 45, 'longitude': -60},
{'latitude': 45, 'longitude': 0}]
Trajectory(waypoints)
.. note:: All the waypoint dictionaries must contain the same
coordinate names.
Args:
* waypoints
A sequence of dictionaries, mapping coordinate names to values.
Kwargs:
* sample_count
The number of sample positions to use along the trajectory.
"""
self.waypoints = waypoints
self.sample_count = sample_count
# create line segments from the waypoints
segments = [
_Segment(self.waypoints[i], self.waypoints[i + 1])
for i in range(len(self.waypoints) - 1)
]
# calculate our total length
self.length = sum([seg.length for seg in segments])
# generate our sampled points
#: The trajectory points, as dictionaries of {coord_name: value}.
self.sampled_points = []
sample_step = self.length / (self.sample_count - 1)
# start with the first segment
cur_seg_i = 0
cur_seg = segments[cur_seg_i]
len_accum = cur_seg.length
for p in range(self.sample_count):
# calculate the sample position along our total length
sample_at_len = p * sample_step
# skip forward to the containing segment
while len_accum < sample_at_len and cur_seg_i < len(segments):
cur_seg_i += 1
cur_seg = segments[cur_seg_i]
len_accum += cur_seg.length
# how far through the segment is our sample point?
seg_start_len = len_accum - cur_seg.length
seg_frac = (sample_at_len - seg_start_len) / cur_seg.length
# sample each coordinate in this segment, to create a new
# sampled point
new_sampled_point = {}
for key in cur_seg.pts[0].keys():
seg_coord_delta = cur_seg.pts[1][key] - cur_seg.pts[0][key]
new_sampled_point.update(
{key: cur_seg.pts[0][key] + seg_frac * seg_coord_delta}
)
# add this new sampled point
self.sampled_points.append(new_sampled_point)
def __repr__(self):
return "Trajectory(%s, sample_count=%s)" % (
self.waypoints,
self.sample_count,
)
def _get_interp_points(self):
"""
Translate `self.sampled_points` to the format expected by the
interpolator.
Returns:
`self.sampled points` in the format required by
`:func:`~iris.analysis.trajectory.interpolate`.
"""
points = {
k: [point_dict[k] for point_dict in self.sampled_points]
for k in self.sampled_points[0].keys()
}
return [(k, v) for k, v in points.items()]
def _src_cube_anon_dims(self, cube):
"""
A helper method to locate the index of anonymous dimensions on the
interpolation target, ``cube``.
Returns:
The index of any anonymous dimensions in ``cube``.
"""
named_dims = [cube.coord_dims(c)[0] for c in cube.dim_coords]
return list(set(range(cube.ndim)) - set(named_dims))
def interpolate(self, cube, method=None):
"""
Calls :func:`~iris.analysis.trajectory.interpolate` to interpolate
``cube`` on the defined trajectory.
Assumes that the coordinate names supplied in the waypoints
dictionaries match to coordinate names in `cube`, and that points are
supplied in the same coord_system as in `cube`, where appropriate (i.e.
for horizontal coordinate points).
Args:
* cube
The source Cube to interpolate.
Kwargs:
* method:
The interpolation method to use; "linear" (default) or "nearest".
Only nearest is available when specifying multi-dimensional
coordinates.
"""
sample_points = self._get_interp_points()
interpolated_cube = interpolate(cube, sample_points, method=method)
# Add an "index" coord to name the anonymous dimension produced by
# the interpolation, if present.
if len(interpolated_cube.dim_coords) < interpolated_cube.ndim:
# Add a new coord `index` to describe the new dimension created by
# interpolating.
index_coord = iris.coords.DimCoord(
range(self.sample_count), long_name="index"
)
# Make sure anonymous dims in `cube` do not mistakenly get labelled
# as the new `index` dimension created by interpolating.
src_anon_dims = self._src_cube_anon_dims(cube)
interp_anon_dims = self._src_cube_anon_dims(interpolated_cube)
(anon_dim_index,) = list(
set(interp_anon_dims) - set(src_anon_dims)
)
# Add the new coord to the interpolated cube.
interpolated_cube.add_dim_coord(index_coord, anon_dim_index)
return interpolated_cube
def interpolate(cube, sample_points, method=None):
"""
Extract a sub-cube at the given n-dimensional points.
Args:
* cube
The source Cube.
* sample_points
A sequence of coordinate (name) - values pairs.
Kwargs:
* method
Request "linear" interpolation (default) or "nearest" neighbour.
Only nearest neighbour is available when specifying multi-dimensional
coordinates.
For example::
sample_points = [('latitude', [45, 45, 45]),
('longitude', [-60, -50, -40])]
interpolated_cube = interpolate(cube, sample_points)
"""
if method not in [None, "linear", "nearest"]:
raise ValueError("Unhandled interpolation specified : %s" % method)
# Convert any coordinate names to coords
points = []
for coord, values in sample_points:
if isinstance(coord, str):
coord = cube.coord(coord)
points.append((coord, values))
sample_points = points
# Do all value sequences have the same number of values?
coord, values = sample_points[0]
trajectory_size = len(values)
for coord, values in sample_points[1:]:
if len(values) != trajectory_size:
raise ValueError("Lengths of coordinate values are inconsistent.")
# Which dimensions are we squishing into the last dimension?
squish_my_dims = set()
for coord, values in sample_points:
dims = cube.coord_dims(coord)
for dim in dims:
squish_my_dims.add(dim)
# Derive the new cube's shape by filtering out all the dimensions we're
# about to sample,
# and then adding a new dimension to accommodate all the sample points.
remaining = [
(dim, size)
for dim, size in enumerate(cube.shape)
if dim not in squish_my_dims
]
new_data_shape = [size for dim, size in remaining]
new_data_shape.append(trajectory_size)
# Start with empty data and then fill in the "column" of values for each
# trajectory point.
new_cube = iris.cube.Cube(np.empty(new_data_shape))
new_cube.metadata = cube.metadata
# Derive the mapping from the non-trajectory source dimensions to their
# corresponding destination dimensions.
remaining_dims = [dim for dim, size in remaining]
dimension_remap = {dim: i for i, dim in enumerate(remaining_dims)}
# Record a mapping from old coordinate IDs to new coordinates,
# for subsequent use in creating updated aux_factories.
coord_mapping = {}
# Create all the non-squished coords
for coord in cube.dim_coords:
src_dims = cube.coord_dims(coord)
if squish_my_dims.isdisjoint(src_dims):
dest_dims = [dimension_remap[dim] for dim in src_dims]
new_coord = coord.copy()
new_cube.add_dim_coord(new_coord, dest_dims)
coord_mapping[id(coord)] = new_coord
for coord in cube.aux_coords:
src_dims = cube.coord_dims(coord)
if squish_my_dims.isdisjoint(src_dims):
dest_dims = [dimension_remap[dim] for dim in src_dims]
new_coord = coord.copy()
new_cube.add_aux_coord(new_coord, dest_dims)
coord_mapping[id(coord)] = new_coord
# Create all the squished (non derived) coords, not filled in yet.
trajectory_dim = len(remaining_dims)
for coord in cube.dim_coords + cube.aux_coords:
src_dims = cube.coord_dims(coord)
if not squish_my_dims.isdisjoint(src_dims):
points = np.array([coord.points.flatten()[0]] * trajectory_size)
new_coord = iris.coords.AuxCoord(
points,
standard_name=coord.standard_name,
long_name=coord.long_name,
units=coord.units,
bounds=None,
attributes=coord.attributes,
coord_system=coord.coord_system,
)
new_cube.add_aux_coord(new_coord, trajectory_dim)
coord_mapping[id(coord)] = new_coord
for factory in cube.aux_factories:
new_cube.add_aux_factory(factory.updated(coord_mapping))
# Are the given coords all 1-dimensional? (can we do linear interp?)
for coord, values in sample_points:
if coord.ndim > 1:
if method == "linear":
msg = (
"Cannot currently perform linear interpolation for "
"multi-dimensional coordinates."
)
raise iris.exceptions.CoordinateMultiDimError(msg)
method = "nearest"
break
if method in ["linear", None]:
for i in range(trajectory_size):
point = [(coord, values[i]) for coord, values in sample_points]
column = cube.interpolate(point, Linear())
new_cube.data[..., i] = column.data
# Fill in the empty squashed (non derived) coords.
for column_coord in column.dim_coords + column.aux_coords:
src_dims = cube.coord_dims(column_coord)
if not squish_my_dims.isdisjoint(src_dims):
if len(column_coord.points) != 1:
msg = "Expected to find exactly one point. Found {}."
raise Exception(msg.format(column_coord.points))
new_cube.coord(column_coord.name()).points[
i
] = column_coord.points[0]
elif method == "nearest":
# Use a cache with _nearest_neighbour_indices_ndcoords()
cache = {}
column_indexes = _nearest_neighbour_indices_ndcoords(
cube, sample_points, cache=cache
)
# Construct "fancy" indexes, so we can create the result data array in
# a single numpy indexing operation.
# ALSO: capture the index range in each dimension, so that we can fetch
# only a required (square) sub-region of the source data.
fancy_source_indices = []
region_slices = []
n_index_length = len(column_indexes[0])
dims_reduced = [False] * n_index_length
for i_ind in range(n_index_length):
contents = [column_index[i_ind] for column_index in column_indexes]
each_used = [content != slice(None) for content in contents]
if np.all(each_used):
# This dimension is addressed : use a list of indices.
dims_reduced[i_ind] = True
# Select the region by min+max indices.
start_ind = np.min(contents)
stop_ind = 1 + np.max(contents)
region_slice = slice(start_ind, stop_ind)
# Record point indices with start subtracted from all of them.
fancy_index = list(np.array(contents) - start_ind)
elif not np.any(each_used):
# This dimension is not addressed by the operation.
# Use a ":" as the index.
fancy_index = slice(None)
# No sub-region selection for this dimension.
region_slice = slice(None)
else:
# Should really never happen, if _ndcoords is right.
msg = (
"Internal error in trajectory interpolation : point "
"selection indices should all have the same form."
)
raise ValueError(msg)
fancy_source_indices.append(fancy_index)
region_slices.append(region_slice)
# Fetch the required (square-section) region of the source data.
# NOTE: This is not quite as good as only fetching the individual
# points used, but it avoids creating a sub-cube for each point,
# which is very slow, especially when points are re-used a lot ...
source_area_indices = tuple(region_slices)
source_data = cube[source_area_indices].data
# Transpose source data before indexing it to get the final result.
# Because.. the fancy indexing will replace the indexed (horizontal)
# dimensions with a new single dimension over trajectory points.
# Move those dimensions to the end *first* : this ensures that the new
# dimension also appears at the end, which is where we want it.
# Make a list of dims with the reduced ones last.
dims_reduced = np.array(dims_reduced)
dims_order = np.arange(n_index_length)
dims_order = np.concatenate(
(dims_order[~dims_reduced], dims_order[dims_reduced])
)
# Rearrange the data dimensions and the fancy indices into that order.
source_data = source_data.transpose(dims_order)
fancy_source_indices = [
fancy_source_indices[i_dim] for i_dim in dims_order
]
# Apply the fancy indexing to get all the result data points.
source_data = source_data[tuple(fancy_source_indices)]
# "Fix" problems with missing datapoints producing odd values
# when copied from a masked into an unmasked array.
# TODO: proper masked data handling.
if np.ma.isMaskedArray(source_data):
# This is **not** proper mask handling, because we cannot produce a
# masked result, but it ensures we use a "filled" version of the
# input in this case.
source_data = source_data.filled()
new_cube.data[:] = source_data
# NOTE: we assign to "new_cube.data[:]" and *not* just "new_cube.data",
# because the existing code produces a default dtype from 'np.empty'
# instead of preserving the input dtype.
# TODO: maybe this should be fixed -- i.e. to preserve input dtype ??
# Fill in the empty squashed (non derived) coords.
column_coords = [
coord
for coord in cube.dim_coords + cube.aux_coords
if not squish_my_dims.isdisjoint(cube.coord_dims(coord))
]
new_cube_coords = [
new_cube.coord(column_coord.name())
for column_coord in column_coords
]
all_point_indices = np.array(column_indexes)
single_point_test_cube = cube[column_indexes[0]]
for new_cube_coord, src_coord in zip(new_cube_coords, column_coords):
# Check structure of the indexed coord (at one selected point).
point_coord = single_point_test_cube.coord(src_coord)
if len(point_coord.points) != 1:
msg = (
"Coord {} at one x-y position has the shape {}, "
"instead of being a single point. "
)
raise ValueError(msg.format(src_coord.name(), src_coord.shape))
# Work out which indices apply to the input coord.
# NOTE: we know how to index the source cube to get a cube with a
# single point for each coord, but this is very inefficient.
# So here, we translate cube indexes into *coord* indexes.
src_coord_dims = cube.coord_dims(src_coord)
fancy_coord_index_arrays = [
list(all_point_indices[:, src_dim])
for src_dim in src_coord_dims
]
# Fill the new coord with all the correct points from the old one.
new_cube_coord.points = src_coord.points[
tuple(fancy_coord_index_arrays)
]
# NOTE: the new coords do *not* have bounds.
return new_cube
def _ll_to_cart(lon, lat):
# Based on cartopy.img_transform.ll_to_cart().
x = np.sin(np.deg2rad(90 - lat)) * np.cos(np.deg2rad(lon))
y = np.sin(np.deg2rad(90 - lat)) * np.sin(np.deg2rad(lon))
z = np.cos(np.deg2rad(90 - lat))
return (x, y, z)
def _cartesian_sample_points(sample_points, sample_point_coord_names):
"""
Replace geographic lat/lon with cartesian xyz.
Generates coords suitable for nearest point calculations with
`scipy.spatial.cKDTree`.
Args:
* sample_points[coord][datum]:
list of sample_positions for each datum, formatted for fast use of
:func:`_ll_to_cart()`.
* sample_point_coord_names[coord]:
list of n coord names
Returns:
list of [x,y,z,t,etc] positions, formatted for kdtree.
"""
# Find lat and lon coord indices
i_lat = i_lon = None
i_non_latlon = list(range(len(sample_point_coord_names)))
for i, name in enumerate(sample_point_coord_names):
if "latitude" in name:
i_lat = i
i_non_latlon.remove(i_lat)
if "longitude" in name:
i_lon = i
i_non_latlon.remove(i_lon)
if i_lat is None or i_lon is None:
return sample_points.transpose()
num_points = len(sample_points[0])
cartesian_points = [None] * num_points
# Get the point coordinates without the latlon
for p in range(num_points):
cartesian_points[p] = [sample_points[c][p] for c in i_non_latlon]
# Add cartesian xyz coordinates from latlon
x, y, z = _ll_to_cart(sample_points[i_lon], sample_points[i_lat])
for p in range(num_points):
cartesian_point = cartesian_points[p]
cartesian_point.append(x[p])
cartesian_point.append(y[p])
cartesian_point.append(z[p])
return cartesian_points
def _nearest_neighbour_indices_ndcoords(cube, sample_points, cache=None):
"""
Returns the indices to select the data value(s) closest to the given
coordinate point values.
'sample_points' is of the form [[coord-or-coord-name, point-value(s)]*].
The lengths of all the point-values sequences must be equal.
This function is adapted for points sampling a multi-dimensional coord,
and can currently only do nearest neighbour interpolation.
Because this function can be slow for multidimensional coordinates,
a 'cache' dictionary can be provided by the calling code.
.. Note::
If the points are longitudes/latitudes, these are handled correctly as
points on the sphere, but the values must be in 'degrees'.
Developer notes:
A "sample space cube" is made which only has the coords and dims we are
sampling on.
We get the nearest neighbour using this sample space cube.
"""
if sample_points:
try:
coord, value = sample_points[0]
except (KeyError, ValueError):
emsg = (
"Sample points must be a list of "
"(coordinate, value) pairs, got {!r}."
)
raise TypeError(emsg.format(sample_points))
# Convert names to coords in sample_point and reformat sample point values
# for use in `_cartesian_sample_points()`.
coord_values = []
sample_point_coords = []
sample_point_coord_names = []
ok_coord_ids = set(map(id, cube.dim_coords + cube.aux_coords))
for coord, value in sample_points:
coord = cube.coord(coord)
if id(coord) not in ok_coord_ids:
msg = (
"Invalid sample coordinate {!r}: derived coordinates are"
" not allowed.".format(coord.name())
)
raise ValueError(msg)
sample_point_coords.append(coord)
sample_point_coord_names.append(coord.name())
value = np.array(value, ndmin=1)
coord_values.append(value)
coord_point_lens = np.array([len(value) for value in coord_values])
if not np.all(coord_point_lens == coord_point_lens[0]):
msg = "All coordinates must have the same number of sample points."
raise ValueError(msg)
coord_values = np.array(coord_values)
# Which dims are we sampling?
sample_dims = set()
for coord in sample_point_coords:
for dim in cube.coord_dims(coord):
sample_dims.add(dim)
sample_dims = sorted(list(sample_dims))
# Extract a sub cube that lives in just the sampling space.
sample_space_slice = [0] * cube.ndim
for sample_dim in sample_dims:
sample_space_slice[sample_dim] = slice(None, None)
sample_space_slice = tuple(sample_space_slice)
sample_space_cube = cube[sample_space_slice]
# Just the sampling coords.
for coord in sample_space_cube.coords():
if not coord.name() in sample_point_coord_names:
sample_space_cube.remove_coord(coord)
# Order the sample point coords according to the sample space cube coords.
sample_space_coord_names = [
coord.name() for coord in sample_space_cube.coords()
]
new_order = [
sample_space_coord_names.index(name)
for name in sample_point_coord_names
]
coord_values = np.array([coord_values[i] for i in new_order])
sample_point_coord_names = [sample_point_coord_names[i] for i in new_order]
sample_space_coords = (
sample_space_cube.dim_coords + sample_space_cube.aux_coords
)
sample_space_coords_and_dims = [
(coord, sample_space_cube.coord_dims(coord))
for coord in sample_space_coords
]
if cache is not None and cube in cache:
kdtree = cache[cube]
else:
# Create a "sample space position" for each
# `datum.sample_space_data_positions[coord_index][datum_index]`.
sample_space_data_positions = np.empty(
(len(sample_space_coords_and_dims), sample_space_cube.data.size),
dtype=float,
)
for d, ndi in enumerate(np.ndindex(sample_space_cube.data.shape)):
for c, (coord, coord_dims) in enumerate(
sample_space_coords_and_dims
):
# Index of this datum along this coordinate (could be nD).
if coord_dims:
keys = tuple(ndi[ind] for ind in coord_dims)
else:
keys = slice(None, None)
# Position of this datum along this coordinate.
sample_space_data_positions[c][d] = coord.points[keys]
# Convert to cartesian coordinates. Flatten for kdtree compatibility.
cartesian_space_data_coords = _cartesian_sample_points(
sample_space_data_positions, sample_point_coord_names
)
# Create a kdtree for the nearest-distance lookup to these 3d points.
kdtree = cKDTree(cartesian_space_data_coords)
# This can find the nearest datum point to any given target point,
# which is the goal of this function.
# Update cache.
if cache is not None:
cache[cube] = kdtree
# Convert the sample points to cartesian (3d) coords.
# If there is no latlon within the coordinate there will be no change.
# Otherwise, geographic latlon is replaced with cartesian xyz.
cartesian_sample_points = _cartesian_sample_points(
coord_values, sample_point_coord_names
)
# Use kdtree to get the nearest sourcepoint index for each target point.
_, datum_index_lists = kdtree.query(cartesian_sample_points)
# Convert flat indices back into multidimensional sample-space indices.
sample_space_dimension_indices = np.unravel_index(
datum_index_lists, sample_space_cube.data.shape
)
# Convert this from "pointwise list of index arrays for each dimension",
# to "list of cube indices for each point".
sample_space_ndis = np.array(sample_space_dimension_indices).transpose()
# For the returned result, we must convert these indices into the source
# (sample-space) cube, to equivalent indices into the target 'cube'.
# Make a result array: (cube.ndim * <index>), per sample point.
n_points = coord_values.shape[-1]
main_cube_slices = np.empty((n_points, cube.ndim), dtype=object)
# Initialise so all unused indices are ":".
main_cube_slices[:] = slice(None)
# Move result indices according to the source (sample) and target (cube)
# dimension mappings.
for sample_coord, sample_coord_dims in sample_space_coords_and_dims:
# Find the coord in the main cube
main_coord = cube.coord(sample_coord.name())
main_coord_dims = cube.coord_dims(main_coord)
# Fill nearest-point data indices for each coord dimension.
for sample_i, main_i in zip(sample_coord_dims, main_coord_dims):
main_cube_slices[:, main_i] = sample_space_ndis[:, sample_i]
# Return as a list of **tuples** : required for correct indexing usage.
result = [tuple(inds) for inds in main_cube_slices]
return result
class UnstructuredNearestNeigbourRegridder:
"""
Encapsulate the operation of :meth:`iris.analysis.trajectory.interpolate`
with given source and target grids.
This is the type used by the :class:`~iris.analysis.UnstructuredNearest`
regridding scheme.
"""
# TODO: cache the necessary bits of the operation so re-use can actually
# be more efficient.
def __init__(self, src_cube, target_grid_cube):
"""
A nearest-neighbour regridder to perform regridding from the source
grid to the target grid.
This can then be applied to any source data with the same structure as
the original 'src_cube'.
Args:
* src_cube:
The :class:`~iris.cube.Cube` defining the source grid.
The X and Y coordinates can have any shape, but must be mapped over
the same cube dimensions.
* target_grid_cube:
A :class:`~iris.cube.Cube`, whose X and Y coordinates specify a
desired target grid.
The X and Y coordinates must be one-dimensional dimension
coordinates, mapped to different dimensions.
All other cube components are ignored.
Returns:
regridder : (object)
A callable object with the interface:
`result_cube = regridder(data)`
where `data` is a cube with the same grid as the original
`src_cube`, that is to be regridded to the `target_grid_cube`.
.. Note::
For latitude-longitude coordinates, the nearest-neighbour distances
are computed on the sphere, otherwise flat Euclidean distances are
used.
The source and target X and Y coordinates must all have the same
coordinate system, which may also be None.
If any X and Y coordinates are latitudes or longitudes, they *all*
must be. Otherwise, the corresponding X and Y coordinates must
have the same units in the source and grid cubes.
"""
# Make a copy of the source cube, so we can convert coordinate units.
src_cube = src_cube.copy()
# Snapshot the target grid and check it is a "normal" grid.
tgt_x_coord, tgt_y_coord = snapshot_grid(target_grid_cube)
# Check that the source has unique X and Y coords over common dims.
if not src_cube.coords(axis="x") or not src_cube.coords(axis="y"):
msg = "Source cube must have X- and Y-axis coordinates."
raise ValueError(msg)
src_x_coord = src_cube.coord(axis="x")
src_y_coord = src_cube.coord(axis="y")
if src_cube.coord_dims(src_x_coord) != src_cube.coord_dims(
src_y_coord
):
msg = (
"Source cube X and Y coordinates must have the same "
"cube dimensions."
)
raise ValueError(msg)
# Record *copies* of the original grid coords, in the desired
# dimension order.
# This lets us convert the actual ones in use to units of "degrees".
self.src_grid_coords = [src_y_coord.copy(), src_x_coord.copy()]
self.tgt_grid_coords = [tgt_y_coord.copy(), tgt_x_coord.copy()]
# Check that all XY coords have suitable coordinate systems and units.
coords_all = [src_x_coord, src_y_coord, tgt_x_coord, tgt_y_coord]
cs = coords_all[0].coord_system
if not all(coord.coord_system == cs for coord in coords_all):
msg = (
"Source and target cube X and Y coordinates must all have "
"the same coordinate system."
)
raise ValueError(msg)
# Check *all* X and Y coords are lats+lons, if any are.
latlons = [
"latitude" in coord.name() or "longitude" in coord.name()
for coord in coords_all
]
if any(latlons) and not all(latlons):
msg = (
"If any X and Y coordinates are latitudes/longitudes, "
"then they all must be."
)
raise ValueError(msg)
self.grid_is_latlon = any(latlons)
if self.grid_is_latlon:
# Convert all XY coordinates to units of "degrees".
# N.B. already copied the target grid, so the result matches that.
for coord in coords_all:
try:
coord.convert_units("degrees")
except ValueError:
msg = (
"Coordinate {!r} has units of {!r}, which does not "
'convert to "degrees".'
)
raise ValueError(
msg.format(coord.name(), str(coord.units))
)
else:
# Check that source and target have the same X and Y units.
if (
src_x_coord.units != tgt_x_coord.units
or src_y_coord.units != tgt_y_coord.units
):
msg = (
"Source and target cube X and Y coordinates must "
"have the same units."
)
raise ValueError(msg)
# Record the resulting grid shape.
self.tgt_grid_shape = tgt_y_coord.shape + tgt_x_coord.shape
# Calculate sample points as 2d arrays, like broadcast (NY,1)*(1,NX).
x_2d, y_2d = _meshgrid(tgt_x_coord.points, tgt_y_coord.points)
# Cast as a "trajectory", to suit the method used.
self.trajectory = (
(tgt_x_coord.name(), x_2d.flatten()),
(tgt_y_coord.name(), y_2d.flatten()),
)
def __call__(self, src_cube):
# Check the source cube X and Y coords match the original.
# Note: for now, this is sufficient to ensure a valid trajectory
# interpolation, but if in future we save + re-use the cache context
# for the 'interpolate' call, we may need more checks here.
# Check the given cube against the original.
x_cos = src_cube.coords(axis="x")
y_cos = src_cube.coords(axis="y")
if (
not x_cos
or not y_cos
or y_cos != [self.src_grid_coords[0]]
or x_cos != [self.src_grid_coords[1]]
):
msg = (
"The given cube is not defined on the same source "
"grid as this regridder."
)
raise ValueError(msg)
# Convert source XY coordinates to degrees if required.
if self.grid_is_latlon:
src_cube = src_cube.copy()
src_cube.coord(axis="x").convert_units("degrees")
src_cube.coord(axis="y").convert_units("degrees")
# Get the basic interpolated results.
result_trajectory_cube = interpolate(
src_cube, self.trajectory, method="nearest"
)
# Reconstruct this as a cube "like" the source data.
# TODO: handle all aux-coords, cell measures ??
# The shape is that of the basic result, minus the trajectory (last)
# dimension, plus the target grid dimensions.
target_shape = result_trajectory_cube.shape[:-1] + self.tgt_grid_shape
data_2d_x_and_y = result_trajectory_cube.data.reshape(target_shape)
# Make a new result cube with the reshaped data.
result_cube = iris.cube.Cube(data_2d_x_and_y)
result_cube.metadata = src_cube.metadata
# Copy all the coords from the trajectory result.
i_trajectory_dim = result_trajectory_cube.ndim - 1
for coord in result_trajectory_cube.dim_coords:
dims = result_trajectory_cube.coord_dims(coord)
if i_trajectory_dim not in dims:
result_cube.add_dim_coord(coord.copy(), dims)
for coord in result_trajectory_cube.aux_coords:
dims = result_trajectory_cube.coord_dims(coord)
if i_trajectory_dim not in dims:
result_cube.add_aux_coord(coord.copy(), dims)
# Add the X+Y grid coords from the grid cube, mapped to the new Y and X
# dimensions, i.e. the last 2.
for i_dim, coord in enumerate(self.tgt_grid_coords):
result_cube.add_dim_coord(coord.copy(), i_dim + i_trajectory_dim)
return result_cube
| pp-mo/iris | lib/iris/analysis/trajectory.py | Python | lgpl-3.0 | 35,223 |
import os
import signal
import yaml
from gi.repository import Gtk
from gi.repository import GLib
from pylsner import gui
class Loader(yaml.Loader):
def __init__(self, stream):
self._root = os.path.split(stream.name)[0]
super(Loader, self).__init__(stream)
def include(self, node):
filename = os.path.join(self._root, self.construct_scalar(node))
with open(filename, 'r') as f:
return yaml.load(f, Loader)
Loader.add_constructor('!include', Loader.include)
def main():
main_win = gui.Window()
reload_config(main_win)
GLib.timeout_add(1, main_win.refresh)
GLib.timeout_add(1000, reload_config, main_win)
signal.signal(signal.SIGINT, signal.SIG_DFL)
Gtk.main()
def reload_config(window):
if 'mtime' not in reload_config.__dict__:
reload_config.mtime = 0
config_path = 'etc/pylsner/config.yml'
config_mtime = os.path.getmtime(config_path)
widgets_path = 'etc/pylsner/widgets.yml'
widgets_mtime = os.path.getmtime(widgets_path)
reload_required = False
if config_mtime > reload_config.mtime:
reload_config.mtime = config_mtime
reload_required = True
if widgets_mtime > reload_config.mtime:
reload_config.mtime = widgets_mtime
reload_required = True
if reload_required:
with open(config_path) as config_file:
config = yaml.load(config_file, Loader)
window.widgets = init_widgets(config, window)
window.refresh(True)
return True
def init_widgets(config, window):
widgets = []
for wid_spec in config['widgets']:
wid = gui.Widget(**wid_spec)
wid.indicator.position[0] = ((window.width / 2)
+ wid.indicator.position[0]
)
wid.indicator.position[1] = ((window.height / 2)
+ (wid.indicator.position[1] * -1)
)
widgets.append(wid)
return widgets
| mrmrwat/pylsner | pylsner/__init__.py | Python | mit | 2,028 |
# -*- coding: utf-8 -*-
from distutils.version import LooseVersion
from classytags.core import Tag, Options
import django
from django import template
from django.core.serializers.json import DjangoJSONEncoder
from django.utils.text import javascript_quote
DJANGO_1_4 = LooseVersion(django.get_version()) < LooseVersion('1.5')
if DJANGO_1_4:
from django.utils import simplejson as json
else:
import json
register = template.Library()
@register.filter
def js(value):
return json.dumps(value, cls=DjangoJSONEncoder)
@register.filter
def bool(value):
if value:
return 'true'
else:
return 'false'
class JavascriptString(Tag):
name = 'javascript_string'
options = Options(
blocks=[
('end_javascript_string', 'nodelist'),
]
)
def render_tag(self, context, **kwargs):
rendered = self.nodelist.render(context)
return u"'%s'" % javascript_quote(rendered.strip())
register.tag(JavascriptString)
| mpetyx/palmdrop | venv/lib/python2.7/site-packages/cms/templatetags/cms_js_tags.py | Python | apache-2.0 | 1,001 |
""" Sahana Eden Module Automated Tests - INV001 Send Items
@copyright: 2011-2012 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from helper import InvTestFunctions
class SendItem(InvTestFunctions):
"""
Inventory Test - Send Workflow (Send items)
@param items: This test sends a specific item to another party.
This test assume that test/inv-mngt has been added to prepop
- e.g. via demo/IFRC_Train
@Case: INV001
@TestDoc: https://docs.google.com/spreadsheet/ccc?key=0AmB3hMcgB-3idG1XNGhhRG9QWF81dUlKLXpJaFlCMFE
@Test Wiki: http://eden.sahanafoundation.org/wiki/DeveloperGuidelines/Testing
"""
# -------------------------------------------------------------------------
def test_inv001_send_items(self):
""" Tests for Send Workflow """
user = "admin"
self.login(account="admin", nexturl="inv/send/create")
send_data = [("site_id",
"Timor-Leste Red Cross Society (CVTL) National Warehouse (Warehouse)",
),
("type",
"Internal Shipment",
),
("to_site_id",
"Lospalos Warehouse (Warehouse)",
),
("sender_id",
"Beatriz de Carvalho",
),
("recipient_id",
"Liliana Otilia",
)
]
item_data = [
[("send_inv_item_id",
"Blankets - Australian Red Cross",
"inv_widget",
),
("quantity",
"3",
),
],
]
result = self.send(user, send_data)
send_id = self.send_get_id(result)
for data in item_data:
result = self.track_send_item(user, send_id, data)
# Send the shipment
self.send_shipment(user, send_id)
| gallifrey17/eden | modules/tests/inv/send_item.py | Python | mit | 3,148 |
# vim: set et nosi ai ts=2 sts=2 sw=2:
# coding: utf-8
from __future__ import absolute_import, print_function, unicode_literals
import unittest
from schwa import tokenizer
import six
INPUT = r'''
<h1>This is a page title</h1>
<p>Once upon a time, there was a "sentence" with:</p>
<ul>
<li>One dot point</li>
<li>And another dot point</li>
</ul>
<p>It has a concluding paragraph too. With more than one sentence. Tschüß. 再见。</p>
'''.strip()
class TestCase__callback_object(unittest.TestCase):
METHODS = (
b'begin_sentence',
b'end_sentence',
b'begin_paragraph',
b'end_paragraph',
b'begin_heading',
b'end_heading',
b'begin_list',
b'end_list',
b'begin_item',
b'end_item',
b'begin_document',
b'end_document',
b'add',
b'error',
)
def setUp(self):
self.tokenizer = tokenizer.Tokenizer()
self.expected_called = {m: True for m in TestCase__callback_object.METHODS}
self.expected_called[b'error'] = False
self.maxDiff = 1000
def test_all_methods(self):
class X(object):
def __init__(self):
self.called = {m: False for m in TestCase__callback_object.METHODS}
self.raw = self.norm = None
self.tokens = []
def begin_sentence(self):
self.called[b'begin_sentence'] = True
def end_sentence(self):
self.called[b'end_sentence'] = True
def begin_paragraph(self):
self.called[b'begin_paragraph'] = True
def end_paragraph(self):
self.called[b'end_paragraph'] = True
def begin_heading(self, depth):
self.called[b'begin_heading'] = True
def end_heading(self, depth):
self.called[b'end_heading'] = True
def begin_list(self):
self.called[b'begin_list'] = True
def end_list(self):
self.called[b'end_list'] = True
def begin_item(self):
self.called[b'begin_item'] = True
def end_item(self):
self.called[b'end_item'] = True
def begin_document(self):
self.called[b'begin_document'] = True
def end_document(self):
self.called[b'end_document'] = True
def add(self, begin, raw, norm=None):
self.called[b'add'] = True
if self.raw is None:
self.raw = raw
if self.norm is None:
self.norm = norm
self.tokens.append((begin, raw, norm))
def error(self):
self.called[b'error'] = True
x = X()
self.tokenizer.tokenize(INPUT.encode('utf-8'), dest=x)
self.assertIsNotNone(x.raw)
self.assertIsInstance(x.raw, six.binary_type)
self.assertIsNotNone(x.norm)
self.assertIsInstance(x.norm, six.binary_type)
self.assertDictEqual(self.expected_called, x.called)
self.assertEqual(42, len(x.tokens))
self.assertEqual('再见'.encode('utf-8'), x.tokens[-2][1])
def test_missing_methods(self):
class X(object):
def __init__(self):
self.called = {m: False for m in TestCase__callback_object.METHODS}
self.raw = self.norm = None
self.tokens = []
def add(self, begin, raw, norm=None):
self.called[b'add'] = True
if self.raw is None:
self.raw = raw
if self.norm is None:
self.norm = norm
self.tokens.append((begin, raw, norm))
def error(self):
self.called[b'error'] = True
def unhandled(self, method_name, *args):
self.called[method_name] = True
x = X()
self.tokenizer.tokenize(INPUT.encode('utf-8'), dest=x)
self.assertDictEqual(self.expected_called, x.called)
self.assertEqual(42, len(x.tokens))
def test_only_unhandled(self):
class X(object):
def __init__(self):
self.called = {m: False for m in TestCase__callback_object.METHODS}
self.tokens = []
self.values = []
def unhandled(self, method_name, *args):
self.called[method_name] = True
self.values.append((method_name, args))
if method_name == b'add':
if len(args) == 2:
begin, raw = args
norm = None
else:
begin, raw, norm = args
self.tokens.append((begin, raw, norm))
x = X()
self.tokenizer.tokenize(INPUT.encode('utf-8'), dest=x)
self.assertEqual(70, len(x.values))
self.assertDictEqual(self.expected_called, x.called)
self.assertEqual(42, len(x.tokens))
def test_invalid(self):
class X(object):
pass
x = X()
with self.assertRaises(TypeError):
self.tokenizer.tokenize(INPUT.encode('utf-8'), dest=x)
class X(object):
unhandled = b'this is not callable'
x = X()
with self.assertRaises(TypeError):
self.tokenizer.tokenize(INPUT.encode('utf-8'), dest=x)
| schwa-lab/libschwa-python | tests/test_tokenizer.py | Python | mit | 4,752 |
@micropython.asm_thumb
def f(r0, r1, r2):
push({r0})
push({r1, r2})
pop({r0})
pop({r1, r2})
print(f(0, 1, 2))
| kerneltask/micropython | tests/inlineasm/asmpushpop.py | Python | mit | 128 |
from pyperator.decorators import inport, outport, component, run_once
from pyperator.nodes import Component
from pyperator.DAG import Multigraph
from pyperator.utils import InputPort, OutputPort
import pyperator.components | baffelli/pyperator | pyperator/__init__.py | Python | mit | 222 |
import mock
from decimal import Decimal as D
from xml.dom.minidom import parseString
from django.test import TestCase
from oscar.apps.payment.datacash.utils import Gateway
class AuthResponseHandlingTests(TestCase):
success_response_xml = """<?xml version="1.0" encoding="UTF-8" ?>
<Response>
<CardTxn>
<authcode>060642</authcode>
<card_scheme>Switch</card_scheme>
<country>United Kingdom</country>
<issuer>HSBC</issuer>
</CardTxn>
<datacash_reference>3000000088888888</datacash_reference>
<merchantreference>1000001</merchantreference>
<mode>LIVE</mode>
<reason>ACCEPTED</reason>
<status>1</status>
<time>1071567305</time>
</Response>"""
def setUp(self):
self.gateway = Gateway(client="DUMMY", password="123456", host="dummyhost.com",)
self.gateway.do_request = mock.Mock()
def test_success_auth_response(self):
self.gateway.do_request.return_value = self.success_response_xml
response = self.gateway.auth(card_number='1000010000000007',
expiry_date='01/12',
merchant_reference='1000001',
currency='GBP',
amount=D('12.99'))
self.assertEquals(1, response['status'])
self.assertEquals('3000000088888888', response['datacash_reference'])
self.assertEquals('1000001', response['merchant_reference'])
self.assertEquals('060642', response['auth_code'])
self.assertEquals('ACCEPTED', response['reason'])
def test_success_pre_response(self):
self.gateway.do_request.return_value = self.success_response_xml
response = self.gateway.pre(card_number='1000010000000007',
expiry_date='01/12',
merchant_reference='1000001',
currency='GBP',
amount=D('12.99'))
self.assertEquals(1, response['status'])
self.assertEquals('3000000088888888', response['datacash_reference'])
self.assertEquals('1000001', response['merchant_reference'])
self.assertEquals('060642', response['auth_code'])
self.assertEquals('ACCEPTED', response['reason'])
def test_declined_auth_response(self):
response_xml = """<?xml version="1.0" encoding="UTF-8" ?>
<Response>
<CardTxn>
<authcode>DECLINED</authcode>
<card_scheme>Mastercard</card_scheme>
<country>United Kingdom</country>
</CardTxn>
<datacash_reference>4400200045583767</datacash_reference>
<merchantreference>AA004630</merchantreference>
<mode>TEST</mode>
<reason>DECLINED</reason>
<status>7</status>
<time>1169223906</time>
</Response>"""
self.gateway.do_request.return_value = response_xml
response = self.gateway.auth(card_number='1000010000000007',
expiry_date='01/12',
merchant_reference='1000001',
currency='GBP',
amount=D('12.99'))
self.assertEquals(7, response['status'])
self.assertEquals('4400200045583767', response['datacash_reference'])
self.assertEquals('AA004630', response['merchant_reference'])
self.assertEquals('DECLINED', response['auth_code'])
self.assertEquals('DECLINED', response['reason'])
def test_successful_cancel_response(self):
response_xml = """<?xml version="1.0" encoding="UTF-8" ?>
<Response>
<datacash_reference>4900200000000001</datacash_reference>
<merchantreference>4900200000000001</merchantreference>
<mode>TEST</mode>
<reason>CANCELLED OK</reason>
<status>1</status>
<time>1151567456</time>
</Response>"""
self.gateway.do_request.return_value = response_xml
response = self.gateway.cancel(txn_reference='4900200000000001')
self.assertEquals(1, response['status'])
self.assertEquals('4900200000000001', response['datacash_reference'])
self.assertEquals('4900200000000001', response['merchant_reference'])
self.assertEquals('CANCELLED OK', response['reason'])
def test_successful_fulfil_response(self):
response_xml = """<?xml version="1.0" encoding="UTF-8" ?>
<Response>
<datacash_reference>3900200000000001</datacash_reference>
<merchantreference>3900200000000001</merchantreference>
<mode>LIVE</mode>
<reason>FULFILLED OK</reason>
<status>1</status>
<time>1071567356</time>
</Response>
"""
self.gateway.do_request.return_value = response_xml
response = self.gateway.fulfil(txn_reference='3900200000000001',
merchant_reference='1000001',
currency='GBP',
amount=D('12.99'),
auth_code='asdf')
self.assertEquals(1, response['status'])
self.assertEquals('3900200000000001', response['datacash_reference'])
self.assertEquals('3900200000000001', response['merchant_reference'])
self.assertEquals('FULFILLED OK', response['reason'])
| aykut/django-oscar | oscar/apps/payment/tests/datacash_responses.py | Python | bsd-3-clause | 5,345 |
from django.conf.urls import include, url
from django.views.generic.base import TemplateView
from rest_framework import routers
from . import views
router = routers.DefaultRouter()
router.register(r'users', views.UserViewSet)
router.register(r'groups', views.GroupViewSet)
router.register(r'books', views.BookViewSet)
router.register(r'tags', views.TagViewSet)
router.register(r'categories', views.CategoryViewSet)
urlpatterns = [
url(r'^api/v1/', include(router.urls)),
url(r'^api/v1/scan', views.scan, name='scan'),
url(r'^api/v1/createBook', views.createBook, name='createBook'),
url(r'^api/v1/startBook', views.startBook, name='startBook'),
url(r'^api/v1/stopBook', views.stopBook, name='stopBook'),
url(r'^api/v1/test', views.test, name='test'),
url(r'^api/v1/autoScan', views.auto_scan, name='auto_scan'),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
| narsi84/digilib | gui/urls.py | Python | mit | 929 |
class Solution:
def longestMountain(self, A: List[int]) -> int:
if not A or len(A) < 3:
return 0
result = 0
for i in range(1, len(A)-1):
total = 1
if A[i-1] < A[i] and A[i] > A[i+1]:
pre = i - 1
nxt = i + 1
total += 2
while pre > 0 and A[pre] > A[pre-1]:
pre -= 1
total += 1
while nxt < (len(A) - 1) and A[nxt] > A[nxt+1]:
nxt += 1
total += 1
if total > result:
result = total
return result
| MingfeiPan/leetcode | two_pointers/845.py | Python | apache-2.0 | 767 |
from PyQt5 import QtCore, QtGui, QtWidgets
import os
import scanner
import utils
class MainWindow(QtWidgets.QMainWindow):
def __init__(self, root, parent=None):
super(MainWindow, self).__init__(parent)
datadir = os.path.dirname(os.path.abspath(__file__))
self.setWindowIcon(QtGui.QIcon(os.path.join(datadir, 'icon.png')))
self.root = root
self.setWindowTitle(root)
self.setMinimumSize(QtCore.QSize(800, 600))
self.timer = QtCore.QTimer(self)
self.timer.timeout.connect(self.update)
self.timer.start(2000)
self.fileTable = QtWidgets.QTableWidget(self)
self.fileTable.setColumnCount(3)
# self.fileTable.horizontalHeader().setResizeMode(2, QtWidgets.QHeaderView.Stretch)
self.fileTable.setHorizontalHeaderLabels(['File', 'Status', 'Comment'])
self.fileTable.cellDoubleClicked.connect(self.doubleClickItem)
self.dataDict = {}
self.update()
self.setCentralWidget(self.fileTable)
self.dock = QtWidgets.QDockWidget("Tools", self)
self.dock.setAllowedAreas(QtCore.Qt.TopDockWidgetArea | QtCore.Qt.BottomDockWidgetArea)
import tools
self.dock.setWidget(tools.ToolsDialog(self, self.fileTable))
self.dock.setMinimumHeight(150)
self.addDockWidget(QtCore.Qt.BottomDockWidgetArea, self.dock)
self.dock.widget().show()
self.setupMenu()
def setupMenu(self):
m = self.menuBar().addMenu("&Settings")
m.addAction(QtWidgets.QAction('&General', self, triggered=self.generalSettings))
def generalSettings(self):
from tools import GeneralSettingsDialog
d = GeneralSettingsDialog()
d.exec_()
def resizeEvent(self, event):
s = event.size()
newSize = self.fileTable.size()
newSize.setWidth(s.width())
self.fileTable.resize(newSize)
def update(self):
self.updating = True
newRows = scanner.scan(self.root, self.dataDict)
modified = False
if len(newRows) < len(self.dataDict):
modified = True
else:
for row in newRows:
if row[0] in self.dataDict:
if row != self.dataDict.get(row[0]):
modified = True
break
else:
modified = True
break
if modified:
self.dataDict = {}
self.fileTable.setRowCount(len(newRows))
flags = [QtCore.Qt.ItemIsUserCheckable, 0, QtCore.Qt.ItemIsEditable]
for j in range(0, 3):
flags[j] = flags[j] | QtCore.Qt.ItemIsEnabled
for i in range(0, len(newRows)):
row = newRows[i]
for j in range(0, 3):
item = QtWidgets.QTableWidgetItem(row[j])
item.setFlags(flags[j])
if j == 0:
item.setCheckState(QtCore.Qt.Unchecked)
self.fileTable.setItem(i, j, item)
self.dataDict[row[0]] = row
self.fileTable.resizeColumnToContents(0)
self.fileTable.resizeColumnToContents(1)
self.updating = False
def doubleClickItem(self, row, col):
if col == 0:
s = QtCore.QSettings()
diff = str(s.value('diff').toString())
if diff:
# xterm=(s.value('xterm')=='True')
cmdlist = diff.split(' ')
filename = self.fileTable.item(row, col).text()
# cmdlist.append("git diff {} ; echo Press Enter to close ; read".format(filename))
cmdlist.append(filename)
# print cmdlist
utils.runcmd(self.root, cmdlist)
| amirgeva/commit | mainwindow.py | Python | gpl-2.0 | 3,793 |
# coding: utf-8
# pylint: disable=too-few-public-methods, no-self-use, missing-docstring, unused-argument
from flask_restful import reqparse, Resource
from flask import abort
from main import API
import task
import config
from api.helpers import ArgumentValidator, make_empty_ok_response
from api.decorators import verify_captcha
from model import UserValidator
@API.resource('/api/v1/feedback')
class FeedbackAPI(Resource):
@verify_captcha('feedbackForm')
def post(self):
"""Sends feedback email to admin"""
if not config.CONFIG_DB.feedback_email:
return abort(418)
parser = reqparse.RequestParser()
parser.add_argument('message', type=ArgumentValidator.create('feedback'), required=True)
parser.add_argument('email', type=UserValidator.create('email', required=False))
args = parser.parse_args()
body = '%s\n\n%s' % (args.message, args.email)
kwargs = {'reply_to': args.email} if args.email else {}
task.send_mail_notification('%s...' % body[:48].strip(), body, **kwargs)
return make_empty_ok_response()
| madvas/gae-angular-material-starter | main/api/v1/feedback_api.py | Python | mit | 1,108 |
# Copyright 2020 Camptocamp SA
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl)
from odoo.exceptions import UserError
from odoo.tests import SavepointCase
class TestSaleOrderLinePackagingQty(SavepointCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.env = cls.env(context=dict(cls.env.context, tracking_disable=True))
cls.partner = cls.env.ref("base.res_partner_12")
cls.product = cls.env.ref("product.product_product_9")
cls.packaging = cls.env["product.packaging"].create(
{"name": "Test packaging", "product_id": cls.product.id, "qty": 5.0}
)
cls.product_no_pckg = cls.env.ref("product.product_product_10")
def test_product_packaging_qty(self):
order = self.env["sale.order"].create({"partner_id": self.partner.id})
order_line = self.env["sale.order.line"].create(
{
"order_id": order.id,
"product_id": self.product.id,
"product_uom": self.product.uom_id.id,
"product_uom_qty": 3.0,
}
)
order_line.write({"product_packaging": self.packaging})
# This is what actually happened when user adds so line
order_line._onchange_product_uom_qty()
order_line._compute_product_packaging_qty()
order_line._onchange_product_packaging()
order_line._onchange_product_packaging_qty()
self.assertEqual(order_line.product_uom_qty, 5.0)
self.assertEqual(order_line.product_packaging_qty, 1.0)
order_line.write({"product_packaging_qty": 3.0})
order_line._onchange_product_packaging_qty()
self.assertEqual(order_line.product_uom_qty, 15.0)
order_line.write({"product_uom_qty": 9.0})
self.assertEqual(order_line.product_packaging_qty, 0.0)
order_line.write({"product_uom_qty": 15.0})
dozen = self.env.ref("uom.product_uom_dozen")
order_line.product_uom = dozen
order_line._compute_product_packaging_qty()
self.assertEqual(order_line.product_uom_qty, 180.0)
self.assertEqual(order_line.product_uom, self.product.uom_id)
self.packaging.qty = 0
with self.assertRaises(UserError):
order_line.write({"product_packaging_qty": 0.0})
order_line.product_packaging = False
order_line._onchange_product_packaging()
self.assertEqual(order_line.product_packaging_qty, 0.0)
def test_product_packaging_qty_wo_packaging(self):
order = self.env["sale.order"].create({"partner_id": self.partner.id})
order_line = self.env["sale.order.line"].create(
{
"order_id": order.id,
"product_id": self.product_no_pckg.id,
"product_uom": self.product_no_pckg.uom_id.id,
"product_uom_qty": 3.0,
}
)
order_line._compute_product_packaging_qty()
self.assertEqual(order_line.product_uom_qty, 3.0)
self.assertEqual(order_line.product_packaging_qty, 0.0)
with self.assertRaises(UserError):
order_line.write({"product_packaging_qty": 3.0})
def test_product_packaging_qty_from_external(self):
"""The previous ones have product_uom_qty of 3, which is less than and
not divisible by packaging qty of 5. This test is to increase coverage
for the case that product_uom_qty of 15 is divisible by 5.
"""
order = self.env["sale.order"].create({"partner_id": self.partner.id})
order_line = self.env["sale.order.line"].create(
{
"order_id": order.id,
"product_id": self.product.id,
"product_uom": self.product.uom_id.id,
"product_uom_qty": 15.0,
}
)
order_line.write({"product_packaging": self.packaging})
order_line._onchange_product_packaging()
self.assertEqual(order_line.product_packaging_qty, 3.0)
| OCA/sale-workflow | sale_order_line_packaging_qty/tests/test_sale_order_line_packaging_qty.py | Python | agpl-3.0 | 4,006 |
import json
import time
from _md5 import md5
import requests
import RolevPlayer as r
def now_playing_last_fm(artist, track):
update_now_playing_sig = md5(("api_key" + r.API_KEY +
"artist" + artist +
"method" + "track.updateNowPlaying" +
"sk" + r.SK +
"track" + track +
r.SECRET).encode('utf-8')).hexdigest()
url = "http://ws.audioscrobbler.com/2.0/?method=track.updateNowPlaying" + \
"&api_key=" + r.API_KEY + \
"&api_sig=" + update_now_playing_sig + \
"&artist=" + artist + \
"&format=json" + \
"&sk=" + r.SK + \
"&track=" + track
req = requests.post(url).text
json_obj = json.loads(req)
def scrobble(artist, track):
# this gives us a timestamp, casted to integer
ts = time.time()
scrobbling_sig = md5(("api_key" + r.API_KEY +
"artist" + artist +
"method" + "track.scrobble" +
"sk" + r.SK +
"timestamp" + str(ts) +
"track" + track +
r.SECRET).encode('utf-8')).hexdigest()
req = requests.post(
"http://ws.audioscrobbler.com/2.0/?method=track.scrobble" +
"&api_key=" + r.API_KEY +
"&api_sig=" + scrobbling_sig +
"&artist=" + artist +
"&format=json" +
"&sk=" + r.SK +
"×tamp=" + str(ts) +
"&track=" + track).text
json_obj = json.loads(req)
| SimeonRolev/RolevPlayerQT | RolevPlayer/Scrobbler.py | Python | gpl-3.0 | 1,642 |
# Jiao Lin <[email protected]>
"""
make phonon Density of States nicer by fitting starting curve to a parabolic
"""
import numpy as np
def nice_dos(E, g):
# .. in crease number of points if necessary
if len(E) < 500:
dE = E[-1]/500.
E1 = np.arange(0, E[-1], dE)
g1 = np.interp(E1, E, g)
E, g = E1, g1
# .. fit parabolic
try:
E,g = fitparabolic(E,g)
except ParabolicFittingError:
g = smooth(g, window_len=21)
E,g = fitparabolic(E,g)
# normalize
g /= g.sum() * (E[1] - E[0])
return E,g
class ParabolicFittingError(Exception): pass
def fitparabolic(E, g, N=100, minN = 20):
"""fit the near zero portion of the dos curve to parabolic
"""
"""
NOTE TO DEVELOPER:
default minN=90 matches the default requirement for
number of "fittable" points in the low-E region
in c++ implementation of DOS curve.
"""
def fit(N):
E1 = E[:N]; g1 = g[:N]
return linear_regression(E1*E1, g1)
badfit = True
while N > minN:
c, R2 = fit(N)
if R2 < 0.9: N-=1
else: badfit = False; break
continue
if badfit:
# import pylab; pylab.plot(E, g); pylab.show()
raise ParabolicFittingError("Unable to fit DOS to parabolic")
print("DOS: fit first %s points to parbolic" % N)
E1 = E[:N]
g[:N] = c * E1*E1
return E,g
def linear_regression(x,y):
""" fit y = cx. return c and R**2
"""
c = (x*y).sum() / (x*x).sum()
y_ave = np.average(y)
SS_tot = ((y - y_ave)**2).sum()
SS_err = ((y - c*x)**2).sum()
R2 = 1-SS_err/SS_tot
return c, R2
# copied from
# http://www.scipy.org/Cookbook/SignalSmooth
def smooth(x,window_len=11,window='hanning'):
"""smooth the data using a window with requested size.
This method is based on the convolution of a scaled window with the signal.
The signal is prepared by introducing reflected copies of the signal
(with the window size) in both ends so that transient parts are minimized
in the begining and end part of the output signal.
input:
x: the input signal
window_len: the dimension of the smoothing window; should be an odd integer
window: the type of window from 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'
flat window will produce a moving average smoothing.
output:
the smoothed signal
example:
t=linspace(-2,2,0.1)
x=sin(t)+randn(len(t))*0.1
y=smooth(x)
see also:
numpy.hanning, numpy.hamming, numpy.bartlett, numpy.blackman, numpy.convolve
scipy.signal.lfilter
TODO: the window parameter could be the window itself if an array instead of a string
NOTE: length(output) != length(input), to correct this: return y[(window_len/2-1):-(window_len/2)] instead of just y.
"""
import numpy
if x.ndim != 1:
raise ValueError("smooth only accepts 1 dimension arrays.")
if x.size < window_len:
raise ValueError("Input vector needs to be bigger than window size.")
if window_len<3:
return x
if not window in ['flat', 'hanning', 'hamming', 'bartlett', 'blackman']:
raise ValueError("Window is on of 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'")
s=numpy.r_[x[window_len-1:0:-1],x,x[-1:-window_len:-1]]
#print(len(s))
if window == 'flat': #moving average
w=numpy.ones(window_len,'d')
else:
w=eval('numpy.'+window+'(window_len)')
y=numpy.convolve(w/w.sum(),s,mode='valid')
return y[(window_len//2-1):-(window_len//2)-1]
return y
# End of file
| sns-chops/multiphonon | multiphonon/dos/nice.py | Python | mit | 3,668 |
# Python example script that uses the RcalculatorFilter to compute the mean
# vertex degree of the input graph and the distance from the mean for each
# vertex in the entire graph. The computed result is then used to label
# the displayed graph in VTK.
# VTK must be built with VTK_USE_GNU_R turned on for this example to work!
from vtk import *
if __name__ == "__main__":
# Generate a random graph with 20 vertices and a random number of edges
source = vtkRandomGraphSource()
source.SetNumberOfVertices(20)
source.SetEdgeProbability(0.1)
source.SetUseEdgeProbability(True)
source.SetStartWithTree(True)
source.IncludeEdgeWeightsOn()
source.AllowParallelEdgesOn()
# Connect to the vtkVertexDegree filter to compute vertex degree
degree_filter = vtkVertexDegree()
degree_filter.SetOutputArrayName("vertex_degree")
degree_filter.SetInputConnection(source.GetOutputPort())
# Pass the vertex degree data to R and compute the distance from the mean
# vertex degree for every vertex in the graph.
rcalculator = vtkRCalculatorFilter()
rcalculator.SetInputConnection(degree_filter.GetOutputPort())
# Shows R output on the terminal
rcalculator.SetRoutput(1)
# Copy vertex_degree array to R as variable vd
rcalculator.PutArray("vertex_degree","vd")
# Run R script to perform mean and distance calculation for each vertex
rcalculator.SetRscript("vd = abs(mean(vd) - vd)\n")
# Copy new vertex degree array back into the VTK graph
rcalculator.GetArray("vertex_degree_mean_dist","vd")
# Create a graph layout view
view = vtkGraphLayoutView()
view.AddRepresentationFromInputConnection(rcalculator.GetOutputPort())
view.SetVertexLabelArrayName("vertex_degree_mean_dist")
view.SetVertexLabelVisibility(True)
view.SetVertexColorArrayName("vertex_degree_mean_dist")
view.SetColorVertices(True)
view.SetLayoutStrategyToSimple2D()
view.SetVertexLabelFontSize(14)
view.SetEdgeLabelFontSize(12)
# Set the theme on the view
theme = vtkViewTheme.CreateMellowTheme()
theme.SetLineWidth(5)
theme.SetPointSize(10)
theme.SetCellOpacity(1)
theme.SetSelectedCellColor(1,0,1)
view.ApplyViewTheme(theme)
theme.FastDelete()
view.GetRenderWindow().SetSize(600, 600)
view.ResetCamera()
view.Render()
view.GetInteractor().Start()
| berendkleinhaneveld/VTK | Examples/Infovis/Python/Rcalculator_vd.py | Python | bsd-3-clause | 2,298 |
"""
This is an example plugin about how to use triggers
## Using
### Add the regex
* ```self.api('triggers.add')('testtrig', "^some test$")```
### Register a function to the event
* ```self.api('events.register('trigger_testtrig', somefunc)
"""
from plugins._baseplugin import BasePlugin
NAME = 'Trigger Example'
SNAME = 'triggerex'
PURPOSE = 'examples for using triggers'
AUTHOR = 'Bast'
VERSION = 1
AUTOLOAD = False
class Plugin(BasePlugin):
"""
a plugin to show how to use triggers
"""
def __init__(self, *args, **kwargs):
"""
initialize the instance
"""
BasePlugin.__init__(self, *args, **kwargs)
def load(self):
"""
load the plugins
"""
BasePlugin.load(self)
self.api('triggers.add')(
'example_trigger',
r"^(?P<name>.*) flicks a (?P<insect>.*) off his bar\.$")
self.api('events.register')('trigger_example_trigger', self.testtrigger)
def testtrigger(self, args):
"""
show that the trigger fired
"""
self.api('send.client')('Trigger fired: args returned %s' % args)
| endavis/bastproxy | plugins/example/triggerex.py | Python | gpl-2.0 | 1,063 |
# -*- encoding: utf-8 -*-
from openerp import models, fields, api
class Wizard(models.TransientModel):
_name = 'openacademy.wizard'
def _default_sessions(self):
return self.env['openacademy.session'].browse(self._context.get('active_ids'))
session_ids = fields.Many2many('openacademy.session',
string="Sessions", required=True, default=_default_sessions)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
@api.multi
def subscribe(self):
for session in self.session_ids:
session.attendee_ids |= self.attendee_ids
return {} | cmexiatyp/open_academy | model/wizard.py | Python | apache-2.0 | 611 |
# LANGUAGE: Python
# ENV: Python3.5
# AUTHOR: Artur Baruchi
# GITHUB: https://github.com/abaruchi
a = "Hello"
b = "World"
print("{} {}".format(a, b))
| bikoheke/hacktoberfest | scripts/hello_world_abaruchi.py | Python | gpl-3.0 | 151 |
#!/usr/bin/env python
## \file square.py
# \brief Python script for creating grid for freesurface channels.
# \author Aerospace Design Laboratory (Stanford University) <http://su2.stanford.edu>.
# \version 2.0.6
#
# Stanford University Unstructured (SU2) Code
# Copyright (C) 2012 Aerospace Design Laboratory
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from optparse import OptionParser
parser=OptionParser()
parser.add_option("-f", "--file", dest="filename", default="Channel.su2",
help="write mesh to FILE", metavar="FILE")
parser.add_option("-n", "--nNode", dest="nNode", default=125,
help="use this NNODE in x direction", metavar="NNODE")
parser.add_option("-m", "--mNode", dest="mNode", default=100,
help="use this MNODE in y direction", metavar="MNODE")
parser.add_option("-x", "--xLength", dest="xLength", default=7.0,
help="use this XLENGTH", metavar="XLENGTH")
parser.add_option("--offsetx", dest="offsetx", default=1.0,
help="use this OFFSETX", metavar="OFFSETX")
parser.add_option("--xAdapt", dest="xAdapt", default="True",
help="Adapt the grid XADAPT", metavar="XADAPT")
parser.add_option("-y", "--yLength", dest="yLength", default=1.0,
help="use this YLENGTH", metavar="YLENGTH")
parser.add_option("--offsety", dest="offsety", default=0.5,
help="use this OFFSETY", metavar="OFFSETY")
parser.add_option("--yAdapt", dest="yAdapt", default="True",
help="Adapt the grid YADAPT", metavar="YADAPT")
(options, args)=parser.parse_args()
nNode = int(options.nNode)
mNode = int(options.mNode)
xLength = float(options.xLength)
yLength = float(options.yLength)
xAdapt = options.xAdapt
yAdapt = options.yAdapt
Mesh_File = open(options.filename,"w")
Mesh_File.write( "%\n" )
Mesh_File.write( "% Problem dimension\n" )
Mesh_File.write( "%\n" )
Mesh_File.write( "NDIME=2\n" )
Mesh_File.write( "%\n" )
Mesh_File.write( "% Inner elements\n" )
Mesh_File.write( "%\n" )
Mesh_File.write( "NELEM=%s\n" % ((nNode-1)*(mNode-1)))
iElem = 0
for jNode in range(mNode-1):
for iNode in range(nNode-1):
iPoint = jNode*nNode + iNode
jPoint = jNode*nNode + iNode + 1
kPoint = (jNode + 1)*nNode + iNode
mPoint = (jNode + 1)*nNode + (iNode + 1)
Mesh_File.write( "9 \t %s \t %s \t %s \t %s \t %s\n" % (iPoint, jPoint, mPoint, kPoint, iElem) )
iElem = iElem + 1
nPoint = (nNode)*(mNode)
Mesh_File.write( "%\n" )
Mesh_File.write( "NPOIN=%s\n" % ((nNode)*(mNode)) )
iPoint = 0
for jNode in range(mNode):
for iNode in range(nNode):
xCoord = float(iNode)/float(nNode-1)
yCoord = float(jNode)/float(mNode-1)
if xAdapt == "True":
x0 = 0.0; x1 = 0.12; x2 = 0.30; x3 = 1.0;
fp3 = 10.0; fp0 = fp3; fpp2 = 0.0; fpp1 = -fpp2;
if (xCoord >= x0) and (xCoord < x1):
incr = x1-x0; incr2 = x1*x1-x0*x0; incr3 = x1*x1*x1-x0*x0*x0
d = ((1.0 - fp0) - fpp1*incr)/ (3.0*incr2-6.0*x1*incr); c = (fpp1/2.0) -3.0*d*x1
b = 1.0 - 2.0*c*x1 - 3.0*d*x1*x1; a = x1 - b*x1-c*x1*x1-d*x1*x1*x1
x = a + b*xCoord + c*xCoord*xCoord + d*xCoord*xCoord*xCoord
if (xCoord >= x1) and (xCoord <= x2):
x = xCoord
if (xCoord > x2) and (xCoord <= x3):
incr = x2-x3; incr2 = x2*x2-x3*x3; incr3 = x2*x2*x2-x3*x3*x3
d = ((1.0 - fp3) - fpp2*incr)/ (3.0*incr2-6.0*x2*incr); c = (fpp2/2.0) - 3.0*d*x2
b = 1.0 - 2.0*c*x2 - 3.0*d*x2*x2; a = x2 - b*x2-c*x2*x2-d*x2*x2*x2
x = a + b*xCoord + c*xCoord*xCoord + d*xCoord*xCoord*xCoord
incr = x1-x0; incr2 = x1*x1-x0*x0; incr3 = x1*x1*x1-x0*x0*x0
d = ((1.0 - fp0) - fpp1*incr)/ (3.0*incr2-6.0*x1*incr); c = (fpp1/2.0) -3.0*d*x1
b = 1.0 - 2.0*c*x1 - 3.0*d*x1*x1; a = x1 - b*x1-c*x1*x1-d*x1*x1*x1
x_min = a + b*x0 + c*x0*x0 + d*x0*x0*x0
incr = x2-x3; incr2 = x2*x2-x3*x3; incr3 = x2*x2*x2-x3*x3*x3
d = ((1.0 - fp3) - fpp2*incr)/ (3.0*incr2-6.0*x2*incr); c = (fpp2/2.0) - 3.0*d*x2
b = 1.0 - 2.0*c*x2 - 3.0*d*x2*x2; a = x2 - b*x2-c*x2*x2-d*x2*x2*x2
x_max = a + b*x3 + c*x3*x3 + d*x3*x3*x3
xCoord_new = xLength*((x-x_min)/(x_max-x_min)) - float(options.offsetx)
else:
xCoord_new = xLength*xCoord - float(options.offsetx)
if yAdapt == "True":
y0 = 0.0; y1 = 0.000001; y2 = 0.001; y3 = 1.0;
fp3 = 10.0; fp0 = fp3; fpp2 = 0.0; fpp1 = -fpp2;
if (yCoord >= y0) and (yCoord < y1):
incr = y1-y0; incr2 = y1*y1-y0*y0; incr3 = y1*y1*y1-y0*y0*y0
d = ((1.0 - fp0) - fpp1*incr)/ (3.0*incr2-6.0*y1*incr); c = (fpp1/2.0) -3.0*d*y1
b = 1.0 - 2.0*c*y1 - 3.0*d*y1*y1; a = y1 - b*y1-c*y1*y1-d*y1*y1*y1
y = a + b*yCoord + c*yCoord*yCoord + d*yCoord*yCoord*yCoord
if (yCoord >= y1) and (yCoord <= y2):
y = yCoord
if (yCoord > y2) and (yCoord <= y3):
incr = y2-y3; incr2 = y2*y2-y3*y3; incr3 = y2*y2*y2-y3*y3*y3
d = ((1.0 - fp3) - fpp2*incr)/ (3.0*incr2-6.0*y2*incr); c = (fpp2/2.0) - 3.0*d*y2
b = 1.0 - 2.0*c*y2 - 3.0*d*y2*y2; a = y2 - b*y2-c*y2*y2-d*y2*y2*y2
y = a + b*yCoord + c*yCoord*yCoord + d*yCoord*yCoord*yCoord
incr = y1-y0; incr2 = y1*y1-y0*y0; incr3 = y1*y1*y1-y0*y0*y0
d = ((1.0 - fp0) - fpp1*incr)/ (3.0*incr2-6.0*y1*incr); c = (fpp1/2.0) -3.0*d*y1
b = 1.0 - 2.0*c*y1 - 3.0*d*y1*y1; a = y1 - b*y1-c*y1*y1-d*y1*y1*y1
y_min = a + b*y0 + c*y0*y0 + d*y0*y0*y0
incr = y2-y3; incr2 = y2*y2-y3*y3; incr3 = y2*y2*y2-y3*y3*y3
d = ((1.0 - fp3) - fpp2*incr)/ (3.0*incr2-6.0*y2*incr); c = (fpp2/2.0) - 3.0*d*y2
b = 1.0 - 2.0*c*y2 - 3.0*d*y2*y2; a = y2 - b*y2-c*y2*y2-d*y2*y2*y2
y_max = a + b*y3 + c*y3*y3 + d*y3*y3*y3
yCoord_new = yLength*((y-y_min)/(y_max-y_min)) - float(options.offsety)
else:
yCoord_new = yLength*yCoord - float(options.offsety)
Mesh_File.write( "%15.14f \t %15.14f \t %s\n" % (xCoord_new, yCoord_new, iPoint) )
iPoint = iPoint + 1
Mesh_File.write( "%\n" )
Mesh_File.write( "% Boundary elements\n" )
Mesh_File.write( "%\n" )
Mesh_File.write( "NMARK=4\n" )
Mesh_File.write( "MARKER_TAG= lower\n" )
Mesh_File.write( "MARKER_ELEMS=%s\n" % (nNode-1))
for iNode in range(nNode-1):
Mesh_File.write( "3 \t %s \t %s\n" % (iNode, iNode + 1) )
Mesh_File.write( "MARKER_TAG= outlet\n" )
Mesh_File.write( "MARKER_ELEMS=%s\n" % (mNode-1))
for jNode in range(mNode-1):
Mesh_File.write( "3 \t %s \t %s\n" % (jNode*nNode + (nNode - 1), (jNode + 1)*nNode + (nNode - 1) ) )
Mesh_File.write( "MARKER_TAG= upper\n" )
Mesh_File.write( "MARKER_ELEMS=%s\n" % (nNode-1))
for iNode in range(nNode-1):
Mesh_File.write( "3 \t %s \t %s\n" % ((nNode*mNode - 1) - iNode, (nNode*mNode - 1) - (iNode + 1)) )
Mesh_File.write( "MARKER_TAG= inlet\n" )
Mesh_File.write( "MARKER_ELEMS=%s\n" % (mNode-1))
for jNode in range(mNode-2, -1, -1):
Mesh_File.write( "3 \t %s \t %s\n" % ((jNode + 1)*nNode, jNode*nNode ) )
Mesh_File.close()
| AmritaLonkar/trunk | SU2_PY/2DChannel.py | Python | gpl-2.0 | 7,901 |
# Import the Video model we created in our video_app/models.py
from .models import Video
from rest_framework import serializers
class VideoSerializer(serializers.ModelSerializer):
""" Class to serialize data between Django models and database. """
class Meta:
model = Video
fields = ('author_name', 'author_webpage',
'license', 'video_file', 'video_poster')
| NazimAli2017/Django-Rest-API-Videos | video_api/video_app/serializers.py | Python | gpl-3.0 | 404 |
# -*- coding: utf-8 -*-
# © 2016 Chafique DELLI @ Akretion
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'Purchase Picking State',
'summary': 'Add the status of all the incoming picking'
' in the purchase order',
'version': '9.0.1.0.0',
'category': 'Purchase Management',
'website': 'http://akretion.com',
'author': 'Akretion',
'license': 'AGPL-3',
'application': False,
'installable': True,
'depends': [
'purchase',
],
'data': [
'purchase_view.xml',
]
}
| SerpentCS/purchase-workflow | purchase_picking_state/__openerp__.py | Python | agpl-3.0 | 561 |
#!/usr/bin/env python
#
# Use the LasarScan to compute the distance to any obstacle in a region in front
# of the robot. The region has a width specified as a parameter.
#
# Package: teleop_service. Width parameter = "robot/width"
# Refresh interval used to slow down the publication rate
#
import rospy
import sys
import math
from sensor_msgs.msg import LaserScan
from std_msgs.msg import String
from teleop_service.msg import ObstacleDistance
DEFAULT_WIDTH = 1000. # mm
REFRESH_INTERVAL = 2
REPORT_INTERVAL = 20
INFINITY = 100000.
# Specify all the args whether we use them or not.
msg = StereoWaveform('left','right')
pub = rospy.Publisher('sb_obstacle_distance',StereoWaveform,queue_size=1)
rospy.init_node('sb_publish_obstacle_distance')
count = 0
# Callback for every new LaserScan message
def callback(laser):
count = count+1
if count%REFRESH_INTERVAL == 0:
width = float(rospy.get_param("/robot/width",DEFAULT_WIDTH))
distance = INFINITY
# Only consider -90 to 90. (0 is straight ahead)
delta = laser.angle_increment
angle = laser.angle_min - delta
for d in laser.ranges:
angle = angle + delta
if angle>-math.pi/2 and angle<math.pi/2:
obstacle = width/2*cos(angle)
if obstacle<distance:
distance = obstacle
msg.distance = distance
pub.publish(msg)
if count%REPORT_INTERVAL == 0:
rospy.loginfo("Obstacle distance: %4.2f "%(distance))
sub = rospy.Subscriber("/sensor_msgs",LaserScan,callback)
rospy.spin()
| chuckcoughlin/sarah-bella | robot/src/audio_locator/src/monitor_audio_signal.py | Python | mit | 1,473 |
from hc.api.models import Channel, Check
from hc.test import BaseTestCase
class ApiAdminTestCase(BaseTestCase):
def setUp(self):
super().setUp()
self.check = Check.objects.create(project=self.project, tags="foo bar")
self.alice.is_staff = True
self.alice.is_superuser = True
self.alice.save()
def test_it_shows_channel_list_with_pushbullet(self):
self.client.login(username="[email protected]", password="password")
Channel.objects.create(
project=self.project, kind="pushbullet", value="test-token"
)
r = self.client.get("/admin/api/channel/")
self.assertContains(r, "Pushbullet")
| healthchecks/healthchecks | hc/api/tests/test_admin.py | Python | bsd-3-clause | 688 |
#!/usr/bin/env python
from datetime import datetime, timedelta
import time
import dns
from dnsdisttests import DNSDistTest
class TestResponseRuleNXDelayed(DNSDistTest):
_config_template = """
newServer{address="127.0.0.1:%s"}
addResponseAction(RCodeRule(DNSRCode.NXDOMAIN), DelayResponseAction(1000))
"""
def testNXDelayed(self):
"""
Responses: Delayed on NXDomain
Send an A query to "delayed.responses.tests.powerdns.com.",
check that the response delay is longer than 1000 ms
for a NXDomain response over UDP, shorter for a NoError one.
"""
name = 'delayed.responses.tests.powerdns.com.'
query = dns.message.make_query(name, 'A', 'IN')
response = dns.message.make_response(query)
# NX over UDP
response.set_rcode(dns.rcode.NXDOMAIN)
begin = datetime.now()
(receivedQuery, receivedResponse) = self.sendUDPQuery(query, response)
end = datetime.now()
receivedQuery.id = query.id
self.assertEqual(query, receivedQuery)
self.assertEqual(response, receivedResponse)
self.assertTrue((end - begin) > timedelta(0, 1))
# NoError over UDP
response.set_rcode(dns.rcode.NOERROR)
begin = datetime.now()
(receivedQuery, receivedResponse) = self.sendUDPQuery(query, response)
end = datetime.now()
receivedQuery.id = query.id
self.assertEqual(query, receivedQuery)
self.assertEqual(response, receivedResponse)
self.assertTrue((end - begin) < timedelta(0, 1))
# NX over TCP
response.set_rcode(dns.rcode.NXDOMAIN)
begin = datetime.now()
(receivedQuery, receivedResponse) = self.sendTCPQuery(query, response)
end = datetime.now()
receivedQuery.id = query.id
self.assertEqual(query, receivedQuery)
self.assertEqual(response, receivedResponse)
self.assertTrue((end - begin) < timedelta(0, 1))
class TestResponseRuleERCode(DNSDistTest):
_config_template = """
newServer{address="127.0.0.1:%s"}
addResponseAction(ERCodeRule(DNSRCode.BADVERS), DelayResponseAction(1000))
"""
def testBADVERSDelayed(self):
"""
Responses: Delayed on BADVERS
Send an A query to "delayed.responses.tests.powerdns.com.",
check that the response delay is longer than 1000 ms
for a BADVERS response over UDP, shorter for BADKEY and NoError.
"""
name = 'delayed.responses.tests.powerdns.com.'
query = dns.message.make_query(name, 'A', 'IN')
response = dns.message.make_response(query)
response.use_edns(edns=True)
# BADVERS over UDP
# BADVERS == 16, so rcode==0, ercode==1
response.set_rcode(dns.rcode.BADVERS)
begin = datetime.now()
(receivedQuery, receivedResponse) = self.sendUDPQuery(query, response)
end = datetime.now()
receivedQuery.id = query.id
self.assertEqual(query, receivedQuery)
self.assertEqual(response, receivedResponse)
self.assertTrue((end - begin) > timedelta(0, 1))
# BADKEY (17, an ERCode) over UDP
response.set_rcode(17)
begin = datetime.now()
(receivedQuery, receivedResponse) = self.sendUDPQuery(query, response)
end = datetime.now()
receivedQuery.id = query.id
self.assertEqual(query, receivedQuery)
self.assertEqual(response, receivedResponse)
self.assertTrue((end - begin) < timedelta(0, 1))
# NoError (non-ERcode, basic RCode bits match BADVERS) over UDP
response.set_rcode(dns.rcode.NOERROR)
begin = datetime.now()
(receivedQuery, receivedResponse) = self.sendUDPQuery(query, response)
end = datetime.now()
receivedQuery.id = query.id
self.assertEqual(query, receivedQuery)
self.assertEqual(response, receivedResponse)
self.assertTrue((end - begin) < timedelta(0, 1))
class TestResponseRuleQNameDropped(DNSDistTest):
_config_template = """
newServer{address="127.0.0.1:%s"}
addResponseAction("drop.responses.tests.powerdns.com.", DropResponseAction())
"""
def testDropped(self):
"""
Responses: Dropped on QName
Send an A query to "drop.responses.tests.powerdns.com.",
check that the response (not the query) is dropped.
"""
name = 'drop.responses.tests.powerdns.com.'
query = dns.message.make_query(name, 'A', 'IN')
response = dns.message.make_response(query)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(receivedQuery, receivedResponse) = sender(query, response)
receivedQuery.id = query.id
self.assertEqual(query, receivedQuery)
self.assertEqual(receivedResponse, None)
def testNotDropped(self):
"""
Responses: NOT Dropped on QName
Send an A query to "dontdrop.responses.tests.powerdns.com.",
check that the response is not dropped.
"""
name = 'dontdrop.responses.tests.powerdns.com.'
query = dns.message.make_query(name, 'A', 'IN')
response = dns.message.make_response(query)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(receivedQuery, receivedResponse) = sender(query, response)
receivedQuery.id = query.id
self.assertEqual(query, receivedQuery)
self.assertEqual(response, receivedResponse)
class TestResponseRuleQNameAllowed(DNSDistTest):
_config_template = """
newServer{address="127.0.0.1:%s"}
addResponseAction("allow.responses.tests.powerdns.com.", AllowResponseAction())
addResponseAction(AllRule(), DropResponseAction())
"""
def testAllowed(self):
"""
Responses: Allowed on QName
Send an A query to "allow.responses.tests.powerdns.com.",
check that the response is allowed.
"""
name = 'allow.responses.tests.powerdns.com.'
query = dns.message.make_query(name, 'A', 'IN')
response = dns.message.make_response(query)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(receivedQuery, receivedResponse) = sender(query, response)
receivedQuery.id = query.id
self.assertEqual(query, receivedQuery)
self.assertEqual(response, receivedResponse)
def testNotAllowed(self):
"""
Responses: Not allowed on QName
Send an A query to "dontallow.responses.tests.powerdns.com.",
check that the response is dropped.
"""
name = 'dontallow.responses.tests.powerdns.com.'
query = dns.message.make_query(name, 'A', 'IN')
response = dns.message.make_response(query)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(receivedQuery, receivedResponse) = sender(query, response)
receivedQuery.id = query.id
self.assertEqual(query, receivedQuery)
self.assertEqual(receivedResponse, None)
class TestResponseRuleEditTTL(DNSDistTest):
_ttl = 5
_config_params = ['_testServerPort', '_ttl']
_config_template = """
newServer{address="127.0.0.1:%s"}
function editTTLCallback(section, class, type, ttl)
return %d
end
function editTTLFunc(dr)
dr:editTTLs(editTTLCallback)
return DNSAction.None, ""
end
addResponseAction(AllRule(), LuaResponseAction(editTTLFunc))
"""
def testTTLEdited(self):
"""
Responses: Alter the TTLs
"""
name = 'editttl.responses.tests.powerdns.com.'
query = dns.message.make_query(name, 'A', 'IN')
response = dns.message.make_response(query)
rrset = dns.rrset.from_text(name,
3600,
dns.rdataclass.IN,
dns.rdatatype.A,
'192.0.2.1')
response.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(receivedQuery, receivedResponse) = sender(query, response)
receivedQuery.id = query.id
self.assertEqual(query, receivedQuery)
self.assertEqual(response, receivedResponse)
self.assertNotEqual(response.answer[0].ttl, receivedResponse.answer[0].ttl)
self.assertEqual(receivedResponse.answer[0].ttl, self._ttl)
class TestResponseLuaActionReturnSyntax(DNSDistTest):
_config_template = """
newServer{address="127.0.0.1:%s"}
function customDelay(dr)
return DNSResponseAction.Delay, "1000"
end
function customDrop(dr)
return DNSResponseAction.Drop
end
addResponseAction("drop.responses.tests.powerdns.com.", LuaResponseAction(customDrop))
addResponseAction(RCodeRule(DNSRCode.NXDOMAIN), LuaResponseAction(customDelay))
"""
def testResponseActionDelayed(self):
"""
Responses: Delayed via LuaResponseAction
Send an A query to "delayed.responses.tests.powerdns.com.",
check that the response delay is longer than 1000 ms
for a NXDomain response over UDP, shorter for a NoError one.
"""
name = 'delayed.responses.tests.powerdns.com.'
query = dns.message.make_query(name, 'A', 'IN')
response = dns.message.make_response(query)
# NX over UDP
response.set_rcode(dns.rcode.NXDOMAIN)
begin = datetime.now()
(receivedQuery, receivedResponse) = self.sendUDPQuery(query, response)
end = datetime.now()
receivedQuery.id = query.id
self.assertEqual(query, receivedQuery)
self.assertEqual(response, receivedResponse)
self.assertTrue((end - begin) > timedelta(0, 1))
def testDropped(self):
"""
Responses: Dropped via user defined LuaResponseAction
Send an A query to "drop.responses.tests.powerdns.com.",
check that the response (not the query) is dropped.
"""
name = 'drop.responses.tests.powerdns.com.'
query = dns.message.make_query(name, 'A', 'IN')
response = dns.message.make_response(query)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(receivedQuery, receivedResponse) = sender(query, response)
receivedQuery.id = query.id
self.assertEqual(query, receivedQuery)
self.assertEqual(receivedResponse, None)
| shinsterneck/pdns | regression-tests.dnsdist/test_Responses.py | Python | gpl-2.0 | 10,719 |
from typing import Tuple, Union
import moderngl
from demosys.resources.meta import ProgramDescription
from demosys import context
VERTEX_SHADER = 'VERTEX_SHADER'
GEOMETRY_SHADER = 'GEOMETRY_SHADER'
FRAGMENT_SHADER = 'FRAGMENT_SHADER'
TESS_CONTROL_SHADER = 'TESS_CONTROL_SHADER'
TESS_EVALUATION_SHADER = 'TESS_EVALUATION_SHADER'
COMPUTE_SHADER = 'COMPUTE_SHADER'
class ProgramShaders:
"""Helper class preparing shader source strings for a program"""
def __init__(self, meta: ProgramDescription):
self.meta = meta
self.vertex_source = None
self.geometry_source = None
self.fragment_source = None
self.tess_control_source = None
self.tess_evaluation_source = None
@property
def ctx(self) -> moderngl.Context:
"""The moderngl context"""
return context.ctx()
@classmethod
def from_single(cls, meta: ProgramDescription, source: str):
"""Initialize a single glsl string containing all shaders"""
instance = cls(meta)
instance.vertex_source = ShaderSource(
VERTEX_SHADER,
meta.path or meta.vertex_shader,
source
)
if GEOMETRY_SHADER in source:
instance.geometry_source = ShaderSource(
GEOMETRY_SHADER,
meta.path or meta.geometry_shader,
source,
)
if FRAGMENT_SHADER in source:
instance.fragment_source = ShaderSource(
FRAGMENT_SHADER,
meta.path or meta.fragment_shader,
source,
)
if TESS_CONTROL_SHADER in source:
instance.tess_control_source = ShaderSource(
TESS_CONTROL_SHADER,
meta.path or meta.tess_control_shader,
source,
)
if TESS_EVALUATION_SHADER in source:
instance.tess_evaluation_source = ShaderSource(
TESS_EVALUATION_SHADER,
meta.path or meta.tess_evaluation_shader,
source,
)
return instance
@classmethod
def from_separate(cls, meta: ProgramDescription, vertex_source, geometry_source=None, fragment_source=None,
tess_control_source=None, tess_evaluation_source=None):
"""Initialize multiple shader strings"""
instance = cls(meta)
instance.vertex_source = ShaderSource(
VERTEX_SHADER,
meta.path or meta.vertex_shader,
vertex_source,
)
if geometry_source:
instance.geometry_source = ShaderSource(
GEOMETRY_SHADER,
meta.path or meta.geometry_shader,
geometry_source,
)
if fragment_source:
instance.fragment_source = ShaderSource(
FRAGMENT_SHADER,
meta.path or meta.fragment_shader,
fragment_source,
)
if tess_control_source:
instance.tess_control_source = ShaderSource(
TESS_CONTROL_SHADER,
meta.path or meta.tess_control_shader,
tess_control_source,
)
if tess_evaluation_source:
instance.tess_evaluation_source = ShaderSource(
TESS_EVALUATION_SHADER,
meta.path or meta.tess_control_shader,
tess_evaluation_source,
)
return instance
def create(self):
"""
Creates a shader program.
Returns:
ModernGL Program instance
"""
# Get out varyings
out_attribs = []
# If no fragment shader is present we are doing transform feedback
if not self.fragment_source:
# Out attributes is present in geometry shader if present
if self.geometry_source:
out_attribs = self.geometry_source.find_out_attribs()
# Otherwise they are specified in vertex shader
else:
out_attribs = self.vertex_source.find_out_attribs()
program = self.ctx.program(
vertex_shader=self.vertex_source.source,
geometry_shader=self.geometry_source.source if self.geometry_source else None,
fragment_shader=self.fragment_source.source if self.fragment_source else None,
tess_control_shader=self.tess_control_source.source if self.tess_control_source else None,
tess_evaluation_shader=self.tess_evaluation_source.source if self.tess_evaluation_source else None,
varyings=out_attribs,
)
program.extra = {'meta': self.meta}
return program
class ShaderSource:
"""
Helper class representing a single shader type
"""
def __init__(self, shader_type: str, name: str, source: str):
self.type = shader_type
self.name = name
self.source = source.strip()
self.lines = self.source.split('\n')
# Make sure version is present
if not self.lines[0].startswith("#version"):
self.print()
raise ShaderError(
"Missing #version in {}. A version must be defined in the first line".format(self.name),
)
# Add preprocessors to source VERTEX_SHADER, FRAGMENT_SHADER etc.
self.lines.insert(1, "#define {} 1".format(self.type))
self.source = '\n'.join(self.lines)
def find_out_attribs(self):
"""
Get all out attributes in the shader source.
:return: List of attribute names
"""
names = []
for line in self.lines:
if line.strip().startswith("out "):
names.append(line.split()[2].replace(';', ''))
return names
def print(self):
"""Print the shader lines"""
print("---[ START {} ]---".format(self.name))
for i, line in enumerate(self.lines):
print("{}: {}".format(str(i).zfill(3), line))
print("---[ END {} ]---".format(self.name))
class ShaderError(Exception):
pass
class ReloadableProgram:
"""
Programs we want to be reloadabla must be created with this wrapper
"""
def __init__(self, meta: ProgramDescription, program: moderngl.Program):
"""
Create a shader using either a file path or a name
:param meta: The ProgramMeta
:param program: The program instance
"""
self.program = program
self.meta = meta
@property
def name(self):
return self.meta.path or self.meta.vertex_shader
@property
def _members(self):
return self.program._members
@property
def ctx(self) -> moderngl.Context:
return self.program.ctx
def __getitem__(self, key) -> Union[moderngl.Uniform, moderngl.UniformBlock, moderngl.Subroutine,
moderngl.Attribute, moderngl.Varying]:
return self.program[key]
def get(self, key, default):
return self.program.get(key, default)
@property
def mglo(self):
"""The ModernGL Program object"""
return self.program.mglo
@property
def glo(self) -> int:
"""
int: The internal OpenGL object.
This values is provided for debug purposes only.
"""
return self.program.glo
@property
def subroutines(self) -> Tuple[str, ...]:
'''
tuple: The subroutine uniforms.
'''
return self.program.subroutines
@property
def geometry_input(self) -> int:
"""
int: The geometry input primitive.
The GeometryShader's input primitive if the GeometryShader exists.
The geometry input primitive will be used for validation.
"""
return self.program.geometry_input
@property
def geometry_output(self) -> int:
"""
int: The geometry output primitive.
The GeometryShader's output primitive if the GeometryShader exists.
"""
return self.program.geometry_output
@property
def geometry_vertices(self) -> int:
"""
int: The maximum number of vertices that
the geometry shader will output.
"""
return self.program.geometry_vertices
def __repr__(self):
return '<ReloadableProgram: {} id={}>'.format(self.name, self.mglo.glo)
| Contraz/demosys-py | demosys/opengl/program.py | Python | isc | 8,649 |
#!/usr/bin/python2.4
# Gscreen a GUI for linuxcnc cnc controller
# Chris Morley copyright 2012
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# Gscreen is made for running linuxcnc CNC machines
# currently only machines with XYZA or less are useable.
# Gscreen was built with touchscreens in mind though a mouse works too.
# a keyboard is necessary for editting gcode
# Gscreen is, at it's heart, a gladevcp program though loaded in a non standard way.
# one can also use a second monitor to display a second glade panel
# this would probably be most useful for user's custom status widgets.
# you would need to calibrate your touchscreen to just work on a single screen
import pygtk
pygtk.require("2.0")
import gtk
import gtk.glade
import gobject
import hal
import sys,os,subprocess
from optparse import Option, OptionParser
import gladevcp.makepins
from gladevcp.gladebuilder import GladeBuilder
import pango
import traceback
import atexit
import vte
import time
from time import strftime,localtime
import hal_glib
# try to add a notify system so messages use the
# nice intergrated pop-ups
# Ubuntu kinda wrecks this be not following the
# standard - you can't set how long the message stays up for.
# I suggest fixing this with a PPA off the net
# https://launchpad.net/~leolik/+archive/leolik?field.series_filter=lucid
try:
NOTIFY_AVAILABLE = False
import pynotify
if not pynotify.init("Gscreen"):
print "**** GSCREEN INFO: There was a problem initializing the pynotify module"
else:
NOTIFY_AVAILABLE = True
except:
print "**** GSCREEN INFO: You don't seem to have pynotify installed"
# try to add ability for audio feedback to user.
try:
_AUDIO_AVAIALBLE = False
import pygst
pygst.require("0.10")
import gst
_AUDIO_AVAIALBLE = True
print "**** GSCREEN INFO: audio available!"
except:
print "**** GSCREEN INFO: no audio alerts available - PYGST libray not installed?"
# BASE is the absolute path to linuxcnc base
# libdir is the path to Gscreen python files
# datadir is where the standarad GLADE files are
# imagedir is for icons
# themesdir is path to system's GTK2 theme folder
BASE = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), ".."))
libdir = os.path.join(BASE, "lib", "python")
datadir = os.path.join(BASE, "share", "linuxcnc")
imagedir = os.path.join(BASE, "share","gscreen","images")
sys.path.insert(0, libdir)
themedir = "/usr/share/themes"
xmlname = os.path.join(datadir,"gscreen.glade")
xmlname2 = os.path.join(datadir,"gscreen2.glade")
ALERT_ICON = os.path.join(imagedir,"applet-critical.png")
INFO_ICON = os.path.join(imagedir,"std_info.gif")
# internationalization and localization
import locale, gettext
# path to TCL for external programs eg. halshow
TCLPATH = os.environ['LINUXCNC_TCL_DIR']
# path to the configuration the user requested
# used to see if the is local GLADE files to use
CONFIGPATH = os.environ['CONFIG_DIR']
import linuxcnc
from gscreen import emc_interface
from gscreen import mdi
from gscreen import preferences
# this is for hiding the pointer when using a touch screen
pixmap = gtk.gdk.Pixmap(None, 1, 1, 1)
color = gtk.gdk.Color()
INVISABLE = gtk.gdk.Cursor(pixmap, pixmap, color, color, 0, 0)
# to help with debugging new screens
verbose_debug = False
# print debug messages if debug is true
gscreen_debug = False
def dbg(str):
global gscreen_debug
if not gscreen_debug: return
print str
# Throws up a dialog with debug info when an error is encountered
def excepthook(exc_type, exc_obj, exc_tb):
try:
w = app.widgets.window1
except KeyboardInterrupt:
sys.exit(0)
except NameError:
w = None
lines = traceback.format_exception(exc_type, exc_obj, exc_tb)
m = gtk.MessageDialog(w,
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_ERROR, gtk.BUTTONS_OK,
("Gscreen encountered an error. The following "
"information may be useful in troubleshooting:\n\n")
+ "".join(lines))
m.show()
m.run()
m.destroy()
sys.excepthook = excepthook
# constants
X = 0;Y = 1;Z = 2;A = 3;B = 4;C = 5;U = 6;V = 7;W = 8
_ABS = 0;_REL = 1;_DTG = 2
_MAN = 0;_MDI = 1;_AUTO = 2
_MM = 1;_IMPERIAL = 0
_SPINDLE_INPUT = 1;_PERCENT_INPUT = 2;_VELOCITY_INPUT = 3;_DEGREE_INPUT = 4
# the player class does the work of playing the audio hints
# http://pygstdocs.berlios.de/pygst-tutorial/introduction.html
class Player:
def __init__(self):
#Element playbin automatic plays any file
self.player = gst.element_factory_make("playbin", "player")
#Enable message bus to check for errors in the pipeline
bus = self.player.get_bus()
bus.add_signal_watch()
bus.connect("message", self.on_message)
self.loop = gobject.MainLoop()
def run(self):
self.player.set_state(gst.STATE_PLAYING)
self.loop.run()
def set_sound(self,file):
#Set the uri to the file
self.player.set_property("uri", "file://" + file)
def on_message(self, bus, message):
t = message.type
if t == gst.MESSAGE_EOS:
#file ended, stop
self.player.set_state(gst.STATE_NULL)
self.loop.quit()
elif t == gst.MESSAGE_ERROR:
#Error ocurred, print and stop
self.player.set_state(gst.STATE_NULL)
err, debug = message.parse_error()
print "Error: %s" % err, debug
self.loop.quit()
# a class for holding the glade widgets rather then searching for them each time
class Widgets:
def __init__(self, xml):
self._xml = xml
def __getattr__(self, attr):
r = self._xml.get_object(attr)
if r is None: raise AttributeError, "No widget %r" % attr
return r
def __getitem__(self, attr):
r = self._xml.get_object(attr)
if r is None: raise IndexError, "No widget %r" % attr
return r
# a class for holding data
# here we intialize the data
class Data:
def __init__(self):
self.use_screen2 = False
self.theme_name = "Follow System Theme"
self.abs_textcolor = ""
self.rel_textcolor = ""
self.dtg_textcolor = ""
self.err_textcolor = ""
self.window_geometry = ""
self.window_max = ""
self.axis_list = []
self.rotary_joints = False
self.active_axis_buttons = [(None,None)] # axis letter,axis number
self.abs_color = (0, 65535, 0)
self.rel_color = (65535, 0, 0)
self.dtg_color = (0, 0, 65535)
self.highlight_color = (65535,65535,65535)
self.highlight_major = False
self.display_order = (_REL,_DTG,_ABS)
self.mode_order = (_MAN,_MDI,_AUTO)
self.mode_labels = ["Manual Mode","MDI Mode","Auto Mode"]
self.plot_view = ("p","x","y","y2","z","z2")
self.task_mode = 0
self.active_gcodes = []
self.active_mcodes = []
for letter in ('x','y','z','a','b','c','u','v','w'):
self['%s_abs'%letter] = 0.0
self['%s_rel'%letter] = 0.0
self['%s_dtg'%letter] = 0.0
self['%s_is_homed'%letter] = False
self.spindle_request_rpm = 0
self.spindle_dir = 0
self.spindle_speed = 0
self.spindle_start_rpm = 300
self.spindle_preset = 300
self.active_spindle_command = "" # spindle command setting
self.active_feed_command = "" # feed command setting
self.system = 1
self.estopped = True
self.dro_units = _IMPERIAL
self.machine_units = _IMPERIAL
self.tool_in_spindle = 0
self.flood = False
self.mist = False
self.machine_on = False
self.or_limits = False
self.op_stop = False
self.block_del = False
self.all_homed = False
self.jog_rate = 15
self.jog_rate_inc = 1
self.jog_rate_max = 60
self.jog_increments = ['.001 in ,.01 in ,.1 in']
self.current_jogincr_index = 0
self.angular_jog_adjustment_flag = False
self.angular_jog_increments = []
self.angular_jog_rate = 1800
self.angular_jog_rate_inc = 60
self.angular_jog_rate_max = 7200
self.current_angular_jogincr_index = 0
self.feed_override = 1.0
self.feed_override_inc = .05
self.feed_override_max = 2.0
self.spindle_override = 1.0
self.spindle_override_inc = .05
self.spindle_override_max = 1.2
self.spindle_override_min = .50
self.maxvelocity = 1
self.velocity_override = 1.0
self.velocity_override_inc = .05
self.edit_mode = False
self.full_graphics = False
self.graphic_move_inc = 20
self.plot_hidden = False
self.file = ""
self.file_lines = 0
self.line = 0
self.last_line = 0
self.motion_line = 0
self.id = 0
self.dtg = 0.0
self.show_dtg = False
self.velocity = 0.0
self.delay = 0.0
self.preppedtool = None
self.lathe_mode = False
self.diameter_mode = True
self.tooleditor = ""
self.tooltable = ""
self.alert_sound = "/usr/share/sounds/ubuntu/stereo/bell.ogg"
self.error_sound = "/usr/share/sounds/ubuntu/stereo/dialog-question.ogg"
self.ob = None
self.index_tool_dialog = None
self.keyboard_dialog = None
self.preset_spindle_dialog = None
self.spindle_control_dialog = None
self.entry_dialog = None
self.restart_dialog = None
self.key_event_last = None,0
def __getitem__(self, item):
return getattr(self, item)
def __setitem__(self, item, value):
return setattr(self, item, value)
# trampoline and load_handlers are used for custom displays
class Trampoline(object):
def __init__(self,methods):
self.methods = methods
def __call__(self, *a, **kw):
for m in self.methods:
m(*a, **kw)
def load_handlers(usermod,halcomp,builder,useropts,gscreen):
hdl_func = 'get_handlers'
mod = object = None
def add_handler(method, f):
if method in handlers:
handlers[method].append(f)
else:
handlers[method] = [f]
handlers = {}
for u in usermod:
(directory,filename) = os.path.split(u)
(basename,extension) = os.path.splitext(filename)
if directory == '':
directory = '.'
if directory not in sys.path:
sys.path.insert(0,directory)
print _('adding import dir %s' % directory)
try:
mod = __import__(basename)
except ImportError,msg:
print ("module '%s' skipped - import error: %s" %(basename,msg))
continue
print _("module '%s' imported OK" % mod.__name__)
try:
# look for functions
for temp in ("periodic","connect_signals","initialize_widgets"):
h = getattr(mod,temp,None)
if h and callable(h):
print ("module '%s' : '%s' function found" % (mod.__name__,temp))
# look for 'get_handlers' function
h = getattr(mod,hdl_func,None)
if h and callable(h):
print ("module '%s' : '%s' function found" % (mod.__name__,hdl_func))
objlist = h(halcomp,builder,useropts,gscreen)
else:
# the module has no get_handlers() callable.
# in this case we permit any callable except class Objects in the module to register as handler
dbg("module '%s': no '%s' function - registering only functions as callbacks" % (mod.__name__,hdl_func))
objlist = [mod]
# extract callback candidates
for object in objlist:
dbg("Registering handlers in module %s object %s" % (mod.__name__, object))
if isinstance(object, dict):
methods = dict.items()
else:
methods = map(lambda n: (n, getattr(object, n, None)), dir(object))
for method,f in methods:
if method.startswith('_'):
continue
if callable(f):
dbg("Register callback '%s' in %s" % (method, basename))
add_handler(method, f)
except Exception, e:
print ("**** GSCREEN ERROR: trouble looking for handlers in '%s': %s" %(basename, e))
traceback.print_exc()
# Wrap lists in Trampoline, unwrap single functions
for n,v in list(handlers.items()):
if len(v) == 1:
handlers[n] = v[0]
else:
handlers[n] = Trampoline(v)
return handlers,mod,object
# ok here is the Gscreen class
# there are also three other files:
# mdi.py for mdi commands (which include non-obvious mdi commands done in maual mode)
# preference.py for keeping track of stored user preferences
# emc_interface.py which does most of the commands and status of linuxcnc
# keep in mind some of the gladeVCP widgets send-commands-to/monitor linuxcnc also
class Gscreen:
def __init__(self):
global xmlname
global xmlname2
global gscreen_debug
global verbose_debug
skinname = "gscreen"
self.inipath = sys.argv[2]
(progdir, progname) = os.path.split(sys.argv[0])
# linuxcnc adds -ini to display name and optparse
# can't understand that, so we do it manually
for num,temp in enumerate(sys.argv):
if temp == '-c':
try:
print ("**** GSCREEN INFO: Optional component name ="),sys.argv[num+1]
skinname = sys.argv[num+1]
except:
pass
if temp == '-d': gscreen_debug = True
if temp == '-v': verbose_debug = True
# check for a local translation folder
locallocale = os.path.join(CONFIGPATH,"locale")
if os.path.exists(locallocale):
LOCALEDIR = locallocale
domain = skinname
print ("**** GSCREEN INFO: local locale name =",LOCALEDIR,skinname)
else:
LOCALEDIR = os.path.join(BASE, "share", "locale")
domain = "linuxcnc"
locale.setlocale(locale.LC_ALL, '')
locale.bindtextdomain(domain, LOCALEDIR)
gettext.install(domain, localedir=LOCALEDIR, unicode=True)
gettext.bindtextdomain(domain, LOCALEDIR)
# main screen
localglade = os.path.join(CONFIGPATH,"%s.glade"%skinname)
if os.path.exists(localglade):
print _("\n**** GSCREEN INFO: Using LOCAL custom glade file from %s ****"% localglade)
xmlname = localglade
try:
self.xml = gtk.Builder()
self.xml.set_translation_domain(domain) # for locale translations
self.xml.add_from_file(xmlname)
except:
print _("**** Gscreen GLADE ERROR: With main screen xml file: %s"% xmlname)
sys.exit(0)
# second screen
localglade = os.path.join(CONFIGPATH,"%s2.glade"%skinname)
if os.path.exists(localglade):
print _("\n**** GSCREEN INFO: Using LOCAL glade file from %s ****"% localglade)
xmlname2 = localglade
else:
print _("\n**** GSCREEN INFO: using STOCK glade file from: %s ****"% xmlname2)
try:
self.xml.add_from_file(xmlname2)
self.screen2 = True
except:
print _("**** Gscreen GLADE ERROR: With screen 2's xml file: %s"% xmlname)
self.screen2 = False
self.widgets = Widgets(self.xml)
self.data = Data()
if _AUDIO_AVAIALBLE:
self.audio = Player()
# access to EMC control
self.emc = emc_interface.emc_control(linuxcnc)
# access to EMC status
self.status = emc_interface.emc_status( self.data, linuxcnc)
# access to MDI
mdi_labels = mdi_eventboxes = []
self.mdi_control = mdi.mdi_control(gtk, linuxcnc, mdi_labels, mdi_eventboxes)
# pull info from the INI file
self.inifile = self.emc.emc.ini(self.inipath)
# change the display based on the requested axis
temp = self.inifile.find("TRAJ","COORDINATES")
if temp == None:
self.add_alarm_entry("No coordinates entry found in [TRAJ] of INI file")
self.data.axis_list = []
for letter in temp:
if letter.lower() in self.data.axis_list: continue
if not letter.lower() in ["x","y","z","a","b","c","u","v","w"]: continue
self.data.axis_list.append(letter.lower())
# check for rotary joints
for i in("a","b","c"):
if i in self.data.axis_list:
self.data.rotary_joints = True
break
# check the ini file if UNITS are set to mm"
# first check the global settings
units=self.inifile.find("TRAJ","LINEAR_UNITS")
if units==None:
# else then the X axis units
units=self.inifile.find("AXIS_0","UNITS")
if units==None:
self.add_alarm_entry(_("No UNITS entry found in [TRAJ] or [AXIS_0] of INI file"))
if units=="mm" or units=="metric" or units == "1.0":
self.machine_units_mm=1
conversion=[1.0/25.4]*3+[1]*3+[1.0/25.4]*3
else:
self.machine_units_mm=0
conversion=[25.4]*3+[1]*3+[25.4]*3
self.status.set_machine_units(self.machine_units_mm,conversion)
# set-up HAL component
try:
self.halcomp = hal.component("gscreen")
except:
print _("*** Gscreen ERROR: Asking for a HAL component using a name that already exists.")
sys.exit(0)
panel = gladevcp.makepins.GladePanel( self.halcomp, xmlname, self.xml, None)
# at this point, any glade HAL widgets and their pins are set up.
# look for custom handler files:
HANDLER_FN = "%s_handler.py"%skinname
local_handler_path = os.path.join(CONFIGPATH,HANDLER_FN)
if os.path.exists(local_handler_path):
temp = [HANDLER_FN]
else:
temp = []
handlers,self.handler_module,self.handler_instance = load_handlers(temp,self.halcomp,self.xml,[],self)
self.xml.connect_signals(handlers)
# Look for an optional preferece file path otherwise it uses ~/.gscreen_preferences
# then initiate access to saved prefernces
temp = self.inifile.find("DISPLAY","PREFERENCE_FILE_PATH")
dbg("**** GSCREEN INFO: Preference file path: %s"%temp)
self.prefs = preferences.preferences(temp)
# Intialize prefereces either from the handler file or from Gscreen
if "initialize_preferences" in dir(self.handler_instance):
self.handler_instance.initialize_preferences()
else:
self.initialize_preferences()
# check for ladder loaded
self.data.is_ladder = hal.component_exists('classicladder_rt')
# get the system wide theme
settings = gtk.settings_get_default()
settings.props.gtk_button_images = True
self.data.system_theme = settings.get_property("gtk-theme-name")
# jogging increments
increments = self.inifile.find("DISPLAY", "INCREMENTS")
if increments:
if not "continuous" in increments:
increments +=",continuous"
if "," in increments:
self.data.jog_increments = [i.strip() for i in increments.split(",")]
else:
self.data.jog_increments = increments.split()
else:
if self.machine_units_mm ==_MM:
self.data.jog_increments = [".01 mm",".1 mm","1 mm","continuous"]
else:
self.data.jog_increments = [".001 in",".01 in",".1 in","continuous"]
self.add_alarm_entry(_("No default jog increments entry found in [DISPLAY] of INI file"))
# angular jogging increments
increments = self.inifile.find("DISPLAY", "ANGULAR_INCREMENTS")
if increments:
if not "continuous" in increments:
increments +=",continuous"
if "," in increments:
self.data.angular_jog_increments = [i.strip() for i in increments.split(",")]
else:
self.data.angular_jog_increments = increments.split()
else:
self.data.angular_jog_increments = ["1","45","180","360","continuous"]
self.add_alarm_entry(_("No default angular jog increments entry found in [DISPLAY] of INI file"))
# set default jog rate
# must convert from INI's units per second to gscreen's units per minute
temp = self.inifile.find("DISPLAY","DEFAULT_LINEAR_VELOCITY")
if temp:
temp = float(temp)*60
else:
temp = self.data.jog_rate
self.add_alarm_entry(_("No DEFAULT_LINEAR_VELOCITY entry found in [DISPLAY] of INI file: using internal default of %s"%temp))
self.data.jog_rate = float(temp)
self.emc.continuous_jog_velocity(float(temp),None)
# set max jog rate
# must convert from INI's units per second to gscreen's units per minute
temp = self.inifile.find("DISPLAY","MAX_LINEAR_VELOCITY")
if temp:
temp = float(temp)*60
else:
temp = self.data.jog_rate_max
self.add_alarm_entry(_("No MAX_LINEAR_VELOCITY entry found in [DISPLAY] of INI file: using internal default of %s"%temp))
self.data.jog_rate_max = float(temp)
# max velocity settings: more then one place to check
# This is the maximum velocity of the machine
temp = self.inifile.find("TRAJ","MAX_VELOCITY")
if temp == None:
self.add_alarm_entry(_("No MAX_VELOCITY found in [TRAJ] of the INI file"))
temp = 1.0
self.data._maxvelocity = float(temp)
# look for angular defaults if there is angular axis
if "a" in self.data.axis_list or "b" in self.data.axis_list or "c" in self.data.axis_list:
# set default angular jog rate
# must convert from INI's units per second to gscreen's units per minute
temp = self.inifile.find("DISPLAY","DEFAULT_ANGULAR_VELOCITY")
if temp:
temp = float(temp)*60
else:
temp = self.data.angular_jog_rate
self.add_alarm_entry(_("No DEFAULT_ANGULAR_VELOCITY entry found in [DISPLAY] of INI file: using internal default of %s"%temp))
self.data.angular_jog_rate = float(temp)
self.emc.continuous_jog_velocity(None,float(temp))
# set default angular jog rate
# must convert from INI's units per second to gscreen's units per minute
temp = self.inifile.find("DISPLAY","MAX_ANGULAR_VELOCITY")
if temp:
temp = float(temp)*60
else:
temp = self.data.angular_jog_rate_max
self.add_alarm_entry(_("No MAX_ANGULAR_VELOCITY entry found in [DISPLAY] of INI file: using internal default of %s"%temp))
self.data.angular_jog_rate_max = float(temp)
# check for override settings
temp = self.inifile.find("DISPLAY","MAX_SPINDLE_OVERRIDE")
if temp:
self.data.spindle_override_max = float(temp)
else:
self.add_alarm_entry(_("No MAX_SPINDLE_OVERRIDE entry found in [DISPLAY] of INI file"))
temp = self.inifile.find("DISPLAY","MIN_SPINDLE_OVERRIDE")
if temp:
self.data.spindle_override_min = float(temp)
else:
self.add_alarm_entry(_("No MIN_SPINDLE_OVERRIDE entry found in [DISPLAY] of INI file"))
temp = self.inifile.find("DISPLAY","MAX_FEED_OVERRIDE")
if temp:
self.data.feed_override_max = float(temp)
else:
self.add_alarm_entry(_("No MAX_FEED_OVERRIDE entry found in [DISPLAY] of INI file"))
# if it's a lathe config, set the tooleditor style
self.data.lathe_mode = bool(self.inifile.find("DISPLAY", "LATHE"))
if self.data.lathe_mode:
self.add_alarm_entry(_("This screen will be orientated for Lathe options"))
# get the path to the tool table
self.data.tooltable = self.inifile.find("EMCIO","TOOL_TABLE")
# see if the user specified a tool editor
self.data.tooleditor = self.inifile.find("DISPLAY","TOOL_EDITOR")
# see if the user specified a tool editor
self.data.varfile = self.inifile.find("RS274NGC","PARAMETER_FILE")
# toolsetting reference type
if self.prefs.getpref('toolsetting_fixture', False):
self.g10l11 = 1
else:
self.g10l11 = 0
# set the display options from preference file
if self.prefs.getpref('dro_is_metric', False):
self.status.dro_mm(0)
else:
self.status.dro_inch(0)
if self.prefs.getpref('dro_actual', False):
self.status.dro_actual(0)
else:
self.status.dro_commanded(0)
if "initialize_keybindings" in dir(self.handler_instance):
self.handler_instance.initialize_keybindings()
else:
self.initialize_keybindings()
# TODO the user should be able to invoke this so they know what methods are available
# and what handers are registered
#print handlers
if "initialize_pins" in dir(self.handler_instance):
self.handler_instance.initialize_pins()
else:
self.initialize_pins()
self.initialize_manual_toolchange()
if "connect_signals" in dir(self.handler_instance):
self.handler_instance.connect_signals(handlers)
else:
self.connect_signals(handlers)
# dynamic tabs setup
self._dynamic_childs = {}
atexit.register(self.kill_dynamic_childs)
self.set_dynamic_tabs()
# set title and display everything including the second screen if requested
if skinname == "gscreen":
title = "Gscreen"
else:
title = "Gscreen-%s"% skinname
self.widgets.window1.set_title("%s for linuxcnc"% title)
if self.screen2:
self.widgets.window2.show()
self.widgets.window2.move(0,0)
if not self.data.use_screen2:
self.widgets.window2.hide()
self.widgets.window1.show()
# Set up the widgets
if "initialize_widgets" in dir(self.handler_instance):
self.handler_instance.initialize_widgets()
else:
self.initialize_widgets()
# see if there are user messages in the ini file
self.message_setup()
# ok everything that might make HAL pins should be done now - let HAL know that
self.halcomp.ready()
try:
self.widgets._terminal.feed_child('halcmd show pin gscreen\n')
except:
pass
# timers for display updates
temp = self.inifile.find("DISPLAY","CYCLE_TIME")
if not temp:
self.add_alarm_entry(_("CYCLE_TIME in [DISPLAY] of INI file is missing: defaulting to 100ms"))
temp = 100
elif float(temp) < 50:
self.add_alarm_entry(_("CYCLE_TIME in [DISPLAY] of INI file is too small: defaulting to 100ms"))
temp = 100
print _("timeout %d" % int(temp))
if "timer_interrupt" in dir(self.handler_instance):
gobject.timeout_add(int(temp), self.handler_instance.timer_interrupt)
else:
gobject.timeout_add(int(temp), self.timer_interrupt)
def initialize_keybindings(self):
self.widgets.window1.connect('key_press_event', self.on_key_event,1)
self.widgets.window1.connect('key_release_event', self.on_key_event,0)
def initialize_preferences(self):
self.init_dro_pref()
self.init_theme_pref()
self.init_window_geometry_pref()
self.init_general_pref()
def init_dro_pref(self):
self.data.abs_textcolor = self.prefs.getpref('abs_textcolor', '#0000FFFF0000', str)
self.data.dtg_textcolor = self.prefs.getpref('dtg_textcolor', '#00000000FFFF', str)
self.data.rel_textcolor = self.prefs.getpref('rel_textcolor', '#FFFF00000000', str)
self.data.show_dtg = self.prefs.getpref('show_dtg', False, bool)
def init_theme_pref(self):
self.data.theme_name = self.prefs.getpref('gtk_theme', 'Redmond', str)
def init_window_geometry_pref(self):
self.data.fullscreen1 = self.prefs.getpref('fullscreen1', False, bool)
self.data.use_screen2 = self.prefs.getpref('use_screen2', False, bool)
self.data.window_geometry = self.prefs.getpref('window_geometry', 'default', str)
self.data.window_max = self.prefs.getpref('window_force_max', False, bool)
self.data.window2_geometry = self.prefs.getpref('window2_geometry', 'default', str)
self.data.window2_max = self.prefs.getpref('window2_force_max', False, bool)
def init_general_pref(self):
self.data.alert_sound = self.prefs.getpref('audio_alert', self.data.alert_sound, str)
self.data.desktop_notify = self.prefs.getpref('desktop_notify', True, bool)
self.data.diameter_mode = self.prefs.getpref('diameter_mode', False, bool)
self.data.display_order = self.prefs.getpref('display_order', (0,1,2), repr)
self.data.dro_units = self.prefs.getpref('dro_is_metric', False, bool)
self.data.error_sound = self.prefs.getpref('audio_error', self.data.error_sound, str)
self.data.error_font_name = self.prefs.getpref('error_font', 'Sans Bold 10', str)
self.data.err_textcolor = self.prefs.getpref('err_textcolor', 'default', str)
self.data.grid_size = self.prefs.getpref('grid_size', 1.0 , float)
self.data.hide_cursor = self.prefs.getpref('hide_cursor', False, bool)
self.data.plot_view = self.prefs.getpref('view', ("p","x","y","y2","z","z2"), repr)
self.data.show_offsets = self.prefs.getpref('show_offsets', True, bool)
self.data.spindle_start_rpm = self.prefs.getpref('spindle_start_rpm', 300 , float)
self.data.unlock_code = self.prefs.getpref('unlock_code', '123', str)
self.data.embedded_keyboard = self.prefs.getpref('embedded_keyboard', True, bool)
# initialize default widgets
def initialize_widgets(self):
self.init_axis_frames()
self.init_dro_colors()
self.init_screen2()
self.init_fullscreen1()
self.init_gremlin()
self.init_manual_spindle_controls()
self.init_dro()
self.init_audio()
self.init_desktop_notify()
self.init_statusbar()
self.init_entry()
self.init_tooleditor()
self.init_offsetpage()
self.init_embeded_terminal()
self.init_themes()
self.init_screen1_geometry()
self.init_running_options()
self.init_hide_cursor()
self.init_mode()
self.init_sensitive_on_off()
self.init_sensitive_run_idle()
self.init_sensitive_all_homed()
self.init_sensitive_edit_mode()
self.init_sensitive_override_mode()
self.init_sensitive_graphics_mode()
self.init_sensitive_origin_mode()
self.init_state()
def show_try_errors(self):
global verbose_debug
if verbose_debug:
exc_type, exc_value, exc_traceback = sys.exc_info()
formatted_lines = traceback.format_exc().splitlines()
print
print "****Gscreen verbose debugging:",formatted_lines[0]
traceback.print_tb(exc_traceback, limit=1, file=sys.stdout)
print formatted_lines[-1]
def init_axis_frames(self):
temp = self.data.axis_list
try:
if "a" in temp:
self.widgets.frame_a.show()
self.widgets.image6.hide() # make more room for axis display
except:
self.show_try_errors()
try:
if "b" in temp:
self.widgets.frame_b.show()
self.widgets.image6.hide() # make more room for axis display
except:
self.show_try_errors()
try:
if "c" in temp:
self.widgets.frame_c.show()
self.widgets.image6.hide() # make more room for axis display
except:
self.show_try_errors()
try:
if "y" in temp:
self.widgets.frame_y.show()
except:
self.show_try_errors()
if self.data.rotary_joints:
try:
self.widgets.button_select_rotary_adjust.show()
self.widgets.angular_jog_increments.show()
self.widgets.angular_jog_rate.show()
except:
self.show_try_errors()
def init_dro_colors(self):
self.widgets.abs_colorbutton.set_color(gtk.gdk.color_parse(self.data.abs_textcolor))
self.set_abs_color()
self.widgets.rel_colorbutton.set_color(gtk.gdk.color_parse(self.data.rel_textcolor))
self.set_rel_color()
self.widgets.dtg_colorbutton.set_color(gtk.gdk.color_parse(self.data.dtg_textcolor))
self.set_dtg_color()
def init_screen2(self):
self.widgets.use_screen2.set_active(self.data.use_screen2)
def init_fullscreen1(self):
self.widgets.fullscreen1.set_active(self.data.fullscreen1)
def init_gremlin(self):
self.widgets.show_offsets.set_active( self.data.show_offsets )
self.widgets.gremlin.show_offsets = self.data.show_offsets
self.widgets.grid_size.set_value(self.data.grid_size)
self.widgets.gremlin.grid_size = self.data.grid_size
self.widgets.gremlin.set_property('view',self.data.plot_view[0])
self.widgets.gremlin.set_property('metric_units',(self.data.dro_units == _MM))
def init_manual_spindle_controls(self):
self.widgets.spindle_start_rpm.set_value(self.data.spindle_start_rpm)
self.block("s_display_fwd")
self.widgets.s_display_fwd.set_active(True)
self.unblock("s_display_fwd")
self.preset_spindle_speed(self.data.spindle_start_rpm)
def init_dro(self):
self.widgets.diameter_mode.set_active(self.data.diameter_mode)
self.widgets.dro_units.set_active(self.data.dro_units)
def init_audio(self):
self.widgets.audio_alert_chooser.set_filename(self.data.alert_sound)
self.widgets.audio_error_chooser.set_filename(self.data.error_sound)
def init_desktop_notify(self):
self.widgets.desktop_notify.set_active(self.data.desktop_notify)
def init_statusbar(self):
self.statusbar_id = self.widgets.statusbar1.get_context_id("Statusbar1")
self.homed_status_message = self.widgets.statusbar1.push(1,"Ready For Homing")
def init_entry(self):
return
# first we hide all the axis columns the unhide the ones we want
# if it's a lathe config we show lathe related columns
# and we load the tooltable data
def init_tooleditor(self):
self.widgets.tooledit1.set_visible("abcxyzuvwijq",False)
for axis in self.data.axis_list:
self.widgets.tooledit1.set_visible("%s"%axis,True)
if self.data.lathe_mode:
self.widgets.tooledit1.set_visible("ijq",True)
path = os.path.join(CONFIGPATH,self.data.tooltable)
self.widgets.tooledit1.set_filename(path)
# Only show the rows of the axes we use
# set the var path so offsetpage can fill in all the user system offsets
def init_offsetpage(self):
self.widgets.offsetpage1.set_col_visible('xyzabcuvw',False)
temp =""
for axis in self.data.axis_list:
temp=temp+axis
self.widgets.offsetpage1.set_col_visible(temp,True)
path = os.path.join(CONFIGPATH,self.data.varfile)
self.widgets.offsetpage1.set_filename(path)
def init_embeded_terminal(self):
# add terminal window
self.widgets._terminal = vte.Terminal ()
self.widgets._terminal.connect ("child-exited", lambda term: gtk.main_quit())
self.widgets._terminal.fork_command()
self.widgets._terminal.show()
window = self.widgets.terminal_window.add(self.widgets._terminal)
self.widgets.terminal_window.connect('delete-event', lambda window, event: gtk.main_quit())
self.widgets.terminal_window.show()
def init_themes(self):
# If there are themes then add them to combo box
if os.path.exists(themedir):
model = self.widgets.theme_choice.get_model()
model.clear()
model.append(("Follow System Theme",))
temp = 0
names = os.listdir(themedir)
names.sort()
for search,dirs in enumerate(names):
model.append((dirs,))
if dirs == self.data.theme_name:
temp = search+1
self.widgets.theme_choice.set_active(temp)
settings = gtk.settings_get_default()
if not self.data.theme_name == "Follow System Theme":
settings.set_string_property("gtk-theme-name", self.data.theme_name, "")
def init_screen1_geometry(self):
# maximize window or set geometry and optionally maximize
if "max" in self.data.window_geometry:
self.widgets.window1.maximize()
elif self.data.window_geometry == "default":
self.show_try_errors()
else:
good = self.widgets.window1.parse_geometry(self.data.window_geometry)
if self.data.window_max:
self.widgets.window1.maximize()
if not good:
print _("**** WARNING GSCREEN: could not understand the window geometry info in hidden preference file")
if self.widgets.fullscreen1.get_active():
self.widgets.window1.fullscreen()
def init_running_options(self):
self.widgets.button_block_delete.set_active(self.prefs.getpref('blockdel', False))
self.emc.blockdel(self.data.block_del)
self.widgets.button_option_stop.set_active(self.prefs.getpref('opstop', False))
self.emc.opstop(self.data.op_stop)
def init_hide_cursor(self):
self.widgets.hide_cursor.set_active(self.data.hide_cursor)
# hide cursor if requested
# that also sets the graphics to use touchscreen controls
if self.data.hide_cursor:
self.widgets.window1.window.set_cursor(INVISABLE)
self.widgets.gremlin.set_property('use_default_controls',False)
else:
self.widgets.window1.window.set_cursor(None)
self.widgets.gremlin.set_property('use_default_controls',True)
def init_mode(self):
label = self.data.mode_labels
self.widgets.button_mode.set_label(label[self.data.mode_order[0]])
# set to 'manual mode'
self.mode_changed(self.data.mode_order[0])
# buttons that need to be sensitive based on the machine being on or off
def init_sensitive_on_off(self):
self.data.sensitive_on_off = ["vmode0","mode0","mode1","button_homing","button_override","button_graphics","frame_s","button_mode","button_restart"]
for axis in self.data.axis_list:
self.data.sensitive_on_off.append("axis_%s"% axis)
# buttons that need to be sensitive based on the interpeter runing or being idle
def init_sensitive_run_idle(self):
self.data.sensitive_run_idle = ["button_edit","button_load","button_mode","button_restart"]
for axis in self.data.axis_list:
self.data.sensitive_run_idle.append("axis_%s"% axis)
def init_sensitive_all_homed(self):
self.data.sensitive_all_homed = ["button_zero_origin","button_offset_origin","button_select_system","button_tool_set"]
def init_sensitive_edit_mode(self):
self.data.sensitive_edit_mode = ["button_mode","button_menu","button_graphics","button_override","button_restart","button_single_step","button_run",
"ignore_limits"]
def init_sensitive_override_mode(self):
self.data.sensitive_override_mode = ["spindle_preset","spindle_control","spindle_increase","spindle_decrease","s_display_fwd",
"s_display_rev","button_graphics","button_homing","button_mode","button_jog_mode","button_flood_coolant",
"button_mist_coolant","button_tool_editor","button_tool_set"]
for axis in self.data.axis_list:
self.data.sensitive_override_mode.append("axis_%s"% axis)
def init_sensitive_graphics_mode(self):
self.data.sensitive_graphics_mode = ["button_override","button_homing","button_mode",
"button_zero_origin","button_offset_origin","button_plus","button_minus","vmode0","button_tool_set"]
for axis in self.data.axis_list:
self.data.sensitive_graphics_mode.append("axis_%s"% axis)
def init_sensitive_origin_mode(self):
self.data.sensitive_origin_mode = ["button_override","button_graphics","button_homing","button_mode",
"button_zero_origin","button_offset_origin","button_jog_mode","button_flood_coolant","button_mist_coolant","button_tool_editor","button_tool_set"]
for axis in self.data.axis_list:
self.data.sensitive_origin_mode.append("axis_%s"% axis)
# this needs to be last as it causes methods to be called (eg to sensitize buttons)
def init_state(self):
for num,i in enumerate(self.data.jog_increments):
if i == "continuous": break
self.data.current_jogincr_index = num
try:
jogincr = self.data.jog_increments[self.data.current_jogincr_index]
self.widgets.jog_increments.set_text(jogincr)
except:
self.show_try_errors()
try:
for num,i in enumerate(self.data.angular_jog_increments):
if i == "continuous": break
self.data.current_angular_jogincr_index = num
jogincr = self.data.angular_jog_increments[self.data.current_angular_jogincr_index]
self.widgets.angular_jog_increments.set_text(jogincr)
except:
self.show_try_errors()
self.on_hal_status_state_off(None)
self.add_alarm_entry(_("Control powered up and initialized"))
def init_unlock_code(self):
print "unlock code #",int(self.data.unlock_code)
self.widgets.unlock_number.set_value(int(self.data.unlock_code))
# general call to initialize HAL pins
# select this if you want all the default pins or select each call for
# which ones you want
def initialize_pins(self):
self.init_spindle_pins()
self.init_coolant_pins()
self.init_jog_pins()
self.init_override_pins()
self.init_control_pins()
def init_spindle_pins(self):
self.halcomp.newpin("spindle-readout-in", hal.HAL_FLOAT, hal.HAL_IN)
def init_coolant_pins(self):
self.halcomp.newpin("aux-coolant-m7-out", hal.HAL_BIT, hal.HAL_OUT)
self.halcomp.newpin("aux-coolant-m8-out", hal.HAL_BIT, hal.HAL_OUT)
self.halcomp.newpin("mist-coolant-out", hal.HAL_BIT, hal.HAL_OUT)
self.halcomp.newpin("flood-coolant-out", hal.HAL_BIT, hal.HAL_OUT)
def init_jog_pins(self):
for axis in self.data.axis_list:
self.halcomp.newpin("jog-enable-%s-out"% (axis), hal.HAL_BIT, hal.HAL_OUT)
self.halcomp.newpin("jog-enable-out", hal.HAL_BIT, hal.HAL_OUT)
self.halcomp.newpin("jog-increment-out", hal.HAL_FLOAT, hal.HAL_OUT)
#self.data['jog-increment-in'] = hal_glib.GPin(self.halcomp.newpin('jog-increment-in', hal.HAL_S32, hal.HAL_IN))
#self.data['jog-increment-in'].connect('value-changed', self.on_hal_jog_increments_changed)
#self.data['jog-rate-in'] = hal_glib.GPin(self.halcomp.newpin('jog-rate-in', hal.HAL_S32, hal.HAL_IN))
#self.data['jog-rate-in'].connect('value-changed', self.on_hal_jog_rate_changed)
# pins used for selecting an encoder to adjust overrides
def init_override_pins(self):
self.halcomp.newpin("s-override-enable-out", hal.HAL_BIT, hal.HAL_OUT)
self.halcomp.newpin("f-override-enable-out", hal.HAL_BIT, hal.HAL_OUT)
self.halcomp.newpin("mv-override-enable-out", hal.HAL_BIT, hal.HAL_OUT)
def init_control_pins(self):
self.data['cycle_start'] = hal_glib.GPin(self.halcomp.newpin('cycle-start', hal.HAL_BIT, hal.HAL_IN))
self.data['cycle_start'].connect('value-changed', self.on_cycle_start_changed)
self.data['abort'] = hal_glib.GPin(self.halcomp.newpin('abort', hal.HAL_BIT, hal.HAL_IN))
self.data['abort'].connect('value-changed', self.on_abort_changed)
self.data['feed_hold'] = hal_glib.GPin(self.halcomp.newpin('feed-hold', hal.HAL_BIT, hal.HAL_IN))
self.data['feed_hold'].connect('value-changed', self.on_feed_hold_changed)
def initialize_manual_toolchange(self):
# for manual tool change dialog
self.halcomp.newpin("tool-number", hal.HAL_S32, hal.HAL_IN)
self.halcomp.newpin("tool-changed", hal.HAL_BIT, hal.HAL_OUT)
self.data['change-tool'] = hal_glib.GPin(self.halcomp.newpin('change-tool', hal.HAL_BIT, hal.HAL_IN))
# you can override manual tool change
if "on_tool_change" in dir(self.handler_instance):
self.data['change-tool'].connect('value-changed', self.handler_instance.on_tool_change)
else:
self.data['change-tool'].connect('value-changed', self.on_tool_change)
# *** GLADE callbacks ****
def on_button_spindle_controls_clicked(self,widget):
self.spindle_dialog()
def on_button_select_rotary_adjust_clicked(self,widget):
self.data.angular_jog_adjustment_flag = widget.get_active()
print self.data.angular_jog_adjustment_flag
def search_fwd(self,widget):
self.widgets.gcode_view.text_search(direction=True,text=self.widgets.search_entry.get_text())
def search_bwd(self,widget):
self.widgets.gcode_view.text_search(direction=False,text=self.widgets.search_entry.get_text())
def undo_edit(self,widget):
self.widgets.gcode_view.undo()
def redo_edit(self,widget):
self.widgets.gcode_view.redo()
def keypress(self,accelgroup, acceleratable, accel_key, accel_mods):
print gtk.accelerator_name(accel_key,accel_mods),acceleratable,accel_mods,
return True
def on_key_event(self,widget, event,signal):
keyname = gtk.gdk.keyval_name(event.keyval)
print "Key %s (%d) was pressed" % (keyname, event.keyval),signal, self.data.key_event_last
if event.state & gtk.gdk.CONTROL_MASK:
print "Control was being held down"
if event.state & gtk.gdk.MOD1_MASK:
print "Alt was being held down"
if event.state & gtk.gdk.SHIFT_MASK:
print "Shift was being held down"
try:
if keyname =="F1" and signal:
self.widgets.button_estop.emit("clicked")
elif keyname =="F2" and signal:
self.widgets.button_machine_on.emit("clicked")
elif keyname =="Escape" and signal:
self.widgets.hal_action_stop.emit("activate")
except:
pass
if keyname in( "Shift_L","Shift_R"): return True
if self.data.key_event_last[0] == keyname and self.data.key_event_last[1] == signal : return True
if self.data.mode_order[0] == _MAN and self.widgets.notebook_main.get_current_page() == 0:
if keyname == "Up":
self.do_key_jog(1,1,signal)
elif keyname == "Down":
self.do_key_jog(1,0,signal)
elif keyname == "Left":
self.do_key_jog(0,0,signal)
elif keyname == "Right":
self.do_key_jog(0,1,signal)
elif keyname == "Page_Down":
self.do_key_jog(2,0,signal)
elif keyname == "Page_Up":
self.do_key_jog(2,1,signal)
elif keyname == "bracketleft":
self.do_key_jog(3,0,signal)
elif keyname == "bracketright":
self.do_key_jog(3,1,signal)
elif keyname in ("I","i"):
if signal:
if event.state & gtk.gdk.SHIFT_MASK:
self.set_jog_increments(index_dir = -1)
else:
self.set_jog_increments(index_dir = 1)
self.data.key_event_last = keyname,signal
return True
def on_cycle_start_changed(self,hal_object):
print "cycle start change"
h = self.halcomp
if not h["cycle-start"]: return
if self.data.mode_order[0] == _AUTO:
self.add_alarm_entry(_("Cycle start pressed in AUTO mode"))
self.widgets.hal_toggleaction_run.emit('activate')
elif self.data.mode_order[0] == _MDI:
self.add_alarm_entry(_("Cycle start pressed in MDI mode"))
self.widgets.hal_mdihistory.submit()
def on_abort_changed(self,hal_object):
print "abort change"
h = self.halcomp
if not h["abort"]: return
self.widgets.hal_action_stop.emit("activate")
def on_feed_hold_changed(self,hal_object):
print "feed-hold change"
h = self.halcomp
self.widgets.hal_toggleaction_pause.set_active(h["feed-hold"])
# Here we create a manual tool change dialog
# This can be overridden in a handler file
def on_tool_change(self,widget):
h = self.halcomp
c = h['change-tool']
n = h['tool-number']
cd = h['tool-changed']
print "tool change",c,cd,n
if c:
message = _("Please change to tool # %s, then click OK."% n)
self.data.tool_message = self.notify(_("INFO:"),message,None)
self.warning_dialog(message, True,pinname="TOOLCHANGE")
else:
h['tool-changed'] = False
def on_spindle_speed_adjust(self,widget):
# spindle increase /decrease controls
if self.mdi_control.mdi_is_reading():
self.notify(_("INFO:"),_("Can't start spindle manually while MDI busy"),INFO_ICON)
return
elif self.data.mode_order[0] == _AUTO:
self.notify(_("INFO:"),_("can't start spindle manually in Auto mode"),INFO_ICON)
return
if widget == self.widgets.spindle_increase:
self.spindle_adjustment(True,True)
elif widget == self.widgets.spindle_decrease:
self.spindle_adjustment(False,True)
# start the spindle according to preset rpm and direction buttons, unless interp is busy
def on_spindle_control_clicked(self,*args):
if self.mdi_control.mdi_is_reading():
self.notify(_("INFO:"),_("Can't start spindle manually while MDI busy"),INFO_ICON)
return
elif self.data.mode_order[0] == _AUTO:
self.notify(_("INFO:"),_("can't start spindle manually in Auto mode"),INFO_ICON)
return
if not self.data.spindle_speed == 0:
self.emc.spindle_off(1)
return
if not self.widgets.s_display_fwd.get_active() and not self.widgets.s_display_rev.get_active():
self.notify(_("INFO:"),_("No direction selected for spindle"),INFO_ICON)
return
if self.widgets.s_display_fwd.get_active():
self.adjust_spindle_rpm(self.data.spindle_preset,1)
else:
self.adjust_spindle_rpm(self.data.spindle_preset,-1)
# dialog for setting the spindle preset speed
def on_preset_spindle(self,*args):
if self.data.preset_spindle_dialog: return
label = gtk.Label(_("Spindle Speed Preset Entry"))
label.modify_font(pango.FontDescription("sans 20"))
self.data.preset_spindle_dialog = gtk.Dialog(_("Spindle Speed Preset Entry"),
self.widgets.window1,
gtk.DIALOG_DESTROY_WITH_PARENT,
(gtk.STOCK_CANCEL, gtk.RESPONSE_REJECT,
gtk.STOCK_OK, gtk.RESPONSE_ACCEPT))
calc = gladevcp.Calculator()
self.data.preset_spindle_dialog.vbox.pack_start(label)
self.data.preset_spindle_dialog.vbox.add(calc)
calc.set_value("")
calc.set_property("font","sans 20")
calc.set_editable(True)
calc.entry.connect("activate", lambda w : self.data.preset_spindle_dialog.emit('response',gtk.RESPONSE_ACCEPT))
self.data.preset_spindle_dialog.parse_geometry("400x400")
self.data.preset_spindle_dialog.set_decorated(False)
self.data.preset_spindle_dialog.show_all()
self.data.preset_spindle_dialog.connect("response", self.on_preset_spindle_return,calc)
def on_preset_spindle_return(self,widget,result,calc):
if result == gtk.RESPONSE_ACCEPT:
data = calc.get_value()
if data == None:
return
self.preset_spindle_speed(data)
widget.destroy()
self.data.preset_spindle_dialog = None
# dialog for manually calling a tool
def on_index_tool(self,*args):
if self.data.index_tool_dialog: return
self.data.index_tool_dialog = gtk.Dialog(_("Manual Tool Index Entry"),
self.widgets.window1,
gtk.DIALOG_DESTROY_WITH_PARENT,
(gtk.STOCK_CANCEL, gtk.RESPONSE_REJECT,
gtk.STOCK_OK, gtk.RESPONSE_ACCEPT))
label = gtk.Label(_("Manual Tool Index Entry"))
label.modify_font(pango.FontDescription("sans 20"))
self.data.index_tool_dialog.vbox.pack_start(label)
calc = gladevcp.Calculator()
self.data.index_tool_dialog.vbox.add(calc)
calc.set_value("")
calc.set_property("font","sans 20")
calc.set_editable(True)
calc.entry.connect("activate", lambda w : self.data.index_tool_dialog.emit('response',gtk.RESPONSE_ACCEPT))
self.data.index_tool_dialog.parse_geometry("400x400")
self.data.index_tool_dialog.show_all()
self.data.index_tool_dialog.connect("response", self.on_index_tool_return,calc)
def on_index_tool_return(self,widget,result,calc):
if result == gtk.RESPONSE_ACCEPT:
raw = calc.get_value()
try:
tool = abs(int((raw)))
self.mdi_control.index_tool(tool)
except:
return
widget.destroy()
self.data.index_tool_dialog = None
def set_grid_size(self,widget):
data = widget.get_value()
self.widgets.gremlin.set_property('grid_size',data)
self.prefs.putpref('grid_size', data,float)
# from prefererence page
def set_spindle_start_rpm(self,widget):
data = widget.get_value()
self.data.spindle_start_rpm = data
self.prefs.putpref('spindle_start_rpm', data,float)
self.preset_spindle_speed(data)
def update_preview(self,widget):
file = widget.get_filename()
if file:
try:
test = Player()
test.set_sound(file)
test.run()
except:pass
def change_sound(self,widget,sound):
file = widget.get_filename()
if file:
self.data[sound+"_sound"] = file
temp = "audio_"+ sound
self.prefs.putpref(temp, file, str)
# manual spindle control
def on_s_display_fwd_toggled(self,widget):
if widget.get_active():
if self.widgets.s_display_fwd.get_active():
self.emc.spindle_off(1)
self.block("s_display_rev")
self.widgets.s_display_rev.set_active(False)
self.unblock("s_display_rev")
else:
self.block("s_display_fwd")
widget.set_active(True)
self.unblock("s_display_fwd")
# manual spindle control
def on_s_display_rev_toggled(self,widget):
if widget.get_active():
if self.widgets.s_display_fwd.get_active():
self.emc.spindle_off(1)
self.block("s_display_fwd")
self.widgets.s_display_fwd.set_active(False)
self.unblock("s_display_fwd")
else:
self.block("s_display_rev")
widget.set_active(True)
self.unblock("s_display_rev")
# for plot view controls with touchscreen
def on_eventbox_gremlin_enter_notify_event(self,widget,event):
if self.widgets.button_graphics.get_active():
if self.widgets.button_zoom.get_active():
self.widgets.gremlin.start_continuous_zoom(event.y)
elif self.widgets.button_rotate_v.get_active():
self.widgets.gremlin.select_prime(event.x,event.y)
self.widgets.gremlin.set_mouse_start(event.x,event.y)
# for plot view controls with touchscreen
def on_eventbox_gremlin_leave_notify_event(self,widget,event):
self.widgets.gremlin.select_fire(event.x,event.y)
# for plot view controls with touchscreen or mouse
# if using mouse and when in graphics adjustment mode,
# we don't use mouse controls when we have selected button controls
def on_gremlin_motion(self,widget,event):
if self.widgets.button_graphics.get_active():
self.widgets.gremlin.set_property('use_default_controls',False)
if self.data.hide_cursor:
if self.widgets.button_zoom.get_active():
self.widgets.gremlin.continuous_zoom(event.y)
elif self.widgets.button_pan_v.get_active():
self.pan(event.x,event.y)
elif self.widgets.button_pan_h.get_active():
self.pan(event.x,event.y)
elif self.widgets.button_rotate_v.get_active():
self.rotate(event.x,event.y)
elif self.widgets.button_rotate_h.get_active():
self.rotate(event.x,event.y)
elif self.widgets.button_zoom.get_active() or self.widgets.button_pan_v.get_active():
return
elif self.widgets.button_pan_h.get_active() or self.widgets.button_rotate_v.get_active():
return
elif self.widgets.button_rotate_h.get_active():
return
else:
self.widgets.gremlin.set_property('use_default_controls',True)
else:
self.widgets.gremlin.set_property('use_default_controls',not self.data.hide_cursor)
# display calculator for input
def launch_numerical_input(self,callback="on_numerical_entry_return",data=None,data2=None,title=_("Entry dialog")):
if self.data.entry_dialog: return
label = gtk.Label(title)
label.modify_font(pango.FontDescription("sans 20"))
self.data.entry_dialog = gtk.Dialog(title,
self.widgets.window1,
gtk.DIALOG_DESTROY_WITH_PARENT,
(gtk.STOCK_CANCEL, gtk.RESPONSE_REJECT,
gtk.STOCK_OK, gtk.RESPONSE_ACCEPT))
calc = gladevcp.Calculator()
calc.set_editable(True)
self.data.entry_dialog.vbox.pack_start(label)
self.data.entry_dialog.vbox.add(calc)
calc.set_value("")
calc.set_property("font","sans 20")
calc.entry.connect("activate", lambda w : self.data.entry_dialog.emit('response',gtk.RESPONSE_ACCEPT))
self.data.entry_dialog.parse_geometry("400x400")
#self.data.entry_dialog.set_decorated(False)
self.data.entry_dialog.connect("response", self[callback],calc,data,data2)
self.data.entry_dialog.show_all()
def on_numerical_entry_return(self,widget,result,calc,userdata,userdata2):
data = calc.get_value()
if result == gtk.RESPONSE_ACCEPT:
print "accept",data
if data == None:
data = 0
self.widgets.statusbar1.push(1,"Last Calculation: %f"%data)
widget.destroy()
self.data.entry_dialog = None
def on_offset_origin_entry_return(self,widget,result,calc,userdata,userdata2):
value = calc.get_value()
if result == gtk.RESPONSE_ACCEPT:
if value == None:
return
# if an axis is selected then set it
for axis in self.data.axis_list:
if self.widgets["axis_%s"%axis].get_active():
print "set %s axis" %axis
if not axis == "s":
if axis in('a','b','c'):
pos = self.get_qualified_input(value,switch=_DEGREE_INPUT)
else:
pos = self.get_qualified_input(value)
self.mdi_control.set_axis(axis,pos)
self.reload_plot()
widget.destroy()
self.data.entry_dialog = None
def on_tool_offset_entry_return(self,widget,result,calc,userdata,userdata2):
value = calc.get_value()
if result == gtk.RESPONSE_ACCEPT:
if value == None:
return
# if an axis is selected then set it
for axis in self.data.axis_list:
if self.widgets["axis_%s"%axis].get_active():
print "tool %d, set in %s axis to- %f" %(self.data.tool_in_spindle,axis,value)
if axis in('a','b','c'):
pos = self.get_qualified_input(value,switch=_DEGREE_INPUT)
else:
pos = self.get_qualified_input(value)
self.mdi_control.touchoff(self.data.tool_in_spindle,axis,pos)
widget.destroy()
self.data.entry_dialog = None
def on_adj_overrides_entry_return(self,widget,result,calc,userdata,userdata2):
data = calc.get_value()
if result == gtk.RESPONSE_ACCEPT:
print "accept",data
if data == None:
return None
self.adjustment_buttons(userdata,userdata2,data)
widget.destroy()
self.data.entry_dialog = None
# shows the cursor and warps it to the origin before exiting
def hack_leave(self,*args):
if not self.data.hide_cursor: return
w = self.widgets.window1.window
d = w.get_display()
s = w.get_screen()
x, y = w.get_origin()
d.warp_pointer(s, x, y)
def on_hide_cursor(self,*args):
print "hide cursor change"
if self.widgets.hide_cursor.get_active():
self.prefs.putpref('hide_cursor', True)
self.data.hide_cursor = True
self.widgets.window1.window.set_cursor(INVISABLE)
else:
self.prefs.putpref('hide_cursor', False)
self.data.hide_cursor = False
self.widgets.window1.window.set_cursor(None)
# opens halshow
def on_halshow(self,*args):
print "halshow",TCLPATH
p = os.popen("tclsh %s/bin/halshow.tcl -- -ini %s &" % (TCLPATH,self.inipath))
# opens the calibration program
def on_calibration(self,*args):
print "calibration --%s"% self.inipath
p = os.popen("tclsh %s/bin/emccalib.tcl -- -ini %s > /dev/null &" % (TCLPATH,self.inipath),"w")
# opens the linuxcnc status program
def on_status(self,*args):
p = os.popen("linuxcnctop > /dev/null &","w")
# opens a halmeter
def on_halmeter(self,*args):
print "halmeter"
p = os.popen("halmeter &")
# opens the halscope
def on_halscope(self,*args):
p = os.popen("halscope > /dev/null &","w")
def on_ladder(self,*args):
if hal.component_exists('classicladder_rt'):
p = os.popen("classicladder &","w")
else:
self.notify(_("INFO:"),_("Classicladder realtime component not detected"),INFO_ICON)
self.add_alarm_entry(_("ladder not available - is the realtime component loaded?"))
# estop machine before closing
def on_window1_destroy(self, widget, data=None):
print "estopping / killing gscreen"
self.emc.machine_off(1)
self.emc.estop(1)
time.sleep(2)
gtk.main_quit()
def on_axis_selection_clicked(self,widget):
self.update_active_axis_buttons(widget)
def on_mode_clicked(self,widget,event):
# only change machine modes on click
if event.type == gtk.gdk.BUTTON_PRESS:
a,b,c = self.data.mode_order
self.data.mode_order = b,c,a
label = self.data.mode_labels
self.widgets.button_mode.set_label(label[self.data.mode_order[0]])
self.mode_changed(self.data.mode_order[0])
def on_button_show_offsets_clicked(self,widget):
self.toggle_offset_view()
# Horizontal buttons
def on_button_home_all_clicked(self,widget):
self.home_all()
def on_button_unhome_all_clicked(self,widget):
self.unhome_all()
def on_button_home_axis_clicked(self,widget):
self.home_selected()
def on_button_unhome_axis_clicked(self,widget):
self.unhome_selected()
def on_button_toggle_readout_clicked(self,widget):
self.dro_toggle()
def on_button_jog_mode_clicked(self,widget):
self.jog_mode()
def on_button_select_system_clicked(self,widget):
self.origin_system()
def on_button_flood_coolant_clicked(self,widget):
self.toggle_flood()
def on_button_mist_coolant_clicked(self,widget):
self.toggle_mist()
def on_button_tool_editor_clicked(self,widget):
self.reload_tooltable()
def on_button_block_delete_clicked(self,widget):
self.toggle_block_delete()
def on_button_option_stop_clicked(self,widget):
self.toggle_optional_stop()
def on_button_next_tab_clicked(self,widget):
self.next_tab()
def on_button_overrides_clicked(self,widget,button):
self.toggle_overrides(widget,button)
def on_button_clear_view_clicked(self,widget):
self.clear_plot()
def on_graphic_overrides_clicked(self,widget,button):
self.toggle_graphic_overrides(widget,button)
# vertical buttons
def on_button_plus_pressed(self,widget):
self.adjustment_buttons(widget,True)
def on_button_plus_released(self,widget):
self.adjustment_buttons(widget,False)
def on_button_minus_pressed(self,widget):
self.adjustment_buttons(widget,True)
def on_button_minus_released(self,widget):
self.adjustment_buttons(widget,False)
def on_offset_origin_clicked(self,widget):
# adjust overrrides
if self.widgets.button_override.get_active():
self.launch_numerical_input("on_adj_overrides_entry_return",widget,True,title=_("Override Entry"))
# offset origin
else:
self.set_axis_checks()
def on_move_to_clicked(self,widget):
# Move-to button
# manual mode and jog mode active
if self.data.mode_order[0] == _MAN and self.widgets.button_jog_mode.get_active():
self.launch_numerical_input("on_adj_overrides_entry_return",widget,True)
def on_tool_touchoff_clicked(self,widget):
print "touch"
self.tool_touchoff_checks()
def on_mode_select_clicked(self,widget,event):
maxpage = self.widgets.notebook_main.get_n_pages()
page = self.widgets.notebook_main.get_current_page()
nextpage = page + 1
print "mode select",maxpage,page,nextpage
if nextpage == maxpage:nextpage = 0
self.widgets.notebook_main.set_current_page(nextpage)
def on_estop_clicked(self,*args):
if self.data.estopped:
self.emc.estop_reset(1)
elif not self.data.machine_on:
self.emc.machine_on(1)
self.widgets.on_label.set_text("Machine On")
self.add_alarm_entry(_("Machine powered on"))
else:
self.emc.machine_off(1)
self.emc.estop(1)
self.widgets.on_label.set_text("Machine Off")
self.add_alarm_entry(_("Machine Estopped!"))
def on_calc_clicked(self,widget):
self.launch_numerical_input(title=_("Calculator"))
def on_theme_choice_changed(self, widget):
self.change_theme(widget.get_active_text())
# True is fullscreen
def on_fullscreen1_pressed(self, widget):
self.set_fullscreen1(widget.get_active())
def on_use_screen2_pressed(self,*args):
self.toggle_screen2()
# True is metric
def on_dro_units_pressed(self, widget):
self.set_dro_units(widget.get_active())
# True is diameter mode
def on_diameter_mode_pressed(self, widget):
self.set_diameter_mode(widget.get_active())
def on_rel_colorbutton_color_set(self,*args):
self.set_rel_color()
def on_abs_colorbutton_color_set(self,*args):
self.set_abs_color()
def on_dtg_colorbutton_color_set(self,*args):
self.set_dtg_color()
# True for showing full offsets
def on_show_offsets_pressed(self, widget):
self.set_show_offsets(widget.get_active())
def on_unlock_number_value_changed(self,widget):
self.data.unlock_code = str(int(widget.get_value()))
self.set_unlock_code()
# True is for showing DTG
def on_show_dtg_pressed(self, widget):
self.set_show_dtg(widget.get_active())
# True will use notify
def on_desktop_notify_toggled(self,widget):
self.set_desktop_notify( widget.get_active())
def on_pop_statusbar_clicked(self, *args):
self.widgets.statusbar1.pop(self.statusbar_id)
# This is part of the user message system
# There is status that prints to the status bar
# There is Okdialog that prints a dialog that the user must acknoledge
# there is yes/no dialog where the user must choose between yes or no
# you can combine status and dialog messages so they print to the status bar
# and pop a dialog
def on_printmessage(self, pin, pinname,boldtext,text,type):
if not pin.get(): return
if boldtext == "NONE": boldtext = None
if "status" in type:
if boldtext:
statustext = boldtext
else:
statustext = text
self.notify(_("INFO:"),statustext,INFO_ICON)
if "dialog" in type or "okdialog" in type:
if pin.get():
self.halcomp[pinname + "-waiting"] = True
if "okdialog" in type:
self.warning_dialog(boldtext,True,text,pinname)
else:
if pin.get():
self.halcomp[pinname + "-response"] = 0
self.warning_dialog(boldtext,False,text,pinname)
def toggle_overrides(self,widget,data):
print "overrides - button_h_%s"% data
list = ('jog_speed','jog_increments','feed_override','spindle_override','rapid_override')
for i in (list):
print i,data
if i == data:continue
button = "button_%s"% (i)
print "block",button
self.block(button)
self.widgets[button].set_active(False)
self.unblock(button)
self.update_hal_override_pins()
def toggle_graphic_overrides(self,widget,data):
print "graphic overrides - button_%s"%data
list = ('zoom','pan_v','pan_h','rotate_v','rotate_h')
for i in (list):
if i == data:continue
button = "button_%s"% (i)
self.block(button)
self.widgets[button].set_active(False)
self.unblock(button)
def on_hal_status_interp_run(self,widget):
print "run"
self.sensitize_widgets(self.data.sensitive_run_idle,False)
def on_hal_status_interp_idle(self,widget):
print "idle"
self.sensitize_widgets(self.data.sensitive_run_idle,True)
state = self.data.all_homed
self.sensitize_widgets(self.data.sensitive_all_homed,state)
mode = self.emc.get_mode()
print "mode",mode,self.data.mode_order[0]
if self.data.mode_order[0] == _MAN and not mode == 1:
print "set to manual"
self.emc.set_manual_mode()
def on_hal_status_state_on(self,widget):
print "on"
self.sensitize_widgets(self.data.sensitive_on_off,True)
state = self.data.all_homed
self.sensitize_widgets(self.data.sensitive_all_homed,state)
if not state:
self.widgets.button_homing.emit("clicked")
def on_hal_status_state_off(self,widget):
print "off"
self.sensitize_widgets(self.data.sensitive_on_off,False)
def on_hal_status_axis_homed(self,widget,data):
print "homed list",data
temp=[]
for letter in(self.data.axis_list):
count = "xyzabcuvws".index(letter)
if str(count) in data:
temp.append(" %s"%letter.upper())
self.add_alarm_entry(_("Axes %s are homed"%temp))
def on_hal_status_all_homed(self,widget):
print "all-homed"
self.data.all_homed = True
self.widgets.button_homing.set_active(False)
self.widgets.statusbar1.remove_message(self.statusbar_id,self.homed_status_message)
self.add_alarm_entry(_("All the axes have been homed"))
def on_hal_status_not_all_homed(self,widget,data):
print "not-all-homed",data
self.data.all_homed = False
temp =[]
for letter in(self.data.axis_list):
count = "xyzabcuvws".index(letter)
if str(count) in data:
temp.append(" %s"%letter.upper())
self.add_alarm_entry(_("There are unhomed axes: %s"%temp))
def on_hal_status_file_loaded(self,widget,filename):
path,name = os.path.split(filename)
self.widgets.gcode_tab.set_text(name)
self.add_alarm_entry(_("Program loaded: %s"%filename))
def on_toggle_keyboard(self,widget,args="",x="",y=""):
if self.data.ob:
self.kill_keyboard()
else:
self.launch_keyboard()
if self.data.mode_order[0] == _MDI:
try:
self.widgets.hal_mdihistory.entry.grab_focus()
except:
dbg("**** GSCREEN ERROR: can't set focus to hal_mdihistory when Onboard launched - Is this widget name in glade file?")
elif self.data.mode_order[0] == _AUTO:
try:
self.widgets.gcode_view.grab_focus()
except:
dbg("**** GSCREEN ERROR: can't set focus to gcode_view when Onboard launched - Is this widget name in glade file?")
def on_hal_jog_increments_changed(self,halpin):
print halpin.get()
data = halpin.get()
self.set_jog_increments(vector=data)
def on_hal_jog_rate_changed(self,halpin):
print halpin.get()
data = halpin.get()
self.set_jog_rate(absolute=data)
# highlight the gcode down one line lower
# used for run-at-line restart
def restart_down(self,widget,calc):
self.widgets.gcode_view.line_down()
line = int(self.widgets.gcode_view.get_line_number())
calc.set_value(line)
self.update_restart_line(line,line)
# highlight the gcode down one line higher
# used for run-at-line restart
def restart_up(self,widget,calc):
self.widgets.gcode_view.line_up()
line = int(self.widgets.gcode_view.get_line_number())
calc.set_value(line)
self.update_restart_line(line,line)
# highlight the gcode line specified
# used for run-at-line restart
def restart_set_line(self,widget,calc):
try:
line = int(calc.get_value())
except:
calc.set_value("0.0")
line = 0
self.widgets.gcode_view.set_line_number(line)
self.update_restart_line(line,line)
# This is a method that toggles the DRO units
# the preference unit button saves the state
# for startup, This one just changes it for the session
def on_metric_select_clicked(self,widget):
data = (self.data.dro_units -1) * -1
self.set_dro_units(data,False)
def on_button_edit_clicked(self,widget):
state = widget.get_active()
if not state:
self.edited_gcode_check()
self.widgets.notebook_main.set_current_page(0)
self.widgets.notebook_main.set_show_tabs(not (state))
self.edit_mode(state)
if not state and self.widgets.button_full_view.get_active():
self.set_full_graphics_view(True)
if state:
self.widgets.search_box.show()
else:
self.widgets.search_box.hide()
def on_button_change_view_clicked(self,widget):
self.toggle_view()
def on_button_full_view_clicked(self,widget):
self.set_full_graphics_view(widget.get_active())
# ****** do stuff *****
def spindle_dialog(self):
if not self.data.spindle_control_dialog:
self.data.spindle_control_dialog = gtk.Dialog(_("Manual Spindle Control"),
self.widgets.window1,
gtk.DIALOG_DESTROY_WITH_PARENT,
(gtk.STOCK_CLOSE, gtk.RESPONSE_REJECT))
self.data.spindle_control_dialog.vbox.add(self.widgets.frame_s)
self.data.spindle_control_dialog.parse_geometry("200x200")
self.data.spindle_control_dialog.connect("delete_event", self.spindle_dialog_return)
self.data.spindle_control_dialog.connect("response", self.spindle_dialog_return)
self.data.spindle_control_dialog.show_all()
def spindle_dialog_return(self,widget,signal):
self.data.spindle_control_dialog.hide()
return True
def update_restart_line(self,line,reset_line):
if "set_restart_line" in dir(self.handler_instance):
self.handler_instance.set_restart_line(line,reset_line)
else:
self.set_restart_line(line,reset_line)
def set_restart_line(self,line,reset_line):
self.widgets.hal_toggleaction_run.set_restart_line(line,reset_line)
def edited_gcode_check(self):
if self.widgets.gcode_view.buf.get_modified():
dialog = gtk.MessageDialog(self.widgets.window1,
gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_QUESTION, gtk.BUTTONS_YES_NO,"You edited the File. save edits?\n Choosing No will erase the edits.")
dialog.show_all()
result = dialog.run()
dialog.destroy()
if result == gtk.RESPONSE_YES:
self.widgets.hal_action_saveas.emit("activate")
else:
self.widgets.gcode_view.load_file()
def set_desktop_notify(self,data):
self.data.desktop_notify = data
self.prefs.putpref('desktop_notify', data, bool)
# shows 'Onboard' virtual keyboard if available
# check for key_box widget - if there is, and embedded flag, embed Onboard in it.
# else launch an independant Onboard inside a dialog so it works in fullscreen
# (otherwise it hides when main screen is touched)
# else error message
def launch_keyboard(self,args="",x="",y=""):
print args,x,y
def dialog_keyboard():
if self.data.keyboard_dialog:
self.data.keyboard_dialog.show()
self.data.ob = True
else:
self.data.ob = subprocess.Popen(["onboard","--xid",args,x,y],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
close_fds=True)
sid = self.data.ob.stdout.readline()
self.data.keyboard_dialog = gtk.Dialog(_("Keyboard"),
self.widgets.window1,
gtk.DIALOG_DESTROY_WITH_PARENT)
self.data.keyboard_dialog.set_accept_focus(False)
self.data.keyboard_dialog.set_deletable(False)
socket = gtk.Socket()
socket.show()
self.data.keyboard_dialog.vbox.add(socket)
socket.add_id(long(sid))
self.data.keyboard_dialog.parse_geometry("800x200")
self.data.keyboard_dialog.show_all()
self.data.keyboard_dialog.connect("destroy", self.keyboard_return)
try:
if self.widgets.key_box and self.data.embedded_keyboard:
self.widgets.rightside_box.show()
self.widgets.key_box.show()
self.data.ob = subprocess.Popen(["onboard","--xid",args,x,y],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
close_fds=True)
sid = self.data.ob.stdout.readline()
print"keyboard", sid # skip header line
socket = gtk.Socket()
socket.show()
self.widgets.key_box.add(socket)
socket.add_id(long(sid))
else:
dialog_keyboard()
except:
try:
dialog_keyboard()
except:
print _("Error with launching 'Onboard' on-screen keyboard program")
# seems the only way to trap the destroy signal
def keyboard_return(self,widget):
self.data.keyboard_dialog = None
self.data.ob = None
# if keyboard in dialog just hide it
# else kill it and if needed hide the key_box
def kill_keyboard(self):
if not self.data.keyboard_dialog == None:
self.data.keyboard_dialog.hide()
self.data.ob = None
return
try:
self.widgets.key_box.hide()
self.data.ob.kill()
self.data.ob.terminate()
self.data.ob = None
except:
try:
self.data.ob.kill()
self.data.ob.terminate()
self.data.ob = None
except:
self.show_try_errors()
# this installs local signals unless overriden by custom handlers
# HAL pin signal call-backs are covered in the HAL pin initilization functions
def connect_signals(self, handlers):
signal_list = [
["","button_estop","clicked", "on_estop_clicked"],
["","gremlin","motion-notify-event", "on_gremlin_motion"],
["","button_mode","button_press_event", "on_mode_clicked"],
["","button_menu","button_press_event", "on_mode_select_clicked"],
["","button_plus","pressed", "on_button_plus_pressed"],
["","button_plus","released", "on_button_plus_released"],
["","button_minus","pressed", "on_button_minus_pressed"],
["","button_minus","released", "on_button_minus_released"],
["","button_zero_origin","clicked", "adjustment_buttons",True],
["","button_offset_origin","clicked", "on_offset_origin_clicked"],
["","button_move_to","clicked", "on_move_to_clicked"],
["","button_tool_set","clicked", "on_tool_touchoff_clicked"],
["","button_change_view","clicked", "on_button_change_view_clicked"],
["","button_full_view","clicked", "on_button_full_view_clicked"],
["","button_home_all","clicked", "on_button_home_all_clicked"],
["","button_unhome_all","clicked", "on_button_unhome_all_clicked"],
["","button_home_axis","clicked", "on_button_home_axis_clicked"],
["","button_unhome_axis","clicked", "on_button_unhome_axis_clicked"],
["","button_toggle_readout","clicked", "on_button_toggle_readout_clicked"],
["","button_jog_mode","clicked", "on_button_jog_mode_clicked"],
["","button_spindle_controls","clicked", "on_button_spindle_controls_clicked"],
["","button_select_system","clicked", "on_button_select_system_clicked"],
["","button_mist_coolant","clicked", "on_button_mist_coolant_clicked"],
["","button_flood_coolant","clicked", "on_button_flood_coolant_clicked"],
["","button_tool_editor","clicked", "on_button_tool_editor_clicked"],
["","button_toggle_readout2","clicked", "on_button_toggle_readout_clicked"],
["","button_show_offsets","clicked", "on_button_show_offsets_clicked"],
["","button_block_delete","clicked", "on_button_block_delete_clicked"],
["","button_option_stop","clicked", "on_button_option_stop_clicked"],
["","button_next_tab","clicked", "on_button_next_tab_clicked"],
["","button_calc","clicked", "on_calc_clicked"],
["block","button_jog_speed","clicked", "on_button_overrides_clicked","jog_speed"],
["","button_select_rotary_adjust","clicked", "on_button_select_rotary_adjust_clicked"],
["block","button_jog_increments","clicked", "on_button_overrides_clicked","jog_increments"],
["block","button_feed_override","clicked", "on_button_overrides_clicked","feed_override"],
["block","button_spindle_override","clicked", "on_button_overrides_clicked","spindle_override"],
["block","button_rapid_override","clicked", "on_button_overrides_clicked","rapid_override"],
["block","button_zoom","clicked", "on_graphic_overrides_clicked","zoom"],
["block","button_pan_v","clicked", "on_graphic_overrides_clicked","pan_v"],
["block","button_pan_h","clicked", "on_graphic_overrides_clicked","pan_h"],
["block","button_rotate_v","clicked", "on_graphic_overrides_clicked","rotate_v"],
["block","button_rotate_h","clicked", "on_graphic_overrides_clicked","rotate_h"],
["","button_clear_view","clicked", "on_button_clear_view_clicked"],
["","theme_choice","changed", "on_theme_choice_changed"],
["","use_screen2","clicked", "on_use_screen2_pressed"],
["","dro_units","clicked", "on_dro_units_pressed"],
["","diameter_mode","clicked", "on_diameter_mode_pressed"],
["","button_edit","toggled", "on_button_edit_clicked"],
["","show_offsets","clicked", "on_show_offsets_pressed"],
["","show_dtg","clicked", "on_show_dtg_pressed"],
["","fullscreen1","clicked", "on_fullscreen1_pressed"],
["","shut_down","clicked", "on_window1_destroy"],
["","shut_down",'released',"hack_leave"],
["","run_halshow","clicked", "on_halshow"],
["","run_calibration","clicked", "on_calibration"],
["","run_status","clicked", "on_status"],
["","run_halmeter","clicked", "on_halmeter"],
["","run_halscope","clicked", "on_halscope"],
["","run_ladder","clicked", "on_ladder"],
["","hide_cursor","clicked", "on_hide_cursor"],
["","button_homing","clicked", "homing"],
["","button_override","clicked", "override"],
["","button_graphics","clicked", "graphics"],
["","desktop_notify","toggled", "on_desktop_notify_toggled"],
["","grid_size","value_changed", "set_grid_size"],
["","spindle_start_rpm","value_changed", "set_spindle_start_rpm"],
["","spindle_control","clicked", "on_spindle_control_clicked"],
["","spindle_preset","clicked", "on_preset_spindle"],
["","spindle_increase","clicked", "on_spindle_speed_adjust"],
["","spindle_decrease","clicked", "on_spindle_speed_adjust"],
["","audio_error_chooser","update-preview", "update_preview"],
["","audio_alert_chooser","update-preview", "update_preview"],
["","hal_status","interp-idle", "on_hal_status_interp_idle"],
["","hal_status","interp-run", "on_hal_status_interp_run"],
["","hal_status","state-on", "on_hal_status_state_on"],
["","hal_status","state-off", "on_hal_status_state_off"],
["","hal_status","homed", "on_hal_status_axis_homed"],
["","hal_status","all-homed", "on_hal_status_all_homed"],
["","hal_status","not-all-homed", "on_hal_status_not_all_homed"],
["","hal_status","file-loaded", "on_hal_status_file_loaded"],
["","window1","destroy", "on_window1_destroy"],
["","pop_statusbar","clicked", "on_pop_statusbar_clicked"],
["","dtg_colorbutton","color-set", "on_dtg_colorbutton_color_set"],
["","abs_colorbutton","color-set", "on_abs_colorbutton_color_set"],
["","rel_colorbutton","color-set", "on_rel_colorbutton_color_set"],
["","eventbox_gremlin","leave_notify_event", "on_eventbox_gremlin_leave_notify_event"],
["","eventbox_gremlin","enter_notify_event", "on_eventbox_gremlin_enter_notify_event"],
["block","s_display_rev","toggled", "on_s_display_rev_toggled"],
["block","s_display_fwd","toggled", "on_s_display_fwd_toggled"],
["","ignore_limits","clicked", "toggle_ignore_limits"],
["","audio_error_chooser","selection_changed","change_sound","error"],
["","audio_alert_chooser","selection_changed","change_sound","alert"],
["","toggle_keyboard","clicked", "on_toggle_keyboard"],
["","metric_select","clicked","on_metric_select_clicked"],
["","button_restart","clicked", "launch_restart_dialog"],
["","button_index_tool","clicked", "on_index_tool"],
["","button_search_fwd","clicked", "search_fwd"],
["","button_search_bwd","clicked", "search_bwd"],
["","button_undo","clicked", "undo_edit"],
["","button_redo","clicked", "redo_edit"],]
# check to see if the calls in the signal list are in the custom handler's list of calls
# if so skip the call in the signal list
# else connect the signals based on how many arguments they have and if blockable
for i in signal_list:
if i[3] in handlers:
print _("**** GSCREEN INFO: Overriding internal signal call to %s"% i[3])
continue
try:
# add id # for blockable signals
if i[0] == "block":
j = "_sighandler_%s"% i[1]
if len(i) == 4:
self.data[j] = int(self.widgets[i[1]].connect(i[2], self[i[3]]))
if len(i) == 5:
self.data[j] = int(self.widgets[i[1]].connect(i[2], self[i[3]],i[4]))
elif len(i) == 4:
self.widgets[i[1]].connect(i[2], self[i[3]])
elif len(i) == 5:
self.widgets[i[1]].connect(i[2], self[i[3]],i[4])
except:
print ("**** GSCREEN WARNING: could not connect %s to %s"% (i[1],i[3]))
# setup signals that can be blocked but not overriden
for axis in self.data.axis_list:
cb = "axis_%s"% axis
i = "_sighandler_axis_%s"% axis
try:
self.data[i] = int(self.widgets[cb].connect("clicked", self.on_axis_selection_clicked))
except:
self.show_try_errors()
def toggle_offset_view(self):
data = self.data.plot_hidden
data = (data * -1) +1
self.widgets.dro_frame.set_visible(not data)
self.widgets.gremlin.set_property('enable_dro', data)
self.widgets.gremlin.set_property('show_program', not data)
self.widgets.gremlin.set_property('show_limits', not data)
self.widgets.gremlin.set_property('show_extents_option', not data)
self.widgets.gremlin.set_property('show_live_plot', not data)
self.widgets.gremlin.set_property('show_tool', not data)
self.widgets.gremlin.show_offsets = data
self.data.plot_hidden = data
def preset_spindle_speed(self,rpm):
self.data.spindle_preset = rpm
self.widgets.spindle_preset.set_label(" S %d"% rpm)
def sensitize_widgets(self, widgetlist, value):
for name in widgetlist:
try:
self.widgets[name].set_sensitive(value)
except:
print "**** GSCREEN WARNING: No widget named: %s to sensitize"%name
def from_internal_linear_unit(self,v, unit=None):
if unit is None:
unit = self.status.get_linear_units()
lu = (unit or 1) * 25.4
return v*lu
def parse_increment(self,jogincr):
if jogincr.endswith("mm"):
scale = self.from_internal_linear_unit(1/25.4)
elif jogincr.endswith("cm"):
scale = self.from_internal_linear_unit(10/25.4)
elif jogincr.endswith("um"):
scale = self.from_internal_linear_unit(.001/25.4)
elif jogincr.endswith("in") or jogincr.endswith("inch"):
scale = self.from_internal_linear_unit(1.)
elif jogincr.endswith("mil"):
scale = self.from_internal_linear_unit(.001)
else:
scale = 1
jogincr = jogincr.rstrip(" inchmuil")
if "/" in jogincr:
p, q = jogincr.split("/")
jogincr = float(p) / float(q)
else:
jogincr = float(jogincr)
return jogincr * scale
# This prints a message in the status bar, the system notifier (if available)
# and makes a sound (if available)
# It returns a statusbar message id reference so one can directly erase the message later.
# Ubuntu screws with the system notification program so it does follow timeouts
# There is a ppa on the net to fix this I suggest it.
# https://launchpad.net/~leolik/+archive/leolik?field.series_filter=lucid
def notify(self,title,message,icon="",timeout=2):
messageid = None
try:
messageid = self.widgets.statusbar1.push(self.statusbar_id,message)
except:
self.show_try_errors()
self.add_alarm_entry(message)
if NOTIFY_AVAILABLE and self.data.desktop_notify:
uri = ""
if icon:
uri = "file://" + icon
n = pynotify.Notification(title, message, uri)
n.set_hint_string("x-canonical-append","True")
n.set_urgency(pynotify.URGENCY_CRITICAL)
n.set_timeout(int(timeout * 1000) )
n.show()
if _AUDIO_AVAIALBLE:
if icon == ALERT_ICON:
self.audio.set_sound(self.data.error_sound)
else:
self.audio.set_sound(self.data.alert_sound)
self.audio.run()
return messageid
def add_alarm_entry(self,message):
try:
textbuffer = self.widgets.alarm_history.get_buffer()
textbuffer.insert_at_cursor(strftime("%a, %d %b %Y %H:%M:%S -", localtime())+message+"\n" )
except:
self.show_try_errors()
def next_tab(self):
maxpage = self.widgets.notebook_mode.get_n_pages()
page = self.widgets.notebook_mode.get_current_page()
nextpage = page + 1
print "mode select",maxpage,page,nextpage
if nextpage == maxpage:nextpage = 0
self.widgets.notebook_mode.set_current_page(nextpage)
def set_feed_override(self,percent_rate,absolute=False):
if absolute:
rate = percent_rate
else:
rate = self.data.feed_override + percent_rate
if rate > self.data.feed_override_max: rate = self.data.feed_override_max
self.emc.feed_override(rate)
def set_spindle_override(self,percent_rate,absolute=False):
if absolute:
rate = percent_rate
else:
rate = self.data.spindle_override + percent_rate
if rate > self.data.spindle_override_max: rate = self.data.spindle_override_max
elif rate < self.data.spindle_override_min: rate = self.data.spindle_override_min
self.emc.spindle_override(rate)
def set_velocity_override(self,percent_rate,absolute=False):
if absolute:
rate = percent_rate
else:
rate = self.data.velocity_override + percent_rate
if rate > 1.0: rate = 1.0
self.emc.max_velocity(rate * self.data._maxvelocity)
def set_jog_rate(self,step=None,absolute=None):
if self.data.angular_jog_adjustment_flag:
j_rate = "angular_jog_rate"
else:
j_rate = "jog_rate"
# in units per minute
print "jog rate =",step,absolute,self.data[j_rate]
if not absolute == None:
rate = absolute
elif not step == None:
rate = self.data[j_rate] + step
else:return
if rate < 0: rate = 0
if rate > self.data[j_rate+"_max"]: rate = self.data[j_rate+"_max"]
rate = round(rate,1)
if self.data.angular_jog_adjustment_flag:
self.emc.continuous_jog_velocity(None,rate)
else:
self.emc.continuous_jog_velocity(rate,None)
self.data[j_rate] = rate
# This sets the jog increments -there are three ways
# ABSOLUTE:
# set absolute to the absolute increment wanted
# INDEX from INI:
# self.data.jog_increments holds the increments from the INI file
# do not set absolute variable
# index_dir = 1 or -1 to set the rate higher or lower from the list
def set_jog_increments(self,vector=None,index_dir=None,absolute=None):
print "set jog incr"
if self.data.angular_jog_adjustment_flag:
incr = "angular_jog_increments"
incr_index = "current_angular_jogincr_index"
else:
incr = "jog_increments"
incr_index = "current_jogincr_index"
if not absolute == None:
distance = absolute
self.widgets[incr].set_text("%f"%distance)
self.halcomp["jog-increment-out"] = distance
print "index jog increments",distance
return
elif not index_dir == None:
next = self.data[incr_index] + index_dir
elif not vector == None:
next = vector
else: return
end = len(self.data[incr])-1
if next < 0: next = 0
if next > end: next = end
self.data[incr_index] = next
jogincr = self.data[incr][next]
try:
if 'angular' in incr and not jogincr == 'continuous':
label = jogincr + ' Degs'
else:
label = jogincr
self.widgets[incr].set_text(label)
except:
self.show_try_errors()
if jogincr == ("continuous"):
distance = 0
else:
distance = self.parse_increment(jogincr)
print "index jog increments",jogincr,distance
self.halcomp["jog-increment-out"] = distance
def adjustment_buttons(self,widget,action,change=0):
print "adjustment buttons"
# is over ride adjustment selection active?
if self.widgets.button_override.get_active():
print "override"
if widget == self.widgets.button_zero_origin:
print "zero button",action
change = 0
absolute = True
elif widget == self.widgets.button_offset_origin:
print "set at button",action
absolute = True
elif widget == self.widgets.button_plus:
print "up button",action
change = 1
absolute = False
elif widget == self.widgets.button_minus:
print "down button",action
change = -1
absolute = False
else:return
self.adjust_overrides(widget,action,change,absolute)
# graphics adjustment
elif self.widgets.button_graphics.get_active():
inc = self.data.graphic_move_inc
if widget == self.widgets.button_plus:
print "up button",action
change = 1
elif widget == self.widgets.button_minus:
print "down button",action
change = -1
if self.widgets.button_zoom.get_active() and action:
print "zoom"
if change == 1: self.zoom_in()
else: self.zoom_out()
elif self.widgets.button_pan_v.get_active() and action:
print "pan vertical"
self.widgets.gremlin.set_mouse_start(0,0)
if change == 1: self.pan(0,-inc)
else: self.pan(0,inc)
elif self.widgets.button_pan_h.get_active() and action:
print "pan horizontal"
self.widgets.gremlin.set_mouse_start(0,0)
if change == 1: self.pan(-inc,0)
else: self.pan(inc,0)
elif self.widgets.button_rotate_v.get_active() and action:
print "rotate horiontal"
self.widgets.gremlin.set_mouse_start(0,0)
if change == 1: self.rotate(-inc,0)
else: self.rotate(inc,0)
elif self.widgets.button_rotate_h.get_active() and action:
print "rotate horiontal"
self.widgets.gremlin.set_mouse_start(0,0)
if change == 1: self.rotate(0,-inc)
else: self.rotate(0,inc)
# user coordinate system
elif self.widgets.button_select_system.get_active():
if widget == self.widgets.button_plus and action:
print "up button",action
change = 1
elif widget == self.widgets.button_minus and action:
print "down button",action
change = -1
else: return
self.change_origin_system(None,change)
# Jogging mode (This needs to be last)
elif self.data.mode_order[0] == _MAN and self.widgets.button_jog_mode.get_active(): # manual mode and jog mode active
# what axis is set
if widget == self.widgets.button_zero_origin:
print "zero button",action
self.zero_axis()
elif widget == self.widgets.button_move_to:
print "move to button",action
self.move_to(change)
elif widget == self.widgets.button_plus:
print "up button",action
self.do_jog(True,action)
elif widget == self.widgets.button_minus:
print "down button",action
self.do_jog(False,action)
elif widget == self.widgets.button_offset_origin:
self.set_axis_checks()
elif widget == self.widgets.button_zero_origin:
print "zero buttons"
self.zero_axis()
elif widget == self.widgets.button_offset_origin:
print "set axis buttons"
self.set_axis_checks()
def adjust_overrides(self,widget,action,number,absolute):
print "adjust overrides",action,number,absolute
# what override is selected
if absolute:
change = self.get_qualified_input(number,_PERCENT_INPUT)/100
else:
change = number
if self.widgets.button_feed_override.get_active() and action:
print "feed override"
if absolute:
self.set_feed_override(change,absolute)
else:
self.set_feed_override((change * self.data.feed_override_inc),absolute)
elif self.widgets.button_spindle_override.get_active() and action:
print "spindle override"
if absolute:
self.set_spindle_override(change,absolute)
else:
self.set_spindle_override((change * self.data.spindle_override_inc),absolute)
elif self.widgets.button_rapid_override.get_active() and action:
print "velocity override"
if absolute:
self.set_velocity_override(change,absolute)
else:
self.set_velocity_override((change * self.data.velocity_override_inc),absolute)
elif self.widgets.button_jog_speed.get_active() and action:
print "jog speed adjustment"
if widget == self.widgets.button_offset_origin:
change = self.get_qualified_input(number)
if absolute:
self.set_jog_rate(absolute = change)
else:
if self.data.angular_jog_adjustment_flag:
self.set_jog_rate(step = (change * self.data.angular_jog_rate_inc))
else:
self.set_jog_rate(step = (change * self.data.jog_rate_inc))
elif self.widgets.button_jog_increments.get_active() and action:
print "jog increments adjustment"
if widget == self.widgets.button_offset_origin:
change = self.get_qualified_input(number)
if absolute:
self.set_jog_increments(absolute = change)
else:
self.set_jog_increments(index_dir = change)
def origin_system(self,*args):
print "origin system button"
value = self.widgets.button_select_system.get_active()
self.sensitize_widgets(self.data.sensitive_origin_mode,not value)
def change_origin_system(self,system,direction=None):
print system,direction
system_list = (0,54,55,56,57,58,59,59.1,59.2,59.3)
current = system_list[self.data.system]
if not system:
if direction > 0 and not current == 59.3: self.mdi_control.set_user_system(system_list[self.data.system+1])
elif direction < 0 and not current == 54: self.mdi_control.set_user_system(system_list[self.data.system-1])
self.reload_plot()
def homing(self,*args):
print "show/hide homing buttons"
if self.widgets.button_homing.get_active():
if len(self.data.active_axis_buttons) > 1:
for i in self.data.axis_list:
self.widgets["axis_%s"%i].set_active(False)
for i in ('zero_origin','offset_origin','plus','minus'):
self.widgets["button_%s"% i].set_sensitive(False)
self.widgets.mode0.hide()
self.widgets.mode3.show()
#self.widgets.button_mode.set_sensitive(False)
self.widgets.button_override.set_sensitive(False)
self.widgets.button_graphics.set_sensitive(False)
else:
for i in ('zero_origin','offset_origin','plus','minus'):
self.widgets["button_%s"% i].set_sensitive(True)
self.widgets.mode3.hide()
self.widgets.mode0.show()
state = self.data.all_homed
self.sensitize_widgets(self.data.sensitive_all_homed,state)
self.widgets.button_override.set_sensitive(True)
self.widgets.button_graphics.set_sensitive(True)
def graphics(self,*args):
print "show/hide graphics buttons"
if self.widgets.button_graphics.get_active():
for i in range(0,3):
self.widgets["mode%d"% i].hide()
self.widgets.mode5.show()
self.widgets.vmode0.show()
self.widgets.vmode1.hide()
self._tempholder = []
for name in (self.data.sensitive_graphics_mode):
self._tempholder.append(self.widgets[name].get_sensitive())
self.widgets[name].set_sensitive(False)
self.widgets.vmode0.set_sensitive(True)
self.widgets.button_plus.set_sensitive(True)
self.widgets.button_minus.set_sensitive(True)
else:
self.widgets.mode5.hide()
self.mode_changed(self.data.mode_order[0])
for num,name in enumerate(self.data.sensitive_graphics_mode):
if self.data.machine_on:
self.widgets[name].set_sensitive(True)
else:
self.widgets[name].set_sensitive(self._tempholder[num])
def override(self,*args):
print "show/hide override buttons"
value = self.widgets.button_override.get_active()
self.sensitize_widgets(self.data.sensitive_override_mode,not value)
if self.widgets.button_override.get_active():
for i in range(0,3):
self.widgets["mode%d"% i].hide()
self.widgets.mode4.show()
self.widgets.vmode0.show()
self.widgets.vmode1.hide()
self.widgets.button_zero_origin.set_label("Zero\n ")
self.widgets.button_offset_origin.set_label("Set At\n ")
else:
self.widgets.mode4.hide()
self.mode_changed(self.data.mode_order[0])
self.widgets.button_zero_origin.set_label(_(" Zero Origin"))
self.widgets.button_offset_origin.set_label(_("Offset Origin"))
# search for and set up user requested message system.
# status displays on the statusbat and requires no acknowledge.
# dialog displays a GTK dialog box with yes or no buttons
# okdialog displays a GTK dialog box with an ok button
# dialogs require an answer before focus is sent back to main screen
def message_setup(self):
if not self.inifile:
return
m_boldtext = self.inifile.findall("DISPLAY", "MESSAGE_BOLDTEXT")
m_text = self.inifile.findall("DISPLAY", "MESSAGE_TEXT")
m_type = self.inifile.findall("DISPLAY", "MESSAGE_TYPE")
m_pinname = self.inifile.findall("DISPLAY", "MESSAGE_PINNAME")
if len(m_text) != len(m_type):
print _("**** Gscreen ERROR: Invalid message configuration (missing text or type) in INI File [DISPLAY] section")
if len(m_text) != len(m_pinname):
print _("**** Gscreen ERROR: Invalid message configuration (missing pinname) in INI File [DISPLAY] section")
if len(m_text) != len(m_boldtext):
print _("**** Gscreen ERROR: Invalid message configuration (missing boldtext) in INI File [DISPLAY] section")
for bt,t,c ,name in zip(m_boldtext,m_text, m_type,m_pinname):
#print bt,t,c,name
if not ("status" in c) and not ("dialog" in c) and not ("okdialog" in c):
print _("**** Gscreen ERROR: invalid message type (%s)in INI File [DISPLAY] section"% c)
continue
if not name == None:
# this is how we make a pin that can be connected to a callback
self.data['name'] = hal_glib.GPin(self.halcomp.newpin(name, hal.HAL_BIT, hal.HAL_IN))
self.data['name'].connect('value-changed', self.on_printmessage,name,bt,t,c)
if ("dialog" in c):
self.halcomp.newpin(name+"-waiting", hal.HAL_BIT, hal.HAL_OUT)
if not ("ok" in c):
self.halcomp.newpin(name+"-response", hal.HAL_BIT, hal.HAL_OUT)
# display dialog
def warning_dialog(self,message, displaytype, secondary=None,pinname=None):
if displaytype:
dialog = gtk.MessageDialog(self.widgets.window1,
gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_INFO, gtk.BUTTONS_OK,message)
else:
dialog = gtk.MessageDialog(self.widgets.window1,
gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_QUESTION, gtk.BUTTONS_YES_NO,message)
# if there is a secondary message then the first message text is bold
if secondary:
dialog.format_secondary_text(secondary)
dialog.show_all()
try:
if "dialog_return" in dir(self.handler_instance):
dialog.connect("response", self.handler_instance.dialog_return,self,displaytype,pinname)
else:
dialog.connect("response", self.dialog_return,displaytype,pinname)
except:
dialog.destroy()
raise NameError (_('Dialog error - Is the dialog handler missing from the handler file?'))
if pinname == "TOOLCHANGE":
dialog.set_title(_("Manual Toolchange"))
else:
dialog.set_title(_("Operator Message"))
# message dialog returns a response here
# This includes the manual tool change dialog
# We know this by the pinname being called 'TOOLCHANGE'
def dialog_return(self,widget,result,dialogtype,pinname):
if pinname == "TOOLCHANGE":
self.halcomp["tool-changed"] = True
widget.destroy()
try:
self.widgets.statusbar1.remove_message(self.statusbar_id,self.data.tool_message)
except:
self.show_try_errors()
return
if not dialogtype: # yes/no dialog
if result == gtk.RESPONSE_YES:result = True
else: result = False
if pinname:
self.halcomp[pinname + "-response"] = result
if pinname:
self.halcomp[pinname + "-waiting"] = False
widget.destroy()
# dialog is used for choosing the run-at-line position
def launch_restart_dialog(self,widget):
self.restart_dialog()
# dialog for manually calling a tool
def restart_dialog(self):
if self.data.restart_dialog: return
self.data.restart_dialog = gtk.Dialog(_("Restart Entry"),
self.widgets.window1,
gtk.DIALOG_DESTROY_WITH_PARENT,
(gtk.STOCK_CANCEL, gtk.RESPONSE_REJECT))
label = gtk.Label(_("Restart Entry"))
label.modify_font(pango.FontDescription("sans 20"))
self.data.restart_dialog.vbox.pack_start(label)
calc = gladevcp.Calculator()
self.data.restart_dialog.vbox.add(calc)
calc.set_value("%d"%self.data.last_line)
calc.set_property("font","sans 20")
calc.set_editable(True)
box = gtk.HButtonBox()
upbutton = gtk.Button(label = _("Up"))
box.add(upbutton)
enterbutton = gtk.Button(label = _("Enter"))
box.add(enterbutton)
downbutton = gtk.Button(label = _("Down"))
box.add(downbutton)
calc.calc_box.pack_end(box, expand=False, fill=False, padding=0)
upbutton.connect("clicked",self.restart_up,calc)
downbutton.connect("clicked",self.restart_down,calc)
enterbutton.connect("clicked",lambda w:calc.entry.emit('activate'))
calc.entry.connect("activate",self.restart_set_line,calc)
self.data.restart_dialog.parse_geometry("400x400+0+0")
self.data.restart_dialog.show_all()
calc.num_pad_only(True)
self.data.restart_dialog.connect("response", self.restart_dialog_return,calc)
# either start the gcode at the line specified or cancel
def restart_dialog_return(self,widget,result,calc):
value = calc.get_value()
if value == None:value = 0
self.add_alarm_entry(_("Restart program from line %d"%value))
self.update_restart_line(0,0)
widget.destroy()
self.data.restart_dialog = None
# adds the embedded object to a notebook tab or box
def _dynamic_tab(self, widget, text):
s = gtk.Socket()
try:
widget.append_page(s, gtk.Label(" " + text + " "))
except:
try:
widget.pack_end(s,True,True,0)
except:
return None
return s.get_id()
# Check INI file for embed commands
# NAME is used as the tab label if a notebook is used
# LOCATION is the widgets name from the gladefile.
# COMMAND is the actual program command
# if no location is specified the main notebook is used
def set_dynamic_tabs(self):
from subprocess import Popen
if not self.inifile:
return
tab_names = self.inifile.findall("DISPLAY", "EMBED_TAB_NAME")
tab_location = self.inifile.findall("DISPLAY", "EMBED_TAB_LOCATION")
tab_cmd = self.inifile.findall("DISPLAY", "EMBED_TAB_COMMAND")
if len(tab_names) != len(tab_cmd):
print _("Invalid embeded tab configuration") # Complain somehow
if len(tab_location) != len(tab_names):
for num,i in enumerate(tab_names):
try:
if tab_location[num]:
continue
except:
tab_location.append("notebook_mode")
for t,c ,name in zip(tab_names, tab_cmd,tab_location):
nb = self.widgets[name]
xid = self._dynamic_tab(nb, t)
if not xid: continue
cmd = c.replace('{XID}', str(xid))
child = Popen(cmd.split())
self._dynamic_childs[xid] = child
nb.show_all()
# Gotta kill the embedded processes when gscreen closes
def kill_dynamic_childs(self):
for c in self._dynamic_childs.values():
c.terminate()
# finds the postgui file name and INI file path
def postgui(self):
postgui_halfile = self.inifile.find("HAL", "POSTGUI_HALFILE")
return postgui_halfile,sys.argv[2]
# zooms in a set amount (set deep in gremlin)
def zoom_in(self,*args):
self.widgets.gremlin.zoom_in()
def zoom_out(self,*args):
self.widgets.gremlin.zoom_out()
def set_fullscreen1(self, data):
self.prefs.putpref('fullscreen1', data, bool)
self.data.fullscreen1 = data
if data:
self.widgets.window1.fullscreen()
else:
self.widgets.window1.unfullscreen()
def set_show_offsets(self, data):
self.prefs.putpref('show_offsets', data, bool)
self.data.show_offsets = data
try:
self.widgets.gremlin.show_offsets = data
except:
self.show_try_errors()
def set_show_dtg(self, data):
self.prefs.putpref('show_dtg', data, bool)
try:
self.widgets.gremlin.set_property('show_dtg',data)
except:
self.show_try_errors()
def set_diameter_mode(self, data):
print "toggle diameter mode"
self.data.diameter_mode = data
self.prefs.putpref('diameter_mode', data, bool)
try:
self.widgets.gremlin.set_property('show_lathe_radius',not data)
except:
self.show_try_errors()
# returns the separate RGB color numbers from the color widget
def convert_to_rgb(self,spec):
color = spec.to_string()
temp = color.strip("#")
r = temp[0:4]
g = temp[4:8]
b = temp[8:]
return (int(r,16),int(g,16),int(b,16))
def set_rel_color(self):
self.data.rel_color = self.convert_to_rgb(self.widgets.rel_colorbutton.get_color())
self.prefs.putpref('rel_textcolor', self.widgets.rel_colorbutton.get_color(),str)
def set_abs_color(self):
self.data.abs_color = self.convert_to_rgb(self.widgets.abs_colorbutton.get_color())
self.prefs.putpref('abs_textcolor', self.widgets.abs_colorbutton.get_color(),str)
def set_dtg_color(self):
self.data.dtg_color = self.convert_to_rgb(self.widgets.dtg_colorbutton.get_color())
self.prefs.putpref('dtg_textcolor', self.widgets.dtg_colorbutton.get_color(),str)
def set_unlock_code(self):
self.prefs.putpref('unlock_code', self.data.unlock_code,str)
# toggles gremlin's different views
# if in lathe mode only P, Y and Y2 available
def toggle_view(self):
dist = self.widgets.gremlin.get_zoom_distance()
def shift():
a = self.data.plot_view[0]
b = self.data.plot_view[1]
c = self.data.plot_view[2]
d = self.data.plot_view[3]
e = self.data.plot_view[4]
f = self.data.plot_view[5]
self.data.plot_view = (b,c,d,e,f,a)
shift()
if self.data.lathe_mode:
while not self.data.plot_view[0].lower() in("p","y","y2"):
shift()
elif self.data.plot_view[0].lower() == "y2":
shift()
self.widgets.gremlin.set_property('view',self.data.plot_view[0])
self.prefs.putpref('view', self.data.plot_view, tuple)
self.widgets.gremlin.set_zoom_distance(dist)
# toggle a large graphics view / gcode view
def set_full_graphics_view(self,data):
print "full view",data
if data:
print "enlarge"
self.data.full_graphics = True
self.widgets.notebook_mode.hide()
self.widgets.dro_frame.hide()
self.widgets.gremlin.set_property('enable_dro',True)
else:
print "shrink"
self.data.full_graphics = False
self.widgets.notebook_mode.show()
self.widgets.dro_frame.show()
self.widgets.gremlin.set_property('enable_dro',False)
# enlargen the Gcode box while in edit mode
def edit_mode(self,data):
print "edit mode pressed",data
self.sensitize_widgets(self.data.sensitive_edit_mode,not data)
if data:
self.widgets.mode2.hide()
self.widgets.mode6.show()
self.widgets.dro_frame.hide()
self.widgets.gcode_view.set_sensitive(1)
self.data.edit_mode = True
self.widgets.show_box.hide()
self.widgets.notebook_mode.show()
else:
self.widgets.mode6.hide()
self.widgets.mode2.show()
self.widgets.dro_frame.show()
self.widgets.gcode_view.set_sensitive(0)
self.data.edit_mode = False
self.widgets.show_box.show()
def set_dro_units(self, data, save=True):
print "toggle dro units",self.data.dro_units,data
if data == _IMPERIAL:
print "switch to imperial"
self.status.dro_inch(1)
self.widgets.gremlin.set_property('metric_units',False)
try:
self.widgets.offsetpage1.set_to_inch()
except:
self.show_try_errors()
else:
print "switch to mm"
self.status.dro_mm(1)
self.widgets.gremlin.set_property('metric_units',True)
try:
self.widgets.offsetpage1.set_to_mm()
except:
self.show_try_errors()
self.data.dro_units = data
if save:
self.prefs.putpref('dro_is_metric', data, bool)
def toggle_optional_stop(self):
print "option stop"
self.set_optional_stop(self.widgets.button_option_stop.get_active())
def set_optional_stop(self,data):
self.prefs.putpref('opstop', data, bool)
self.data.op_stop = data
self.emc.opstop(data)
def toggle_block_delete(self):
self.set_block_delete(self.widgets.button_block_delete.get_active())
def set_block_delete(self,data):
print "block delete"
self.prefs.putpref('blockdel', data, bool)
self.data.block_del = data
self.emc.blockdel(data)
def save_edit(self):
print "edit"
# helper method to block and unblock GTK widget signals
def block(self,widget_name):
self.widgets["%s"%(widget_name)].handler_block(self.data["_sighandler_%s"% (widget_name)])
def unblock(self,widget_name):
self.widgets["%s"%(widget_name)].handler_unblock(self.data["_sighandler_%s"% (widget_name)])
# update the global variable of active axis buttons
# if in jogging or homing mode, only one axis can be active at once
# update the related axis HAL pins
def update_active_axis_buttons(self,widget):
count = 0;temp = []
self.data.active_axis_buttons = []
for i in self.data.axis_list:
num = "xyzabcuvws".index(i)
if self.widgets.button_jog_mode.get_active() or self.widgets.button_homing.get_active():
if not self.widgets["axis_%s"%i] == widget:
# unselect axis / HAL pin
self.block("axis_%s"%i)
self.widgets["axis_%s"%i].set_active(False)
self.unblock("axis_%s"%i)
continue
if self.widgets["axis_%s"%i].get_active():
count +=1
axisnum = num
self.data.active_axis_buttons.append((i,num))
if count == 0: self.data.active_axis_buttons.append((None,None))
# check and update jogging buttons
self.jog_mode()
# adjust sensitivity and labels of buttons
def jog_mode(self):
print "jog mode:",self.widgets.button_jog_mode.get_active()
# if muliple axis selected - unselect all of them
if len(self.data.active_axis_buttons) > 1 and self.widgets.button_jog_mode.get_active():
for i in self.data.axis_list:
self.widgets["axis_%s"%i].set_active(False)
if self.widgets.button_jog_mode.get_active():
self.widgets.button_move_to.set_label("Goto Position")
self.emc.set_manual_mode()
else:
self.widgets.button_move_to.set_label("")
self.update_hal_jog_pins()
self.update_hal_override_pins()
# do some checks then jog selected axis or start spindle
def do_jog(self,direction,action):
# if manual mode, if jogging
# if only one axis button pressed
# jog positive at selected rate
if self.data.mode_order[0] == _MAN:
if len(self.data.active_axis_buttons) > 1:
self.notify(_("INFO:"),_("Can't jog multiple axis"),INFO_ICON)
print self.data.active_axis_buttons
elif self.data.active_axis_buttons[0][0] == None:
self.notify(_("INFO:"),_("No axis selected to jog"),INFO_ICON)
else:
print "Jog axis %s" % self.data.active_axis_buttons[0][0]
if not self.data.active_axis_buttons[0][0] == "s":
if not action: cmd = 0
elif direction: cmd = 1
else: cmd = -1
self.emc.jogging(1)
if self.data.active_axis_buttons[0][0] in('a','b','c'):
jogincr = self.data.angular_jog_increments[self.data.current_angular_jogincr_index]
else:
jogincr = self.data.jog_increments[self.data.current_jogincr_index]
print jogincr
if jogincr == ("continuous"): # continuous jog
print "active axis jog:",self.data.active_axis_buttons[0][1]
self.emc.continuous_jog(self.data.active_axis_buttons[0][1],cmd)
else:
print "jog incremental"
if cmd == 0: return # don't want release of button to stop jog
distance = self.parse_increment(jogincr)
self.emc.incremental_jog(self.data.active_axis_buttons[0][1],cmd,distance)
def do_key_jog(self,axis,direction,action):
if self.data.mode_order[0] == _MAN and self.widgets.button_jog_mode.get_active(): # jog mode active:
if not action: cmd = 0
elif direction: cmd = 1
else: cmd = -1
self.emc.jogging(1)
print self.data.jog_increments[self.data.current_jogincr_index]
if self.data.jog_increments[self.data.current_jogincr_index] == ("continuous"): # continuous jog
print "active axis jog:",axis
self.emc.continuous_jog(axis,cmd)
else:
print "jog incremental"
if cmd == 0: return # don't want release of button to stop jog
self.mdi_control.mdi.emcstat.poll()
if self.mdi_control.mdi.emcstat.state != 1: return
jogincr = self.data.jog_increments[self.data.current_jogincr_index]
distance = self.parse_increment(jogincr)
self.emc.incremental_jog(axis,cmd,distance)
# spindle control
def spindle_adjustment(self,direction,action):
if action and not self.widgets.s_display_fwd.get_active() and not self.widgets.s_display_rev.get_active():
self.notify(_("INFO:"),_("No direction selected for spindle"),INFO_ICON)
return
if direction and action:
if self.data.spindle_speed:
self.emc.spindle_faster(1)
elif self.widgets.s_display_fwd.get_active():
self.emc.spindle_forward(1,self.data.spindle_start_rpm)
else:
self.emc.spindle_reverse(1,self.data.spindle_start_rpm)
print direction,action
elif not direction and action:
if self.data.spindle_speed:
if self.data.spindle_speed >100:
self.emc.spindle_slower(1)
else:
self.emc.spindle_off(1)
# feeds to a position (while in manual mode)
def do_jog_to_position(self,data):
if len(self.data.active_axis_buttons) > 1:
self.notify(_("INFO:"),_("Can't jog multiple axis"),INFO_ICON)
print self.data.active_axis_buttons
elif self.data.active_axis_buttons[0][0] == None:
self.notify(_("INFO:"),_("No axis selected to move"),INFO_ICON)
else:
if not self.data.active_axis_buttons[0][0] == "s":
if self.data.active_axis_buttons[0][0] in('a','b','c'):
rate = self.data.angular_jog_rate
pos = self.get_qualified_input(data,switch=_DEGREE_INPUT)
else:
rate = self.data.jog_rate
pos = self.get_qualified_input(data)
self.mdi_control.go_to_position(self.data.active_axis_buttons[0][0],pos,rate)
def adjust_spindle_rpm(self, rpm, direction=None):
# spindle control
if direction == None:
direction = self.data.spindle_dir
if direction > 0:
print "forward"
self.emc.spindle_forward(1, float(rpm))
elif direction < 0:
print "reverse"
self.emc.spindle_reverse(1, float(rpm))
else:
self.emc.spindle_off(1)
# shows the second glade file panel
def toggle_screen2(self):
self.data.use_screen2 = self.widgets.use_screen2.get_active()
if self.screen2:
if self.data.use_screen2:
self.widgets.window2.show()
else:
self.widgets.window2.hide()
self.prefs.putpref('use_screen2', self.data.use_screen2, bool)
# This converts and qualifies the input
# eg for diameter, metric or percentage
def get_qualified_input(self,raw = 0,switch = None):
print "RAW input:",raw
if switch in(_DEGREE_INPUT, _SPINDLE_INPUT):
return raw
elif switch == _PERCENT_INPUT:
return round(raw,2)
else:
g21 = False
if "G21" in self.data.active_gcodes: g21 = True
# metric DRO - imperial mode
if self.data.dro_units == _MM:
if not g21:
raw = raw / 25.4
# imperial DRO - metric mode
elif g21:
raw = raw * 25.4
if switch == "x" and self.data.diameter_mode:
print "convert from diameter"
raw = raw / 2.0
print "Qualified input:",raw
return raw
def unhome_all(self):
self.emc.unhome_all(1)
def home_all(self):
self.emc.home_all(1)
# do some checks first the home the selected axis
def home_selected(self):
print "home selected"
if len(self.data.active_axis_buttons) > 1:
self.notify(_("INFO:"),_("Can't home multiple axis - select HOME ALL instead"),INFO_ICON)
print self.data.active_axis_buttons
elif self.data.active_axis_buttons[0][0] == None:
self.notify(_("INFO:"),_("No axis selected to home"),INFO_ICON)
else:
print "home axis %s" % self.data.active_axis_buttons[0][0]
self.emc.home_selected(self.data.active_axis_buttons[0][1])
def unhome_selected(self):
if len(self.data.active_axis_buttons) > 1:
self.notify(_("INFO:"),_("Can't unhome multiple axis"),INFO_ICON)
print self.data.active_axis_buttons
elif self.data.active_axis_buttons[0][0] == None:
self.notify(_("INFO:"),_("No axis selected to unhome"),INFO_ICON)
else:
print "unhome axis %s" % self.data.active_axis_buttons[0][0]
self.emc.unhome_selected(self.data.active_axis_buttons[0][1])
# Touchoff the axis zeroing it
# reload the plot to update the display
def zero_axis(self):
if self.data.active_axis_buttons[0][0] == None:
self.notify(_("INFO:"),_("No axis selected for origin zeroing"),INFO_ICON)
# if an axis is selected then set it
for i in self.data.axis_list:
if self.widgets["axis_%s"%i].get_active():
print "zero %s axis" %i
self.mdi_control.set_axis(i,0)
self.reload_plot()
# touchoff - setting the axis to the input
def set_axis_checks(self):
if self.data.active_axis_buttons[0][0] == None:
self.notify(_("INFO:"),_("No axis selected for origin touch-off"),INFO_ICON)
return
self.launch_numerical_input("on_offset_origin_entry_return")
def tool_touchoff_checks(self):
if len(self.data.active_axis_buttons) > 1:
self.notify(_("INFO:"),_("Can't tool touch-off multiple axes"),INFO_ICON)
return
if self.data.active_axis_buttons[0][0] == None:
self.notify(_("INFO:"),_("No axis selected for tool touch-off"),INFO_ICON)
return
self.launch_numerical_input("on_tool_offset_entry_return")
# move axis to a position (while in manual mode)
def move_to(self,data):
if self.data.mode_order[0] == _MAN:# if in manual mode
if self.widgets.button_jog_mode.get_active(): # jog mode active
print "jog to position"
self.do_jog_to_position(data)
def clear_plot(self):
self.widgets.gremlin.clear_live_plotter()
def pan(self,x,y):
self.widgets.gremlin.pan(x,y)
def rotate(self,x,y):
self.widgets.gremlin.rotate_view(x,y)
def reload_plot(self):
print "reload plot"
self.widgets.hal_action_reload.emit("activate")
def toggle_mist(self):
if self.data.mist:
self.emc.mist_off(1)
else:
self.emc.mist_on(1)
def toggle_flood(self):
if self.data.flood:
self.emc.flood_off(1)
else:
self.emc.flood_on(1)
def toggle_ignore_limits(self,*args):
print "over ride limits"
self.emc.override_limits(1)
# toggle the tool editor page forward
# reload the page when doing this
# If the user specified a tool editor spawn it.
def reload_tooltable(self):
# show the tool table page or return to the main page
if not self.widgets.notebook_main.get_current_page() == 3:
self.widgets.notebook_main.set_current_page(3)
else:
self.widgets.notebook_main.set_current_page(0)
return
# set the tooltable path from the INI file and reload it
path = os.path.join(CONFIGPATH,self.data.tooltable)
print "tooltable:",path
self.widgets.tooledit1.set_filename(path)
# see if user requested an external editor and spawn it
editor = self.data.tooleditor
if not editor == None:
res = os.spawnvp(os.P_WAIT, editor, [editor, path])
if res:
self.notify(_("Error Message"),_("Tool editor error - is the %s editor available?"% editor,ALERT_ICON,3))
# tell linuxcnc that the tooltable may have changed
self.emc.reload_tooltable(1)
# toggle thru the DRO large display
def dro_toggle(self):
print "toggle axis display"
a = self.data.display_order[0]
b = self.data.display_order[1]
c = self.data.display_order[2]
self.data.display_order = (c,a,b)
self.prefs.putpref('display_order', self.data.display_order, tuple)
if self.data.display_order[2] == _ABS:
self.widgets.gremlin.set_property('use_relative',False)
else:
self.widgets.gremlin.set_property('use_relative',True)
# adjust the screen as per each mode toggled
def mode_changed(self,mode):
if mode == _MAN:
self.widgets.vmode0.show()
self.widgets.vmode1.hide()
self.widgets.notebook_mode.hide()
self.widgets.hal_mdihistory.hide()
self.widgets.button_homing.show()
self.widgets.dro_frame.show()
self.widgets.spare.hide()
elif mode == _MDI:
if self.widgets.button_homing.get_active():
self.widgets.button_homing.emit("clicked")
if self.data.plot_hidden:
self.toggle_offset_view()
self.emc.set_mdi_mode()
self.widgets.hal_mdihistory.show()
self.widgets.vmode0.show()
self.widgets.vmode1.hide()
self.widgets.notebook_mode.hide()
elif mode == _AUTO:
self.widgets.vmode0.hide()
self.widgets.vmode1.show()
if self.data.full_graphics:
self.widgets.notebook_mode.hide()
else:
self.widgets.notebook_mode.show()
self.widgets.hal_mdihistory.hide()
if not mode == _MAN:
self.widgets.button_jog_mode.set_active(False)
self.widgets.button_homing.set_active(False)
self.widgets.button_homing.hide()
self.widgets.spare.show()
for i in range(0,3):
if i == mode:
self.widgets["mode%d"% i].show()
else:
self.widgets["mode%d"% i].hide()
def change_theme(self, theme):
self.prefs.putpref('gtk_theme', theme, str)
if theme == None:return
if theme == "Follow System Theme":
theme = self.data.system_theme
settings = gtk.settings_get_default()
settings.set_string_property("gtk-theme-name", theme, "")
# check linuxcnc for status, error and then update the readout
def timer_interrupt(self):
self.emc.mask()
self.emcstat = linuxcnc.stat()
self.emcerror = linuxcnc.error_channel()
self.emcstat.poll()
self.data.task_mode = self.emcstat.task_mode
self.status.periodic()
self.data.system = self.status.get_current_system()
e = self.emcerror.poll()
if e:
kind, text = e
print kind,text
if "joint" in text:
for letter in self.data.axis_list:
axnum = "xyzabcuvws".index(letter)
text = text.replace( "joint %d"%axnum,"Axis %s"%letter.upper() )
if kind in (linuxcnc.NML_ERROR, linuxcnc.OPERATOR_ERROR):
self.notify(_("Error Message"),text,ALERT_ICON,3)
elif kind in (linuxcnc.NML_TEXT, linuxcnc.OPERATOR_TEXT):
self.notify(_("Message"),text,INFO_ICON,3)
elif kind in (linuxcnc.NML_DISPLAY, linuxcnc.OPERATOR_DISPLAY):
self.notify(_("Message"),text,INFO_ICON,3)
self.emc.unmask()
if "periodic" in dir(self.handler_instance):
self.handler_instance.periodic()
else:
self.update_position()
return True
# update the whole display
def update_position(self,*args):
self.update_mdi_spindle_button()
self.update_spindle_bar()
self.update_dro()
self.update_active_gcodes()
self.update_active_mcodes()
self.update_aux_coolant_pins()
self.update_feed_speed_label()
self.update_tool_label()
self.update_coolant_leds()
self.update_estop_led()
self.update_machine_on_led()
self.update_limit_override()
self.update_override_label()
self.update_jog_rate_label()
self.update_mode_label()
self.update_units_button_label()
# spindle controls
def update_mdi_spindle_button(self):
try:
self.widgets.at_speed_label.set_label(_("%d RPM"%abs(self.data.spindle_speed)))
except:
pass
label = self.widgets.spindle_control.get_label()
speed = self.data.spindle_speed
if speed == 0 and not label == _("Start"):
temp = _("Start")
elif speed and not label == _("Stop"):
temp = _("Stop")
else: return
self.widgets.spindle_control.set_label(temp)
def update_spindle_bar(self):
self.widgets.s_display.set_value(abs(self.halcomp["spindle-readout-in"]))
self.widgets.s_display.set_target_value(abs(self.data.spindle_speed))
try:
self.widgets.s_display2.set_value(abs(self.data.spindle_speed))
except:
self.show_try_errors()
def update_dro(self):
# DRO
for i in self.data.axis_list:
for j in range (0,3):
current = self.data.display_order[j]
attr = pango.AttrList()
if current == _ABS:
color = self.data.abs_color
data = self.data["%s_abs"%i]
#text = "%+ 10.4f"% self.data["%s_abs"%i]
label = "ABS"
elif current == _REL:
color = self.data.rel_color
data = self.data["%s_rel"%i]
#text = "%+ 10.4f"% self.data["%s_rel"%i]
label= "REL"
elif current == _DTG:
color = self.data.dtg_color
data = self.data["%s_dtg"%i]
#text = "%+ 10.4f"% self.data["%s_dtg"%i]
label = "DTG"
if j == 2:
if self.data.highlight_major:
hlcolor = self.data.highlight_color
bg_color = pango.AttrBackground(hlcolor[0],hlcolor[1],hlcolor[2], 0, -1)
attr.insert(bg_color)
size = pango.AttrSize(30000, 0, -1)
attr.insert(size)
weight = pango.AttrWeight(600, 0, -1)
attr.insert(weight)
fg_color = pango.AttrForeground(color[0],color[1],color[2], 0, 11)
attr.insert(fg_color)
self.widgets["%s_display_%d"%(i,j)].set_attributes(attr)
h = " "
if current == _ABS and self.data["%s_is_homed"% i]: h = "*"
if self.data.diameter_mode and i == 'x': data = data * 2.0
if self.data.dro_units == _MM:
text = "%s% 10.3f"% (h,data)
else:
text = "%s% 9.4f"% (h,data)
self.widgets["%s_display_%d"%(i,j)].set_text(text)
self.widgets["%s_display_%d"%(i,j)].set_alignment(0,.5)
self.widgets["%s_display_%d_label"%(i,j)].set_alignment(1,.5)
self.widgets["%s_display_%d_label"%(i,j)].set_text(label)
def update_active_gcodes(self):
# active codes
active_g = " ".join(self.data.active_gcodes)
self.widgets.active_gcodes_label.set_label("%s "% active_g)
def update_active_mcodes(self):
self.widgets.active_mcodes_label.set_label(" ".join(self.data.active_mcodes))
def update_aux_coolant_pins(self):
# control aux_coolant - For Dave Armstrong
m7 = m8 = False
self.halcomp["aux-coolant-m8-out"] = False
self.halcomp["mist-coolant-out"] = False
self.halcomp["aux-coolant-m7-out"] = False
self.halcomp["flood-coolant-out"] = False
if self.data.mist:
if self.widgets.aux_coolant_m7.get_active():
self.halcomp["aux-coolant-m7-out"] = True
else:
self.halcomp["mist-coolant-out"] = True
if self.data.flood:
if self.widgets.aux_coolant_m8.get_active():
self.halcomp["aux-coolant-m8-out"] = True
else:
self.halcomp["flood-coolant-out"] = True
def update_feed_speed_label(self):
data = self.data.velocity
if self.data.dro_units == _MM:
text = "%.2f"% (data)
else:
text = "%.3f"% (data)
self.widgets.active_feed_speed_label.set_label("F%s S%s V%s"% (self.data.active_feed_command,
self.data.active_spindle_command,text))
def update_tool_label(self):
# corodinate system:
systemlabel = (_("Machine"),"G54","G55","G56","G57","G58","G59","G59.1","G59.2","G59.3")
tool = str(self.data.tool_in_spindle)
if tool == None: tool = "None"
self.widgets.system.set_text(("Tool %s %s"%(tool,systemlabel[self.data.system])))
def update_coolant_leds(self):
# coolant
self.widgets.led_mist.set_active(self.data.mist)
self.widgets.led_flood.set_active(self.data.flood)
def update_estop_led(self):
# estop
self.widgets.led_estop.set_active(self.data.estopped)
def update_machine_on_led(self):
self.widgets.led_on.set_active(self.data.machine_on)
def update_limit_override(self):
# ignore limts led
self.widgets.led_ignore_limits.set_active(self.data.or_limits)
def update_override_label(self):
# overrides
self.widgets.fo.set_text("FO: %d%%"%(round(self.data.feed_override,2)*100))
self.widgets.so.set_text("SO: %d%%"%(round(self.data.spindle_override,2)*100))
self.widgets.mv.set_text("VO: %d%%"%(round((self.data.velocity_override),2) *100))
# we need to check if the current units is in the basic machine units - convert if nesassary.
# then set the display according to the current display units.
def update_jog_rate_label(self):
rate = round(self.status.convert_units(self.data.jog_rate),2)
if self.data.dro_units == _MM:
text = "%4.2f mm/min"% (rate)
else:
text = "%3.2f IPM"% (rate)
self.widgets.jog_rate.set_text(text)
try:
text = "%4.2f DPM"% (self.data.angular_jog_rate)
self.widgets.angular_jog_rate.set_text(text)
except:
pass
def update_mode_label(self):
# Mode / view
modenames = self.data.mode_labels
time = strftime("%a, %d %b %Y %I:%M:%S %P ", localtime())
self.widgets.mode_label.set_label( "%s View -%s %s"% (modenames[self.data.mode_order[0]],self.data.plot_view[0],time) )
def update_units_button_label(self):
label = self.widgets.metric_select.get_label()
data = self.data.dro_units
if data and not label == " mm ":
temp = " mm "
elif data == 0 and not label == "Inch":
temp = "Inch"
else: return
self.widgets.metric_select.set_label(temp)
def update_hal_jog_pins(self):
for i in self.data.axis_list:
if self.widgets.button_jog_mode.get_active() and self.widgets["axis_%s"%i].get_active():
self.halcomp["jog-enable-%s-out"%i] = True
else:
self.halcomp["jog-enable-%s-out"%i] = False
if self.widgets.button_jog_mode.get_active():
self.halcomp["jog-enable-out"] = True
else:
self.halcomp["jog-enable-out"] = False
try:
self.widgets.led_jog_mode.set_active(self.halcomp["jog-enable-out"])
except:
pass
# These pins set and unset enable pins for override adjustment
# only true when the screen button is true and not in jog mode
# (because jog mode may use the encoder for jogging)
def update_hal_override_pins(self):
jogmode = not(self.widgets.button_jog_mode.get_active())
fo = self.widgets.button_feed_override.get_active() and jogmode
so = self.widgets.button_spindle_override.get_active() and jogmode
mv = self.widgets.button_rapid_override.get_active() and jogmode
self.halcomp["f-override-enable-out"] = fo
self.halcomp["s-override-enable-out"] = so
self.halcomp["mv-override-enable-out"] = mv
def __getitem__(self, item):
return getattr(self, item)
def __setitem__(self, item, value):
return setattr(self, item, value)
# calls a postgui file if there is one.
# then starts Gscreen
if __name__ == "__main__":
try:
print "**** GSCREEN INFO ini:", sys.argv[2]
app = Gscreen()
except KeyboardInterrupt:
sys.exit(0)
postgui_halfile,inifile = Gscreen.postgui(app)
print "**** GSCREEN INFO: postgui filename:",postgui_halfile
if postgui_halfile:
res = os.spawnvp(os.P_WAIT, "halcmd", ["halcmd", "-i",inifile,"-f", postgui_halfile])
if res: raise SystemExit, res
gtk.main()
| CalvinHsu1223/LinuxCNC-EtherCAT-HAL-Driver | src/emc/usr_intf/gscreen/gscreen.py | Python | gpl-2.0 | 151,811 |
from django.contrib.auth.models import User as AuthUser
from django.core.urlresolvers import resolve
from django.http import HttpRequest
from django.template.loader import render_to_string
from django.test import TestCase
from django.utils.html import escape
import json
from mock import patch
from server.models import Player, Song, Request
class SongRequestTest(TestCase):
def test_saving_a_new_song_request(self):
self.user = AuthUser.objects.create_user(username='barry', email='[email protected]',
password='myeverything')
self.song = Song.songs.create(pk=43, title='Title 1',
artist='Artist 1', last_time_play=0)
self.client.login(username='barry', password='myeverything')
self.client.post(
'/request/',
data={'id_song': 43}
)
self.assertEqual(Request.requests.count(), 1)
new_request = Request.requests.first()
self.assertEqual(new_request.song.pk, 43)
@patch('server.models.time')
def test_get_next_request(self, mock_time):
self.user = AuthUser.objects.create_user(username='barry', email='[email protected]',
password='myeverything')
self.player = Player.objects.create(user=self.user)
self.song = Song.songs.create(pk=43, title='Title 1',
artist='Artist 1', last_time_play=0)
mock_time.time.return_value = 2000
Request.requests.create(user=self.player, song=self.song)
mock_time.time.return_value = 2100
Request.requests.create(user=self.player, song=self.song)
self.client.get(
'/songs/next/'
)
self.assertEqual(Request.requests.count(), 2)
self.client.get(
'/songs/next/'
)
self.assertEqual(Request.requests.count(), 1)
self.client.get(
'/songs/next/'
)
self.assertEqual(Request.requests.count(), 0)
class PlaylistTest(TestCase):
def setUp(self):
self.user = AuthUser.objects.create_user(username='barry', email='[email protected]',
password='myeverything')
self.player = Player.objects.create(user=self.user)
self.song1 = Song.songs.create(title='Title 1',
artist='Artist 1', last_time_play=0)
self.song2 = Song.songs.create(title='Title 2',
artist='Artist 2', last_time_play=0)
Request.requests.create(user=self.player, song=self.song1)
Request.requests.create(user=self.player, song=self.song2)
self.client.login(username='barry', password='myeverything')
def test_playlist_read(self):
resp = self.client.get(
'/playlist/current'
)
self.assertEqual(len(json.loads(resp.content)), 2)
def test_playlist_after_new_request(self):
resp = self.client.get(
'/playlist/current'
)
playlist = json.loads(resp.content)
self.assertFalse(playlist[0]['now_play'])
self.assertEqual(len(playlist), 2)
self.client.get(
'/songs/next/'
)
resp = self.client.get(
'/playlist/current'
)
playlist = json.loads(resp.content)
self.assertTrue(playlist[0]['now_play'])
self.client.get(
'/songs/next/'
)
resp = self.client.get(
'/playlist/current'
)
playlist = json.loads(resp.content)
self.assertEqual(len(playlist), 1)
self.assertTrue(playlist[0]['now_play'])
def test_playlist_empty(self):
for i in range(3):
self.client.get(
'/songs/next/'
)
resp = self.client.get(
'/playlist/current'
)
playlist = json.loads(resp.content)
self.assertEqual(len(playlist), 0)
| raspberrywhite/raspberrywhite | server/tests/test_views.py | Python | bsd-3-clause | 3,820 |
from utils import *
from config import *
from .flightblock import FlightBlockList
from .waypointobject import Waypoint
import config
import utm
import logging
import threading
from collections import OrderedDict
from PyQt5.QtCore import QObject, pyqtSignal
logg = logging.getLogger(__name__)
class BagOfHolding(object):
def __init__(self):
self.airplane = AirplaneTelemetry()
self.waypoints = OrderedDict()
self.allMissions = OrderedDict()
self.tasks = OrderedDict()
self.airMissionStatus = fancyList()
self.groundMissionStatus = fancyList()
self.remianingMissionTime = 0
self.signals = dbSignals()
self.flightBlocks = FlightBlockList()
self.currentFlightBlock = None
def groundMissionStatusUpdated(self):
pass
#self.sync.missionUpdateSent()
def updateTelemetry(self, msg):
self.airplane.updateFromWaldo(msg)
def updateAirMissionStatus(self, msg):
self.remianingMissionTime = msg.fieldvalues[0]
if(msg.fieldvalues[1].split(",")[0] != 0):
mission_array = msg.fieldvalues[1].split(",")
mission_list = fancyList()
#--------------------------------------------------------------------------------[6/9/2018]----------------
#task_array = msg.fieldvalues[2].split(",")
# Parse Missions and Task
for iv_miss_id in mission_array:
miss = self.findMissionById(iv_miss_id)
if miss != None :
mission_list.append(miss)
if (len(mission_list)>0)and(len(self.airMissionStatus)>0)and(mission_list[0].name != self.airMissionStatus[0].name):
self.signals.updateStagedMissionsinUI()
self.airMissionStatus = mission_list
self.groundMissionStatus.replaceAll(mission_list)
self.signals.updateUASinUI()
def findMissionById(self, idStr):
for miss in self.allMissions.items():
if (str(miss[1].index) == idStr):
return miss[1]
return None
def addWaypoint(self, wpTuple):
self.waypoints.update(wpTuple)
def updateWaypoint(self, msg):
east = msg.fieldvalues[1]
north = msg.fieldvalues[2]
alt = msg.fieldvalues[3]
zone = msg.fieldvalues[4]
wp = list(self.waypoints.items())[int(msg.fieldvalues[0])][1]
if (wp != None)and((east != wp.east)or(north != wp.north)or(alt != wp.alt)):
tmpest = str(wp.east)
if (tmpest != str(wp.east)) and (WP_DEBUG):
print(str(msg.fieldvalues[0]))
print(wp.name)
print('Updating a Waypoint!')
print("New Easting is :" + tmpest)
print("Old Easting is :" + str(wp.east))
print('------------------------------------------------------')
wp.update_utm(east, north, zone, northern=True, alt=alt, name=None)
#Waypoint has updated, check if it is part of a comitted mission
for mission in self.groundMissionStatus :
for committedWaypoint in mission.waypoints:
if wp.name == committedWaypoint:
self.signals.resendMissions()
def getWaypoint(self, index):
return self.waypoints.get(index)
def addMission(self, missionTuple):
self.allMissions.update(missionTuple)
def addTask(self, taskTuple):
self.tasks.update(taskTuple)
def updateCurrentFlightBlock(self, currentBlockID):
'''
Sends the signal to UI to update to the selected flight block
'''
newBlock = self.flightBlocks.getFlightBlockByID(currentBlockID)
if (newBlock != self.currentFlightBlock):
self.currentFlightBlock = newBlock
self.signals.updateCurrentBlock()
class AirplaneTelemetry(object):
'''
Stores the airplane's position, altitude and current heading.
This is meant to be updated from the Ivybus and updating the Interop Server
when any value is updated.
We need to submit at a minimum of 1 Hz to the server to recieve points
Note the interop server REQUIRES position in Lat/Lon and height in ft msl
(feet above mean sea level). Conversion is done here and saved in the server's units.
'''
def __init__(self):
self.position = (0, 0) # Tuple of lat-lon
self.altitude = 0
self.heading = 0
self.teleAvail = threading.Event()
self.teleAvail.clear()
self.positionFlag = False
self.altitudeFlag = False
self.headingFlag = False
def updateFromWaldo(self, msg):
easting = float(msg.fieldvalues[4]) / 100 # cm to m
northing = float(msg.fieldvalues[5]) / 100 # cm to m
zone_num = int(msg.fieldvalues[6])
try:
self.position = utm.to_latlon(easting, northing, zone_num, northern=UTM_NORTHERN_HEMISPHERE)
except utm.error.OutOfRangeError:
logg.warning('Out Of Range Error, GPS is probably disconnected. Defaulting to NULL ISLAND (0,0) \n GPS Easting: ' +str(easting)+ ' Northing: ' + str(northing))
self.position = ('0', '0') #Plane defaults to NULL ISLAND in the Atlantic Ocean
self.altitude = str((float(msg.fieldvalues[10]) + Waypoint.flightParams['ground_alt'])*feetInOneMeter)
if (float(self.altitude) < 0):
logg.warning('Altitude reported as negative. Flipping Altitude:' + self.altitude + ' to prevent further errors')
self.altitude = str(-1*float(self.altitude))
self.heading = float(msg.fieldvalues[1]) * 180 / PI + 90
self.teleAvail.set()
if TELEM_DEBUG:
print(self.position)
# Updates each variable individually. This isint really used, can we discard?
def newPosition(self, newPos):
if (self.position != newPos):
self.position = newPos
self.positionFlag = True
return True
else:
return False
def newAltitude(self, newAlt):
if (self.altitude != newAlt):
self.altitude = newAlt
self.altitudeFlag = True
return True
else:
return False
def newHeading(self, newHead):
newHead = self.checkHeading(newHead)
if (self.heading != newHead):
self.heading = newHead
self.headingFlag = True
return True
else:
return False
def checkHeading(self, value):
'''
Ensures heading is a value the interop server accepts.
'''
counter = 0
while (value > 360.0) or (value < 0.0):
if (value > 360.0):
value = value - 360.0
else:
value = value + 360.0
counter = counter + 1
if counter > 1000:
logger. critical('Breaking infinite loop of heading checker')
break
return value
# Interop server code will call this when new data is recieved
def getTelemetry(self):
self.teleAvail.clear()
self.newHeading(self.heading) # probably a better way to do this, but I want to be sure were sending a valid heading
tele = {
'latitude':float(self.position[0]),
'longitude':float(self.position[1]),
'altitude_msl':float(self.altitude),
'uas_heading':float(self.heading)
}
return tele
class dbSignals(QObject):
'''This class is to be used to create QT signal objects which can then be connected to the UI'''
uas_update = pyqtSignal()
stagedListUpdate = pyqtSignal()
resendMissionstoUI = pyqtSignal()
updateFlightBlock = pyqtSignal()
def __init__(self):
# Initialize as a QObject
QObject.__init__(self)
def updateUASinUI(self):
self.uas_update.emit()
def updateStagedMissionsinUI(self):
self.stagedListUpdate.emit()
def resendMissions(self):
self.resendMissionstoUI.emit()
def updateCurrentBlock(self):
self.updateFlightBlock.emit()
| uaarg/missioncommander | database/database.py | Python | gpl-2.0 | 8,144 |
#
# Copyright (c) 2011 Ginkgo Bioworks Inc.
# Copyright (c) 2011 Daniel Taub
#
# This file is part of Scantelope.
#
# Scantelope is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Data matrix decoding module for Scantelope.
"""
from os.path import exists as fexist
#curdir, sep, path
EXPECTED_LEN = 10
from time import time
from pydmtx import DataMatrix, Image
import findcode
import cv
class DMDecoder():
do_display = False
verbose = False
totalTime = time()
def resettime(self):
self.totalTime = time()
def __init__(self,myDir = None,files = None):
if myDir != None:
self.myDir = myDir
if files != None:
self.files = [f for f in files if fexist(self.myDir+f)]
self.output = {}
self.failed = []
self.status = ''
self.resettime()
def parseImages(self, files = None):
if files != None:
self.files = files
n=0
m=0
failNum=0
lastCVTime = 0
timeForCV = 0
print "\nFiles to decode: ",len(self.files)
stop = False
# import pdb;pdb.set_trace()
for filename in self.files:
is_found = False
# if filename.find('/') != -1:
# self.myDir,filename = path.split(filename)
# self.myDir += '/'
lastCVTime = time()
cv_orig,cv_smoo,cv_final = findcode.findAndOrient(self.myDir,
filename,
self.do_display,
self.verbose)
timeForCV += (time() - lastCVTime)
cv.SaveImage(self.myDir+filename.replace('tif','jpg'),cv_final)
test = cv.Avg(cv_final)
if stop == True:
pdb.set_trace()
if test[0] < 130 and test[0] > 40: # hard threshold works for avision
for img,name in [#[cv_smoo,"smooth"], #seems to introduce many errors
[cv_final,"clipped"],
[cv_orig,"original"]]:
if is_found:
break
dmtx_im = Image.fromstring("L", cv.GetSize(img), img.tostring())
if name != "original":
padding = 0.1
else:
padding = 0
ncols,nrows = dmtx_im.size
padw = (ncols)*padding
padh = (nrows)*padding
isize = (int(round(ncols+2*padw)),int(round(nrows+2*padh)))
# Create a new color image onto which we can paste findcode output
dmtx_image = Image.new('RGB',isize,0)
bbox = (round(padw),round(padh),ncols+round(padw),nrows+round(padh))
dmtx_image.paste(dmtx_im,map(int,bbox))
(width, height) = dmtx_image.size
# Send to datamatrix library
if findcode.low_res:
dm_read = DataMatrix(max_count = 1, timeout = 300, min_edge = 20, max_edge = 32, threshold = 5, deviation = 10)
else:
dm_read = DataMatrix(max_count = 1, timeout = 300, min_edge = 20, max_edge = 32, threshold = 5, deviation = 10, shrink = 2)
#dm_read = DataMatrix(max_count = 1, timeout = 300, shape = DataMatrix.DmtxSymbol12x12, min_edge = 20, max_edge = 32, threshold = 5, deviation = 10)
dmtx_code = dm_read.decode (width, height, buffer(dmtx_image.tostring()))
if dmtx_code is not None and len(dmtx_code) == EXPECTED_LEN:
how = "Quick Search: "+str(name)
is_found = True
out = dmtx_code
if not is_found:
self.failed.append(filename)
else:
failNum+=1
if is_found:
n+=1
self.output[filename] = out
#print filename, out, test[0]
else:
#print filename, None, test[0]
pass
#self.failed.append(filename)
m+=1
print failNum, "failed to produce images worth decoding"
print n,"of the",m-failNum,"remaining were successfully decoded."
self.status += "\nFound %d of %d in "%(n,m)
self.status += str(time()-self.totalTime)+" seconds.\n"
self.status += "(OpenCV: "+str(timeForCV)+" sec)\n"
if not(len(self.failed) == 0):# and verbose:
dirr = ' '+self.myDir
self.status+= "\nmissing: "+str(self.failed)+'\n'
return self.output,self.failed,self.status
| dmtaub/scantelope | decode.py | Python | gpl-3.0 | 5,157 |
# Copyright 2020 Google Research. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Common keras utils."""
import collections
from typing import Optional, Text
from absl import logging
import tensorflow as tf
from tensorflow_examples.lite.model_maker.third_party.efficientdet import utils
# Prefix variable name mapping from keras model to the hub module checkpoint.
HUB_CPT_NAME = collections.OrderedDict([('class_net/class-predict/', 'classes'),
('box_net/box-predict/', 'boxes'),
('', 'base_model')])
def build_batch_norm(is_training_bn: bool,
beta_initializer: Text = 'zeros',
gamma_initializer: Text = 'ones',
data_format: Text = 'channels_last',
momentum: float = 0.99,
epsilon: float = 1e-3,
strategy: Optional[Text] = None,
name: Text = 'tpu_batch_normalization'):
"""Build a batch normalization layer.
Args:
is_training_bn: `bool` for whether the model is training.
beta_initializer: `str`, beta initializer.
gamma_initializer: `str`, gamma initializer.
data_format: `str` either "channels_first" for `[batch, channels, height,
width]` or "channels_last for `[batch, height, width, channels]`.
momentum: `float`, momentume of batch norm.
epsilon: `float`, small value for numerical stability.
strategy: `str`, whether to use tpu, gpus or other version of batch norm.
name: the name of the batch normalization layer
Returns:
A normalized `Tensor` with the same `data_format`.
"""
axis = 1 if data_format == 'channels_first' else -1
batch_norm_class = utils.batch_norm_class(is_training_bn, strategy)
bn_layer = batch_norm_class(
axis=axis,
momentum=momentum,
epsilon=epsilon,
center=True,
scale=True,
beta_initializer=beta_initializer,
gamma_initializer=gamma_initializer,
name=name)
return bn_layer
def get_ema_vars(model):
"""Get all exponential moving average (ema) variables."""
ema_vars = model.trainable_weights
for v in model.weights:
# We maintain mva for batch norm moving mean and variance as well.
if 'moving_mean' in v.name or 'moving_variance' in v.name:
ema_vars.append(v)
ema_vars_dict = dict()
# Remove duplicate vars
for var in ema_vars:
ema_vars_dict[var.ref()] = var
return ema_vars_dict
def average_name(ema, var):
"""Returns the name of the `Variable` holding the average for `var`.
A hacker for tf2.
Args:
ema: A `ExponentialMovingAverage` object.
var: A `Variable` object.
Returns:
A string: The name of the variable that will be used or was used
by the `ExponentialMovingAverage class` to hold the moving average of `var`.
"""
if var.ref() in ema._averages: # pylint: disable=protected-access
return ema._averages[var.ref()].name.split(':')[0] # pylint: disable=protected-access
return tf.compat.v1.get_default_graph().unique_name(
var.name.split(':')[0] + '/' + ema.name, mark_as_used=False)
def load_from_hub_checkpoint(model, ckpt_path_or_file):
"""Loads EfficientDetNet weights from EfficientDetNetTrainHub checkpoint."""
def _get_cpt_var_name(var_name):
for name_prefix, hub_name_prefix in HUB_CPT_NAME.items():
if var_name.startswith(name_prefix):
cpt_var_name = var_name[len(name_prefix):] # remove the name_prefix
cpt_var_name = cpt_var_name.replace('/', '.S')
cpt_var_name = hub_name_prefix + '/' + cpt_var_name
if name_prefix:
cpt_var_name = cpt_var_name.replace(':0', '')
break
return cpt_var_name + '/.ATTRIBUTES/VARIABLE_VALUE'
for var in model.weights:
cpt_var_name = _get_cpt_var_name(var.name)
var.assign(tf.train.load_variable(ckpt_path_or_file, cpt_var_name))
logging.log_first_n(
logging.INFO,
'Init %s from %s (%s)' % (var.name, cpt_var_name, ckpt_path_or_file),
10)
def restore_ckpt(model,
ckpt_path_or_file,
ema_decay=0.9998,
skip_mismatch=True,
exclude_layers=None):
"""Restore variables from a given checkpoint.
Args:
model: the keras model to be restored.
ckpt_path_or_file: the path or file for checkpoint.
ema_decay: ema decay rate. If None or zero or negative value, disable ema.
skip_mismatch: whether to skip variables if shape mismatch,
only works with tf1 checkpoint.
exclude_layers: string list exclude layer's variables,
only works with tf2 checkpoint.
Raises:
KeyError: if access unexpected variables.
"""
if ckpt_path_or_file == '_':
logging.info('Running test: do not load any ckpt.')
return
if tf.io.gfile.isdir(ckpt_path_or_file):
ckpt_path_or_file = tf.train.latest_checkpoint(ckpt_path_or_file)
var_list = tf.train.list_variables(ckpt_path_or_file)
if (var_list[0][0] ==
'_CHECKPOINTABLE_OBJECT_GRAPH'):
try:
# Use custom checkpoint solves mismatch shape issue.
keys = {var[0].split('/')[0] for var in var_list}
keys.discard('_CHECKPOINTABLE_OBJECT_GRAPH')
if exclude_layers:
exclude_layers = set(exclude_layers)
keys = keys.difference(exclude_layers)
ckpt = tf.train.Checkpoint(**{key: getattr(model, key, None)
for key in keys
if getattr(model, key, None)})
status = ckpt.restore(ckpt_path_or_file)
status.assert_nontrivial_match()
except AssertionError:
# The checkpoint for EfficientDetNetTrainHub and EfficientDetNet are not
# the same. If we trained from EfficientDetNetTrainHub using hub module
# and then want to use the weight in EfficientDetNet, it needed to
# manually load the model checkpoint.
load_from_hub_checkpoint(model, ckpt_path_or_file)
else:
if ema_decay > 0:
ema = tf.train.ExponentialMovingAverage(decay=0.0)
ema_vars = get_ema_vars(model)
var_dict = {
average_name(ema, var): var for (ref, var) in ema_vars.items()
}
else:
ema_vars = get_ema_vars(model)
var_dict = {
var.name.split(':')[0]: var for (ref, var) in ema_vars.items()
}
# add variables that not in var_dict
for v in model.weights:
if v.ref() not in ema_vars:
var_dict[v.name.split(':')[0]] = v
# try to load graph-based checkpoint with ema support,
# else load checkpoint via keras.load_weights which doesn't support ema.
reader = tf.train.load_checkpoint(ckpt_path_or_file)
var_shape_map = reader.get_variable_to_shape_map()
for key, var in var_dict.items():
if key in var_shape_map:
if var_shape_map[key] != var.shape:
msg = 'Shape mismatch: %s' % key
if skip_mismatch:
logging.warning(msg)
else:
raise ValueError(msg)
else:
var.assign(reader.get_tensor(key), read_value=False)
logging.log_first_n(
logging.INFO, f'Init {var.name} from {key} ({ckpt_path_or_file})',
10)
else:
msg = 'Not found %s in %s' % (key, ckpt_path_or_file)
if skip_mismatch:
logging.warning(msg)
else:
raise KeyError(msg)
def fp16_to_fp32_nested(input_nested):
"""Convert fp16 tensors in a nested structure to fp32.
Args:
input_nested: A Python dict, values being Tensor or Python list/tuple of
Tensor or Non-Tensor.
Returns:
A Python dict with the same structure as `tensor_dict`,
with all bfloat16 tensors converted to float32.
"""
if isinstance(input_nested, tf.Tensor):
if input_nested.dtype in (tf.bfloat16, tf.float16):
return tf.cast(input_nested, dtype=tf.float32)
else:
return input_nested
elif isinstance(input_nested, (list, tuple)):
out_tensor_dict = [fp16_to_fp32_nested(t) for t in input_nested]
elif isinstance(input_nested, dict):
out_tensor_dict = {
k: fp16_to_fp32_nested(v) for k, v in input_nested.items()
}
else:
return input_nested
return out_tensor_dict
### The following code breaks COCO training.
# def get_batch_norm(bn_class):
# def _wrapper(*args, **kwargs):
# if not kwargs.get('name', None):
# kwargs['name'] = 'tpu_batch_normalization'
# return bn_class(*args, **kwargs)
# return _wrapper
# if tf.compat.v1.executing_eagerly_outside_functions():
# utils.BatchNormalization = get_batch_norm(
# tf.keras.layers.BatchNormalization)
# utils.SyncBatchNormalization = get_batch_norm(
# tf.keras.layers.experimental.SyncBatchNormalization)
# utils.TpuBatchNormalization = get_batch_norm(
# tf.keras.layers.experimental.SyncBatchNormalization)
| tensorflow/examples | tensorflow_examples/lite/model_maker/third_party/efficientdet/keras/util_keras.py | Python | apache-2.0 | 9,468 |
from .particles import (
drift_kick,
process_sink,
produce_ptcls,
Ensemble,
json_to_ensemble,
ensemble_to_json,
Sink,
SinkPlane,
Source,
)
from .coulomb import (
CoulombForce
)
from .bend_kick import (
bend_kick
)
from .radiation_pressure import (
RadiationPressure
)
from .penning_trap import (
HarmonicTrapPotential
)
from .version import (
__version__
)
from coldatoms_lib import (
Rng
)
| d-meiser/cold-atoms | src/coldatoms/__init__.py | Python | gpl-3.0 | 459 |
# -*- coding: utf-8 -*-
from flask.ext.sqlalchemy import SQLAlchemy
db = SQLAlchemy()
from flask.ext.mail import Mail
mail = Mail()
from flask.ext.login import LoginManager
login_manager = LoginManager()
from flask.ext.babel import Babel
babel = Babel()
| vovantics/flask-bluebone | app/extensions.py | Python | mit | 258 |
#!/usr/bin/env python3
"""Tests bsp's -c' and '-C' options
TODO: test error cases ('-c' set to non-existent file; invalid values for
'--bool-config-value', '--int-config-value', '--float-config-value';
config conflicts; etc.)
"""
import datetime
import json
import logging
import os
import subprocess
import sys
import tempfile
from py.test import raises
from bluesky import datetimeutils
from bluesky.config import DEFAULTS
ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '../..'))
sys.path.insert(0, ROOT_DIR) # in case this script is run outside of py.test
BSP = os.path.join(ROOT_DIR, 'bin/bsp')
INPUT = {
"run_id": 'abcdefg123',
"fires": [],
"run_config": { # 'run_config' is ignored in input data
"foobar": 12312
}
}
CONFIG_0 = {
"config": {
"foo": {
"a": 111,
"b": 222,
"c": 333,
"d": 444
},
'echo_run_id': '{run_id}'
}
}
CONFIG_1 = {
"config": {
"foo": {
"b": 2222,
"c": 3333,
"d": 4444,
"bb": "bb"
},
"bar": {
"b": "b"
},
"b": "b",
"e": '{run_id}__{_run_id}'
}
}
CONFIG_2 = {
"run_config": { # either 'config' or 'run_config' is allowed
"foo": {
"c": 33333,
"d": 44444,
"cc": "cc"
},
"baz": {
"c": "c"
},
"c": "c"
}
}
# Note: "-C foo.d=444444 -C foo.dd=dd -C boo.d=d -C d=d "
# and "-B dbt=true -B dbf=0 -I di=23 -F df=123.23"
# and "-C dci=23 -C dcf=123.23"
# will be specified on the command line
EXPECTED = {
"run_config": dict(DEFAULTS, **{
"foo": {
"a": 111,
"b": 2222,
"c": 33333,
"d": "444444", # because it was set on command line
"bb": "bb",
"cc": "cc",
"dd": "dd"
},
'echo_run_id': 'abcdefg123',
"bar": {
"b": "b"
},
"baz": {
"c": "c"
},
"boo": {
"d": "d"
},
"b": "b",
"c": "c",
"d": "d",
"dbt": True,
"dbf": False,
"di": 23,
"df": 123.23,
"dci": "23",
"dcf": "123.23",
"e": 'abcdefg123__{_run_id}',
#"f": 'sdfdsf__abcdefg123'
}),
"fires": []
}
# smoke ready config defaults have datetime wildcards; call fill-in code
# on all of EXPECTED, in case future config params also have wildcards
def replace_config_wildcards(val):
if isinstance(val, dict):
for k in val:
val[k] = replace_config_wildcards(val[k])
elif isinstance(val, list):
val = [replace_config_wildcards(v) for v in val]
elif hasattr(val, 'lower'): # i.e. it's a string
if val:
# first, fill in any datetime control codes or wildcards
val = datetimeutils.fill_in_datetime_strings(val)
return val
EXPECTED['run_config'] = replace_config_wildcards(EXPECTED['run_config'])
input_file = tempfile.NamedTemporaryFile(mode='w+t')
input_file.write(json.dumps(INPUT))
input_file.flush()
config_0_file = tempfile.NamedTemporaryFile(mode='w+t')
config_0_file.write(json.dumps(CONFIG_0))
config_0_file.flush()
config_1_file = tempfile.NamedTemporaryFile(mode='w+t')
config_1_file.write(json.dumps(CONFIG_1))
config_1_file.flush()
config_2_file = tempfile.NamedTemporaryFile(mode='w+t')
config_2_file.write(json.dumps(CONFIG_2))
config_2_file.flush()
cmd_args = [
BSP, '-i', input_file.name,
'--log-level', 'DEBUG',
'-c', config_0_file.name,
'-c', config_1_file.name,
'-c', config_2_file.name,
'-C', 'foo.d=444444',
'-C', 'foo.dd=dd',
'-C', 'boo.d=d',
'-C', 'd=d',
#'-C', 'f="sdfdsf__{run_id}"'
'-B', 'dbt=true',
'-B', 'dbf=0',
'-I', 'di=23',
'-F', 'df=123.23',
'-C', 'dci=23',
'-C', 'dcf=123.23'
]
output = subprocess.check_output(cmd_args)
actual = json.loads(output.decode())
actual.pop('runtime')
logging.basicConfig(level=logging.INFO)
logging.info("actual: {}".format(actual))
logging.info("expected: {}".format(EXPECTED))
today = actual.pop('today')
assert today == datetime.datetime.utcnow().strftime('%Y-%m-%dT00:00:00')
#assert actual == EXPECTED
assert set(actual.keys()) == set(['run_config', 'fires', 'run_id', 'counts', 'bluesky_version'])
assert actual['fires'] == EXPECTED['fires']
assert set(actual['run_config'].keys()) == set(EXPECTED['run_config'].keys())
for k in actual['run_config'].keys():
# if actual['run_config'][k] != EXPECTED['run_config'][k]:
# import pdb;pdb.set_trace()
logging.info('Checking output config key %s', k)
assert actual['run_config'][k] == EXPECTED['run_config'][k]
# Now check that specifying 'config' in input data causes failure
INPUT['config'] = INPUT.pop('run_config')
invalid_input_file = tempfile.NamedTemporaryFile(mode='w+t')
invalid_input_file.write(json.dumps(INPUT))
invalid_input_file.flush()
cmd_args[2] = invalid_input_file.name
with raises(subprocess.CalledProcessError) as e:
output = subprocess.check_output(cmd_args)
print("\n*** Correctly failed due to 'config' in input data ***\n")
print("\n*** PASSED ***\n") | pnwairfire/bluesky | test/adhoc/test_bsp_config_options.py | Python | gpl-3.0 | 5,273 |
r"""
Diametrically point loaded 2-D disk with postprocessing and probes. See
:ref:`sec-primer`.
Use it as follows (assumes running from the sfepy directory; on Windows, you
may need to prefix all the commands with "python " and remove "./"):
1. solve the problem::
./simple.py examples/linear_elasticity/its2D_4.py
2. optionally, view the results::
./postproc.py its2D.h5 -b
3. optionally, convert results to VTK, and view again::
./extractor.py -d its2D.h5
./postproc.py its2D.vtk -b
4. probe the data::
./probe.py examples/linear_elasticity/its2D_4.py its2D.h5
Find :math:`\ul{u}` such that:
.. math::
\int_{\Omega} D_{ijkl}\ e_{ij}(\ul{v}) e_{kl}(\ul{u})
= 0
\;, \quad \forall \ul{v} \;,
where
.. math::
D_{ijkl} = \mu (\delta_{ik} \delta_{jl}+\delta_{il} \delta_{jk}) +
\lambda \ \delta_{ij} \delta_{kl}
\;.
"""
from __future__ import absolute_import
from examples.linear_elasticity.its2D_1 import *
from sfepy.mechanics.matcoefs import stiffness_from_youngpoisson
from six.moves import range
def stress_strain(out, pb, state, extend=False):
"""
Calculate and output strain and stress for given displacements.
"""
from sfepy.base.base import Struct
ev = pb.evaluate
strain = ev('ev_cauchy_strain.2.Omega(u)', mode='el_avg')
stress = ev('ev_cauchy_stress.2.Omega(Asphalt.D, u)', mode='el_avg')
out['cauchy_strain'] = Struct(name='output_data', mode='cell',
data=strain, dofs=None)
out['cauchy_stress'] = Struct(name='output_data', mode='cell',
data=stress, dofs=None)
return out
def gen_lines(problem):
from sfepy.discrete.probes import LineProbe
ps0 = [[0.0, 0.0], [ 0.0, 0.0]]
ps1 = [[75.0, 0.0], [ 0.0, 75.0]]
# Use adaptive probe with 10 inital points.
n_point = -10
labels = ['%s -> %s' % (p0, p1) for p0, p1 in zip(ps0, ps1)]
probes = []
for ip in range(len(ps0)):
p0, p1 = ps0[ip], ps1[ip]
probes.append(LineProbe(p0, p1, n_point))
return probes, labels
def probe_hook(data, probe, label, problem):
import matplotlib.pyplot as plt
import matplotlib.font_manager as fm
def get_it(name, var_name):
var = problem.create_variables([var_name])[var_name]
var.set_data(data[name].data)
pars, vals = probe(var)
vals = vals.squeeze()
return pars, vals
results = {}
results['u'] = get_it('u', 'u')
results['cauchy_strain'] = get_it('cauchy_strain', 's')
results['cauchy_stress'] = get_it('cauchy_stress', 's')
fig = plt.figure()
plt.clf()
fig.subplots_adjust(hspace=0.4)
plt.subplot(311)
pars, vals = results['u']
for ic in range(vals.shape[1]):
plt.plot(pars, vals[:,ic], label=r'$u_{%d}$' % (ic + 1),
lw=1, ls='-', marker='+', ms=3)
plt.ylabel('displacements')
plt.xlabel('probe %s' % label, fontsize=8)
plt.legend(loc='best', prop=fm.FontProperties(size=10))
sym_indices = ['11', '22', '12']
plt.subplot(312)
pars, vals = results['cauchy_strain']
for ic in range(vals.shape[1]):
plt.plot(pars, vals[:,ic], label=r'$e_{%s}$' % sym_indices[ic],
lw=1, ls='-', marker='+', ms=3)
plt.ylabel('Cauchy strain')
plt.xlabel('probe %s' % label, fontsize=8)
plt.legend(loc='best', prop=fm.FontProperties(size=8))
plt.subplot(313)
pars, vals = results['cauchy_stress']
for ic in range(vals.shape[1]):
plt.plot(pars, vals[:,ic], label=r'$\sigma_{%s}$' % sym_indices[ic],
lw=1, ls='-', marker='+', ms=3)
plt.ylabel('Cauchy stress')
plt.xlabel('probe %s' % label, fontsize=8)
plt.legend(loc='best', prop=fm.FontProperties(size=8))
return plt.gcf(), results
materials['Asphalt'][0].update({'D' : stiffness_from_youngpoisson(2, young, poisson)})
# Update fields and variables to be able to use probes for tensors.
fields.update({
'sym_tensor': ('real', 3, 'Omega', 0),
})
variables.update({
's' : ('parameter field', 'sym_tensor', None),
})
options.update({
'output_format' : 'h5', # VTK reader cannot read cell data yet for probing
'post_process_hook' : 'stress_strain',
'gen_probes' : 'gen_lines',
'probe_hook' : 'probe_hook',
})
| sfepy/sfepy | examples/linear_elasticity/its2D_4.py | Python | bsd-3-clause | 4,331 |
import random
import numpy as np
from deap import base
from deap import creator
from deap import tools
from deap import algorithms
import argparse
import pandas
import os
import numpy as np
import matplotlib.pyplot as plt
import functools
from collections import Counter
import math
import random
def accuracy(decision_vector):
ok = len([s[2] for s in decision_vector if confusion_matrix_label(s) in ['TP', 'TN']])
return float(ok) / len(decision_vector)
def mcc(decision_vector):
tp = len([s[2] for s in decision_vector if confusion_matrix_label(s) in ['TP']])
tn = len([s[2] for s in decision_vector if confusion_matrix_label(s) in ['TN']])
fp = len([s[2] for s in decision_vector if confusion_matrix_label(s) in ['FP']])
fn = len([s[2] for s in decision_vector if confusion_matrix_label(s) in ['FN']])
den = 0
if (tp + fp) == 0 or (tp + fn) == 0 or (tn + fp) == 0 or (tn + fn) == 0:
den = 1
else:
den = (tp + fp) * (tp + fn) * (tn + fp) * (tn + fn)
den = math.sqrt(den)
return ((tp*tn) - (fp * fn)) / den
def determine_bias(S, mu0, mu1, weights, J, f=accuracy):
min_S = min(S)
max_S = max(S)
bias = min_S + abs(max_S - min_S) / 2
dr = functools.partial(decision_rule, bias=bias)
predictor = np.vectorize(dr)
D, S, S0, S1 = calculate_decision_vector(predictor, mu1, mu0, weights, J)
decision_vector = np.column_stack((D,J,S,S,D))
confusion_matrix, labels = calculate_confusion_matrix(decision_vector)
max_v = 0
max_i = 0
for i, e in enumerate(decision_vector):
if labels[i] in ['FP', 'FN']:
dr = functools.partial(decision_rule, bias=e[3])
predictor = np.vectorize(dr)
D, S, S0, S1 = calculate_decision_vector(predictor, mu1, mu0, weights, J)
dv = np.column_stack((D,J,S,S,D))
confusion_matrix, labels = calculate_confusion_matrix(dv)
v = f(dv)
max_v = max_v if max_v > v else v
max_i = max_i if max_v > v else i
return decision_vector[max_i][3]
def adjust_intrinsic_strength_full_loop(S, J, D, mu0, mu1, W, k_max=1):
for k in range(k_max):
for i, C in enumerate(W):
if J[i] != int(D[i]):
#print('Case {} misclassified!'.format(i))
for j, w in enumerate(C):
err = abs(mu1[j] - mu0[j])
if w != 0:
#print(' ERROR E{} = {}'.format(j, err))
err *= w
if int(D[i]) == 1:
mu0[j] += err
mu1[j] -= err
else:
mu0[j] -= err
mu1[j] += err
return mu0, mu1
def adjust_intrinsic_strength(S, J, D, mu0, mu1, W, k_max=1):
for it in range(k_max):
for i, ps in enumerate(S):
C = W[i]
s = np.dot(C, mu1 - mu0)
pred = decision_rule(s)
if J[i] != pred:
#print('Case {} misclassified!'.format(i))
for j, w in enumerate(C):
err = abs(mu1[j] - mu0[j])
if w != 0:
#print(' ERROR E{} = {}'.format(j, err))
err *= w
if int(D[i]) == 1:
mu0[j] += err
mu1[j] -= err
else:
mu0[j] -= err
mu1[j] += err
return mu0, mu1
def decision_rule(s, eta1=0, eta0=0, l1=1, l0=0, bias=0):
if s > bias:
if s > eta1:
return 1
else:
return l1
else:
if s < eta0:
return 0
else:
return l0
def confusion_matrix_label(o,i=0):
if o[1] == 1:
return 'TP' if o[i] == 1 else 'FN'
else:
return 'TN' if o[i] == 0 else 'FP'
def calculate_decision_vector(predictor, mu1, mu0, weights, J):
#delta_mu = mu1.subtract(mu0)
S1 = np.matmul(weights, mu1)
S0 = np.matmul(weights, mu0)
S = S1 - S0
D = predictor(S)
#for i, s1 in enumerate(S1):
# print('{} - {} = {} | P : {} | {} | {}'.format(s1, S0[i], S[i], D[i], i, J[i]))
return D, S, S0, S1
def calculate_confusion_matrix(decision_vector):
cf_label = np.array(map(confusion_matrix_label, decision_vector))
return Counter(cf_label), cf_label
def calculate_fitness(predictor, mu1, mu0, weights, J, individual):
#print(mu1)
mu1_d = np.array(mu1)
mu0_d = np.array(mu0)
for i,_ in enumerate(mu1_d):
mu1_d[i] += individual[i] / 2.
mu0_d[i] -= individual[i] / 2.
#mu0_d /= np.linalg.norm(mu0_d, ord=1)
#mu1_d /= np.linalg.norm(mu1_d, ord=1)
D, S, S0, S1 = calculate_decision_vector(predictor, mu1_d, mu0_d, weights, J)
OS = S
OD = D
decision_vector = np.column_stack((D,J,S,OS,OD))
confusion_matrix, labels = calculate_confusion_matrix(decision_vector)
#fitness = accuracy(decision_vector) - 0.1 * np.linalg.norm(individual, ord=2)
fitness = mcc(decision_vector) - 0.1 * np.linalg.norm(individual, ord=2) ** 2
print(fitness, mcc(decision_vector), 0.1 * np.linalg.norm(individual, ord=2) ** 2)
return fitness,
def calculate_final_mcc(predictor, mu1, mu0, weights, J, individual):
#print(mu1)
mu1_d = np.array(mu1)
mu0_d = np.array(mu0)
for i,_ in enumerate(mu1_d):
mu1_d[i] += individual[i] / 2.
mu0_d[i] -= individual[i] / 2.
#mu0_d /= np.linalg.norm(mu0_d, ord=1)
#mu1_d /= np.linalg.norm(mu1_d, ord=1)
D, S, S0, S1 = calculate_decision_vector(predictor, mu1_d, mu0_d, weights, J)
OS = S
OD = D
decision_vector = np.column_stack((D,J,S,OS,OD))
confusion_matrix, labels = calculate_confusion_matrix(decision_vector)
#fitness = accuracy(decision_vector) - 0.1 * np.linalg.norm(individual, ord=2)
fitness = mcc(decision_vector)
return fitness
def calculate_final_accuracy(predictor, mu1, mu0, weights, J, individual):
mu1_d = np.array(mu1)
mu0_d = np.array(mu0)
for i,_ in enumerate(mu1_d):
mu1_d[i] += individual[i] / 2.
mu0_d[i] -= individual[i] / 2.
#mu0_d /= np.linalg.norm(mu0_d, ord=1)
#mu1_d /= np.linalg.norm(mu1_d, ord=1)
D, S, S0, S1 = calculate_decision_vector(predictor, mu1_d, mu0_d, weights, J)
OS = S
OD = D
decision_vector = np.column_stack((D,J,S,OS,OD))
confusion_matrix, labels = calculate_confusion_matrix(decision_vector)
#fitness = accuracy(decision_vector) - 0.1 * np.linalg.norm(individual, ord=2)
fitness = accuracy(decision_vector)
return fitness
def main(args):
weights = pandas.read_table(args.weights, delim_whitespace=True, header=None)
mu0 = pandas.read_table(args.mu0, delim_whitespace=True, header=None)
mu1 = pandas.read_table(args.mu1, delim_whitespace=True, header=None)
J = pandas.read_table(args.outcomes, delim_whitespace=True, header=None)[:len(weights)]
#J = pandas.read_table(args.outcomes, delim_whitespace=True, header=None)[-len(weights):]
weights = weights.values
mu0 = mu0.values
mu1 = mu1.values
J = J.values
dr_simple = functools.partial(decision_rule)
dr_optimized = functools.partial(decision_rule, eta1=args.eta1, eta0=args.eta0, l1=args.l1, l0=args.l0)
predictor = np.vectorize(dr_simple)
predictor_opti = np.vectorize(dr_optimized)
fitness_function = functools.partial(calculate_fitness, predictor, mu1, mu0, weights, J)
print('# INITIALIZATION')
D, S, S0, S1 = calculate_decision_vector(predictor, mu1, mu0, weights, J)
OS = S
OD = D
decision_vector = np.column_stack((D,J,S,OS,OD))
confusion_matrix, labels = calculate_confusion_matrix(decision_vector)
print(confusion_matrix)
print('# Initial Accuracy: {}'.format(accuracy(decision_vector)))
print('# Initial MCC: {}'.format(mcc(decision_vector)))
def difference_matrix(a):
x = np.reshape(a, (len(a), 1))
return x - x.transpose()
dmu = mu1 - mu0
vmu = difference_matrix(dmu)
min_vmu = vmu.min()
print('# Minimal variation in Mu: {}'.format(min_vmu))
print('# Min in Mu: {}'.format(min(dmu)))
min_vS = np.abs(S).min()
print('# Min variation in initial S: {}'.format(min_vS))
IND_SIZE = len(mu0)
creator.create("FitnessMax", base.Fitness, weights=(1.0,))
creator.create("Individual", np.ndarray, fitness=creator.FitnessMax)
creator.create("Strategy", np.ndarray, typecode="d", strategy=None)
def initES(icls, scls, size, imin, imax, smin, smax):
ind = icls(random.uniform(imin, imax) for _ in range(size))
ind.strategy = scls(random.uniform(smin, smax) for _ in range(size))
return ind
toolbox = base.Toolbox()
#toolbox.register("attr_float", random.uniform, -min_vmu / 10, min_vmu / 10)
#toolbox.register("attr_float", initES, -min_vmu / len(weights[0]), min_vmu / len(weights[0]), -1., 1.)
toolbox.register("individual", initES, creator.Individual,
creator.Strategy, IND_SIZE, -min_vmu / 10, min_vmu / 10, 0, 1.)
#creator.Strategy, IND_SIZE, -min(dmu) / 100 , min(dmu) / 100, -1., 1.)
toolbox.register("population", tools.initRepeat, list, toolbox.individual)
toolbox.register("evaluate", fitness_function)
toolbox.register("mate", tools.cxTwoPoint)
toolbox.register("mutate", tools.mutGaussian, mu=0.0, sigma=min_vmu / 10, indpb=0.05)
#toolbox.register("mutate", tools.mutGaussian, mu=0.0, sigma=min_vmu / len(weights[0]), indpb=0.05)
toolbox.register("select", tools.selTournament, tournsize=3)
pop = toolbox.population(n=200)
stats = tools.Statistics(key=lambda ind: ind.fitness.values)
stats.register("avg", np.mean)
stats.register("std", np.std)
stats.register("min", np.min)
stats.register("max", np.max)
#algorithms.eaSimple(pop, toolbox, cxpb=0.5, mutpb=0.05, ngen=1000, stats=stats, verbose=True)
algorithms.eaMuPlusLambda(pop, toolbox, mu=100, lambda_=50, cxpb=0.5, mutpb=0.2, ngen=100, stats=stats, verbose=True)
#algorithms.eaMuCommaLambda(pop, toolbox, mu=3, lambda_=5, cxpb=0.5, mutpb=0.2, ngen=100, stats=stats, verbose=True)
top10 = tools.selBest(pop, k=1)
mu1_d = np.array(mu1)
mu0_d = np.array(mu0)
for i,_ in enumerate(top10[0]):
mu1_d[i] += top10[0][i] / 2.
mu0_d[i] -= top10[0][i] / 2.
f_accuracy = functools.partial(calculate_final_accuracy, predictor, mu1, mu0, weights, J)
f_mcc = functools.partial(calculate_final_mcc, predictor, mu1, mu0, weights, J)
print(f_accuracy(top10[0]))
print(f_mcc(top10[0]))
np.savetxt('Mu_0_optimized.txt', mu0_d, fmt='%.15f')
np.savetxt('Mu_1_optimized.txt', mu1_d, fmt='%.15f')
def parse_args(parser0):
args = parser.parse_args()
# Check path
return args
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Adjust model')
parser.add_argument('--weights', type=str)
parser.add_argument('--mu0', type=str)
parser.add_argument('--mu1', type=str)
parser.add_argument('--outcomes', type=str)
parser.add_argument('--l1', default=1, type=int)
parser.add_argument('--l0', default=0, type=int)
parser.add_argument('--eta1', default=0., type=float)
parser.add_argument('--eta0', default=0., type=float)
args = parse_args(parser)
main(args)
| aquemy/HCBR | script/genetic_algorithm.py | Python | mit | 11,500 |
"""Shared OS X support functions."""
import os
import re
import sys
__all__ = [
'compiler_fixup',
'customize_config_vars',
'customize_compiler',
'get_platform_osx',
]
# configuration variables that may contain universal build flags,
# like "-arch" or "-isdkroot", that may need customization for
# the user environment
_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS', 'BASECFLAGS',
'BLDSHARED', 'LDSHARED', 'CC', 'CXX',
'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS',
'PY_CORE_CFLAGS')
# configuration variables that may contain compiler calls
_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'CC', 'CXX')
# prefix added to original configuration variable names
_INITPRE = '_OSX_SUPPORT_INITIAL_'
def _find_executable(executable, path=None):
"""Tries to find 'executable' in the directories listed in 'path'.
A string listing directories separated by 'os.pathsep'; defaults to
os.environ['PATH']. Returns the complete filename or None if not found.
"""
if path is None:
path = os.environ['PATH']
paths = path.split(os.pathsep)
base, ext = os.path.splitext(executable)
if (sys.platform == 'win32' or os.name == 'os2') and (ext != '.exe'):
executable = executable + '.exe'
if not os.path.isfile(executable):
for p in paths:
f = os.path.join(p, executable)
if os.path.isfile(f):
# the file exists, we have a shot at spawn working
return f
return None
else:
return executable
def _read_output(commandstring):
"""Output from succesful command execution or None"""
# Similar to os.popen(commandstring, "r").read(),
# but without actually using os.popen because that
# function is not usable during python bootstrap.
# tempfile is also not available then.
import contextlib
try:
import tempfile
fp = tempfile.NamedTemporaryFile()
except ImportError:
fp = open("/tmp/_osx_support.%s"%(
os.getpid(),), "w+b")
with contextlib.closing(fp) as fp:
cmd = "%s 2>/dev/null >'%s'" % (commandstring, fp.name)
return fp.read().decode('utf-8').strip() if not os.system(cmd) else None
def _find_build_tool(toolname):
"""Find a build tool on current path or using xcrun"""
return (_find_executable(toolname)
or _read_output("/usr/bin/xcrun -find %s" % (toolname,))
or ''
)
_SYSTEM_VERSION = None
def _get_system_version():
"""Return the OS X system version as a string"""
# Reading this plist is a documented way to get the system
# version (see the documentation for the Gestalt Manager)
# We avoid using platform.mac_ver to avoid possible bootstrap issues during
# the build of Python itself (distutils is used to build standard library
# extensions).
global _SYSTEM_VERSION
if _SYSTEM_VERSION is None:
_SYSTEM_VERSION = ''
try:
f = open('/System/Library/CoreServices/SystemVersion.plist')
except IOError:
# We're on a plain darwin box, fall back to the default
# behaviour.
pass
else:
try:
m = re.search(r'<key>ProductUserVisibleVersion</key>\s*'
r'<string>(.*?)</string>', f.read())
finally:
f.close()
if m is not None:
_SYSTEM_VERSION = '.'.join(m.group(1).split('.')[:2])
# else: fall back to the default behaviour
return _SYSTEM_VERSION
def _remove_original_values(_config_vars):
"""Remove original unmodified values for testing"""
# This is needed for higher-level cross-platform tests of get_platform.
for k in list(_config_vars):
if k.startswith(_INITPRE):
del _config_vars[k]
def _save_modified_value(_config_vars, cv, newvalue):
"""Save modified and original unmodified value of configuration var"""
oldvalue = _config_vars.get(cv, '')
if (oldvalue != newvalue) and (_INITPRE + cv not in _config_vars):
_config_vars[_INITPRE + cv] = oldvalue
_config_vars[cv] = newvalue
def _supports_universal_builds():
"""Returns True if universal builds are supported on this system"""
# As an approximation, we assume that if we are running on 10.4 or above,
# then we are running with an Xcode environment that supports universal
# builds, in particular -isysroot and -arch arguments to the compiler. This
# is in support of allowing 10.4 universal builds to run on 10.3.x systems.
osx_version = _get_system_version()
if osx_version:
try:
osx_version = tuple(int(i) for i in osx_version.split('.'))
except ValueError:
osx_version = ''
return bool(osx_version >= (10, 4)) if osx_version else False
def _find_appropriate_compiler(_config_vars):
"""Find appropriate C compiler for extension module builds"""
# Issue #13590:
# The OSX location for the compiler varies between OSX
# (or rather Xcode) releases. With older releases (up-to 10.5)
# the compiler is in /usr/bin, with newer releases the compiler
# can only be found inside Xcode.app if the "Command Line Tools"
# are not installed.
#
# Futhermore, the compiler that can be used varies between
# Xcode releases. Upto Xcode 4 it was possible to use 'gcc-4.2'
# as the compiler, after that 'clang' should be used because
# gcc-4.2 is either not present, or a copy of 'llvm-gcc' that
# miscompiles Python.
# skip checks if the compiler was overriden with a CC env variable
if 'CC' in os.environ:
return _config_vars
# The CC config var might contain additional arguments.
# Ignore them while searching.
cc = oldcc = _config_vars['CC'].split()[0]
if not _find_executable(cc):
# Compiler is not found on the shell search PATH.
# Now search for clang, first on PATH (if the Command LIne
# Tools have been installed in / or if the user has provided
# another location via CC). If not found, try using xcrun
# to find an uninstalled clang (within a selected Xcode).
# NOTE: Cannot use subprocess here because of bootstrap
# issues when building Python itself (and os.popen is
# implemented on top of subprocess and is therefore not
# usable as well)
cc = _find_build_tool('clang')
elif os.path.basename(cc).startswith('gcc'):
# Compiler is GCC, check if it is LLVM-GCC
data = _read_output("'%s' --version"
% (cc.replace("'", "'\"'\"'"),))
if 'llvm-gcc' in data:
# Found LLVM-GCC, fall back to clang
cc = _find_build_tool('clang')
if not cc:
raise SystemError(
"Cannot locate working compiler")
if cc != oldcc:
# Found a replacement compiler.
# Modify config vars using new compiler, if not already explictly
# overriden by an env variable, preserving additional arguments.
for cv in _COMPILER_CONFIG_VARS:
if cv in _config_vars and cv not in os.environ:
cv_split = _config_vars[cv].split()
cv_split[0] = cc if cv != 'CXX' else cc + '++'
_save_modified_value(_config_vars, cv, ' '.join(cv_split))
return _config_vars
def _remove_universal_flags(_config_vars):
"""Remove all universal build arguments from config vars"""
for cv in _UNIVERSAL_CONFIG_VARS:
# Do not alter a config var explicitly overriden by env var
if cv in _config_vars and cv not in os.environ:
flags = _config_vars[cv]
flags = re.sub('-arch\s+\w+\s', ' ', flags, re.ASCII)
flags = re.sub('-isysroot [^ \t]*', ' ', flags)
_save_modified_value(_config_vars, cv, flags)
return _config_vars
def _remove_unsupported_archs(_config_vars):
"""Remove any unsupported archs from config vars"""
# Different Xcode releases support different sets for '-arch'
# flags. In particular, Xcode 4.x no longer supports the
# PPC architectures.
#
# This code automatically removes '-arch ppc' and '-arch ppc64'
# when these are not supported. That makes it possible to
# build extensions on OSX 10.7 and later with the prebuilt
# 32-bit installer on the python.org website.
# skip checks if the compiler was overriden with a CC env variable
if 'CC' in os.environ:
return _config_vars
if re.search('-arch\s+ppc', _config_vars['CFLAGS']) is not None:
# NOTE: Cannot use subprocess here because of bootstrap
# issues when building Python itself
status = os.system("'%s' -arch ppc -x c /dev/null 2>/dev/null"%(
_config_vars['CC'].replace("'", "'\"'\"'"),))
# The Apple compiler drivers return status 255 if no PPC
if (status >> 8) == 255:
# Compiler doesn't support PPC, remove the related
# '-arch' flags if not explicitly overridden by an
# environment variable
for cv in _UNIVERSAL_CONFIG_VARS:
if cv in _config_vars and cv not in os.environ:
flags = _config_vars[cv]
flags = re.sub('-arch\s+ppc\w*\s', ' ', flags)
_save_modified_value(_config_vars, cv, flags)
return _config_vars
def _override_all_archs(_config_vars):
"""Allow override of all archs with ARCHFLAGS env var"""
# NOTE: This name was introduced by Apple in OSX 10.5 and
# is used by several scripting languages distributed with
# that OS release.
if 'ARCHFLAGS' in os.environ:
arch = os.environ['ARCHFLAGS']
for cv in _UNIVERSAL_CONFIG_VARS:
if cv in _config_vars and '-arch' in _config_vars[cv]:
flags = _config_vars[cv]
flags = re.sub('-arch\s+\w+\s', ' ', flags)
flags = flags + ' ' + arch
_save_modified_value(_config_vars, cv, flags)
return _config_vars
def _check_for_unavailable_sdk(_config_vars):
"""Remove references to any SDKs not available"""
# If we're on OSX 10.5 or later and the user tries to
# compile an extension using an SDK that is not present
# on the current machine it is better to not use an SDK
# than to fail. This is particularly important with
# the standalong Command Line Tools alternative to a
# full-blown Xcode install since the CLT packages do not
# provide SDKs. If the SDK is not present, it is assumed
# that the header files and dev libs have been installed
# to /usr and /System/Library by either a standalone CLT
# package or the CLT component within Xcode.
cflags = _config_vars.get('CFLAGS', '')
m = re.search(r'-isysroot\s+(\S+)', cflags)
if m is not None:
sdk = m.group(1)
if not os.path.exists(sdk):
for cv in _UNIVERSAL_CONFIG_VARS:
# Do not alter a config var explicitly overriden by env var
if cv in _config_vars and cv not in os.environ:
flags = _config_vars[cv]
flags = re.sub(r'-isysroot\s+\S+(?:\s|$)', ' ', flags)
_save_modified_value(_config_vars, cv, flags)
return _config_vars
def compiler_fixup(compiler_so, cc_args):
"""
This function will strip '-isysroot PATH' and '-arch ARCH' from the
compile flags if the user has specified one them in extra_compile_flags.
This is needed because '-arch ARCH' adds another architecture to the
build, without a way to remove an architecture. Furthermore GCC will
barf if multiple '-isysroot' arguments are present.
"""
stripArch = stripSysroot = False
compiler_so = list(compiler_so)
if not _supports_universal_builds():
# OSX before 10.4.0, these don't support -arch and -isysroot at
# all.
stripArch = stripSysroot = True
else:
stripArch = '-arch' in cc_args
stripSysroot = '-isysroot' in cc_args
if stripArch or 'ARCHFLAGS' in os.environ:
while True:
try:
index = compiler_so.index('-arch')
# Strip this argument and the next one:
del compiler_so[index:index+2]
except ValueError:
break
if 'ARCHFLAGS' in os.environ and not stripArch:
# User specified different -arch flags in the environ,
# see also distutils.sysconfig
compiler_so = compiler_so + os.environ['ARCHFLAGS'].split()
if stripSysroot:
while True:
try:
index = compiler_so.index('-isysroot')
# Strip this argument and the next one:
del compiler_so[index:index+2]
except ValueError:
break
# Check if the SDK that is used during compilation actually exists,
# the universal build requires the usage of a universal SDK and not all
# users have that installed by default.
sysroot = None
if '-isysroot' in cc_args:
idx = cc_args.index('-isysroot')
sysroot = cc_args[idx+1]
elif '-isysroot' in compiler_so:
idx = compiler_so.index('-isysroot')
sysroot = compiler_so[idx+1]
if sysroot and not os.path.isdir(sysroot):
from distutils import log
log.warn("Compiling with an SDK that doesn't seem to exist: %s",
sysroot)
log.warn("Please check your Xcode installation")
return compiler_so
def customize_config_vars(_config_vars):
"""Customize Python build configuration variables.
Called internally from sysconfig with a mutable mapping
containing name/value pairs parsed from the configured
makefile used to build this interpreter. Returns
the mapping updated as needed to reflect the environment
in which the interpreter is running; in the case of
a Python from a binary installer, the installed
environment may be very different from the build
environment, i.e. different OS levels, different
built tools, different available CPU architectures.
This customization is performed whenever
distutils.sysconfig.get_config_vars() is first
called. It may be used in environments where no
compilers are present, i.e. when installing pure
Python dists. Customization of compiler paths
and detection of unavailable archs is deferred
until the first extention module build is
requested (in distutils.sysconfig.customize_compiler).
Currently called from distutils.sysconfig
"""
if not _supports_universal_builds():
# On Mac OS X before 10.4, check if -arch and -isysroot
# are in CFLAGS or LDFLAGS and remove them if they are.
# This is needed when building extensions on a 10.3 system
# using a universal build of python.
_remove_universal_flags(_config_vars)
# Allow user to override all archs with ARCHFLAGS env var
_override_all_archs(_config_vars)
# Remove references to sdks that are not found
_check_for_unavailable_sdk(_config_vars)
return _config_vars
def customize_compiler(_config_vars):
"""Customize compiler path and configuration variables.
This customization is performed when the first
extension module build is requested
in distutils.sysconfig.customize_compiler).
"""
# Find a compiler to use for extension module builds
_find_appropriate_compiler(_config_vars)
# Remove ppc arch flags if not supported here
_remove_unsupported_archs(_config_vars)
# Allow user to override all archs with ARCHFLAGS env var
_override_all_archs(_config_vars)
return _config_vars
def get_platform_osx(_config_vars, osname, release, machine):
"""Filter values for get_platform()"""
# called from get_platform() in sysconfig and distutils.util
#
# For our purposes, we'll assume that the system version from
# distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set
# to. This makes the compatibility story a bit more sane because the
# machine is going to compile and link as if it were
# MACOSX_DEPLOYMENT_TARGET.
macver = _config_vars.get('MACOSX_DEPLOYMENT_TARGET', '')
macrelease = _get_system_version() or macver
macver = macver or macrelease
if macver:
release = macver
osname = "macosx"
# Use the original CFLAGS value, if available, so that we
# return the same machine type for the platform string.
# Otherwise, distutils may consider this a cross-compiling
# case and disallow installs.
cflags = _config_vars.get(_INITPRE+'CFLAGS',
_config_vars.get('CFLAGS', ''))
if ((macrelease + '.') >= '10.4.' and
'-arch' in cflags.strip()):
# The universal build will build fat binaries, but not on
# systems before 10.4
machine = 'fat'
archs = re.findall('-arch\s+(\S+)', cflags)
archs = tuple(sorted(set(archs)))
if len(archs) == 1:
machine = archs[0]
elif archs == ('i386', 'ppc'):
machine = 'fat'
elif archs == ('i386', 'x86_64'):
machine = 'intel'
elif archs == ('i386', 'ppc', 'x86_64'):
machine = 'fat3'
elif archs == ('ppc64', 'x86_64'):
machine = 'fat64'
elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'):
machine = 'universal'
else:
raise ValueError(
"Don't know machine value for archs=%r" % (archs,))
elif machine == 'i386':
# On OSX the machine type returned by uname is always the
# 32-bit variant, even if the executable architecture is
# the 64-bit variant
if sys.maxsize >= 2**32:
machine = 'x86_64'
elif machine in ('PowerPC', 'Power_Macintosh'):
# Pick a sane name for the PPC architecture.
# See 'i386' case
if sys.maxsize >= 2**32:
machine = 'ppc64'
else:
machine = 'ppc'
return (osname, release, machine)
| lfcnassif/MultiContentViewer | release/modules/ext/libreoffice/program/python-core-3.3.0/lib/_osx_support.py | Python | lgpl-3.0 | 18,472 |
## This file is part of CDS Invenio.
## Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008 CERN.
##
## CDS Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## CDS Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with CDS Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
__revision__ = "$Id$"
from invenio.config import \
CFG_SITE_NAME
## Description: function Print_Success_MBI
## This function displays a message telling the user the
## modification has been taken into account
## Author: T.Baron
## PARAMETERS: -
def Print_Success_MBI(parameters, curdir, form, user_info=None):
"""
This function simply displays a text on the screen, telling the
user the modification went fine. To be used in the Modify Record
(MBI) action.
"""
global rn
t="<b>Modification completed!</b><br /><br />"
t+="These modifications on document %s will be processed as quickly as possible and made <br />available on the %s Server</b>" % (rn, CFG_SITE_NAME)
return t
| ppiotr/Bibedit-some-refactoring | modules/websubmit/lib/functions/Print_Success_MBI.py | Python | gpl-2.0 | 1,591 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-09-14 17:48
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('member', '0014_auto_20170911_2147'),
]
operations = [
migrations.AlterField(
model_name='scholarship',
name='initials',
field=models.CharField(max_length=15),
),
]
| comcidis/portal | member/migrations/0015_auto_20170914_1748.py | Python | mit | 457 |
import unittest
import httplib
import plistlib
import os.path
from mock import Mock
from airpnp.AirPlayService import AirPlayService
from airpnp.airplayserver import IAirPlayServer
from cStringIO import StringIO
from twisted.web import http, server
from twisted.internet import defer
from twisted.test.proto_helpers import StringTransport
from zope.interface import implements
class IAirPlayServerMock(Mock):
implements(IAirPlayServer)
def __init__(self, *args, **kwargs):
Mock.__init__(self, IAirPlayServer.names(), *args, **kwargs)
class TestAirPlayProtocol(unittest.TestCase):
def setUp(self):
self.apserver = IAirPlayServerMock()
service = AirPlayService(self.apserver, "test")
service.deviceid = "01:00:17:44:60:d2"
self.apserver.features = 0x77
self.proto = http.HTTPChannel()
self.proto.requestFactory = server.Request
self.proto.site = server.Site(service.create_site())
self.proto.makeConnection(StringTransport())
def test_playback_info_method_calls(self):
self._send_playback_info((0.0, 0.0), False)
self.assertTrue(self.apserver.get_scrub.called)
self.assertTrue(self.apserver.is_playing.called)
def test_playback_info_response_content_type(self):
self._send_playback_info((0.0, 0.0), False)
resp = self._get_response()
self.assertEqual(resp.getheader("Content-Type"),
"text/x-apple-plist+xml")
def test_playback_info_response_data_not_playing(self):
self._send_playback_info((0.0, 0.0), False)
resp = self._get_response()
plist = plistlib.readPlist(resp)
self.assertEqual(plist["duration"], 0.0)
self.assertEqual(plist["position"], 0.0)
self.assertEqual(plist["rate"], 0.0)
self.assertEqual(plist["playbackBufferEmpty"], True)
self.assertEqual(plist["readyToPlay"], False)
self.assertEqual(plist["loadedTimeRanges"][0]["duration"], 0.0)
self.assertEqual(plist["seekableTimeRanges"][0]["duration"], 0.0)
def test_playback_info_response_data_playing(self):
self._send_playback_info((20.0, 2.0), True)
resp = self._get_response()
plist = plistlib.readPlist(resp)
self.assertEqual(plist["duration"], 20.0)
self.assertEqual(plist["position"], 2.0)
self.assertEqual(plist["rate"], 1.0)
self.assertEqual(plist["playbackBufferEmpty"], False)
self.assertEqual(plist["readyToPlay"], True)
self.assertEqual(plist["loadedTimeRanges"][0]["duration"], 20.0)
self.assertEqual(plist["seekableTimeRanges"][0]["duration"], 20.0)
def test_stop_method_calls(self):
data = "POST /stop HTTP/1.1\r\nHost: www.example.com\r\nContent-Length: 0\r\n\r\n"
self._send_data(data)
self.assertTrue(self.apserver.stop.called)
def test_get_scrub_method_calls(self):
self.apserver.get_scrub.return_value = defer.succeed((0.0, 0.0))
data = "GET /scrub HTTP/1.1\r\nHost: www.example.com\r\nContent-Length: 0\r\n\r\n"
self._send_data(data)
self.assertTrue(self.apserver.get_scrub.called)
def test_get_scrub_response_data(self):
self.apserver.get_scrub.return_value = defer.succeed((0.0, 0.0))
data = "GET /scrub HTTP/1.1\r\nHost: www.example.com\r\nContent-Length: 0\r\n\r\n"
self._send_data(data)
resp = self._get_response()
body = resp.read()
self.assertEqual(body, "duration: 0.0\nposition: 0.0")
def test_set_scrub_method_calls(self):
data = "POST /scrub?position=1.0 HTTP/1.1\r\nHost: www.example.com\r\nContent-Length: 0\r\n\r\n"
self._send_data(data)
self.apserver.set_scrub.assert_called_with(1.0)
def test_rate_method_calls(self):
data = "POST /rate?value=1.0 HTTP/1.1\r\nHost: www.example.com\r\nContent-Length: 0\r\n\r\n"
self._send_data(data)
self.apserver.rate.assert_called_with(1.0)
def test_server_info_content_type(self):
data = "GET /server-info HTTP/1.1\r\nHost: www.example.com\r\nContent-Length: 0\r\n\r\n"
self._send_data(data)
resp = self._get_response()
self.assertEqual(resp.getheader("Content-Type"),
"text/x-apple-plist+xml")
def test_server_info_response_data(self):
data = "GET /server-info HTTP/1.1\r\nHost: www.example.com\r\nContent-Length: 0\r\n\r\n"
self._send_data(data)
resp = self._get_response()
plist = plistlib.readPlist(resp)
self.assertEqual(plist["deviceid"], "01:00:17:44:60:d2")
self.assertEqual(plist["features"], 0x77)
self.assertEqual(plist["model"], "AppleTV2,1")
def test_play_with_strings_method_calls(self):
data = "POST /play HTTP/1.1\r\nHost: www.example.com\r\n" + \
"Content-Length: 59\r\n\r\nStart-Position: 1.0\n" + \
"Content-Location: http://localhost/test"
self._send_data(data)
self.apserver.play.assert_called_with("http://localhost/test", 1.0)
def test_play_without_position_method_calls(self):
data = "POST /play HTTP/1.1\r\nHost: www.example.com\r\n" + \
"Content-Length: 39\r\n\r\n" + \
"Content-Location: http://localhost/test"
self._send_data(data)
self.apserver.play.assert_called_with("http://localhost/test", 0.0)
def test_play_with_binary_plist_method_calls(self):
fn = os.path.join(os.path.dirname(__file__), "plist/airplay.bin")
fd = open(fn, "rb")
try:
bindata = fd.read()
finally:
fd.close()
data = "POST /play HTTP/1.1\r\nHost: www.example.com\r\n" + \
"Content-Type: application/x-apple-binary-plist\r\n" + \
"Content-Length: %d\r\n\r\n" % (len(bindata), )
data += bindata
self._send_data(data)
self.assertTrue(self.apserver.play.called)
# changed 1 -> 0 between mock 0.8.0beta1 and beta3
args = self.apserver.play.call_args[0]
self.assertTrue(args[0].startswith("http://"))
self.assertEqual(args[1], 0.0005364880198612809)
def test_setProperty_with_binary_plist_method_calls(self):
bindata = 'bplist00\xd1\x01\x02Uvalue\xd4\x03\x04\x05\x06\x07\x07\x07\x07YtimescaleUvalueUepochUflags\x10\x00\x08\x0b\x11\x1a$*06\x00\x00\x00\x00\x00\x00\x01\x01\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x008'
data = "PUT /setProperty?forwardEndTime HTTP/1.1\r\nHost: www.example.com\r\n" + \
"Content-Type: application/x-apple-binary-plist\r\n" + \
"Content-Length: %d\r\n\r\n" % (len(bindata), )
data += bindata
self._send_data(data)
self.assertTrue(self.apserver.set_property.called)
args = self.apserver.set_property.call_args[0]
self.assertEqual(args[0], "forwardEndTime")
self.assertTrue("epoch" in args[1])
def _send_playback_info(self, get_scrub_response, is_playing_response):
self.apserver.get_scrub.return_value = defer.succeed(get_scrub_response)
self.apserver.is_playing.return_value = defer.succeed(is_playing_response)
data = "GET /playback-info HTTP/1.1\r\nHost: www.example.com\r\nContent-Length: 0\r\n\r\n"
self._send_data(data)
def _send_data(self, data):
self.proto.dataReceived(data)
def _get_response(self):
resp = httplib.HTTPResponse(FakeSock(self.proto.transport.value()))
resp.begin()
return resp
class FakeSock(object):
def __init__(self, data):
self.data = data
def makefile(self, mode, bufsize=0):
return StringIO(self.data)
| provegard/airpnp | test/test_airplayservice.py | Python | bsd-3-clause | 7,784 |
#
# Copyright (c) 2005 Red Hat, Inc.
#
# Written by Joel Martin <[email protected]>
#
# This file is part of Smart Package Manager.
#
# Smart Package Manager is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# Smart Package Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Smart Package Manager; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
from smart.backends.solaris.pm import SolarisPackageManager
from solarisver import checkdep, vercmp
from smart.util.strtools import isGlob
from smart.cache import *
import fnmatch
import re
__all__ = ["SolarisPackage", "SolarisProvides", "SolarisDepends",
"SolarisUpgrades", "SolarisConflicts"]
class SolarisPackage(Package):
packagemanager = SolarisPackageManager
def isPatch(self):
return self.name.startswith("patch-solaris")
def isPatchCluster(self):
return self.name.startswith("patch-cluster-solaris")
def matches(self, relation, version):
if not relation:
return True
return checkdep(self.version, relation, version)
def search(self, searcher):
myname = self.name
myversion = self.version
ratio = 0
for nameversion, cutoff in searcher.nameversion:
_, ratio1 = globdistance(nameversion, myname, cutoff)
_, ratio2 = globdistance(nameversion,
"%s-%s" % (myname, myversion), cutoff)
_, ratio3 = globdistance(nameversion, "%s-%s" %
(myname, myversion.split("-", 1)[-1]),
cutoff)
ratio = max(ratio, ratio1, ratio2, ratio3)
if ratio:
searcher.addResult(self, ratio)
def coexists(self, other):
if not isinstance(other, SolarisPackage):
return True
return False
def __lt__(self, other):
rc = cmp(self.name, other.name)
if type(other) is SolarisPackage:
if rc == 0 and self.version != other.version:
rc = vercmp(self.version, other.version)
return rc == -1
class SolarisProvides(Provides): pass
class SolarisDepends(Depends):
def matches(self, prv):
if not isinstance(prv, SolarisProvides) and type(prv) is not Provides:
return False
if not self.version or not prv.version:
return True
return checkdep(prv.version, self.relation, self.version)
class SolarisUpgrades(SolarisDepends,Upgrades): pass
class SolarisConflicts(SolarisDepends,Conflicts): pass
# vim:ts=4:sw=4:et
| dmacvicar/spacewalk | client/solaris/smartpm/smart/backends/solaris/base.py | Python | gpl-2.0 | 3,072 |
def get_credentials(client_secret=None):
"""Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
import sys
sys.argv=['']
SCOPES = 'https://www.googleapis.com/auth/drive.readonly'
if client_secret:
CLIENT_SECRET_FILE = client_secret
else:
CLIENT_SECRET_FILE = 'client_secret.json'
APPLICATION_NAME = 'Qualdocs'
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.credentials')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,
'drive-api-qualdocs.json')
store = Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
credentials = tools.run_flow(flow, store)
print('Storing credentials to ' + credential_path)
return credentials
| qualdocs/qualdocs | qualdocs/get_credentials.py | Python | mit | 1,213 |
from django.conf.urls import patterns, include, url
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'openlc.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^$', include('projects.urls', namespace="index")),
url(r'^blockly/', include('blockly.urls', namespace="blockly")),
url(r'^projects/', include('projects.urls', namespace="projects"))
)
if settings.DEBUG:
urlpatterns += patterns('',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,
}),
url(r'^static/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.STATIC_ROOT,
}),
)
| Gorgel/khd_projects | khd_projects/khd_projects/urls.py | Python | mit | 890 |
"""
mbed CMSIS-DAP debugger
Copyright (c) 2017 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import sys
import logging
log = logging.getLogger('progress')
## @brief Base progress report class.
#
# This base class implements the logic but no output.
class ProgressReport(object):
def __init__(self, file=None):
self._file = file or sys.stdout
self.prev_progress = 0
self.backwards_progress = False
self.done = False
self.last = 0
def __call__(self, progress):
assert progress >= 0.0
# assert progress <= 1.0 # TODO restore this assert when the progress > 1 bug is fixed
# assert (progress == 0 and self.prev_progress == 1.0) or (progress >= self.prev_progress)
if progress > 1.0:
log.debug("progress out of bounds: %.3f", progress)
# Reset state on 0.0
if progress == 0.0:
self._start()
# Check for backwards progress
if progress < self.prev_progress:
self.backwards_progress = True
self.prev_progress = progress
# print progress bar
if not self.done:
self._update(progress)
# Finish on 1.0
if progress >= 1.0:
self._finish()
if self.backwards_progress:
log.warning("Progress went backwards!")
def _start(self):
self.prev_progress = 0
self.backwards_progress = False
self.done = False
self.last = 0
def _update(self, progress):
raise NotImplemented()
def _finish(self):
raise NotImplemented()
## @brief Progress report subclass for TTYs.
#
# The progress bar is fully redrawn onscreen as progress is updated to give the
# impression of animation.
class ProgressReportTTY(ProgressReport):
# These width constants can't be changed yet without changing the code below to match.
WIDTH = 20
def _update(self, progress):
self._file.write('\r')
i = int(progress * self.WIDTH)
self._file.write("[%-20s] %3d%%" % ('=' * i, round(progress * 100)))
self._file.flush()
def _finish(self):
self.done = True
self._file.write("\n")
## @brief Progress report subclass for non-TTY output.
#
# A simpler progress bar is used than for the TTY version. Only the difference between
# the previous and current progress is drawn for each update, making the output suitable
# for piping to a file or similar output.
class ProgressReportNoTTY(ProgressReport):
# These width constants can't be changed yet without changing the code below to match.
WIDTH = 40
def _start(self):
super(ProgressReportNoTTY, self)._start()
self._file.write('[' + '---|' * 9 + '----]\n[')
self._file.flush()
def _update(self, progress):
i = int(progress * self.WIDTH)
delta = i - self.last
self._file.write('=' * delta)
self._file.flush()
self.last = i
def _finish(self):
self.done = True
self._file.write("]\n")
self._file.flush()
## @brief Progress printer factory.
#
# This factory function checks whether the output file is a TTY, and instantiates the
# appropriate subclass of ProgressReport.
#
# @param file The output file. Optional. If not provided, or if set to None, then sys.stdout
# will be used automatically.
def print_progress(file=None):
if file is None:
file = sys.stdout
try:
istty = os.isatty(file.fileno())
except (OSError, AttributeError):
# Either the file doesn't have a fileno method, or calling it returned an
# error. In either case, just assume we're not connected to a TTY.
istty = False
klass = ProgressReportTTY if istty else ProgressReportNoTTY
return klass(file)
| mesheven/pyOCD | pyocd/utility/progress.py | Python | apache-2.0 | 4,370 |
"""
Functional test
Big Share Admin Epic
Storyboard is defined within the comments of the program itself
"""
import unittest
from flask import url_for
from biblib.views.http_errors import NO_PERMISSION_ERROR
from biblib.tests.stubdata.stub_data import UserShop, LibraryShop, fake_biblist
from biblib.tests.base import MockEmailService, MockSolrBigqueryService,\
TestCaseDatabase, MockEndPoint
class TestBigShareAdminEpic(TestCaseDatabase):
"""
Base class used to test the Big Share Admin Epic
"""
def test_big_share_admin(self):
"""
Carries out the epic 'Big Share Admin', where a user creates a library
and wants one other user to have admin permissions, i.e., add and
remove users permissions (except the owners) from the library.
:return: no return
"""
# Generate some stub data for Dave, Mary and the student
user_dave = UserShop()
user_mary = UserShop()
user_student = UserShop()
library_dave = LibraryShop()
# Librarian Dave makes a big library full of bibcodes
# 1. Lets say 20 bibcodes
# Dave makes his library
url = url_for('userview')
response = self.client.post(
url,
data=library_dave.user_view_post_data_json,
headers=user_dave.headers
)
self.assertEqual(response.status_code, 200, response)
library_id_dave = response.json['id']
# Dave adds content to his library
libraries_added = []
number_of_documents = 20
for i in range(number_of_documents):
# Stub data
stub_library = LibraryShop()
libraries_added.append(stub_library)
# Add document
url = url_for('documentview', library=library_id_dave)
response = self.client.post(
url,
data=stub_library.document_view_post_data_json('add'),
headers=user_dave.headers
)
self.assertEqual(response.json['number_added'],
len(stub_library.bibcode))
self.assertEqual(response.status_code, 200, response)
canonical_bibcode = \
[i.get_bibcodes()[0] for i in libraries_added]
url = url_for('libraryview', library=library_id_dave)
with MockSolrBigqueryService(
canonical_bibcode=canonical_bibcode) as BQ, \
MockEndPoint([user_dave]) as EP:
response = self.client.get(
url,
headers=user_dave.headers
)
self.assertTrue(len(response.json['documents']) == number_of_documents)
# Dave does not want to manage who can change content. He wants Mary to
# adminstrate the library. Mary tries, but gets errors. need a
# permissions endpoint
# /permissions/<uuid_library>
url = url_for('permissionview', library=library_id_dave)
with MockEmailService(user_student):
response = self.client.post(
url,
data=user_student.permission_view_post_data_json(
'write', True
),
headers=user_mary.headers
)
self.assertEqual(response.status_code, NO_PERMISSION_ERROR['number'])
self.assertEqual(response.json['error'], NO_PERMISSION_ERROR['body'])
# Dave now adds her account to permissions. She already has an ADS
# account, and so Dave adds her with her e-mail address with read and
# write permissions (but not admin).
url = url_for('permissionview', library=library_id_dave)
with MockEmailService(user_mary):
response = self.client.post(
url,
data=user_mary.permission_view_post_data_json('admin', True),
headers=user_dave.headers
)
self.assertEqual(response.status_code, 200)
# Mary then adds the student as an admin
url = url_for('permissionview', library=library_id_dave)
with MockEmailService(user_student):
response = self.client.post(
url,
data=user_student.permission_view_post_data_json(
'write', True
),
headers=user_mary.headers
)
self.assertEqual(response.status_code, 200)
# The student removes a few bibcodes and keeps a list of the ones she
# removed just in case
url = url_for('documentview', library=library_id_dave)
libraries_removed = []
for i in range(number_of_documents/2):
# Remove documents
response = self.client.post(
url,
data=libraries_added[i].document_view_post_data_json('remove'),
headers=user_student.headers
)
self.assertEqual(response.json['number_removed'],
len(libraries_added[i].bibcode))
self.assertEqual(response.status_code, 200, response)
libraries_removed.append(libraries_added[i])
libraries_added.remove(libraries_added[i])
# She checks that they got removed
canonical_bibcode = [i.get_bibcodes()[0] for i in libraries_added]
url = url_for('libraryview', library=library_id_dave)
with MockSolrBigqueryService(
canonical_bibcode=canonical_bibcode) as BQ, \
MockEndPoint([user_student, user_dave]) as EP:
response = self.client.get(
url,
headers=user_student.headers
)
self.assertTrue(
len(response.json['documents']) == number_of_documents/2.
)
# Dave asks Mary to re-add the ones she removed because they were
# actually useful
url = url_for('documentview', library=library_id_dave)
for library in libraries_removed:
# Add documents
response = self.client.post(
url,
data=library.document_view_post_data_json('add'),
headers=user_mary.headers
)
self.assertEqual(response.json['number_added'],
len(library.bibcode))
self.assertEqual(response.status_code, 200, response)
libraries_added.append(library)
# She checks that they got added
canonical_bibcode = [i.get_bibcodes()[0] for i in libraries_added]
url = url_for('libraryview', library=library_id_dave)
with MockSolrBigqueryService(
canonical_bibcode=canonical_bibcode) as BQ, \
MockEndPoint([user_dave, user_student]) as EP:
response = self.client.get(
url,
headers=user_student.headers
)
self.assertTrue(
len(response.json['documents']) == number_of_documents
)
# Sanity check 1
# --------------
# Remove the permissions of the student, they should not be able to do
# what they could before
# --------------
# Mary removes the students permissions and the student tries to modify
# the library content, but cannot
url = url_for('permissionview', library=library_id_dave)
with MockEmailService(user_student):
response = self.client.post(
url,
data=user_student.permission_view_post_data_json(
'write', False
),
headers=user_mary.headers
)
self.assertEqual(response.status_code, 200)
# The student tries to add content
url = url_for('documentview', library=library_id_dave)
response = self.client.post(
url,
data=stub_library.document_view_post_data_json('add'),
headers=user_student.headers
)
self.assertEqual(response.status_code, NO_PERMISSION_ERROR['number'])
self.assertEqual(response.json['error'], NO_PERMISSION_ERROR['body'])
# Sanity check 2
# --------------
# Check that you cannot modify owner permissions
# --------------
# Mary tries to give the student owner permissions
url = url_for('permissionview', library=library_id_dave)
with MockEmailService(user_student):
response = self.client.post(
url,
data=user_student.permission_view_post_data_json(
'owner', True
),
headers=user_mary.headers
)
self.assertEqual(response.status_code,
NO_PERMISSION_ERROR['number'],
response.json)
self.assertEqual(response.json['error'],
NO_PERMISSION_ERROR['body'],
response.json)
# Sanity check 3
# --------------
# Mary tries to manipulate Daves permissions
# --------------
# Mary attempts to change the read, admin, write, owner, permissions
# of Dave, but should fail
url = url_for('permissionview', library=library_id_dave)
for permission_type in ['read', 'write', 'admin', 'owner']:
with MockEmailService(user_dave):
response = self.client.post(
url,
data=user_dave.permission_view_post_data_json(
permission_type,
False
),
headers=user_mary.headers
)
self.assertEqual(response.status_code,
NO_PERMISSION_ERROR['number'])
self.assertEqual(response.json['error'],
NO_PERMISSION_ERROR['body'])
# Sanity check 4
# --------------
# Remove Mary's permissions so she cannot do what she was doing before
# --------------
# Dave removes Mary's permissions.
url = url_for('permissionview', library=library_id_dave)
with MockEmailService(user_mary):
response = self.client.post(
url,
data=user_mary.permission_view_post_data_json('admin', False),
headers=user_dave.headers
)
self.assertEqual(response.status_code, 200)
# Mary tries to change permissions for the student again but should
# not be able to
with MockEmailService(user_student):
response = self.client.post(
url,
data=user_student.permission_view_post_data_json(
'write', True
),
headers=user_mary.headers
)
self.assertEqual(response.status_code, NO_PERMISSION_ERROR['number'])
self.assertEqual(response.json['error'], NO_PERMISSION_ERROR['body'])
if __name__ == '__main__':
unittest.main(verbosity=2) | jonnybazookatone/biblib-service | biblib/tests/functional_tests/test_big_share_admin_epic.py | Python | mit | 11,039 |
"""Implements the Astropy TestRunner which is a thin wrapper around pytest."""
import inspect
import os
import glob
import copy
import shlex
import sys
import tempfile
import warnings
import importlib
from collections import OrderedDict
from importlib.util import find_spec
from functools import wraps
from astropy.config.paths import set_temp_config, set_temp_cache
from astropy.utils import find_current_module
from astropy.utils.exceptions import AstropyWarning, AstropyDeprecationWarning
__all__ = ['TestRunner', 'TestRunnerBase', 'keyword']
class keyword:
"""
A decorator to mark a method as keyword argument for the ``TestRunner``.
Parameters
----------
default_value : `object`
The default value for the keyword argument. (Default: `None`)
priority : `int`
keyword argument methods are executed in order of descending priority.
"""
def __init__(self, default_value=None, priority=0):
self.default_value = default_value
self.priority = priority
def __call__(self, f):
def keyword(*args, **kwargs):
return f(*args, **kwargs)
keyword._default_value = self.default_value
keyword._priority = self.priority
# Set __doc__ explicitly here rather than using wraps because we want
# to keep the function name as keyword so we can inspect it later.
keyword.__doc__ = f.__doc__
return keyword
class TestRunnerBase:
"""
The base class for the TestRunner.
A test runner can be constructed by creating a subclass of this class and
defining 'keyword' methods. These are methods that have the
:class:`~astropy.tests.runner.keyword` decorator, these methods are used to
construct allowed keyword arguments to the
``run_tests`` method as a way to allow
customization of individual keyword arguments (and associated logic)
without having to re-implement the whole
``run_tests`` method.
Examples
--------
A simple keyword method::
class MyRunner(TestRunnerBase):
@keyword('default_value'):
def spam(self, spam, kwargs):
\"\"\"
spam : `str`
The parameter description for the run_tests docstring.
\"\"\"
# Return value must be a list with a CLI parameter for pytest.
return ['--spam={}'.format(spam)]
"""
def __init__(self, base_path):
self.base_path = os.path.abspath(base_path)
def __new__(cls, *args, **kwargs):
# Before constructing the class parse all the methods that have been
# decorated with ``keyword``.
# The objective of this method is to construct a default set of keyword
# arguments to the ``run_tests`` method. It does this by inspecting the
# methods of the class for functions with the name ``keyword`` which is
# the name of the decorator wrapping function. Once it has created this
# dictionary, it also formats the docstring of ``run_tests`` to be
# comprised of the docstrings for the ``keyword`` methods.
# To add a keyword argument to the ``run_tests`` method, define a new
# method decorated with ``@keyword`` and with the ``self, name, kwargs``
# signature.
# Get all 'function' members as the wrapped methods are functions
functions = inspect.getmembers(cls, predicate=inspect.isfunction)
# Filter out anything that's not got the name 'keyword'
keywords = filter(lambda func: func[1].__name__ == 'keyword', functions)
# Sort all keywords based on the priority flag.
sorted_keywords = sorted(keywords, key=lambda x: x[1]._priority, reverse=True)
cls.keywords = OrderedDict()
doc_keywords = ""
for name, func in sorted_keywords:
# Here we test if the function has been overloaded to return
# NotImplemented which is the way to disable arguments on
# subclasses. If it has been disabled we need to remove it from the
# default keywords dict. We do it in the try except block because
# we do not have access to an instance of the class, so this is
# going to error unless the method is just doing `return
# NotImplemented`.
try:
# Second argument is False, as it is normally a bool.
# The other two are placeholders for objects.
if func(None, False, None) is NotImplemented:
continue
except Exception:
pass
# Construct the default kwargs dict and docstring
cls.keywords[name] = func._default_value
if func.__doc__:
doc_keywords += ' '*8
doc_keywords += func.__doc__.strip()
doc_keywords += '\n\n'
cls.run_tests.__doc__ = cls.RUN_TESTS_DOCSTRING.format(keywords=doc_keywords)
return super().__new__(cls)
def _generate_args(self, **kwargs):
# Update default values with passed kwargs
# but don't modify the defaults
keywords = copy.deepcopy(self.keywords)
keywords.update(kwargs)
# Iterate through the keywords (in order of priority)
args = []
for keyword in keywords.keys():
func = getattr(self, keyword)
result = func(keywords[keyword], keywords)
# Allow disabling of options in a subclass
if result is NotImplemented:
raise TypeError(f"run_tests() got an unexpected keyword argument {keyword}")
# keyword methods must return a list
if not isinstance(result, list):
raise TypeError(f"{keyword} keyword method must return a list")
args += result
return args
RUN_TESTS_DOCSTRING = \
"""
Run the tests for the package.
This method builds arguments for and then calls ``pytest.main``.
Parameters
----------
{keywords}
"""
_required_dependencies = ['pytest', 'pytest_remotedata', 'pytest_doctestplus', 'pytest_astropy_header']
_missing_dependancy_error = (
"Test dependencies are missing: {module}. You should install the "
"'pytest-astropy' package (you may need to update the package if you "
"have a previous version installed, e.g., "
"'pip install pytest-astropy --upgrade' or the equivalent with conda).")
@classmethod
def _has_test_dependencies(cls): # pragma: no cover
# Using the test runner will not work without these dependencies, but
# pytest-openfiles is optional, so it's not listed here.
for module in cls._required_dependencies:
spec = find_spec(module)
# Checking loader accounts for packages that were uninstalled
if spec is None or spec.loader is None:
raise RuntimeError(
cls._missing_dependancy_error.format(module=module))
def run_tests(self, **kwargs):
# The following option will include eggs inside a .eggs folder in
# sys.path when running the tests. This is possible so that when
# running pytest, test dependencies installed via e.g.
# tests_requires are available here. This is not an advertised option
# since it is only for internal use
if kwargs.pop('add_local_eggs_to_path', False):
# Add each egg to sys.path individually
for egg in glob.glob(os.path.join('.eggs', '*.egg')):
sys.path.insert(0, egg)
# We now need to force reload pkg_resources in case any pytest
# plugins were added above, so that their entry points are picked up
import pkg_resources
importlib.reload(pkg_resources)
self._has_test_dependencies() # pragma: no cover
# The docstring for this method is defined as a class variable.
# This allows it to be built for each subclass in __new__.
# Don't import pytest until it's actually needed to run the tests
import pytest
# Raise error for undefined kwargs
allowed_kwargs = set(self.keywords.keys())
passed_kwargs = set(kwargs.keys())
if not passed_kwargs.issubset(allowed_kwargs):
wrong_kwargs = list(passed_kwargs.difference(allowed_kwargs))
raise TypeError(f"run_tests() got an unexpected keyword argument {wrong_kwargs[0]}")
args = self._generate_args(**kwargs)
if kwargs.get('plugins', None) is not None:
plugins = kwargs.pop('plugins')
elif self.keywords.get('plugins', None) is not None:
plugins = self.keywords['plugins']
else:
plugins = []
# Override the config locations to not make a new directory nor use
# existing cache or config. Note that we need to do this here in
# addition to in conftest.py - for users running tests interactively
# in e.g. IPython, conftest.py would get read in too late, so we need
# to do it here - but at the same time the code here doesn't work when
# running tests in parallel mode because this uses subprocesses which
# don't know about the temporary config/cache.
astropy_config = tempfile.mkdtemp('astropy_config')
astropy_cache = tempfile.mkdtemp('astropy_cache')
# Have to use nested with statements for cross-Python support
# Note, using these context managers here is superfluous if the
# config_dir or cache_dir options to pytest are in use, but it's
# also harmless to nest the contexts
with set_temp_config(astropy_config, delete=True):
with set_temp_cache(astropy_cache, delete=True):
return pytest.main(args=args, plugins=plugins)
@classmethod
def make_test_runner_in(cls, path):
"""
Constructs a `TestRunner` to run in the given path, and returns a
``test()`` function which takes the same arguments as
``TestRunner.run_tests``.
The returned ``test()`` function will be defined in the module this
was called from. This is used to implement the ``astropy.test()``
function (or the equivalent for affiliated packages).
"""
runner = cls(path)
@wraps(runner.run_tests, ('__doc__',))
def test(**kwargs):
return runner.run_tests(**kwargs)
module = find_current_module(2)
if module is not None:
test.__module__ = module.__name__
# A somewhat unusual hack, but delete the attached __wrapped__
# attribute--although this is normally used to tell if the function
# was wrapped with wraps, on some version of Python this is also
# used to determine the signature to display in help() which is
# not useful in this case. We don't really care in this case if the
# function was wrapped either
if hasattr(test, '__wrapped__'):
del test.__wrapped__
test.__test__ = False
return test
class TestRunner(TestRunnerBase):
"""
A test runner for astropy tests
"""
def packages_path(self, packages, base_path, error=None, warning=None):
"""
Generates the path for multiple packages.
Parameters
----------
packages : str
Comma separated string of packages.
base_path : str
Base path to the source code or documentation.
error : str
Error message to be raised as ``ValueError``. Individual package
name and path can be accessed by ``{name}`` and ``{path}``
respectively. No error is raised if `None`. (Default: `None`)
warning : str
Warning message to be issued. Individual package
name and path can be accessed by ``{name}`` and ``{path}``
respectively. No warning is issues if `None`. (Default: `None`)
Returns
-------
paths : list of str
List of stings of existing package paths.
"""
packages = packages.split(",")
paths = []
for package in packages:
path = os.path.join(
base_path, package.replace('.', os.path.sep))
if not os.path.isdir(path):
info = {'name': package, 'path': path}
if error is not None:
raise ValueError(error.format(**info))
if warning is not None:
warnings.warn(warning.format(**info))
else:
paths.append(path)
return paths
# Increase priority so this warning is displayed first.
@keyword(priority=1000)
def coverage(self, coverage, kwargs):
if coverage:
warnings.warn(
"The coverage option is ignored on run_tests, since it "
"can not be made to work in that context. Use "
"'python setup.py test --coverage' instead.",
AstropyWarning)
return []
# test_path depends on self.package_path so make sure this runs before
# test_path.
@keyword(priority=1)
def package(self, package, kwargs):
"""
package : str, optional
The name of a specific package to test, e.g. 'io.fits' or
'utils'. Accepts comma separated string to specify multiple
packages. If nothing is specified all default tests are run.
"""
if package is None:
self.package_path = [self.base_path]
else:
error_message = ('package to test is not found: {name} '
'(at path {path}).')
self.package_path = self.packages_path(package, self.base_path,
error=error_message)
if not kwargs['test_path']:
return self.package_path
return []
@keyword()
def test_path(self, test_path, kwargs):
"""
test_path : str, optional
Specify location to test by path. May be a single file or
directory. Must be specified absolutely or relative to the
calling directory.
"""
all_args = []
# Ensure that the package kwarg has been run.
self.package(kwargs['package'], kwargs)
if test_path:
base, ext = os.path.splitext(test_path)
if ext in ('.rst', ''):
if kwargs['docs_path'] is None:
# This shouldn't happen from "python setup.py test"
raise ValueError(
"Can not test .rst files without a docs_path "
"specified.")
abs_docs_path = os.path.abspath(kwargs['docs_path'])
abs_test_path = os.path.abspath(
os.path.join(abs_docs_path, os.pardir, test_path))
common = os.path.commonprefix((abs_docs_path, abs_test_path))
if os.path.exists(abs_test_path) and common == abs_docs_path:
# Turn on the doctest_rst plugin
all_args.append('--doctest-rst')
test_path = abs_test_path
# Check that the extensions are in the path and not at the end to
# support specifying the name of the test, i.e.
# test_quantity.py::test_unit
if not (os.path.isdir(test_path) or ('.py' in test_path or '.rst' in test_path)):
raise ValueError("Test path must be a directory or a path to "
"a .py or .rst file")
return all_args + [test_path]
return []
@keyword()
def args(self, args, kwargs):
"""
args : str, optional
Additional arguments to be passed to ``pytest.main`` in the ``args``
keyword argument.
"""
if args:
return shlex.split(args, posix=not sys.platform.startswith('win'))
return []
@keyword(default_value=[])
def plugins(self, plugins, kwargs):
"""
plugins : list, optional
Plugins to be passed to ``pytest.main`` in the ``plugins`` keyword
argument.
"""
# Plugins are handled independently by `run_tests` so we define this
# keyword just for the docstring
return []
@keyword()
def verbose(self, verbose, kwargs):
"""
verbose : bool, optional
Convenience option to turn on verbose output from pytest. Passing
True is the same as specifying ``-v`` in ``args``.
"""
if verbose:
return ['-v']
return []
@keyword()
def pastebin(self, pastebin, kwargs):
"""
pastebin : ('failed', 'all', None), optional
Convenience option for turning on pytest pastebin output. Set to
'failed' to upload info for failed tests, or 'all' to upload info
for all tests.
"""
if pastebin is not None:
if pastebin in ['failed', 'all']:
return [f'--pastebin={pastebin}']
else:
raise ValueError("pastebin should be 'failed' or 'all'")
return []
@keyword(default_value='none')
def remote_data(self, remote_data, kwargs):
"""
remote_data : {'none', 'astropy', 'any'}, optional
Controls whether to run tests marked with @pytest.mark.remote_data. This can be
set to run no tests with remote data (``none``), only ones that use
data from http://data.astropy.org (``astropy``), or all tests that
use remote data (``any``). The default is ``none``.
"""
if remote_data is True:
remote_data = 'any'
elif remote_data is False:
remote_data = 'none'
elif remote_data not in ('none', 'astropy', 'any'):
warnings.warn("The remote_data option should be one of "
"none/astropy/any (found {}). For backward-compatibility, "
"assuming 'any', but you should change the option to be "
"one of the supported ones to avoid issues in "
"future.".format(remote_data),
AstropyDeprecationWarning)
remote_data = 'any'
return [f'--remote-data={remote_data}']
@keyword()
def pep8(self, pep8, kwargs):
"""
pep8 : bool, optional
Turn on PEP8 checking via the pytest-pep8 plugin and disable normal
tests. Same as specifying ``--pep8 -k pep8`` in ``args``.
"""
if pep8:
try:
import pytest_pep8 # pylint: disable=W0611
except ImportError:
raise ImportError('PEP8 checking requires pytest-pep8 plugin: '
'https://pypi.org/project/pytest-pep8')
else:
return ['--pep8', '-k', 'pep8']
return []
@keyword()
def pdb(self, pdb, kwargs):
"""
pdb : bool, optional
Turn on PDB post-mortem analysis for failing tests. Same as
specifying ``--pdb`` in ``args``.
"""
if pdb:
return ['--pdb']
return []
@keyword()
def open_files(self, open_files, kwargs):
"""
open_files : bool, optional
Fail when any tests leave files open. Off by default, because
this adds extra run time to the test suite. Requires the
``psutil`` package.
"""
if open_files:
if kwargs['parallel'] != 0:
raise SystemError(
"open file detection may not be used in conjunction with "
"parallel testing.")
try:
import psutil # pylint: disable=W0611
except ImportError:
raise SystemError(
"open file detection requested, but psutil package "
"is not installed.")
return ['--open-files']
print("Checking for unclosed files")
return []
@keyword(0)
def parallel(self, parallel, kwargs):
"""
parallel : int or 'auto', optional
When provided, run the tests in parallel on the specified
number of CPUs. If parallel is ``'auto'``, it will use the all
the cores on the machine. Requires the ``pytest-xdist`` plugin.
"""
if parallel != 0:
try:
from xdist import plugin # noqa
except ImportError:
raise SystemError(
"running tests in parallel requires the pytest-xdist package")
return ['-n', str(parallel)]
return []
@keyword()
def docs_path(self, docs_path, kwargs):
"""
docs_path : str, optional
The path to the documentation .rst files.
"""
paths = []
if docs_path is not None and not kwargs['skip_docs']:
if kwargs['package'] is not None:
warning_message = ("Can not test .rst docs for {name}, since "
"docs path ({path}) does not exist.")
paths = self.packages_path(kwargs['package'], docs_path,
warning=warning_message)
elif not kwargs['test_path']:
paths = [docs_path, ]
if len(paths) and not kwargs['test_path']:
paths.append('--doctest-rst')
return paths
@keyword()
def skip_docs(self, skip_docs, kwargs):
"""
skip_docs : `bool`, optional
When `True`, skips running the doctests in the .rst files.
"""
# Skip docs is a bool used by docs_path only.
return []
@keyword()
def repeat(self, repeat, kwargs):
"""
repeat : `int`, optional
If set, specifies how many times each test should be run. This is
useful for diagnosing sporadic failures.
"""
if repeat:
return [f'--repeat={repeat}']
return []
# Override run_tests for astropy-specific fixes
def run_tests(self, **kwargs):
# This prevents cyclical import problems that make it
# impossible to test packages that define Table types on their
# own.
from astropy.table import Table # pylint: disable=W0611
return super().run_tests(**kwargs)
| StuartLittlefair/astropy | astropy/tests/runner.py | Python | bsd-3-clause | 22,724 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Grappler LayoutOptimizer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.core.protobuf import config_pb2
from tensorflow.core.protobuf import rewriter_config_pb2
from tensorflow.python.client import session
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import random_seed
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import random_ops
from tensorflow.python.platform import test
def weight(shape):
"""weights generates a weight of a given shape."""
return random_ops.truncated_normal(shape, seed=0, stddev=0.1)
def bias(shape):
"""bias generates a bias of a given shape."""
return constant_op.constant(0.1, shape=shape)
def conv2d(x, w):
"""conv2d returns a 2d convolution layer with full stride."""
return nn.conv2d(x, w, strides=[1, 1, 1, 1], padding='SAME')
def max_pool_2x2(x):
"""max_pool_2x2 downsamples a feature map by 2X."""
return nn.max_pool(
x, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
# Taken from tensorflow/examples/tutorials/mnist/mnist_deep.py
def two_layer_model():
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
x_image = array_ops.reshape(x, [-1, 28, 28, 1])
w_conv1 = weight([5, 5, 1, 32])
b_conv1 = bias([32])
h_conv1 = nn.relu(conv2d(x_image, w_conv1) + b_conv1)
h_pool1 = max_pool_2x2(h_conv1)
w_conv2 = weight([5, 5, 32, 64])
b_conv2 = bias([64])
h_conv2 = nn.relu(conv2d(h_pool1, w_conv2) + b_conv2)
h_pool2 = max_pool_2x2(h_conv2)
return h_pool2
class LayoutOptimizerTest(test.TestCase):
"""Tests the Grappler layout optimizer."""
def testTwoConvLayers(self):
if test.is_gpu_available(cuda_only=True):
output = two_layer_model()
with session.Session() as sess:
output_val_ref = sess.run(output)
rewrite_options = rewriter_config_pb2.RewriterConfig(
optimize_tensor_layout=True)
graph_options = config_pb2.GraphOptions(
rewrite_options=rewrite_options,
build_cost_model=1)
config = config_pb2.ConfigProto(graph_options=graph_options)
with session.Session(config=config) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if node.name.startswith('LayoutOptimizerTranspose'):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self.assertIn('LayoutOptimizerTransposeNHWCToNCHW-Conv2D-Reshape', nodes)
self.assertIn('LayoutOptimizerTransposeNCHWToNHWC-Relu_1-MaxPool_1',
nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
if __name__ == '__main__':
test.main()
| npuichigo/ttsflow | third_party/tensorflow/tensorflow/python/grappler/layout_optimizer_test.py | Python | apache-2.0 | 3,838 |
__author__ = 'parallels'
import pika
import logging
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.CRITICAL)
connection = pika.BlockingConnection(pika.ConnectionParameters('localhost'))
channel = connection.channel()
channel.queue_declare(queue='hello')
def callback(ch, method, properties, body):
print " [x] Recieved %r" % (body, )
channel.basic_consume(callback,
queue='hello',
no_ack=True)
print ' [*] Waiting for messages. To exit press CTRL+C'
channel.start_consuming()
| bkolowitz/IADSS | ml/recieve.py | Python | apache-2.0 | 552 |
# Copyright 2014 Yajie Miao Carnegie Mellon University
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
# WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
# MERCHANTABLITY OR NON-INFRINGEMENT.
# See the Apache 2 License for the specific language governing permissions and
# limitations under the License.
import cPickle
import gzip
import os
import sys
import time
import numpy
import theano
import theano.tensor as T
from theano.tensor.shared_randomstreams import RandomStreams
from models.dnn import DNN
from models.dnn_2tower import DNN_2Tower
from models.dropout_nnet import DNN_Dropout
from io_func.model_io import _nnet2file, _cfg2file, _file2nnet, log
from utils.utils import parse_arguments
from utils.learn_rates import _lrate2file, _file2lrate
from utils.network_config import NetworkConfig
from learning.sgd import train_sgd, validate_by_minibatch
if __name__ == '__main__':
# check the arguments
arg_elements = [sys.argv[i] for i in range(1, len(sys.argv))]
arguments = parse_arguments(arg_elements)
required_arguments = ['train_data', 'valid_data', 'nnet_spec', 'nnet_spec_tower1', 'nnet_spec_tower2', 'wdir']
for arg in required_arguments:
if arguments.has_key(arg) == False:
print "Error: the argument %s has to be specified" % (arg); exit(1)
# mandatory arguments
train_data_spec = arguments['train_data']
valid_data_spec = arguments['valid_data']
nnet_spec = arguments['nnet_spec']
nnet_spec_tower1 = arguments['nnet_spec_tower1']
nnet_spec_tower2 = arguments['nnet_spec_tower2']
wdir = arguments['wdir']
# parse network configuration from arguments, and initialize data reading
cfg_tower1 = NetworkConfig(); cfg_tower1.parse_config_dnn(arguments, nnet_spec_tower1 + ":0")
cfg_tower2 = NetworkConfig(); cfg_tower2.parse_config_dnn(arguments, nnet_spec_tower2 + ":0")
cfg = NetworkConfig(); cfg.parse_config_dnn(arguments, str(cfg_tower1.hidden_layers_sizes[-1] + cfg_tower2.hidden_layers_sizes[-1]) + ":" + nnet_spec)
cfg.init_data_reading(train_data_spec, valid_data_spec)
# parse pre-training options
# pre-training files and layer number (how many layers are set to the pre-training parameters)
ptr_layer_number = 0; ptr_file = ''
if arguments.has_key('ptr_file') and arguments.has_key('ptr_layer_number'):
ptr_file = arguments['ptr_file']
ptr_layer_number = int(arguments['ptr_layer_number'])
# check working dir to see whether it's resuming training
resume_training = False
if os.path.exists(wdir + '/nnet.tmp') and os.path.exists(wdir + '/training_state.tmp'):
resume_training = True
cfg.lrate = _file2lrate(wdir + '/training_state.tmp')
log('> ... found nnet.tmp and training_state.tmp, now resume training from epoch ' + str(cfg.lrate.epoch))
numpy_rng = numpy.random.RandomState(89677)
theano_rng = RandomStreams(numpy_rng.randint(2 ** 30))
log('> ... building the model')
# setup model
dnn = DNN_2Tower(numpy_rng=numpy_rng, theano_rng = theano_rng, cfg = cfg, cfg_tower1 = cfg_tower1, cfg_tower2 = cfg_tower2)
# initialize model parameters
# if not resuming training, initialized from the specified pre-training file
# if resuming training, initialized from the tmp model file
if (ptr_layer_number > 0) and (resume_training is False):
_file2nnet(dnn.layers, set_layer_num = ptr_layer_number, filename = ptr_file)
if resume_training:
_file2nnet(dnn.layers, filename = wdir + '/nnet.tmp')
# get the training, validation and testing function for the model
log('> ... getting the finetuning functions')
train_fn, valid_fn = dnn.build_finetune_functions(
(cfg.train_x, cfg.train_y), (cfg.valid_x, cfg.valid_y),
batch_size=cfg.batch_size)
log('> ... finetuning the model')
while (cfg.lrate.get_rate() != 0):
# one epoch of sgd training
train_error = train_sgd(train_fn, cfg)
log('> epoch %d, training error %f ' % (cfg.lrate.epoch, 100*numpy.mean(train_error)) + '(%)')
# validation
valid_error = validate_by_minibatch(valid_fn, cfg)
log('> epoch %d, lrate %f, validation error %f ' % (cfg.lrate.epoch, cfg.lrate.get_rate(), 100*numpy.mean(valid_error)) + '(%)')
cfg.lrate.get_next_rate(current_error = 100*numpy.mean(valid_error))
# output nnet parameters and lrate, for training resume
if cfg.lrate.epoch % cfg.model_save_step == 0:
_nnet2file(dnn.layers, filename=wdir + '/nnet.tmp')
_lrate2file(cfg.lrate, wdir + '/training_state.tmp')
# save the model and network configuration
if cfg.param_output_file != '':
_nnet2file(dnn.dnn.layers, filename = cfg.param_output_file,
input_factor = cfg.input_dropout_factor, factor = cfg.dropout_factor)
_nnet2file(dnn.dnn_tower1.layers, filename = cfg.param_output_file + '.tower1',
input_factor = cfg.input_dropout_factor, factor = cfg.dropout_factor)
_nnet2file(dnn.dnn_tower2.layers, filename = cfg.param_output_file + '.tower2',
input_factor = cfg.input_dropout_factor, factor = cfg.dropout_factor)
log('> ... the final PDNN model parameter is ' + cfg.param_output_file + '(, .tower1, .tower2)')
if cfg.cfg_output_file != '':
_cfg2file(cfg, filename=cfg.cfg_output_file)
_cfg2file(cfg_tower1, filename=cfg.cfg_output_file + '.tower1')
_cfg2file(cfg_tower2, filename=cfg.cfg_output_file + '.tower2')
log('> ... the final PDNN model config is ' + cfg.cfg_output_file + '(, .tower1, .tower2)')
# output the model into Kaldi-compatible format
if cfg.kaldi_output_file != '':
dnn.dnn.write_model_to_kaldi(cfg.kaldi_output_file)
dnn.dnn_tower1.write_model_to_kaldi(cfg.kaldi_output_file + '.tower1', with_softmax = False)
dnn.dnn_tower2.write_model_to_kaldi(cfg.kaldi_output_file + '.tower2', with_softmax = False)
log('> ... the final Kaldi model is ' + cfg.kaldi_output_file + '(, .tower1, .tower2)')
# remove the tmp files (which have been generated from resuming training)
os.remove(wdir + '/nnet.tmp')
os.remove(wdir + '/training_state.tmp')
| mclaughlin6464/pdnn | cmds2/run_DNN_2Tower.py | Python | apache-2.0 | 6,638 |
from __future__ import unicode_literals
class Status(object):
"""Sharing status.
ACCEPTED: An accepted and active share
DECLINED: declined share
PENDING: pending share waiting on user's response
INACTIVE: once an accepted share, now is no longer sharing
"""
ACCEPTED = 'ACCEPTED'
DECLINED = 'DECLINED'
DELETED = 'DELETED'
PENDING = 'PENDING'
INACTIVE = 'INACTIVE'
CHOICES = ((ACCEPTED, 'Accepted'),
(DECLINED, 'Declined'),
(DELETED, 'Deleted'),
(PENDING, 'Pending'),
(INACTIVE, 'Inactive'))
@classmethod
def get_keys(cls):
"""Gets a tuple of all the status keys.
(ACCEPTED, DECLINED, DELETED, PENDING, INACTIVE)
"""
return (choice[0] for choice in cls.CHOICES)
| InfoAgeTech/django-shares | django_shares/constants.py | Python | mit | 814 |
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import pytest
from pkg_resources import WorkingSet
from pants.bin.goal_runner import GoalRunner
from pants.option.options_bootstrapper import OptionsBootstrapper
from pants.util.contextutil import temporary_dir
@pytest.mark.parametrize('version_flag', ['-V', '--version', '--pants-version'])
def test_version_request(version_flag):
class ExitException(Exception):
def __init__(self, exit_code):
self.exit_code = exit_code
with temporary_dir() as build_root:
def exiter(exit_code):
raise ExitException(exit_code)
goal_runner = GoalRunner(build_root, exiter=exiter)
options_bootstrapper = OptionsBootstrapper(args=[version_flag])
with pytest.raises(ExitException) as excinfo:
goal_runner.setup(options_bootstrapper=options_bootstrapper, working_set=WorkingSet())
assert 0 == excinfo.value.exit_code
| areitz/pants | tests/python/pants_test/bin/test_goal_runner.py | Python | apache-2.0 | 1,149 |
#!/usr/bin/python3
"""
Request the route /permissions/doctype/:doctype/sharedWithMe to get all the
permissions that apply to the documents of the provided doctype that were
shared to the user.
"""
import logging
import subprocess
import sys
import createoauthclientandtoken
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.DEBUG)
def get_shared_with_me_permissions():
"""
Request the route /permissions/doctype/:doctype/sharedWithMe to get all the
permissions that apply to the documents of the provided doctype that were
shared to the user.
"""
client = createoauthclientandtoken.create_oauth_client_and_token()
for doctype in client.doctypes:
cmd = 'curl -s -X GET -H "Authorization: Bearer {}" \
-H "Host: {}" -H "Accept: application/json" \
{}/permissions/doctype/{}/sharedWithMe'.format(client.token,
client.instance,
client.domain,
doctype)
logging.debug(cmd)
logging.info("Requesting for doctype: %s", doctype)
try:
res = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, check=True,
encoding="utf-8")
except subprocess.CalledProcessError as cpe:
logging.error("Request failed with error: %s", cpe.stderr)
sys.exit()
except subprocess.SubprocessError as spe:
logging.error('An unexpected error occurred: %s', spe)
sys.exit()
print("List of permissions for {}: {}".format(
doctype, res.stdout.rstrip()))
if __name__ == "__main__":
get_shared_with_me_permissions()
| Ljinod/Misc | scripts/python/getsharedwithmepermissions.py | Python | mit | 1,859 |
# Generated by Django 2.2.24 on 2021-11-11 08:49
import django.core.validators
from django.db import migrations, models
from django.db.models import Count
def add_shard_to_no_rule_configurations(apps, schema_editor):
Configuration = apps.get_model("santa", "Configuration")
for configuration in Configuration.objects.annotate(num_rules=Count('rule')).filter(num_rules=0):
configuration.allow_unknown_shard = 5
configuration.save()
class Migration(migrations.Migration):
dependencies = [
('santa', '0025_auto_20210921_1517'),
]
operations = [
migrations.AddField(
model_name='configuration',
name='allow_unknown_shard',
field=models.IntegerField(
default=100,
help_text="Restrict the reporting of 'Allow Unknown' events to a percentage (0-100) of hosts",
validators=[django.core.validators.MinValueValidator(0),
django.core.validators.MaxValueValidator(100)]
),
),
migrations.RunPython(add_shard_to_no_rule_configurations),
]
| zentralopensource/zentral | zentral/contrib/santa/migrations/0026_configuration_allow_unknown_shard.py | Python | apache-2.0 | 1,128 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for ListGameServerConfigs
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-game-servers
# [START gameservices_v1_generated_GameServerConfigsService_ListGameServerConfigs_sync]
from google.cloud import gaming_v1
def sample_list_game_server_configs():
# Create a client
client = gaming_v1.GameServerConfigsServiceClient()
# Initialize request argument(s)
request = gaming_v1.ListGameServerConfigsRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_game_server_configs(request=request)
# Handle the response
for response in page_result:
print(response)
# [END gameservices_v1_generated_GameServerConfigsService_ListGameServerConfigs_sync]
| googleapis/python-game-servers | samples/generated_samples/gameservices_v1_generated_game_server_configs_service_list_game_server_configs_sync.py | Python | apache-2.0 | 1,581 |
#!/usr/bin/env python
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
__doc__ = """generate_resource_whitelist.py [-o OUTPUT] INPUTS...
INPUTS are paths to unstripped binaries or PDBs containing references to
resources in their debug info.
This script generates a resource whitelist by reading debug info from
INPUTS and writes it to OUTPUT.
"""
# Whitelisted resources are identified by searching the input file for
# instantiations of the special function ui::WhitelistedResource (see
# ui/base/resource/whitelist.h).
import argparse
import os
import subprocess
import sys
llvm_bindir = os.path.join(
os.path.dirname(sys.argv[0]), '..', '..', 'third_party', 'llvm-build',
'Release+Asserts', 'bin')
def GetResourceWhitelistELF(path):
# Produce a resource whitelist by searching for debug info referring to
# WhitelistedResource. It's sufficient to look for strings in .debug_str
# rather than trying to parse all of the debug info.
readelf = subprocess.Popen(['readelf', '-p', '.debug_str', path],
stdout=subprocess.PIPE)
resource_ids = set()
for line in readelf.stdout:
# Read a line of the form " [ 123] WhitelistedResource<456>". We're
# only interested in the string, not the offset. We're also not interested
# in header lines.
split = line.split(']', 1)
if len(split) < 2:
continue
s = split[1][2:]
if s.startswith('WhitelistedResource<'):
try:
resource_ids.add(int(s[len('WhitelistedResource<'):-len('>')-1]))
except ValueError:
continue
exit_code = readelf.wait()
if exit_code != 0:
raise Exception('readelf exited with exit code %d' % exit_code)
return resource_ids
def GetResourceWhitelistPDB(path):
# Produce a resource whitelist by using llvm-pdbutil to read a PDB file's
# publics stream, which is essentially a symbol table, and searching for
# instantiations of WhitelistedResource. Any such instantiations are demangled
# to extract the resource identifier.
pdbutil = subprocess.Popen(
[os.path.join(llvm_bindir, 'llvm-pdbutil'), 'dump', '-publics', path],
stdout=subprocess.PIPE)
names = ''
for line in pdbutil.stdout:
# Read a line of the form
# "733352 | S_PUB32 [size = 56] `??$WhitelistedResource@$0BFGM@@ui@@YAXXZ`".
if '`' not in line:
continue
sym_name = line[line.find('`') + 1:line.rfind('`')]
# Under certain conditions such as the GN arg `use_clang_coverage = true` it
# is possible for the compiler to emit additional symbols that do not match
# the standard mangled-name format.
# Example: __profd_??$WhitelistedResource@$0BGPH@@ui@@YAXXZ
# C++ mangled names are supposed to begin with `?`, so check for that.
if 'WhitelistedResource' in sym_name and sym_name.startswith('?'):
names += sym_name + '\n'
exit_code = pdbutil.wait()
if exit_code != 0:
raise Exception('llvm-pdbutil exited with exit code %d' % exit_code)
undname = subprocess.Popen([os.path.join(llvm_bindir, 'llvm-undname')],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
stdout, _ = undname.communicate(names)
resource_ids = set()
for line in stdout.split('\n'):
# Read a line of the form
# "void __cdecl ui::WhitelistedResource<5484>(void)".
prefix = ' ui::WhitelistedResource<'
pos = line.find(prefix)
if pos == -1:
continue
try:
resource_ids.add(int(line[pos + len(prefix):line.rfind('>')]))
except ValueError:
continue
exit_code = undname.wait()
if exit_code != 0:
raise Exception('llvm-undname exited with exit code %d' % exit_code)
return resource_ids
def WriteResourceWhitelist(args):
resource_ids = set()
for input in args.inputs:
with open(input, 'r') as f:
magic = f.read(4)
if magic == '\x7fELF':
resource_ids = resource_ids.union(GetResourceWhitelistELF(input))
elif magic == 'Micr':
resource_ids = resource_ids.union(GetResourceWhitelistPDB(input))
else:
raise Exception('unknown file format')
if len(resource_ids) == 0:
raise Exception('No debug info was dumped. Ensure GN arg "symbol_level" '
'!= 0 and that the file is not stripped.')
for id in sorted(resource_ids):
args.output.write(str(id) + '\n')
def main():
parser = argparse.ArgumentParser(usage=__doc__)
parser.add_argument('inputs', nargs='+', help='An unstripped binary or PDB.')
parser.add_argument(
'-o', dest='output', type=argparse.FileType('w'), default=sys.stdout,
help='The resource list path to write (default stdout)')
args = parser.parse_args()
WriteResourceWhitelist(args)
if __name__ == '__main__':
main()
| endlessm/chromium-browser | tools/resources/generate_resource_whitelist.py | Python | bsd-3-clause | 4,853 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-03-02 12:48
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('libreosteoweb', '0031_paimentmean'),
]
operations = [
migrations.AlterModelOptions(
name='invoice',
options={'ordering': ['-date']},
),
]
| littlejo/Libreosteo | libreosteoweb/migrations/0032_auto_20180302_1348.py | Python | gpl-3.0 | 409 |
""" Test for the remove.py module in the vcontrol/rest/providers directory """
from os import remove as delete_file
from web import threadeddict
from vcontrol.rest.providers import remove
PROVIDERS_FILE_PATH = "../vcontrol/rest/providers/providers.txt"
class ContextDummy():
env = threadeddict()
env['HTTP_HOST'] = 'localhost:8080'
class WebDummy():
# dummy class to emulate the web.ctx.env call in remove.py
ctx = ContextDummy()
def test_successful_provider_removal():
""" Here we give the module a text file with PROVIDER: written in it,
it should remove that line in the file """
remove_provider = remove.RemoveProviderR()
remove.web = WebDummy() # override the web variable in remove.py
test_provider = "PROV"
expected_providers_contents = ['What:\n', 'Test:'] # what we expect to see in providers.txt after we call GET
# create the file
with open(PROVIDERS_FILE_PATH, 'w') as f:
f.writelines([
"What:",
"\n",
test_provider + ":",
"\n",
"Test:"
])
assert remove_provider.GET(test_provider) == "removed " + test_provider
# read the file and see if it has removed the line with the test_provider
with open(PROVIDERS_FILE_PATH, 'r') as f:
provider_contents = f.readlines()
delete_file(PROVIDERS_FILE_PATH) # delete the file
assert provider_contents == expected_providers_contents
def test_unsuccessful_provider_removal():
""" Here we give the module a text file without the provider written in it,
it should tell us that it couldn't find the provider we gave it as an argument"""
remove_provider = remove.RemoveProviderR()
remove.web = WebDummy() # override the web variable in remove.py
test_provider = "PROV"
expected_providers_contents = ['What:\n', 'NOTPROV:\n', 'Test:'] # what we expect to see in providers.txt after GET
# create the file
with open(PROVIDERS_FILE_PATH, 'w') as f:
f.writelines([
"What:",
"\n",
"NOTPROV:",
"\n",
"Test:"
])
assert remove_provider.GET(test_provider) == test_provider + " not found, couldn't remove"
# read the file and see if it's the same
with open(PROVIDERS_FILE_PATH, 'r') as f:
provider_contents = f.readlines()
delete_file(PROVIDERS_FILE_PATH) # delete the file
assert provider_contents == expected_providers_contents
| CyberReboot/vcontrol | tests/test_rest_providers_remove.py | Python | apache-2.0 | 2,487 |
# Copyright 2009 Kieran Elliott <[email protected]>
#
# Media Rover is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Media Rover is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
from mediarover.error import InvalidItemTitle, UnsupportedCategory
from mediarover.source import AbstractXmlSource
from mediarover.source.tvnzb.item import TvnzbItem
class TvnzbSource(AbstractXmlSource):
""" tvnzb source class """
# public methods - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def items(self):
""" return list of Item objects """
logger = logging.getLogger("mediarover.source.tvnzb")
# if item list hasn't been constructed yet, parse document tree
# and build list of available items.
try:
self.__items
except AttributeError:
self.__items = []
for rawItem in self.__document.getElementsByTagName("item"):
title = rawItem.getElementsByTagName("title")[0].childNodes[0].data
try:
item = TvnzbItem(rawItem, self.type(), self.priority(), self.quality(), self.delay())
except InvalidItemTitle:
logger.debug("skipping %r, unknown format" % title)
except UnsupportedCategory:
logger.debug("skipping %r, unsupported category type" % title)
else:
if item is not None:
self.__items.append(item)
# return item list to caller
return self.__items
| kierse/mediarover | mediarover/source/DEPRECATED/tvnzb/__init__.py | Python | gpl-3.0 | 1,854 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# (c) Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Generic linux Fibre Channel utilities."""
import errno
import executor
import linuxscsi
from cinder.openstack.common.gettextutils import _
from cinder.openstack.common import log as logging
from cinder.openstack.common import processutils as putils
LOG = logging.getLogger(__name__)
class LinuxFibreChannel(linuxscsi.LinuxSCSI):
def __init__(self, execute=putils.execute, root_helper="sudo",
*args, **kwargs):
super(LinuxFibreChannel, self).__init__(execute, root_helper,
*args, **kwargs)
def rescan_hosts(self, hbas):
for hba in hbas:
self.echo_scsi_command("/sys/class/scsi_host/%s/scan"
% hba['host_device'], "- - -")
def get_fc_hbas(self):
"""Get the Fibre Channel HBA information."""
out = None
try:
out, err = self._execute('systool', '-c', 'fc_host', '-v',
run_as_root=True,
root_helper=self._root_helper)
except putils.ProcessExecutionError as exc:
# This handles the case where rootwrap is used
# and systool is not installed
# 96 = nova.cmd.rootwrap.RC_NOEXECFOUND:
if exc.exit_code == 96:
LOG.warn(_("systool is not installed"))
return []
except OSError as exc:
# This handles the case where rootwrap is NOT used
# and systool is not installed
if exc.errno == errno.ENOENT:
LOG.warn(_("systool is not installed"))
return []
# No FC HBAs were found
if out is None:
return []
lines = out.split('\n')
# ignore the first 2 lines
lines = lines[2:]
hbas = []
hba = {}
lastline = None
for line in lines:
line = line.strip()
# 2 newlines denotes a new hba port
if line == '' and lastline == '':
if len(hba) > 0:
hbas.append(hba)
hba = {}
else:
val = line.split('=')
if len(val) == 2:
key = val[0].strip().replace(" ", "")
value = val[1].strip()
hba[key] = value.replace('"', '')
lastline = line
return hbas
def get_fc_hbas_info(self):
"""Get Fibre Channel WWNs and device paths from the system, if any."""
# Note(walter-boring) modern linux kernels contain the FC HBA's in /sys
# and are obtainable via the systool app
hbas = self.get_fc_hbas()
if not hbas:
return []
hbas_info = []
for hba in hbas:
wwpn = hba['port_name'].replace('0x', '')
wwnn = hba['node_name'].replace('0x', '')
device_path = hba['ClassDevicepath']
device = hba['ClassDevice']
hbas_info.append({'port_name': wwpn,
'node_name': wwnn,
'host_device': device,
'device_path': device_path})
return hbas_info
def get_fc_wwpns(self):
"""Get Fibre Channel WWPNs from the system, if any."""
# Note(walter-boring) modern linux kernels contain the FC HBA's in /sys
# and are obtainable via the systool app
hbas = self.get_fc_hbas()
wwpns = []
if hbas:
for hba in hbas:
if hba['port_state'] == 'Online':
wwpn = hba['port_name'].replace('0x', '')
wwpns.append(wwpn)
return wwpns
def get_fc_wwnns(self):
"""Get Fibre Channel WWNNs from the system, if any."""
# Note(walter-boring) modern linux kernels contain the FC HBA's in /sys
# and are obtainable via the systool app
hbas = self.get_fc_hbas()
if not hbas:
return []
wwnns = []
if hbas:
for hba in hbas:
if hba['port_state'] == 'Online':
wwnn = hba['node_name'].replace('0x', '')
wwnns.append(wwnn)
return wwnns
| inkerra/cinder | cinder/brick/initiator/linuxfc.py | Python | apache-2.0 | 4,945 |
import pandas as pd
import numpy as np
def preprocess(k, v):
tmp = pd.DataFrame(v)
tmp['id'] = ['%s-%d' %(k, i) for i in range(tmp.shape[0])]
tmp = pd.melt(tmp, id_vars = ['id'])
tmp['src'] = k
tmp['hash'] = tmp.variable.apply(lambda x: k + '-' + x)
tmp.rename(columns = {'value' : 'obj'}, inplace = True)
tmp.obj = tmp.obj.apply(str)
tmp = tmp[tmp.obj.apply(lambda x: 'nan' != x)]
tmp = tmp[tmp.obj.apply(lambda x: 'None' != x)]
return tmp
def make_distmat(preds, labs, levs):
spreds = [preds[labs == i] for i in range(len(levs))]
out = np.zeros( (len(levs), len(levs)) )
for i in range(len(levs)):
print i
for j in range(i + 1, len(levs)):
out[i, j] = fast_cosine_mmd(spreds[i], spreds[j])
out = out + out.T
dist = pd.DataFrame(out.copy())
dist.columns = dist.index = levs
return dist
def fast_distmat(preds, labs, levs):
centers = np.vstack([preds[labs == i].mean(axis = 0) for i in range(len(levs))])
sims = centers.dot(centers.T)
dg = np.diag(sims)
out = np.vstack([dg[i] + dg for i in range(sims.shape[0])]) - 2 * sims
dist = pd.DataFrame(out.copy())
dist.columns = dist.index = levs
return dist
def fast_distmat_2(dpreds, dist_dist = False):
preds_ = dpreds[dpreds.columns[:-1]]
ulab = list(dpreds.lab.unique())
centers = np.vstack([preds_[dpreds.lab == i].mean(axis = 0) for i in ulab])
sims = centers.dot(centers.T)
dg = np.diag(sims)
out = np.vstack([dg[i] + dg for i in range(sims.shape[0])]) - 2 * sims
if dist_dist:
out = squareform(pdist(out.copy()))
dist = pd.DataFrame(out)
dist.columns = dist.index = ulab
return dist
| bkj/wit | wit/helper_functions.py | Python | apache-2.0 | 1,832 |
import pytest
from api.base.settings.defaults import API_BASE
from framework.auth.core import Auth
from website.util import disconnected_from_listeners
from website.project.signals import contributor_removed
from osf_tests.factories import (
NodeFactory,
AuthUserFactory,
RegistrationFactory
)
@pytest.fixture()
def user():
return AuthUserFactory()
@pytest.mark.django_db
class TestNodeRelationshipNodeLinks:
@pytest.fixture()
def contributor(self):
return AuthUserFactory()
@pytest.fixture()
def auth(self, user):
return Auth(user)
@pytest.fixture()
def private_node(self, user):
return NodeFactory(creator=user)
@pytest.fixture()
def admin_node(self, user):
return NodeFactory(creator=user)
@pytest.fixture()
def other_node(self):
return NodeFactory()
@pytest.fixture()
def public_node(self):
return NodeFactory(is_public=True)
@pytest.fixture()
def linking_node_source(self, user, auth, private_node, admin_node):
linking_node_source = NodeFactory(creator=user)
linking_node_source.add_pointer(private_node, auth=auth)
linking_node_source.add_pointer(admin_node, auth=auth)
return linking_node_source
@pytest.fixture()
def contributor_node(self, user, contributor):
contributor_node = NodeFactory(creator=contributor)
contributor_node.add_contributor(user, auth=Auth(contributor))
contributor_node.save()
return contributor_node
@pytest.fixture()
def public_linking_node_source(
self, contributor, private_node, public_node):
public_linking_node_source = NodeFactory(
is_public=True, creator=contributor)
public_linking_node_source.add_pointer(
private_node, auth=Auth(contributor))
public_linking_node_source.add_pointer(
public_node, auth=Auth(contributor))
public_linking_node_source.save()
return public_linking_node_source
@pytest.fixture()
def public_linking_node(self, public_linking_node_source, contributor):
return RegistrationFactory(
project=public_linking_node_source,
is_public=True,
creator=contributor)
@pytest.fixture()
def linking_node(self, user, linking_node_source):
return RegistrationFactory(project=linking_node_source, creator=user)
@pytest.fixture()
def url(self, linking_node):
return '/{}registrations/{}/relationships/linked_nodes/'.format(
API_BASE, linking_node._id)
@pytest.fixture()
def public_url(self, public_linking_node):
return '/{}registrations/{}/relationships/linked_nodes/'.format(
API_BASE, public_linking_node._id)
@pytest.fixture()
def payload(self, admin_node):
def payload(node_ids=None):
node_ids = node_ids or [admin_node._id]
return {'data': [{'type': 'linked_nodes', 'id': node_id}
for node_id in node_ids]}
return payload
def test_node_relationship_node_links(
self, app, user, url, public_url, linking_node,
private_node, admin_node, public_node,
contributor_node, other_node, payload):
# get_relationship_linked_nodes
res = app.get(url, auth=user.auth)
assert res.status_code == 200
assert linking_node.linked_nodes_self_url in res.json['links']['self']
assert linking_node.linked_nodes_related_url in res.json['links']['html']
assert private_node._id in [e['id'] for e in res.json['data']]
# get_linked_nodes_related_counts
res = app.get(
'/{}registrations/{}/?related_counts=linked_nodes'.format(API_BASE, linking_node._id),
auth=user.auth
)
assert res.json['data']['relationships']['linked_nodes']['links']['related']['meta']['count'] == 2
# get_public_relationship_linked_nodes_logged_out
res = app.get(public_url)
assert res.status_code == 200
assert len(res.json['data']) == 1
assert public_node._id in [e['id'] for e in res.json['data']]
# get_public_relationship_linked_nodes_logged_in
res = app.get(public_url, auth=user.auth)
assert res.status_code == 200
assert len(res.json['data']) == 2
# get_private_relationship_linked_nodes_logged_out
res = app.get(url, expect_errors=True)
assert res.status_code == 401
# post_contributing_node
res = app.post_json_api(
url, payload([contributor_node._id]),
auth=user.auth, expect_errors=True
)
assert res.status_code == 405
# post_public_node
res = app.post_json_api(
url, payload([public_node._id]),
auth=user.auth, expect_errors=True
)
assert res.status_code == 405
# post_private_node
res = app.post_json_api(
url, payload([other_node._id]),
auth=user.auth,
expect_errors=True
)
assert res.status_code == 405
res = app.get(
url, auth=user.auth
)
ids = [data['id'] for data in res.json['data']]
assert other_node._id not in ids
assert private_node._id in ids
# post_mixed_nodes
res = app.post_json_api(
url, payload([other_node._id, contributor_node._id]),
auth=user.auth,
expect_errors=True
)
assert res.status_code == 405
res = app.get(
url, auth=user.auth
)
ids = [data['id'] for data in res.json['data']]
assert other_node._id not in ids
assert contributor_node._id not in ids
assert private_node._id in ids
# post_node_already_linked
res = app.post_json_api(
url, payload([private_node._id]),
auth=user.auth, expect_errors=True
)
assert res.status_code == 405
# put_contributing_node
res = app.put_json_api(
url, payload([contributor_node._id]),
auth=user.auth, expect_errors=True
)
assert res.status_code == 405
# put_private_node
res = app.put_json_api(
url, payload([other_node._id]),
auth=user.auth,
expect_errors=True
)
assert res.status_code == 405
res = app.get(
url, auth=user.auth
)
ids = [data['id'] for data in res.json['data']]
assert other_node._id not in ids
assert private_node._id in ids
# put_mixed_nodes
res = app.put_json_api(
url, payload([other_node._id, contributor_node._id]),
auth=user.auth, expect_errors=True
)
assert res.status_code == 405
res = app.get(
url, auth=user.auth
)
ids = [data['id'] for data in res.json['data']]
assert other_node._id not in ids
assert contributor_node._id not in ids
assert private_node._id in ids
# delete_with_put_empty_array
new_payload = payload()
new_payload['data'].pop()
res = app.put_json_api(
url, new_payload, auth=user.auth, expect_errors=True
)
assert res.status_code == 405
# delete_one
res = app.delete_json_api(
url, payload([private_node._id]),
auth=user.auth, expect_errors=True
)
assert res.status_code == 405
res = app.get(url, auth=user.auth)
ids = [data['id'] for data in res.json['data']]
assert admin_node._id in ids
assert private_node._id in ids
# delete_multiple
res = app.delete_json_api(
url, payload([private_node._id, admin_node._id]),
auth=user.auth, expect_errors=True
)
assert res.status_code == 405
res = app.get(url, auth=user.auth)
assert len(res.json['data']) == 2
# delete_not_present
number_of_links = linking_node.linked_nodes.count()
res = app.delete_json_api(
url, payload([other_node._id]),
auth=user.auth, expect_errors=True
)
assert res.status_code == 405
res = app.get(
url, auth=user.auth
)
assert len(res.json['data']) == number_of_links
# node_doesnt_exist
res = app.post_json_api(
url, payload(['aquarela']),
auth=user.auth,
expect_errors=True
)
assert res.status_code == 405
# type_mistyped
res = app.post_json_api(
url,
{'data': [{
'type': 'not_linked_nodes',
'id': contributor_node._id}]},
auth=user.auth,
expect_errors=True)
assert res.status_code == 405
# creates_public_linked_node_relationship_logged_out
res = app.post_json_api(
public_url, payload([public_node._id]),
expect_errors=True
)
assert res.status_code == 401
# creates_public_linked_node_relationship_logged_in
res = app.post_json_api(
public_url, payload([public_node._id]),
auth=user.auth, expect_errors=True
)
assert res.status_code == 405
# creates_private_linked_node_relationship_logged_out
res = app.post_json_api(
url, payload([other_node._id]),
expect_errors=True
)
assert res.status_code == 401
# put_public_nodes_relationships_logged_out
res = app.put_json_api(
public_url, payload([public_node._id]),
expect_errors=True
)
assert res.status_code == 401
# put_public_nodes_relationships_logged_in
res = app.put_json_api(
public_url, payload([private_node._id]),
auth=user.auth, expect_errors=True
)
assert res.status_code == 405
# delete_public_nodes_relationships_logged_out
res = app.delete_json_api(
public_url, payload([public_node._id]),
expect_errors=True
)
assert res.status_code == 401
# delete_public_nodes_relationships_logged_in
res = app.delete_json_api(
public_url, payload([private_node._id]),
auth=user.auth, expect_errors=True
)
assert res.status_code == 405
@pytest.mark.django_db
class TestNodeLinkedNodes:
@pytest.fixture()
def auth(self, user):
return Auth(user)
@pytest.fixture()
def private_node_one(self, user):
return NodeFactory(creator=user)
@pytest.fixture()
def private_node_two(self, user):
return NodeFactory(creator=user)
@pytest.fixture()
def node_source(
self, user, auth, private_node_one, private_node_two,
public_node):
node_source = NodeFactory(creator=user)
node_source.add_pointer(private_node_one, auth=auth)
node_source.add_pointer(private_node_two, auth=auth)
node_source.add_pointer(public_node, auth=auth)
node_source.save()
return node_source
@pytest.fixture()
def public_node(self, user):
return NodeFactory(is_public=True, creator=user)
@pytest.fixture()
def linking_node(self, user, node_source):
return RegistrationFactory(project=node_source, creator=user)
@pytest.fixture()
def url(self, linking_node):
return '/{}registrations/{}/linked_nodes/'.format(
API_BASE, linking_node._id)
@pytest.fixture()
def node_ids(self, linking_node):
return list(
linking_node.nodes_pointer.values_list(
'guids___id', flat=True))
def test_linked_nodes_returns_everything(self, app, user, url, node_ids):
res = app.get(url, auth=user.auth)
assert res.status_code == 200
nodes_returned = [
linked_node['id']for linked_node in res.json['data']
]
assert len(nodes_returned) == len(node_ids)
for node_id in node_ids:
assert node_id in nodes_returned
def test_linked_nodes_only_return_viewable_nodes(
self, app, auth, private_node_one, private_node_two,
public_node, node_ids):
user = AuthUserFactory()
new_linking_node = NodeFactory(creator=user)
private_node_one.add_contributor(user, auth=auth, save=True)
private_node_two.add_contributor(user, auth=auth, save=True)
public_node.add_contributor(user, auth=auth, save=True)
new_linking_node.add_pointer(private_node_one, auth=Auth(user))
new_linking_node.add_pointer(private_node_two, auth=Auth(user))
new_linking_node.add_pointer(public_node, auth=Auth(user))
new_linking_node.save()
new_linking_registration = RegistrationFactory(
project=new_linking_node, creator=user)
res = app.get(
'/{}registrations/{}/linked_nodes/'.format(API_BASE, new_linking_registration._id),
auth=user.auth
)
assert res.status_code == 200
nodes_returned = [linked_node['id']
for linked_node in res.json['data']]
assert len(nodes_returned) == len(node_ids)
for node_id in node_ids:
assert node_id in nodes_returned
# Disconnect contributor_removed so that we don't check in files
# We can remove this when StoredFileNode is implemented in osf-models
with disconnected_from_listeners(contributor_removed):
private_node_two.remove_contributor(user, auth=auth)
public_node.remove_contributor(user, auth=auth)
res = app.get(
'/{}registrations/{}/linked_nodes/'.format(API_BASE, new_linking_registration._id),
auth=user.auth
)
nodes_returned = [
linked_node['id'] for linked_node in res.json['data']
]
assert len(nodes_returned) == len(node_ids) - 1
assert private_node_one._id in nodes_returned
assert public_node._id in nodes_returned
assert private_node_two._id not in nodes_returned
def test_linked_nodes_doesnt_return_deleted_nodes(
self, app, user, url, private_node_one,
private_node_two, public_node, node_ids):
private_node_one.is_deleted = True
private_node_one.save()
res = app.get(url, auth=user.auth)
assert res.status_code == 200
nodes_returned = [
linked_node['id'] for linked_node in res.json['data']
]
assert len(nodes_returned) == len(node_ids) - 1
assert private_node_one._id not in nodes_returned
assert private_node_two._id in nodes_returned
assert public_node._id in nodes_returned
def test_attempt_to_return_linked_nodes_logged_out(self, app, url):
res = app.get(url, auth=None, expect_errors=True)
assert res.status_code == 401
| leb2dg/osf.io | api_tests/registrations/views/test_registration_linked_nodes.py | Python | apache-2.0 | 15,112 |
import psycopg2
from psycopg2 import extras
import math
import string
import sys
import settingReader # Read the XML settings
import logging
class DBConnection(object):
def __init__(self,Options,ConnectToDefaultDB=False):
self.ConnectionOpened=False
self.QueriesCount=0
self.Options=Options
self.PrepareDatabaseInfo()
def PrepareDatabaseInfo(self):
self.serverscount=int(self.Options['PGDB:ServersCount'])
self.DBservers=[]
self.CurrentConnections=[]
self.ActiveCursors=[]
for i in range(0,self.serverscount):
serverinfo={}
serverinfo['serverip']=self.Options['PGDB:serverInfo'+str(i)+':serverip']
serverinfo['username']=self.Options['PGDB:serverInfo'+str(i)+':user']
serverinfo['password']=self.Options['PGDB:serverInfo'+str(i)+':password']
serverinfo['port']=int(self.Options['PGDB:serverInfo'+str(i)+':port'])
self.DBservers.append(serverinfo)
def ConnectDB(self,Current_DBName, ServerID):
serverinfo=self.DBservers[ServerID]
ConnectionStr="host="+serverinfo['serverip']+" user="+serverinfo['username']+" password="+serverinfo['password']+" port="+str(serverinfo['port'])+" dbname="+Current_DBName
CurrentConnection=psycopg2.connect(ConnectionStr)
CurrentConnection.autocommit=True
self.CurrentConnections=CurrentConnection
self.ActiveCursors=CurrentConnection.cursor(cursor_factory=psycopg2.extras.DictCursor)
print('Connection to DB is open...')
self.ConnectionOpened=True
def ChangeConnection(self,Current_DBName,ServerID):
self.CloseConnections()
self.ConnectDB(Current_DBName,ServerID)
def CloseConnections(self):
if self.ConnectionOpened==True:
self.ActiveCursors.close()
self.CurrentConnections.close()
self.ConnectionOpened=False
def ExecuteNoQuerySQLStatment(self,SQLStatment,SQLParamsDict=None):
try:
print(SQLStatment)
#SQLStatment=string.lower(SQLStatment)
if SQLParamsDict==None:
self.ActiveCursors.execute(SQLStatment)
else:
self.ActiveCursors.execute(SQLStatment,SQLParamsDict)
except Exception as Exp:
print(type(Exp))
print(Exp.args)
print(Exp)
print("Current SQL Statement =\n"+SQLStatment)
raw_input("PLease press enter to continue.....")
def ExecuteQuerySQLStatment(self,SQLStatment,SQLParamsDict={}):
try:
print(SQLStatment)
print(SQLParamsDict)
self.ActiveCursors.execute(SQLStatment,SQLParamsDict)
resultsList= self.ActiveCursors.fetchall()
return resultsList
except Exception as Exp:
print(type(Exp))
print(Exp.args)
print(Exp)
print("Current SQL Statement =\n"+SQLStatment)
raw_input("PLease press enter to continue.....")
if __name__ == '__main__':
if len(sys.argv)<2:
print("Error Not Enough Arguments")
exit()
SettingFile=sys.argv[1]
[CurrentSAGEStruct,Options]=settingReader.ParseParams(SettingFile)
DBConnectionObj=DBConnection(Options)
for i in range(0,DBConnectionObj.serverscount):
DBConnectionObj.ChangeConnection('postgres',i)
Results=DBConnectionObj.ExecuteQuerySQLStatment("select datname from pg_database where datistemplate=false;", i)
print Results
for DBName in Results:
print DBName[0]
if DBName[0]!='postgres':
try:
DBConnectionObj.ChangeConnection(str(DBName[0]),i)
print "Connection To"+str(DBName[0])+" .... Done"
DBConnectionObj.ExecuteNoQuerySQLStatment("GRANT CONNECT ON DATABASE "+DBName[0]+" to tao_sciencemodules_user;")
DBConnectionObj.ExecuteNoQuerySQLStatment("REVOKE ALL ON ALL TABLES IN SCHEMA public From tao_sciencemodules_user;")
DBConnectionObj.ExecuteNoQuerySQLStatment("GRANT SELECT ON ALL TABLES IN SCHEMA public to tao_sciencemodules_user;")
except Exception as Exp:
print Exp
| IntersectAustralia/asvo-tao | core/sageimport_mpi_HDF/DatabaseSecurity.py | Python | gpl-3.0 | 4,647 |
import sys
import smtplib
def output((code, msg)):
sys.stdout.write('%s %s\n' % (code, msg))
sys.stdout.flush()
smtp = smtplib.SMTP('localhost', 2500)
output(smtp.ehlo('moon.localdomain'))
print smtp.esmtp_features
output(smtp.mail('Damien Churchill <[email protected]>'))
output(smtp.rcpt('Damien Churchill <[email protected]>'))
output(smtp.data('Subject: Testing\n\nTest'))
output(smtp.quit())
| damoxc/vsmtpd | test_smtp.py | Python | gpl-3.0 | 406 |
# construct groups of connected images. The inclusion order favors
# images with the most connections (features matches) to neighbors.
import cv2
import json
import math
import numpy as np
import os
import sys
from props import getNode
from .logger import log
#min_group = 10
min_group = 7
min_connections = 25
max_wanted = 250 # possibly overridden later
def my_add(placed_matches, matches, group_level, i):
# print("adding feature:", i)
for m in matches[i][2:]:
placed_matches[m[0]] += 1
matches[i][1] = group_level
# NEW GROUPING TEST
def compute(image_list, matches):
# notice: we assume that matches have been previously sorted by
# longest chain first!
log("Start of grouping algorithm...")
matcher_node = getNode('/config/matcher', True)
min_chain_len = matcher_node.getInt("min_chain_len")
if min_chain_len == 0:
min_chain_len = 3
log("/config/matcher/min_chain_len:", min_chain_len)
use_single_pairs = (min_chain_len == 2)
max_wanted = int(8000 / math.sqrt(len(image_list)))
if max_wanted < 200:
max_wanted = 200
log("max features desired per image:", max_wanted)
print("Notice: I should really work on this formula ...")
# mark all features as unaffiliated
for match in matches:
match[1] = -1
# start with no placed images or features
placed_images = set()
groups = []
done = False
while not done:
group_level = len(groups)
log("Start of new group level:", group_level)
placed_matches = [0] * len(image_list)
# find the unused feature with the most connections to
# unplaced images
max_connections = 2
seed_index = -1
for i, match in enumerate(matches):
if match[1] < 0:
count = 0
connected = False
for m in match[2:]:
if m[0] in placed_images:
connected = True
else:
count += 1
if not connected and count > max_connections:
max_connections = count
seed_index = i
if seed_index == -1:
break
log("Seed index:", seed_index, "connections:", max_connections)
match = matches[seed_index]
m = match[3] # first image referenced by match
# group_images.add(m[0])
my_add(placed_matches, matches, group_level, seed_index)
seed_image = m[0]
log('Seeding group with:', image_list[seed_image].name)
still_working = True
iteration = 0
while still_working:
log("Iteration:", iteration)
still_working = False
for i, match in enumerate(matches):
if match[1] < 0 and (use_single_pairs or len(match[2:]) > 2):
# determine if we should add this feature
placed_count = 0
placed_need_count = 0
unplaced_count = 0
seed_connection = False
for m in match[2:]:
if m[0] in placed_images:
# placed in a previous grouping, skip
continue
if m[0] == seed_image:
seed_connection = True
if placed_matches[m[0]] >= max_wanted:
placed_count += 1
elif placed_matches[m[0]] >= min_connections:
placed_count += 1
placed_need_count += 1
elif placed_matches[m[0]] > 0:
placed_need_count += 1
else:
unplaced_count += 1
# print("Match:", i, placed_count, seed_connection, placed_need_count, unplaced_count)
if placed_count > 1 or (use_single_pairs and placed_count > 0) or seed_connection:
if placed_need_count > 0 or unplaced_count > 0:
my_add(placed_matches, matches, group_level, i)
still_working = True
iteration += 1
# count up the placed images in this group
group_images = set()
for i in range(len(image_list)):
if placed_matches[i] >= min_connections:
group_images.add(i)
group_list = []
for i in list(group_images):
placed_images.add(i)
group_list.append(image_list[i].name)
if len(group_images) >= min_group:
log(group_list)
groups.append(group_list)
if len(group_images) < 3:
done = True
return groups
def save(path, groups):
file = os.path.join(path, 'groups.json')
try:
fd = open(file, 'w')
json.dump(groups, fd, indent=4, sort_keys=True)
fd.close()
except:
log('{}: error saving file: {}'.format(file, str(sys.exc_info()[1])))
def load(path):
file = os.path.join(path, 'groups.json')
try:
fd = open(file, 'r')
groups = json.load(fd)
fd.close()
except:
log('{}: error loading file: {}'.format(file, str(sys.exc_info()[1])))
groups = []
return groups
| UASLab/ImageAnalysis | scripts/lib/groups.py | Python | mit | 5,436 |
from django.contrib import admin
from models import SensorType, SensorData
def clear_sensor_data(model_admin, request, queryset):
for sensor_type in queryset:
SensorData.objects.filter(type=sensor_type).delete()
clear_sensor_data.short_description = "Clear sensor data"
class SensorTypeAdmin(admin.ModelAdmin):
actions = [clear_sensor_data]
admin.site.register(SensorType, SensorTypeAdmin)
admin.site.register(SensorData)
| circuitar/SensorMonitor | SensorMonitorPanel/admin.py | Python | mit | 443 |
# -*- encoding: utf-8 -*-
#
# Copyright 2013 IBM Corp.
#
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ast
import collections
import os
from oslo.config import cfg
import re
from six.moves import configparser
from six.moves.urllib import parse as urlparse
from pycadf import cadftaxonomy as taxonomy
from pycadf import cadftype
from pycadf import credential
from pycadf import endpoint
from pycadf import eventfactory as factory
from pycadf import host
from pycadf import identifier
from pycadf import reason
from pycadf import reporterstep
from pycadf import resource
from pycadf import tag
from pycadf import timestamp
CONF = cfg.CONF
opts = [
cfg.StrOpt('api_audit_map',
default='api_audit_map.conf',
help='File containing mapping for api paths and '
'service endpoints'),
]
CONF.register_opts(opts, group='audit')
AuditMap = collections.namedtuple('AuditMap',
['path_kw',
'custom_actions',
'service_endpoints',
'default_target_endpoint_type'])
def _configure_audit_map():
"""Configure to recognize and map known api paths."""
path_kw = {}
custom_actions = {}
service_endpoints = {}
default_target_endpoint_type = None
cfg_file = CONF.audit.api_audit_map
if not os.path.exists(CONF.audit.api_audit_map):
cfg_file = cfg.CONF.find_file(CONF.audit.api_audit_map)
if cfg_file:
try:
map_conf = configparser.SafeConfigParser()
map_conf.readfp(open(cfg_file))
try:
default_target_endpoint_type = \
map_conf.get('DEFAULT', 'target_endpoint_type')
except configparser.NoOptionError:
pass
try:
custom_actions = dict(map_conf.items('custom_actions'))
except configparser.Error:
pass
try:
path_kw = dict(map_conf.items('path_keywords'))
except configparser.Error:
pass
try:
service_endpoints = dict(map_conf.items('service_endpoints'))
except configparser.Error:
pass
except configparser.ParsingError as err:
raise PycadfAuditApiConfigError(
'Error parsing audit map file: %s' % err)
return AuditMap(path_kw=path_kw, custom_actions=custom_actions,
service_endpoints=service_endpoints,
default_target_endpoint_type=default_target_endpoint_type)
class ClientResource(resource.Resource):
def __init__(self, project_id=None, **kwargs):
super(ClientResource, self).__init__(**kwargs)
if project_id is not None:
self.project_id = project_id
class KeystoneCredential(credential.Credential):
def __init__(self, identity_status=None, **kwargs):
super(KeystoneCredential, self).__init__(**kwargs)
if identity_status is not None:
self.identity_status = identity_status
class PycadfAuditApiConfigError(Exception):
"""Error raised when pyCADF fails to configure correctly."""
class OpenStackAuditApi(object):
_MAP = None
Service = collections.namedtuple('Service',
['id', 'name', 'type', 'admin_endp',
'public_endp', 'private_endp'])
def _get_action(self, req):
"""Take a given Request, parse url path to calculate action type.
Depending on req.method:
if POST: path ends with 'action', read the body and use as action;
path ends with known custom_action, take action from config;
request ends with known path, assume is create action;
request ends with unknown path, assume is update action.
if GET: request ends with known path, assume is list action;
request ends with unknown path, assume is read action.
if PUT, assume update action.
if DELETE, assume delete action.
if HEAD, assume read action.
"""
path = req.path[:-1] if req.path.endswith('/') else req.path
url_ending = path[path.rfind('/') + 1:]
method = req.method
if url_ending + '/' + method.lower() in self._MAP.custom_actions:
action = self._MAP.custom_actions[url_ending + '/' +
method.lower()]
elif url_ending in self._MAP.custom_actions:
action = self._MAP.custom_actions[url_ending]
elif method == 'POST':
if url_ending == 'action':
if req.json:
body_action = list(req.json.keys())[0]
action = taxonomy.ACTION_UPDATE + '/' + body_action
else:
action = taxonomy.ACTION_CREATE
elif url_ending not in self._MAP.path_kw:
action = taxonomy.ACTION_UPDATE
else:
action = taxonomy.ACTION_CREATE
elif method == 'GET':
if url_ending in self._MAP.path_kw:
action = taxonomy.ACTION_LIST
else:
action = taxonomy.ACTION_READ
elif method == 'PUT':
action = taxonomy.ACTION_UPDATE
elif method == 'DELETE':
action = taxonomy.ACTION_DELETE
elif method == 'HEAD':
action = taxonomy.ACTION_READ
else:
action = taxonomy.UNKNOWN
return action
def _get_service_info(self, endp):
service = self.Service(
type=self._MAP.service_endpoints.get(
endp['type'],
taxonomy.UNKNOWN),
name=endp['name'],
id=identifier.norm_ns(endp['endpoints'][0]['id']),
admin_endp=endpoint.Endpoint(
name='admin',
url=endp['endpoints'][0]['adminURL']),
private_endp=endpoint.Endpoint(
name='private',
url=endp['endpoints'][0]['internalURL']),
public_endp=endpoint.Endpoint(
name='public',
url=endp['endpoints'][0]['publicURL']))
return service
def _build_typeURI(self, req, service_type):
type_uri = ''
prev_key = None
for key in re.split('/', req.path):
if key in self._MAP.path_kw:
type_uri += '/' + key
elif prev_key in self._MAP.path_kw:
type_uri += '/' + self._MAP.path_kw[prev_key]
prev_key = key
return service_type + type_uri
def create_event(self, req, correlation_id):
if not self._MAP:
self._MAP = _configure_audit_map()
action = self._get_action(req)
initiator_host = host.Host(address=req.client_addr,
agent=req.user_agent)
catalog = ast.literal_eval(req.environ['HTTP_X_SERVICE_CATALOG'])
service_info = self.Service(type=taxonomy.UNKNOWN,
name=taxonomy.UNKNOWN,
id=taxonomy.UNKNOWN,
admin_endp=None,
private_endp=None,
public_endp=None)
default_endpoint = None
for endp in catalog:
admin_urlparse = urlparse.urlparse(
endp['endpoints'][0]['adminURL'])
public_urlparse = urlparse.urlparse(
endp['endpoints'][0]['publicURL'])
req_url = urlparse.urlparse(req.host_url)
if (req_url.netloc == admin_urlparse.netloc
or req_url.netloc == public_urlparse.netloc):
service_info = self._get_service_info(endp)
break
elif (self._MAP.default_target_endpoint_type
and endp['type'] == self._MAP.default_target_endpoint_type):
default_endpoint = endp
else:
if default_endpoint:
service_info = self._get_service_info(default_endpoint)
initiator = ClientResource(
typeURI=taxonomy.ACCOUNT_USER,
id=identifier.norm_ns(str(req.environ['HTTP_X_USER_ID'])),
name=req.environ['HTTP_X_USER_NAME'],
host=initiator_host,
credential=KeystoneCredential(
token=req.environ['HTTP_X_AUTH_TOKEN'],
identity_status=req.environ['HTTP_X_IDENTITY_STATUS']),
project_id=identifier.norm_ns(req.environ['HTTP_X_PROJECT_ID']))
target_typeURI = (self._build_typeURI(req, service_info.type)
if service_info.type != taxonomy.UNKNOWN
else service_info.type)
target = resource.Resource(typeURI=target_typeURI,
id=service_info.id,
name=service_info.name)
if service_info.admin_endp:
target.add_address(service_info.admin_endp)
if service_info.private_endp:
target.add_address(service_info.private_endp)
if service_info.public_endp:
target.add_address(service_info.public_endp)
event = factory.EventFactory().new_event(
eventType=cadftype.EVENTTYPE_ACTIVITY,
outcome=taxonomy.OUTCOME_PENDING,
action=action,
initiator=initiator,
target=target,
observer=resource.Resource(id='target'))
event.requestPath = req.path_qs
event.add_tag(tag.generate_name_value_tag('correlation_id',
correlation_id))
return event
def append_audit_event(self, req):
"""Append a CADF event to req.environ['CADF_EVENT']
Also, stores model in request for future process and includes a
CADF correlation id.
"""
correlation_id = identifier.generate_uuid()
req.environ['CADF_EVENT_CORRELATION_ID'] = correlation_id
event = self.create_event(req, correlation_id)
setattr(req, 'cadf_model', event)
req.environ['CADF_EVENT'] = event.as_dict()
def mod_audit_event(self, req, response):
"""Modifies CADF event in request based on response.
If no event exists, a new event is created.
"""
if response:
if response.status_int >= 200 and response.status_int < 400:
result = taxonomy.OUTCOME_SUCCESS
else:
result = taxonomy.OUTCOME_FAILURE
else:
result = taxonomy.UNKNOWN
if hasattr(req, 'cadf_model'):
req.cadf_model.add_reporterstep(
reporterstep.Reporterstep(
role=cadftype.REPORTER_ROLE_MODIFIER,
reporter=resource.Resource(id='target'),
reporterTime=timestamp.get_utc_now()))
else:
self.append_audit_event(req)
req.cadf_model.outcome = result
if response:
req.cadf_model.reason = \
reason.Reason(reasonType='HTTP',
reasonCode=str(response.status_int))
req.environ['CADF_EVENT'] = req.cadf_model.as_dict()
| citrix-openstack-build/pycadf | pycadf/audit/api.py | Python | apache-2.0 | 11,858 |
# -*- coding: utf-8 -*-
"""django_treensl URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^ads/', include('myapp.urls')),
url(r'^admin/', include(admin.site.urls)),
]
| EvgeniyBurdin/django_treensl | django_treensl/urls.py | Python | bsd-3-clause | 830 |
# -*- coding: utf-8
# pylint: disable=line-too-long
"""Simple KMers class to compute kmer frequecies
This module should not be used for k > 4.
"""
import itertools
from collections import Counter
import anvio
from anvio.constants import complements
__author__ = "Developers of anvi'o (see AUTHORS.txt)"
__copyright__ = "Copyleft 2015-2018, the Meren Lab (http://merenlab.org/)"
__credits__ = []
__license__ = "GPL 3.0"
__version__ = anvio.__version__
__maintainer__ = "A. Murat Eren"
__email__ = "[email protected]"
__status__ = "Development"
def rev_comp(seq):
return seq.translate(complements)[::-1]
class KMers:
def __init__(self, k=4, alphabet='ATCG', consider_rev_comps=True):
self.kmers = {}
self.alphabet = alphabet
self.consider_rev_comps = consider_rev_comps
self.k = k
self.get_kmers()
def get_kmers(self):
k = self.k
arg = [self.alphabet] * k
kmers = set()
for item in itertools.product(*arg):
kmer = ''.join(item)
if self.consider_rev_comps:
if rev_comp(kmer) not in kmers:
kmers.add(kmer)
else:
kmers.add(kmer)
self.kmers[k] = kmers
def get_kmer_frequency(self, sequence, dist_metric_safe=False):
k = self.k
sequence = sequence.upper()
if len(sequence) < k:
return None
if k not in self.kmers:
self.get_kmers(k)
kmers = self.kmers[k]
frequencies = Counter({})
for i in range(0, len(sequence) - (k - 1)):
kmer = sequence[i:i + k]
if self.consider_rev_comps:
if kmer in kmers:
frequencies[kmer] += 1
else:
frequencies[rev_comp(kmer)] += 1
else:
frequencies[kmer] += 1
if dist_metric_safe:
# we don't want all kmer freq values to be zero. so the distance
# metrics wouldn't go crazy. instead we fill it with 1. which
# doesn't affect relative distances.
if not len(frequencies):
frequencies = dict(list(zip(kmers, [1] * len(kmers))))
return frequencies
| meren/anvio | anvio/kmers.py | Python | gpl-3.0 | 2,263 |
class Mover(object):
def __init__(self):
self.location = PVector(random(width), random(height))
self.velocity = PVector(0, 0)
self.topspeed = 4
self.r = 15
def update(self):
mouse = PVector(mouseX, mouseY)
dir = PVector.sub(mouse, self.location)
dir.normalize()
dir.mult(0.05)
self.acceleration = dir
self.velocity.add(self.acceleration)
self.velocity.limit(self.topspeed)
self.location.add(self.velocity)
def display(self):
stroke(0)
fill(255, 100, 255)
ellipse(self.location.x, self.location.y, 2*self.r, 2*self.r)
def checkBoundaries(self):
if (self.location.x > width + self.r):
self.location.x = -self.r
elif (self.location.x < -self.r):
self.location.x = width + self.r
if (self.location.y > height + self.r):
self.location.y = -self.r
elif (self.location.y < -self.r):
self.location.y = height + self.r
| kantel/processingpy | sketches/natureOfCode/chapter01/mover5/mover.py | Python | mit | 1,075 |
# Copyright 2014 Mellanox Technologies, Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import itertools
import socket
import sys
import time
from neutron_lib import constants as n_constants
from oslo_config import cfg
from oslo_log import log as logging
import oslo_messaging
from oslo_service import loopingcall
from osprofiler import profiler
import six
from neutron._i18n import _, _LE, _LI, _LW
from neutron.agent.l2.extensions import manager as ext_manager
from neutron.agent import rpc as agent_rpc
from neutron.agent import securitygroups_rpc as sg_rpc
from neutron.api.rpc.callbacks import resources
from neutron.common import config as common_config
from neutron.common import profiler as setup_profiler
from neutron.common import topics
from neutron.common import utils as n_utils
from neutron import context
from neutron.extensions import portbindings
from neutron.plugins.ml2.drivers.mech_sriov.agent.common import config
from neutron.plugins.ml2.drivers.mech_sriov.agent.common \
import exceptions as exc
from neutron.plugins.ml2.drivers.mech_sriov.agent import eswitch_manager as esm
LOG = logging.getLogger(__name__)
class SriovNicSwitchRpcCallbacks(sg_rpc.SecurityGroupAgentRpcCallbackMixin):
# Set RPC API version to 1.0 by default.
# history
# 1.1 Support Security Group RPC (works with NoopFirewallDriver)
# 1.2 Support DVR (Distributed Virtual Router) RPC (not supported)
# 1.3 Added param devices_to_update to security_groups_provider_updated
# (works with NoopFirewallDriver)
# 1.4 Added support for network_update
target = oslo_messaging.Target(version='1.4')
def __init__(self, context, agent, sg_agent):
super(SriovNicSwitchRpcCallbacks, self).__init__()
self.context = context
self.agent = agent
self.sg_agent = sg_agent
def port_update(self, context, **kwargs):
LOG.debug("port_update received")
port = kwargs.get('port')
vnic_type = port.get(portbindings.VNIC_TYPE)
if vnic_type and vnic_type == portbindings.VNIC_DIRECT_PHYSICAL:
LOG.debug("The SR-IOV agent doesn't handle %s ports.",
portbindings.VNIC_DIRECT_PHYSICAL)
return
# Put the port mac address in the updated_devices set.
# Do not store port details, as if they're used for processing
# notifications there is no guarantee the notifications are
# processed in the same order as the relevant API requests.
mac = port['mac_address']
pci_slot = None
if port.get(portbindings.PROFILE):
pci_slot = port[portbindings.PROFILE].get('pci_slot')
if pci_slot:
self.agent.updated_devices.add((mac, pci_slot))
LOG.debug("port_update RPC received for port: %(id)s with MAC "
"%(mac)s and PCI slot %(pci_slot)s slot",
{'id': port['id'], 'mac': mac, 'pci_slot': pci_slot})
else:
LOG.debug("No PCI Slot for port %(id)s with MAC %(mac)s; "
"skipping", {'id': port['id'], 'mac': mac,
'pci_slot': pci_slot})
def network_update(self, context, **kwargs):
network_id = kwargs['network']['id']
LOG.debug("network_update message received for network "
"%(network_id)s, with ports: %(ports)s",
{'network_id': network_id,
'ports': self.agent.network_ports[network_id]})
for port_data in self.agent.network_ports[network_id]:
self.agent.updated_devices.add(port_data['device'])
@profiler.trace_cls("rpc")
class SriovNicSwitchAgent(object):
def __init__(self, physical_devices_mappings, exclude_devices,
polling_interval):
self.polling_interval = polling_interval
self.network_ports = collections.defaultdict(list)
self.conf = cfg.CONF
self.setup_eswitch_mgr(physical_devices_mappings,
exclude_devices)
# Stores port update notifications for processing in the main loop
self.updated_devices = set()
self.context = context.get_admin_context_without_session()
self.plugin_rpc = agent_rpc.PluginApi(topics.PLUGIN)
self.sg_plugin_rpc = sg_rpc.SecurityGroupServerRpcApi(topics.PLUGIN)
self.sg_agent = sg_rpc.SecurityGroupAgentRpc(self.context,
self.sg_plugin_rpc)
self._setup_rpc()
self.ext_manager = self._create_agent_extension_manager(
self.connection)
configurations = {'device_mappings': physical_devices_mappings,
'extensions': self.ext_manager.names()}
#TODO(mangelajo): optimize resource_versions (see ovs agent)
self.agent_state = {
'binary': 'neutron-sriov-nic-agent',
'host': self.conf.host,
'topic': n_constants.L2_AGENT_TOPIC,
'configurations': configurations,
'agent_type': n_constants.AGENT_TYPE_NIC_SWITCH,
'resource_versions': resources.LOCAL_RESOURCE_VERSIONS,
'start_flag': True}
# The initialization is complete; we can start receiving messages
self.connection.consume_in_threads()
# Initialize iteration counter
self.iter_num = 0
def _setup_rpc(self):
self.agent_id = 'nic-switch-agent.%s' % socket.gethostname()
LOG.info(_LI("RPC agent_id: %s"), self.agent_id)
self.topic = topics.AGENT
self.state_rpc = agent_rpc.PluginReportStateAPI(topics.REPORTS)
# RPC network init
# Handle updates from service
self.endpoints = [SriovNicSwitchRpcCallbacks(self.context, self,
self.sg_agent)]
# Define the listening consumers for the agent
consumers = [[topics.PORT, topics.UPDATE],
[topics.NETWORK, topics.UPDATE],
[topics.SECURITY_GROUP, topics.UPDATE]]
self.connection = agent_rpc.create_consumers(self.endpoints,
self.topic,
consumers,
start_listening=False)
report_interval = cfg.CONF.AGENT.report_interval
if report_interval:
heartbeat = loopingcall.FixedIntervalLoopingCall(
self._report_state)
heartbeat.start(interval=report_interval)
def _report_state(self):
try:
devices = len(self.eswitch_mgr.get_assigned_devices_info())
self.agent_state.get('configurations')['devices'] = devices
self.state_rpc.report_state(self.context,
self.agent_state)
# we only want to update resource versions on startup
self.agent_state.pop('resource_versions', None)
self.agent_state.pop('start_flag', None)
except Exception:
LOG.exception(_LE("Failed reporting state!"))
def _create_agent_extension_manager(self, connection):
ext_manager.register_opts(self.conf)
mgr = ext_manager.AgentExtensionsManager(self.conf)
mgr.initialize(connection, 'sriov')
return mgr
def setup_eswitch_mgr(self, device_mappings, exclude_devices=None):
exclude_devices = exclude_devices or {}
self.eswitch_mgr = esm.ESwitchManager()
self.eswitch_mgr.discover_devices(device_mappings, exclude_devices)
def scan_devices(self, registered_devices, updated_devices):
curr_devices = self.eswitch_mgr.get_assigned_devices_info()
device_info = {}
device_info['current'] = curr_devices
device_info['added'] = curr_devices - registered_devices
# we need to clean up after devices are removed
device_info['removed'] = registered_devices - curr_devices
# we don't want to process updates for devices that don't exist
device_info['updated'] = (updated_devices & curr_devices -
device_info['removed'])
return device_info
def _device_info_has_changes(self, device_info):
return (device_info.get('added')
or device_info.get('updated')
or device_info.get('removed'))
def process_network_devices(self, device_info):
resync_a = False
resync_b = False
self.sg_agent.prepare_devices_filter(device_info.get('added'))
if device_info.get('updated'):
self.sg_agent.refresh_firewall()
# Updated devices are processed the same as new ones, as their
# admin_state_up may have changed. The set union prevents duplicating
# work when a device is new and updated in the same polling iteration.
devices_added_updated = (set(device_info.get('added'))
| set(device_info.get('updated')))
if devices_added_updated:
resync_a = self.treat_devices_added_updated(devices_added_updated)
if device_info.get('removed'):
resync_b = self.treat_devices_removed(device_info['removed'])
# If one of the above operations fails => resync with plugin
return (resync_a | resync_b)
def treat_device(self, device, pci_slot, admin_state_up, spoofcheck=True):
if self.eswitch_mgr.device_exists(device, pci_slot):
try:
self.eswitch_mgr.set_device_spoofcheck(device, pci_slot,
spoofcheck)
except Exception:
LOG.warning(_LW("Failed to set spoofcheck for device %s"),
device)
LOG.info(_LI("Device %(device)s spoofcheck %(spoofcheck)s"),
{"device": device, "spoofcheck": spoofcheck})
try:
self.eswitch_mgr.set_device_state(device, pci_slot,
admin_state_up)
except exc.IpCommandOperationNotSupportedError:
LOG.warning(_LW("Device %s does not support state change"),
device)
except exc.SriovNicError:
LOG.warning(_LW("Failed to set device %s state"), device)
return
if admin_state_up:
# update plugin about port status
self.plugin_rpc.update_device_up(self.context,
device,
self.agent_id,
cfg.CONF.host)
else:
self.plugin_rpc.update_device_down(self.context,
device,
self.agent_id,
cfg.CONF.host)
else:
LOG.info(_LI("No device with MAC %s defined on agent."), device)
def _update_network_ports(self, network_id, port_id, mac_pci_slot):
self._clean_network_ports(mac_pci_slot)
self.network_ports[network_id].append({
"port_id": port_id,
"device": mac_pci_slot})
def _clean_network_ports(self, mac_pci_slot):
for netid, ports_list in six.iteritems(self.network_ports):
for port_data in ports_list:
if mac_pci_slot == port_data['device']:
ports_list.remove(port_data)
if ports_list == []:
self.network_ports.pop(netid)
return port_data['port_id']
def treat_devices_added_updated(self, devices_info):
try:
macs_list = set([device_info[0] for device_info in devices_info])
devices_details_list = self.plugin_rpc.get_devices_details_list(
self.context, macs_list, self.agent_id)
except Exception as e:
LOG.debug("Unable to get port details for devices "
"with MAC addresses %(devices)s: %(e)s",
{'devices': macs_list, 'e': e})
# resync is needed
return True
for device_details in devices_details_list:
device = device_details['device']
LOG.debug("Port with MAC address %s is added", device)
if 'port_id' in device_details:
LOG.info(_LI("Port %(device)s updated. Details: %(details)s"),
{'device': device, 'details': device_details})
port_id = device_details['port_id']
profile = device_details['profile']
spoofcheck = device_details.get('port_security_enabled', True)
self.treat_device(device,
profile.get('pci_slot'),
device_details['admin_state_up'],
spoofcheck)
self._update_network_ports(device_details['network_id'],
port_id,
(device, profile.get('pci_slot')))
self.ext_manager.handle_port(self.context, device_details)
else:
LOG.info(_LI("Device with MAC %s not defined on plugin"),
device)
return False
def treat_devices_removed(self, devices):
resync = False
for device in devices:
mac, pci_slot = device
LOG.info(_LI("Removing device with MAC address %(mac)s and "
"PCI slot %(pci_slot)s"),
{'mac': mac, 'pci_slot': pci_slot})
try:
port_id = self._clean_network_ports(device)
if port_id:
port = {'port_id': port_id,
'device': mac,
'profile': {'pci_slot': pci_slot}}
self.ext_manager.delete_port(self.context, port)
else:
LOG.warning(_LW("port_id to device with MAC "
"%s not found"), mac)
dev_details = self.plugin_rpc.update_device_down(self.context,
mac,
self.agent_id,
cfg.CONF.host)
except Exception as e:
LOG.debug("Removing port failed for device with MAC address "
"%(mac)s and PCI slot %(pci_slot)s due to %(exc)s",
{'mac': mac, 'pci_slot': pci_slot, 'exc': e})
resync = True
continue
if dev_details['exists']:
LOG.info(_LI("Port with MAC %(mac)s and PCI slot "
"%(pci_slot)s updated."),
{'mac': mac, 'pci_slot': pci_slot})
else:
LOG.debug("Device with MAC %(mac)s and PCI slot "
"%(pci_slot)s not defined on plugin",
{'mac': mac, 'pci_slot': pci_slot})
return resync
def daemon_loop(self):
sync = True
devices = set()
LOG.info(_LI("SRIOV NIC Agent RPC Daemon Started!"))
while True:
start = time.time()
LOG.debug("Agent rpc_loop - iteration:%d started",
self.iter_num)
if sync:
LOG.info(_LI("Agent out of sync with plugin!"))
devices.clear()
sync = False
device_info = {}
# Save updated devices dict to perform rollback in case
# resync would be needed, and then clear self.updated_devices.
# As the greenthread should not yield between these
# two statements, this will should be thread-safe.
updated_devices_copy = self.updated_devices
self.updated_devices = set()
try:
device_info = self.scan_devices(devices, updated_devices_copy)
if self._device_info_has_changes(device_info):
LOG.debug("Agent loop found changes! %s", device_info)
# If treat devices fails - indicates must resync with
# plugin
sync = self.process_network_devices(device_info)
devices = device_info['current']
except Exception:
LOG.exception(_LE("Error in agent loop. Devices info: %s"),
device_info)
sync = True
# Restore devices that were removed from this set earlier
# without overwriting ones that may have arrived since.
self.updated_devices |= updated_devices_copy
# sleep till end of polling interval
elapsed = (time.time() - start)
if (elapsed < self.polling_interval):
time.sleep(self.polling_interval - elapsed)
else:
LOG.debug("Loop iteration exceeded interval "
"(%(polling_interval)s vs. %(elapsed)s)!",
{'polling_interval': self.polling_interval,
'elapsed': elapsed})
self.iter_num = self.iter_num + 1
class SriovNicAgentConfigParser(object):
def __init__(self):
self.device_mappings = {}
self.exclude_devices = {}
def parse(self):
"""Parses device_mappings and exclude_devices.
Parse and validate the consistency in both mappings
"""
self.device_mappings = n_utils.parse_mappings(
cfg.CONF.SRIOV_NIC.physical_device_mappings, unique_keys=False)
self.exclude_devices = config.parse_exclude_devices(
cfg.CONF.SRIOV_NIC.exclude_devices)
self._validate()
def _validate(self):
"""Validate configuration.
Validate that network_device in excluded_device
exists in device mappings
"""
dev_net_set = set(itertools.chain.from_iterable(
six.itervalues(self.device_mappings)))
for dev_name in self.exclude_devices.keys():
if dev_name not in dev_net_set:
raise ValueError(_("Device name %(dev_name)s is missing from "
"physical_device_mappings") % {'dev_name':
dev_name})
def main():
common_config.init(sys.argv[1:])
common_config.setup_logging()
try:
config_parser = SriovNicAgentConfigParser()
config_parser.parse()
device_mappings = config_parser.device_mappings
exclude_devices = config_parser.exclude_devices
except ValueError:
LOG.exception(_LE("Failed on Agent configuration parse. "
"Agent terminated!"))
raise SystemExit(1)
LOG.info(_LI("Physical Devices mappings: %s"), device_mappings)
LOG.info(_LI("Exclude Devices: %s"), exclude_devices)
polling_interval = cfg.CONF.AGENT.polling_interval
try:
agent = SriovNicSwitchAgent(device_mappings,
exclude_devices,
polling_interval)
except exc.SriovNicError:
LOG.exception(_LE("Agent Initialization Failed"))
raise SystemExit(1)
# Start everything.
setup_profiler.setup("neutron-sriov-nic-agent", cfg.CONF.host)
LOG.info(_LI("Agent initialized successfully, now running... "))
agent.daemon_loop()
| bigswitch/neutron | neutron/plugins/ml2/drivers/mech_sriov/agent/sriov_nic_agent.py | Python | apache-2.0 | 20,317 |
"""
Support for MQTT lights.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/light.mqtt/
"""
import logging
from functools import partial
import voluptuous as vol
import blumate.components.mqtt as mqtt
from blumate.components.light import (
ATTR_BRIGHTNESS, ATTR_RGB_COLOR, Light)
from blumate.const import CONF_NAME, CONF_OPTIMISTIC, CONF_VALUE_TEMPLATE
from blumate.components.mqtt import (
CONF_STATE_TOPIC, CONF_COMMAND_TOPIC, CONF_QOS, CONF_RETAIN)
import blumate.helpers.config_validation as cv
from blumate.helpers.template import render_with_possible_json_value
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['mqtt']
CONF_STATE_VALUE_TEMPLATE = 'state_value_template'
CONF_BRIGHTNESS_STATE_TOPIC = 'brightness_state_topic'
CONF_BRIGHTNESS_COMMAND_TOPIC = 'brightness_command_topic'
CONF_BRIGHTNESS_VALUE_TEMPLATE = 'brightness_value_template'
CONF_RGB_STATE_TOPIC = 'rgb_state_topic'
CONF_RGB_COMMAND_TOPIC = 'rgb_command_topic'
CONF_RGB_VALUE_TEMPLATE = 'rgb_value_template'
CONF_PAYLOAD_ON = 'payload_on'
CONF_PAYLOAD_OFF = 'payload_off'
CONF_BRIGHTNESS_SCALE = 'brightness_scale'
DEFAULT_NAME = 'MQTT Light'
DEFAULT_PAYLOAD_ON = 'ON'
DEFAULT_PAYLOAD_OFF = 'OFF'
DEFAULT_OPTIMISTIC = False
DEFAULT_BRIGHTNESS_SCALE = 255
PLATFORM_SCHEMA = mqtt.MQTT_RW_PLATFORM_SCHEMA.extend({
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_STATE_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_BRIGHTNESS_STATE_TOPIC): mqtt.valid_subscribe_topic,
vol.Optional(CONF_BRIGHTNESS_COMMAND_TOPIC): mqtt.valid_publish_topic,
vol.Optional(CONF_BRIGHTNESS_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_RGB_STATE_TOPIC): mqtt.valid_subscribe_topic,
vol.Optional(CONF_RGB_COMMAND_TOPIC): mqtt.valid_publish_topic,
vol.Optional(CONF_RGB_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_PAYLOAD_ON, default=DEFAULT_PAYLOAD_ON): cv.string,
vol.Optional(CONF_PAYLOAD_OFF, default=DEFAULT_PAYLOAD_OFF): cv.string,
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
vol.Optional(CONF_BRIGHTNESS_SCALE, default=DEFAULT_BRIGHTNESS_SCALE):
vol.All(vol.Coerce(int), vol.Range(min=1)),
})
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
"""Add MQTT Light."""
config.setdefault(CONF_STATE_VALUE_TEMPLATE,
config.get(CONF_VALUE_TEMPLATE))
add_devices_callback([MqttLight(
hass,
config[CONF_NAME],
{
key: config.get(key) for key in (
CONF_STATE_TOPIC,
CONF_COMMAND_TOPIC,
CONF_BRIGHTNESS_STATE_TOPIC,
CONF_BRIGHTNESS_COMMAND_TOPIC,
CONF_RGB_STATE_TOPIC,
CONF_RGB_COMMAND_TOPIC,
)
},
{
'state': config.get(CONF_STATE_VALUE_TEMPLATE),
'brightness': config.get(CONF_BRIGHTNESS_VALUE_TEMPLATE),
'rgb': config.get(CONF_RGB_VALUE_TEMPLATE)
},
config[CONF_QOS],
config[CONF_RETAIN],
{
'on': config[CONF_PAYLOAD_ON],
'off': config[CONF_PAYLOAD_OFF],
},
config[CONF_OPTIMISTIC],
config[CONF_BRIGHTNESS_SCALE],
)])
class MqttLight(Light):
"""MQTT light."""
# pylint: disable=too-many-arguments,too-many-instance-attributes
def __init__(self, hass, name, topic, templates, qos, retain, payload,
optimistic, brightness_scale):
"""Initialize MQTT light."""
self._hass = hass
self._name = name
self._topic = topic
self._qos = qos
self._retain = retain
self._payload = payload
self._optimistic = optimistic or topic["state_topic"] is None
self._optimistic_rgb = optimistic or topic["rgb_state_topic"] is None
self._optimistic_brightness = (optimistic or
topic["brightness_state_topic"] is None)
self._brightness_scale = brightness_scale
self._state = False
templates = {key: ((lambda value: value) if tpl is None else
partial(render_with_possible_json_value, hass, tpl))
for key, tpl in templates.items()}
def state_received(topic, payload, qos):
"""A new MQTT message has been received."""
payload = templates['state'](payload)
if payload == self._payload["on"]:
self._state = True
elif payload == self._payload["off"]:
self._state = False
self.update_ha_state()
if self._topic["state_topic"] is not None:
mqtt.subscribe(self._hass, self._topic["state_topic"],
state_received, self._qos)
def brightness_received(topic, payload, qos):
"""A new MQTT message for the brightness has been received."""
device_value = float(templates['brightness'](payload))
percent_bright = device_value / self._brightness_scale
self._brightness = int(percent_bright * 255)
self.update_ha_state()
if self._topic["brightness_state_topic"] is not None:
mqtt.subscribe(self._hass, self._topic["brightness_state_topic"],
brightness_received, self._qos)
self._brightness = 255
elif self._topic["brightness_command_topic"] is not None:
self._brightness = 255
else:
self._brightness = None
def rgb_received(topic, payload, qos):
"""A new MQTT message has been received."""
self._rgb = [int(val) for val in
templates['rgb'](payload).split(',')]
self.update_ha_state()
if self._topic["rgb_state_topic"] is not None:
mqtt.subscribe(self._hass, self._topic["rgb_state_topic"],
rgb_received, self._qos)
self._rgb = [255, 255, 255]
if self._topic["rgb_command_topic"] is not None:
self._rgb = [255, 255, 255]
else:
self._rgb = None
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return self._brightness
@property
def rgb_color(self):
"""Return the RGB color value."""
return self._rgb
@property
def should_poll(self):
"""No polling needed for a MQTT light."""
return False
@property
def name(self):
"""Return the name of the device if any."""
return self._name
@property
def is_on(self):
"""Return true if device is on."""
return self._state
@property
def assumed_state(self):
"""Return true if we do optimistic updates."""
return self._optimistic
def turn_on(self, **kwargs):
"""Turn the device on."""
should_update = False
if ATTR_RGB_COLOR in kwargs and \
self._topic["rgb_command_topic"] is not None:
mqtt.publish(self._hass, self._topic["rgb_command_topic"],
"{},{},{}".format(*kwargs[ATTR_RGB_COLOR]),
self._qos, self._retain)
if self._optimistic_rgb:
self._rgb = kwargs[ATTR_RGB_COLOR]
should_update = True
if ATTR_BRIGHTNESS in kwargs and \
self._topic["brightness_command_topic"] is not None:
percent_bright = float(kwargs[ATTR_BRIGHTNESS]) / 255
device_brightness = int(percent_bright * self._brightness_scale)
mqtt.publish(self._hass, self._topic["brightness_command_topic"],
device_brightness, self._qos, self._retain)
if self._optimistic_brightness:
self._brightness = kwargs[ATTR_BRIGHTNESS]
should_update = True
mqtt.publish(self._hass, self._topic["command_topic"],
self._payload["on"], self._qos, self._retain)
if self._optimistic:
# Optimistically assume that switch has changed state.
self._state = True
should_update = True
if should_update:
self.update_ha_state()
def turn_off(self, **kwargs):
"""Turn the device off."""
mqtt.publish(self._hass, self._topic["command_topic"],
self._payload["off"], self._qos, self._retain)
if self._optimistic:
# Optimistically assume that switch has changed state.
self._state = False
self.update_ha_state()
| bdfoster/blumate | blumate/components/light/mqtt.py | Python | mit | 8,685 |
# Copyright 2017 Huawei Technologies Co.,LTD.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Cyborg common internal object model"""
import netaddr
from oslo_utils import versionutils
from oslo_versionedobjects import base as object_base
from cyborg import objects
from cyborg.objects import fields as object_fields
class CyborgObjectRegistry(object_base.VersionedObjectRegistry):
def registration_hook(self, cls, index):
# NOTE(jroll): blatantly stolen from nova
# NOTE(danms): This is called when an object is registered,
# and is responsible for maintaining cyborg.objects.$OBJECT
# as the highest-versioned implementation of a given object.
version = versionutils.convert_version_to_tuple(cls.VERSION)
if not hasattr(objects, cls.obj_name()):
setattr(objects, cls.obj_name(), cls)
else:
cur_version = versionutils.convert_version_to_tuple(
getattr(objects, cls.obj_name()).VERSION)
if version >= cur_version:
setattr(objects, cls.obj_name(), cls)
class CyborgObject(object_base.VersionedObject):
"""Base class and object factory.
This forms the base of all objects that can be remoted or instantiated
via RPC. Simply defining a class that inherits from this base class
will make it remotely instantiatable. Objects should implement the
necessary "get" classmethod routines as well as "save" object methods
as appropriate.
"""
OBJ_SERIAL_NAMESPACE = 'cyborg_object'
OBJ_PROJECT_NAMESPACE = 'cyborg'
fields = {
'created_at': object_fields.DateTimeField(nullable=True),
'updated_at': object_fields.DateTimeField(nullable=True),
}
def as_dict(self):
return dict((k, getattr(self, k))
for k in self.fields
if hasattr(self, k))
@staticmethod
def _from_db_object(obj, db_obj):
"""Converts a database entity to a formal object.
:param obj: An object of the class.
:param db_obj: A DB model of the object
:return: The object of the class with the database entity added
"""
for field in obj.fields:
obj[field] = db_obj[field]
obj.obj_reset_changes()
return obj
@classmethod
def _from_db_object_list(cls, db_objs, context):
"""Converts a list of database entities to a list of formal objects."""
objs = []
for db_obj in db_objs:
objs.append(cls._from_db_object(cls(context), db_obj))
return objs
class CyborgObjectSerializer(object_base.VersionedObjectSerializer):
# Base class to use for object hydration
OBJ_BASE_CLASS = CyborgObject
CyborgObjectDictCompat = object_base.VersionedObjectDictCompat
class CyborgPersistentObject(object):
"""Mixin class for Persistent objects.
This adds the fields that we use in common for most persistent objects.
"""
fields = {
'created_at': object_fields.DateTimeField(nullable=True),
'updated_at': object_fields.DateTimeField(nullable=True),
'deleted_at': object_fields.DateTimeField(nullable=True),
'deleted': object_fields.BooleanField(default=False),
}
class ObjectListBase(object_base.ObjectListBase):
@classmethod
def _obj_primitive_key(cls, field):
return 'cyborg_object.%s' % field
@classmethod
def _obj_primitive_field(cls, primitive, field,
default=object_fields.UnspecifiedDefault):
key = cls._obj_primitive_key(field)
if default == object_fields.UnspecifiedDefault:
return primitive[key]
else:
return primitive.get(key, default)
def obj_to_primitive(obj):
"""Recursively turn an object into a python primitive.
A CyborgObject becomes a dict, and anything that implements ObjectListBase
becomes a list.
"""
if isinstance(obj, ObjectListBase):
return [obj_to_primitive(x) for x in obj]
elif isinstance(obj, CyborgObject):
result = {}
for key in obj.obj_fields:
if obj.obj_attr_is_set(key) or key in obj.obj_extra_fields:
result[key] = obj_to_primitive(getattr(obj, key))
return result
elif isinstance(obj, netaddr.IPAddress):
return str(obj)
elif isinstance(obj, netaddr.IPNetwork):
return str(obj)
else:
return obj
def obj_equal_prims(obj_1, obj_2, ignore=None):
"""Compare two primitives for equivalence ignoring some keys.
This operation tests the primitives of two objects for equivalence.
Object primitives may contain a list identifying fields that have been
changed - this is ignored in the comparison. The ignore parameter lists
any other keys to be ignored.
:param:obj1: The first object in the comparison
:param:obj2: The second object in the comparison
:param:ignore: A list of fields to ignore
:returns: True if the primitives are equal ignoring changes
and specified fields, otherwise False.
"""
def _strip(prim, keys):
if isinstance(prim, dict):
for k in keys:
prim.pop(k, None)
for v in prim.values():
_strip(v, keys)
if isinstance(prim, list):
for v in prim:
_strip(v, keys)
return prim
if ignore is not None:
keys = ['cyborg_object.changes'] + ignore
else:
keys = ['cyborg_object.changes']
prim_1 = _strip(obj_1.obj_to_primitive(), keys)
prim_2 = _strip(obj_2.obj_to_primitive(), keys)
return prim_1 == prim_2
class DriverObjectBase(CyborgObject):
@staticmethod
def _from_db_object(obj, db_obj):
fields = obj.fields
fields.pop("updated_at")
fields.pop("created_at")
for field in fields:
obj[field] = db_obj[field]
obj.obj_reset_changes()
return obj
| openstack/nomad | cyborg/objects/base.py | Python | apache-2.0 | 6,515 |
Subsets and Splits